2010-01-26 04:21:07 +00:00
|
|
|
#!/usr/bin/env ruby
|
|
|
|
#
|
|
|
|
# Web Crawler.
|
|
|
|
#
|
|
|
|
# Author: et [at] metasploit.com 2010
|
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
# openssl before rubygems mac os
|
|
|
|
require 'openssl'
|
|
|
|
require 'rubygems'
|
|
|
|
require 'rinda/tuplespace'
|
|
|
|
require 'uri'
|
|
|
|
|
|
|
|
begin
|
|
|
|
require 'sqlite3'
|
|
|
|
rescue LoadError
|
|
|
|
puts "Error: sqlite3-ruby not found"
|
|
|
|
end
|
|
|
|
|
|
|
|
msfbase = File.symlink?(__FILE__) ? File.readlink(__FILE__) : __FILE__
|
|
|
|
$:.unshift(File.join(File.dirname(msfbase), '..', 'lib'))
|
|
|
|
|
|
|
|
require 'rex'
|
|
|
|
require 'msf/ui'
|
|
|
|
require 'msf/base'
|
|
|
|
|
|
|
|
|
|
|
|
# Sleep time (secs) between requests
|
|
|
|
$sleeptime = 0
|
|
|
|
|
|
|
|
# Timeout for loop ending
|
|
|
|
$taketimeout = 15
|
|
|
|
|
|
|
|
# Read timeout (-1 forever)
|
|
|
|
$readtimeout = -1
|
|
|
|
|
|
|
|
# Directory containing crawler modules
|
|
|
|
$crawlermodulesdir = File.join(File.dirname(msfbase),"..", "data", "msfcrawler")
|
|
|
|
|
|
|
|
# Database
|
|
|
|
$dbpathmsf = File.join(Msf::Config.get_config_root, 'sqlite3.db')
|
|
|
|
|
|
|
|
# Store in database?
|
|
|
|
$dbs = false
|
|
|
|
|
|
|
|
# Thread number
|
|
|
|
$threadnum = 1
|
|
|
|
|
2010-03-21 03:23:27 +00:00
|
|
|
# Dont crawl
|
|
|
|
$dontcrawl = ".exe,.zip,.tar,.bz2,.run,.asc,.gz,"
|
|
|
|
|
2010-02-01 00:20:35 +00:00
|
|
|
# Use proxy
|
|
|
|
$useproxy = false
|
|
|
|
|
|
|
|
# Proxy host
|
|
|
|
$proxyhost = '127.0.0.1'
|
|
|
|
|
|
|
|
# Proxy Port
|
|
|
|
$proxyport = 8080
|
|
|
|
|
2010-03-21 00:12:28 +00:00
|
|
|
# Cookie Jar
|
|
|
|
$cookiejar = {}
|
|
|
|
|
2010-04-03 08:11:31 +00:00
|
|
|
# Verbose
|
|
|
|
$verbose = false
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
class HttpCrawler
|
2010-02-01 00:20:35 +00:00
|
|
|
attr_accessor :ctarget, :cport, :cinipath, :cssl, :proxyhost, :proxyport, :useproxy
|
2010-01-26 04:21:07 +00:00
|
|
|
|
2010-02-01 00:20:35 +00:00
|
|
|
def initialize(target,port,inipath,ssl,proxyhost,proxyport,useproxy)
|
2010-01-26 04:21:07 +00:00
|
|
|
self.ctarget = target
|
|
|
|
self.cport = port
|
|
|
|
self.cssl = ssl
|
|
|
|
|
2010-02-01 00:20:35 +00:00
|
|
|
self.useproxy = useproxy
|
|
|
|
self.proxyhost = proxyhost
|
|
|
|
self.proxyport = proxyport
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
self.cinipath = (inipath.nil? or inipath.empty?) ? '/' : inipath
|
|
|
|
|
|
|
|
inireq = {
|
|
|
|
'rhost' => self.ctarget,
|
|
|
|
'rport' => self.cport,
|
|
|
|
'uri' => self.cinipath,
|
|
|
|
'method' => 'GET',
|
|
|
|
'ctype' => 'text/plain',
|
|
|
|
'ssl' => self.cssl,
|
2010-04-03 05:51:59 +00:00
|
|
|
'query' => nil,
|
|
|
|
'data' => nil
|
2010-01-26 04:21:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@NotViewedQueue = Rinda::TupleSpace.new
|
|
|
|
@ViewedQueue = Hash.new
|
|
|
|
|
|
|
|
insertnewpath(inireq)
|
|
|
|
|
|
|
|
puts "Loading modules: #{$crawlermodulesdir}"
|
|
|
|
load_modules
|
|
|
|
puts "OK"
|
|
|
|
end
|
|
|
|
|
|
|
|
def reqtemplate(target,port,ssl)
|
|
|
|
hreq = {
|
|
|
|
'rhost' => target,
|
|
|
|
'rport' => port,
|
|
|
|
'uri' => nil,
|
|
|
|
'method' => nil,
|
|
|
|
'ctype' => nil,
|
|
|
|
'ssl' => ssl,
|
2010-04-03 05:51:59 +00:00
|
|
|
'query' => nil,
|
|
|
|
'data' => nil
|
2010-01-26 04:21:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return hreq
|
|
|
|
end
|
|
|
|
|
|
|
|
def storedb(hashreq,response,dbpath)
|
2010-04-03 08:11:31 +00:00
|
|
|
#postgres , pg gem
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
db = SQLite3::Database.new(dbpath)
|
|
|
|
#db = Mysql.new("127.0.0.1", username, password, databasename)
|
2010-02-01 00:20:35 +00:00
|
|
|
until !db.transaction_active?
|
2010-04-03 07:24:48 +00:00
|
|
|
#puts "Waiting for db"
|
2010-02-01 00:20:35 +00:00
|
|
|
#wait
|
|
|
|
end
|
|
|
|
#puts "db: #{db.transaction_active?}"
|
2010-04-03 07:24:48 +00:00
|
|
|
|
|
|
|
#CREATE TABLE "wmap_requests" (
|
|
|
|
# "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
|
|
# "host" varchar(255),
|
|
|
|
# "address" varchar(16),
|
|
|
|
# "address6" varchar(255),
|
|
|
|
# "port" integer,
|
|
|
|
# "ssl" integer,
|
|
|
|
# "meth" varchar(32),
|
|
|
|
# "path" text,
|
|
|
|
# "headers" text,
|
|
|
|
# "query" text,
|
|
|
|
# "body" text,
|
|
|
|
# "respcode" varchar(16),
|
|
|
|
# "resphead" text,
|
|
|
|
# "response" text,
|
|
|
|
# "created_at" datetime);
|
|
|
|
|
|
|
|
|
|
|
|
db.transaction db.execute( "insert into wmap_requests (host,address,address6,port,ssl,meth,path,headers,query,body,respcode,resphead,response,created_at,updated_at) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
|
2010-02-01 00:20:35 +00:00
|
|
|
hashreq['rhost'],
|
|
|
|
hashreq['rhost'],
|
2010-01-26 04:21:07 +00:00
|
|
|
hashreq['rhost'],
|
2010-02-01 00:20:35 +00:00
|
|
|
hashreq['rport'].to_i,
|
2010-04-03 07:24:48 +00:00
|
|
|
hashreq['ssl']? 1:0,
|
2010-01-26 04:21:07 +00:00
|
|
|
hashreq['method'],
|
|
|
|
SQLite3::Blob.new(hashreq['uri']),
|
2010-04-03 07:24:48 +00:00
|
|
|
SQLite3::Blob.new(''),
|
|
|
|
SQLite3::Blob.new(hashreq['query']? hashreq['query']:''),
|
|
|
|
SQLite3::Blob.new(hashreq['data']? hashreq['data']:''),
|
|
|
|
response.code.to_s,
|
|
|
|
SQLite3::Blob.new(''),
|
|
|
|
SQLite3::Blob.new(response.body.to_s),
|
|
|
|
Time.new,
|
2010-01-26 04:21:07 +00:00
|
|
|
Time.new
|
2010-02-01 00:20:35 +00:00
|
|
|
)
|
|
|
|
db.commit
|
2010-01-26 04:21:07 +00:00
|
|
|
|
|
|
|
db.close
|
|
|
|
end
|
|
|
|
|
|
|
|
def run
|
|
|
|
i, a = 0, []
|
2010-02-01 00:20:35 +00:00
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
|
2010-02-01 00:20:35 +00:00
|
|
|
begin
|
2010-01-26 04:21:07 +00:00
|
|
|
loop do
|
2010-03-21 00:12:28 +00:00
|
|
|
reqfilter = reqtemplate(self.ctarget,self.cport,self.cssl)
|
|
|
|
|
|
|
|
hashreq = @NotViewedQueue.take(reqfilter, $taketimeout)
|
2010-04-03 05:51:59 +00:00
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
if !@ViewedQueue.include?(hashsig(hashreq))
|
2010-04-03 05:51:59 +00:00
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
@ViewedQueue[hashsig(hashreq)] = Time.now
|
|
|
|
|
2010-03-21 03:23:27 +00:00
|
|
|
if !File.extname(hashreq['uri']).empty? and $dontcrawl.include? File.extname(hashreq['uri'])
|
2010-04-03 08:11:31 +00:00
|
|
|
if $verbose
|
|
|
|
puts "URI not crawled #{hashreq['uri']}"
|
|
|
|
end
|
2010-03-21 03:23:27 +00:00
|
|
|
else
|
|
|
|
|
2010-03-21 03:39:09 +00:00
|
|
|
####
|
2010-04-03 05:51:59 +00:00
|
|
|
#if i <= $threadnum
|
|
|
|
# a.push(Thread.new {
|
2010-03-21 03:39:09 +00:00
|
|
|
####
|
2010-02-01 00:20:35 +00:00
|
|
|
prx = nil
|
|
|
|
if self.useproxy
|
|
|
|
prx = "HTTP:"+self.proxyhost.to_s+":"+self.proxyport.to_s
|
|
|
|
end
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
c = Rex::Proto::Http::Client.new(
|
|
|
|
self.ctarget,
|
|
|
|
self.cport.to_i,
|
2010-02-01 00:20:35 +00:00
|
|
|
{},
|
2010-01-26 04:21:07 +00:00
|
|
|
self.cssl,
|
|
|
|
nil,
|
2010-02-01 00:20:35 +00:00
|
|
|
prx
|
2010-01-26 04:21:07 +00:00
|
|
|
)
|
2010-02-01 00:20:35 +00:00
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
sendreq(c,hashreq)
|
2010-03-23 02:59:35 +00:00
|
|
|
|
2010-03-21 00:12:28 +00:00
|
|
|
|
2010-03-21 03:39:09 +00:00
|
|
|
####
|
2010-04-03 05:51:59 +00:00
|
|
|
#})
|
2010-03-21 03:39:09 +00:00
|
|
|
|
2010-04-03 05:51:59 +00:00
|
|
|
#i += 1
|
|
|
|
#else
|
|
|
|
# sleep(0.01) and a.delete_if {|x| not x.alive?} while not a.empty?
|
|
|
|
# i = 0
|
|
|
|
#end
|
2010-03-21 03:39:09 +00:00
|
|
|
####
|
2010-03-21 03:23:27 +00:00
|
|
|
end
|
2010-01-26 04:21:07 +00:00
|
|
|
else
|
2010-04-03 08:11:31 +00:00
|
|
|
if $verbose
|
|
|
|
puts "#{hashreq['uri']} already visited at #{@ViewedQueue[hashsig(hashreq)]}"
|
|
|
|
end
|
2010-01-26 04:21:07 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
rescue Rinda::RequestExpiredError
|
|
|
|
puts "END."
|
|
|
|
return
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Modified version of load_protocols from psnuffle by Max Moser <mmo@remote-exploit.org>
|
|
|
|
#
|
|
|
|
def load_modules
|
|
|
|
base = $crawlermodulesdir
|
|
|
|
if (not File.directory?(base))
|
|
|
|
raise RuntimeError,"The Crawler modules parameter is set to an invalid directory"
|
|
|
|
end
|
|
|
|
|
|
|
|
@crawlermodules = {}
|
|
|
|
cmodules = Dir.new(base).entries.grep(/\.rb$/).sort
|
|
|
|
cmodules.each do |n|
|
|
|
|
f = File.join(base, n)
|
|
|
|
m = ::Module.new
|
|
|
|
begin
|
|
|
|
m.module_eval(File.read(f, File.size(f)))
|
|
|
|
m.constants.grep(/^Crawler(.*)/) do
|
|
|
|
cmod = $1
|
|
|
|
klass = m.const_get("Crawler#{cmod}")
|
|
|
|
@crawlermodules[cmod.downcase] = klass.new(self)
|
|
|
|
|
|
|
|
puts("Loaded crawler module #{cmod} from #{f}...")
|
|
|
|
end
|
|
|
|
rescue ::Exception => e
|
|
|
|
puts("Crawler module #{n} failed to load: #{e.class} #{e} #{e.backtrace}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def sendreq(nclient,reqopts={})
|
|
|
|
|
|
|
|
begin
|
2010-04-03 05:51:59 +00:00
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
r = nclient.request_raw(reqopts)
|
|
|
|
resp = nclient.send_recv(r, $readtimeout)
|
|
|
|
while(resp and resp.code == 100)
|
|
|
|
resp = nclient.reread_response(resp, $readtimeout)
|
|
|
|
end
|
|
|
|
|
|
|
|
if resp
|
|
|
|
#
|
|
|
|
# Quickfix for bug packet.rb to_s line: 190
|
|
|
|
# In case modules or crawler calls to_s on de-chunked responses
|
|
|
|
#
|
|
|
|
resp.transfer_chunked = false
|
2010-03-21 00:12:28 +00:00
|
|
|
if resp['Set-Cookie']
|
2010-03-21 03:23:27 +00:00
|
|
|
#puts "Set Cookie: #{resp['Set-Cookie']}"
|
2010-03-21 00:12:28 +00:00
|
|
|
#puts "Storing in cookie jar for host:port #{reqopts['rhost']}:#{reqopts['rport']}"
|
2010-03-21 03:23:27 +00:00
|
|
|
#$cookiejar["#{reqopts['rhost']}:#{reqopts['rport']}"] = resp['Set-Cookie']
|
2010-03-21 00:12:28 +00:00
|
|
|
end
|
2010-01-26 04:21:07 +00:00
|
|
|
|
|
|
|
if $dbs
|
|
|
|
storedb(reqopts,resp,$dbpathmsf)
|
|
|
|
end
|
|
|
|
|
2010-04-03 08:11:31 +00:00
|
|
|
puts ">> [#{resp.code}] #{reqopts['uri']}"
|
|
|
|
|
|
|
|
if reqopts['query'] and !reqopts['query'].empty?
|
|
|
|
puts ">>> [Q] #{reqopts['query']}"
|
|
|
|
end
|
|
|
|
|
|
|
|
if reqopts['data']
|
|
|
|
puts ">>> [D] #{reqopts['data']}"
|
|
|
|
end
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
case resp.code
|
|
|
|
when 200
|
|
|
|
@crawlermodules.each_key do |k|
|
|
|
|
@crawlermodules[k].parse(reqopts,resp)
|
|
|
|
end
|
2010-03-23 02:59:35 +00:00
|
|
|
when 301..302
|
2010-04-03 08:11:31 +00:00
|
|
|
puts "[#{resp.code}] Redirection to: #{resp['Location']}"
|
|
|
|
if $verbose
|
|
|
|
puts urltohash(resp['Location'])
|
|
|
|
end
|
2010-03-21 00:12:28 +00:00
|
|
|
insertnewpath(urltohash(resp['Location']))
|
2010-01-26 04:21:07 +00:00
|
|
|
when 404
|
2010-04-03 08:11:31 +00:00
|
|
|
puts "[404] Invalid link #{reqopts['uri']}"
|
2010-01-26 04:21:07 +00:00
|
|
|
else
|
|
|
|
puts "Unhandled #{resp.code}"
|
|
|
|
end
|
2010-04-03 08:11:31 +00:00
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
else
|
|
|
|
puts "No response"
|
|
|
|
end
|
2010-04-03 08:11:31 +00:00
|
|
|
sleep($sleeptime)
|
2010-03-21 03:23:27 +00:00
|
|
|
rescue
|
2010-04-04 05:21:51 +00:00
|
|
|
puts "ERROR"
|
|
|
|
if $verbose
|
|
|
|
puts "#{$!}: #{$!.backtrace}"
|
|
|
|
end
|
2010-01-26 04:21:07 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Add new path (uri) to test non-viewed queue
|
|
|
|
#
|
|
|
|
def insertnewpath(hashreq)
|
|
|
|
if hashreq['rhost'] == self.ctarget and hashreq['rport'] == self.cport
|
|
|
|
if !@ViewedQueue.include?(hashsig(hashreq))
|
|
|
|
if @NotViewedQueue.read_all(hashreq).size > 0
|
2010-04-03 08:11:31 +00:00
|
|
|
if $verbose
|
|
|
|
puts "Already in queue to be viewed"
|
|
|
|
end
|
2010-03-23 02:59:35 +00:00
|
|
|
else
|
2010-04-03 08:11:31 +00:00
|
|
|
if $verbose
|
|
|
|
puts "Inserted: #{hashreq['uri']}"
|
|
|
|
end
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
@NotViewedQueue.write(hashreq)
|
|
|
|
end
|
|
|
|
else
|
2010-04-03 08:11:31 +00:00
|
|
|
if $verbose
|
|
|
|
puts "#{hashreq['uri']} already visited at #{@ViewedQueue[hashsig(hashreq)]}"
|
|
|
|
end
|
2010-01-26 04:21:07 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-03-21 00:12:28 +00:00
|
|
|
#
|
|
|
|
# Build a new hash for a local path
|
|
|
|
#
|
|
|
|
|
|
|
|
def urltohash(url)
|
|
|
|
uri = URI.parse(url)
|
|
|
|
tssl = (uri.scheme == "https") ? true : false
|
|
|
|
|
|
|
|
if (uri.host.nil? or uri.host.empty?)
|
|
|
|
uritargethost = self.ctarget
|
|
|
|
uritargetport = self.cport
|
|
|
|
uritargetssl = self.cssl
|
|
|
|
else
|
|
|
|
uritargethost = uri.host
|
|
|
|
uritargetport = uri.port
|
|
|
|
uritargetssl = tssl
|
|
|
|
end
|
|
|
|
|
|
|
|
hashreq = {
|
|
|
|
'rhost' => uritargethost,
|
|
|
|
'rport' => uritargetport,
|
|
|
|
'uri' => uri.path,
|
|
|
|
'method' => 'GET',
|
|
|
|
'ctype' => 'text/plain',
|
|
|
|
'ssl' => uritargetssl,
|
2010-04-03 05:51:59 +00:00
|
|
|
'query' => uri.query,
|
|
|
|
'data' => nil
|
2010-03-21 00:12:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return hashreq
|
|
|
|
end
|
|
|
|
|
2010-01-26 04:21:07 +00:00
|
|
|
def hashsig(hashreq)
|
|
|
|
hashreq.to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
class BaseParser
|
|
|
|
attr_accessor :crawler
|
|
|
|
|
|
|
|
def initialize(c)
|
|
|
|
self.crawler = c
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse(request,result)
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Add new path (uri) to test hash queue
|
|
|
|
#
|
|
|
|
def insertnewpath(hashreq)
|
|
|
|
self.crawler.insertnewpath(hashreq)
|
|
|
|
end
|
|
|
|
|
|
|
|
def hashsig(hashreq)
|
|
|
|
self.crawler.hashsig(hashreq)
|
|
|
|
end
|
|
|
|
|
|
|
|
def targetssl
|
|
|
|
self.crawler.cssl
|
|
|
|
end
|
|
|
|
|
|
|
|
def targetport
|
|
|
|
self.crawler.cport
|
|
|
|
end
|
|
|
|
|
|
|
|
def targethost
|
|
|
|
self.crawler.ctarget
|
|
|
|
end
|
|
|
|
|
|
|
|
def targetinipath
|
|
|
|
self.crawler.cinipath
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
trap("INT") {
|
|
|
|
exit()
|
|
|
|
}
|
|
|
|
|
|
|
|
$args = Rex::Parser::Arguments.new(
|
|
|
|
"-t" => [ true, "Target URI" ],
|
|
|
|
"-d" => [ false, "Enable database" ],
|
2010-02-01 00:20:35 +00:00
|
|
|
"-u" => [ true, "Use proxy"],
|
|
|
|
"-x" => [ true, "Proxy host" ],
|
|
|
|
"-p" => [ true, "Proxy port" ],
|
2010-04-03 08:11:31 +00:00
|
|
|
"-h" => [ false, "Display this help information"],
|
|
|
|
"-v" => [ false, "Verbose" ]
|
2010-01-26 04:21:07 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if ARGV.length < 1
|
|
|
|
puts("\n" + " Usage: #{$0} <options>\n" + $args.usage)
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
|
|
|
turl = nil
|
|
|
|
$args.parse(ARGV) { |opt, idx, val|
|
|
|
|
case opt
|
|
|
|
when "-d"
|
|
|
|
$dbs = true
|
|
|
|
when "-t"
|
|
|
|
$crun = true
|
|
|
|
turl = val
|
2010-02-01 00:20:35 +00:00
|
|
|
when "-u"
|
2010-04-03 08:11:31 +00:00
|
|
|
$useproxy = true
|
|
|
|
when "-v"
|
|
|
|
$verbose = true
|
2010-02-01 00:20:35 +00:00
|
|
|
when "-x"
|
|
|
|
$proxyhost = val
|
|
|
|
when "-p"
|
|
|
|
$proxyposrt = val
|
2010-01-26 04:21:07 +00:00
|
|
|
when "-h"
|
|
|
|
puts("\n" + " Usage: #{$0} <options>\n" + $args.usage)
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
}
|
|
|
|
|
|
|
|
if $crun
|
|
|
|
uri = URI.parse(turl)
|
|
|
|
tssl = (uri.scheme == "https") ? true : false
|
|
|
|
|
|
|
|
if (uri.host.nil? or uri.host.empty?)
|
|
|
|
puts "Error: target http(s)://target/path"
|
|
|
|
exit
|
|
|
|
end
|
|
|
|
|
2010-02-01 00:20:35 +00:00
|
|
|
if $useproxy
|
|
|
|
puts "Using proxy: #{$proxyhost}:#{$proxyport}"
|
|
|
|
end
|
|
|
|
|
|
|
|
mc = HttpCrawler.new(uri.host,uri.port,uri.path,tssl,$proxyhost, $proxyport, $useproxy)
|
2010-01-26 04:21:07 +00:00
|
|
|
if $dbs
|
|
|
|
puts "Database: #{$dbpathmsf}"
|
|
|
|
else
|
2010-02-01 00:20:35 +00:00
|
|
|
puts "[DATABASE DISABLED]"
|
2010-01-26 04:21:07 +00:00
|
|
|
end
|
|
|
|
puts "Target: #{mc.ctarget} Port: #{mc.cport} Path: #{mc.cinipath} SSL: #{mc.cssl}"
|
|
|
|
mc.run
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|