Work around some weirdness between report_website and report_host not accepting hostnames as :host by forcing :host to be the address

git-svn-id: file:///home/svn/framework3/trunk@11723 4d416f70-5f16-0410-b530-b9f4589650da
unstable
HD Moore 2011-02-08 17:32:37 +00:00
parent 81f659f8b1
commit 070f48e16a
3 changed files with 8 additions and 4 deletions

View File

@ -115,6 +115,7 @@ module Auxiliary::HttpCrawler
t[:site] = report_web_site(:wait => true, :host => t[:host], :port => t[:port], :vhost => t[:vhost], :ssl => t[:ssl])
print_status("Crawling #{t.to_url}...")
begin
@current_vhost = t[:vhost]
@current_site = t[:site]

View File

@ -1330,6 +1330,9 @@ class DBManager
if not (addr and port)
raise ArgumentError, "report_web_site requires service OR host/port/ssl"
end
# Force addr to be the address and not hostname
addr = Rex::Socket.getaddress(addr)
end
ret = {}

View File

@ -48,7 +48,7 @@ class Metasploit3 < Msf::Auxiliary
# - The occurence of any form (web.form :path, :type (get|post|path_info), :params)
#
def crawler_process_page(t, page, cnt)
msg = "[#{"%.5d" % cnt}/#{"%.5d" % max_page_count}] #{page.code || "ERR"} - #{@current_site.vhost} - #{page.url}"
msg = "[#{"%.5d" % cnt}/#{"%.5d" % max_page_count}] #{page.code || "ERR"} - #{t[:vhost]} - #{page.url}"
case page.code
when 301,302
if page.headers and page.headers["location"]
@ -74,7 +74,7 @@ class Metasploit3 < Msf::Auxiliary
#
info = {
:web_site => @current_site,
:web_site => t[:site],
:path => page.url.path,
:query => page.url.query,
:code => page.code,
@ -122,7 +122,7 @@ class Metasploit3 < Msf::Auxiliary
# Continue processing forms
#
forms = []
form_template = { :web_site => @current_site }
form_template = { :web_site => t[:site] }
form = {}.merge(form_template)
# This page has a query parameter we can test with GET parameters