metasploit-framework/modules/auxiliary/scanner/http/wmap_robots_txt.rb

83 lines
1.8 KiB
Ruby

##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
# Exploit mixins should be called first
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::WMAPScanServer
# Scanner mixin should be near last
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
def initialize
super(
'Name' => 'HTTP Robots.txt Content Scanner',
'Version' => '$Revision$',
'Description' => 'Detect robots.txt files and analize its content',
'Author' => ['et'],
'License' => MSF_LICENSE
)
register_options(
[
OptString.new('PATH', [ true, "The test path to find robots.txt file", '/'])
], self.class)
end
def run_host(target_host)
tpath = datastore['PATH']
if tpath[-1,1] != '/'
tpath += '/'
end
begin
turl = tpath+'robots.txt'
res = send_request_cgi({
'uri' => turl,
'method' => 'GET',
'version' => '1.0',
}, 10)
if res and res.body.include?("llow:")
print_status("[#{target_host}] #{tpath}robots.txt found")
# short url regex
aregex = /llow:[ ]{0,2}(.*?)$/i
result = res.body.scan(aregex).flatten.map{|s| s.strip}.uniq
print_status("[#{target_host}] #{tpath}robots.txt - #{result.join(", ")}")
result.each do |u|
report_note(
:host => target_host,
:proto => 'HTTP',
:port => rport,
:type => 'ROBOTS_TXT',
:data => "#{u}"
)
end
end
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
rescue ::Timeout::Error, ::Errno::EPIPE
end
end
end