metasploit-framework/modules/auxiliary/gather/dns_cache_scraper.rb

119 lines
3.2 KiB
Ruby
Raw Normal View History

##
# This module requires Metasploit: http://metasploit.com/download
2013-12-13 02:30:46 +00:00
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'net/dns/resolver'
2016-03-08 13:02:44 +00:00
class MetasploitModule < Msf::Auxiliary
include Msf::Module::Deprecated
2013-12-13 02:30:46 +00:00
include Msf::Auxiliary::Report
deprecated(Date.new(2016, 6, 12), 'auxiliary/gather/enum_dns')
2013-12-13 02:30:46 +00:00
def initialize(info = {})
super(update_info(info,
'Name' => 'DNS Non-Recursive Record Scraper',
2013-12-18 07:14:47 +00:00
'Description' => %q{
2013-12-13 02:30:46 +00:00
This module can be used to scrape records that have been cached
by a specific nameserver. The module allows the user to test
every record from a specified file.
2013-12-13 02:30:46 +00:00
},
2013-12-18 07:14:47 +00:00
'Author' => [
2013-12-13 02:30:46 +00:00
'Brandon McCann "zeknox" <bmccann[at]accuvant.com>',
'Rob Dixon "304geek" <rob.dixon[at]accuvant.com>'
],
'License' => MSF_LICENSE,
'References' => [
['URL', 'http://304geeks.blogspot.com/2013/01/dns-scraping-for-corporate-av-detection.html']
2013-12-13 02:30:46 +00:00
]))
register_options([
OptString.new('DOMAIN', [ false, "Domain name to query for"]),
OptPath.new('WORDLIST', [ false, "Wordlist for domain name queries", ::File.join(Msf::Config.data_directory, "wordlists", "av-update-urls.txt")]),
2013-12-13 02:30:46 +00:00
OptAddress.new('NS', [ true, "Specify the nameserver to use for queries" ]),
], self.class)
register_advanced_options([
OptBool.new('TCP_DNS', [false, "Run queries over TCP", false]),
OptInt.new('DNS_TIMEOUT', [true, "DNS Timeout in seconds", 5])
2013-12-13 02:30:46 +00:00
], self.class)
end
# method to scrape dns
def scrape_dns(domain)
# dns request with recursive disabled
2013-12-17 01:26:40 +00:00
use_tcp = datastore['TCP_DNS']
2013-12-13 02:30:46 +00:00
res = Net::DNS::Resolver.new(:nameservers => "#{datastore['NS']}", :recursive => false, :use_tcp => use_tcp)
use_tcp ? res.tcp_timeout = datastore['DNS_TIMEOUT'] : res.udp_timeout = datastore['DNS_TIMEOUT']
2013-12-13 02:30:46 +00:00
# query dns
begin
query = res.send(domain)
rescue ResolverArgumentError
print_error("Invalid domain: #{domain}")
2013-12-13 02:30:46 +00:00
return
rescue NoResponseError
print_error("DNS Timeout Issue: #{domain}")
return
2013-12-13 02:30:46 +00:00
end
# found or not found
if query.answer.empty?
vprint_status("#{domain} - Not Found")
return
end
2013-12-17 01:16:12 +00:00
@is_vulnerable = true
2013-12-13 02:30:46 +00:00
print_good("#{domain} - Found")
report_goods(domain)
end
# method to read each line from file
def read_file
::File.open("#{datastore['WORDLIST']}", "rb").each_line do |line|
scrape_dns(line.chomp)
end
end
# log results to database
def report_goods(domain)
if datastore['TCP_DNS']
proto = "tcp"
else
proto = "udp"
end
2013-12-13 02:30:46 +00:00
report_note(
:host => datastore['NS'],
:name => "dns",
:port => 53,
:proto => proto,
2013-12-13 02:30:46 +00:00
:type => "dns.cache.scrape",
2013-12-17 01:16:12 +00:00
:data => "#{domain} cached",
:update => :unique_data
2013-12-13 02:30:46 +00:00
)
end
# main control method
def run
2013-12-17 01:16:12 +00:00
@is_vulnerable = false
2013-12-13 02:30:46 +00:00
print_status("Making queries against #{datastore['NS']}")
if datastore['DOMAIN'].blank?
read_file
else
scrape_dns(datastore['DOMAIN'])
end
2013-12-17 01:16:12 +00:00
report_vuln(
:host => datastore['NS'],
:name => "DNS Cache Snooping",
) if @is_vulnerable
2013-12-13 02:30:46 +00:00
end
end