2009-10-19 04:58:50 +00:00
|
|
|
##
|
|
|
|
# This file is part of the Metasploit Framework and may be subject to
|
|
|
|
# redistribution and commercial restrictions. Please see the Metasploit
|
2012-02-21 01:40:50 +00:00
|
|
|
# web site for more information on licensing and terms of use.
|
|
|
|
# http://metasploit.com/
|
2009-10-19 04:58:50 +00:00
|
|
|
##
|
|
|
|
|
|
|
|
require 'msf/core'
|
|
|
|
require 'net/http'
|
|
|
|
|
|
|
|
class Metasploit3 < Msf::Auxiliary
|
|
|
|
include Msf::Auxiliary::Report
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
def initialize(info = {})
|
|
|
|
super(update_info(info,
|
|
|
|
'Name' => 'Search Engine Domain Email Address Collector',
|
|
|
|
'Description' => %q{
|
2010-05-03 17:13:09 +00:00
|
|
|
This module uses Google, Bing and Yahoo to create a list of
|
|
|
|
valid email addresses for the target domain.
|
2009-10-19 04:58:50 +00:00
|
|
|
},
|
|
|
|
'Author' => [ 'Carlos Perez <carlos_perez[at]darkoperator.com>' ],
|
2013-01-03 00:05:45 +00:00
|
|
|
'License' => MSF_LICENSE))
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
register_options(
|
|
|
|
[
|
|
|
|
OptString.new('DOMAIN', [ true, "The domain name to locate email addresses for"]),
|
|
|
|
OptBool.new('SEARCH_GOOGLE', [ true, 'Enable Google as a backend search engine', true]),
|
|
|
|
OptBool.new('SEARCH_BING', [ true, 'Enable Bing as a backend search engine', true]),
|
|
|
|
OptBool.new('SEARCH_YAHOO', [ true, 'Enable Yahoo! as a backend search engine', true]),
|
2009-11-25 18:32:28 +00:00
|
|
|
OptString.new('OUTFILE', [ false, "A filename to store the generated email list"]),
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
], self.class)
|
|
|
|
|
|
|
|
register_advanced_options(
|
|
|
|
[
|
|
|
|
OptString.new('PROXY', [ false, "Proxy server to route connection. <host>:<port>",nil]),
|
|
|
|
OptString.new('PROXY_USER', [ false, "Proxy Server User",nil]),
|
|
|
|
OptString.new('PROXY_PASS', [ false, "Proxy Server Password",nil])
|
|
|
|
], self.class)
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
#Search google.com for email's of target domain
|
|
|
|
def search_google(targetdom)
|
|
|
|
print_status("Searching Google for email addresses from #{targetdom}")
|
|
|
|
response = ""
|
|
|
|
emails = []
|
|
|
|
header = { 'User-Agent' => "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"}
|
|
|
|
clnt = Net::HTTP::Proxy(@proxysrv,@proxyport,@proxyuser,@proxypass).new("www.google.com")
|
|
|
|
searches = ["100", "200","300", "400", "500"]
|
|
|
|
searches.each { |num|
|
2012-04-20 06:37:24 +00:00
|
|
|
resp = clnt.get2("/search?hl=en&lr=&ie=UTF-8&q=%40"+targetdom+"&start=#{num}&sa=N&filter=0&num=100",header)
|
|
|
|
response << resp.body
|
2009-10-19 04:58:50 +00:00
|
|
|
}
|
|
|
|
print_status("Extracting emails from Google search results...")
|
|
|
|
response.gsub!(/<.?em?[>]*>/, "")
|
|
|
|
response.scan(/[A-Z0-9._%+-]+@#{targetdom}/i) do |t|
|
|
|
|
emails << t
|
|
|
|
end
|
|
|
|
return emails.uniq
|
|
|
|
end
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
#Search Yahoo.com for email's of target domain
|
|
|
|
def search_yahoo(targetdom)
|
|
|
|
print_status("Searching Yahoo for email addresses from #{targetdom}")
|
|
|
|
response = ""
|
|
|
|
emails = []
|
|
|
|
header = { 'User-Agent' => "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/4.0.221.6 Safari/525.13"}
|
|
|
|
clnt = Net::HTTP::Proxy(@proxysrv,@proxyport,@proxyuser,@proxypass).new("search.yahoo.com")
|
|
|
|
searches = ["1", "101","201", "301", "401", "501"]
|
|
|
|
searches.each { |num|
|
2012-04-20 06:37:24 +00:00
|
|
|
resp = clnt.get2("/search?p=%40#{targetdom}&n=100&ei=UTF-8&va_vt=any&vo_vt=any&ve_vt=any&vp_vt=any&vd=all&vst=0&vf=all&vm=p&fl=0&fr=yfp-t-152&xargs=0&pstart=1&b=#{num}",header)
|
|
|
|
response << resp.body
|
2009-10-19 04:58:50 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
print_status("Extracting emails from Yahoo search results...")
|
|
|
|
response.gsub!(/<.?b?[>]*>/, "")
|
|
|
|
response.scan(/[A-Z0-9._%+-]+@#{targetdom}/i) do |t|
|
|
|
|
emails << t.downcase
|
|
|
|
end
|
|
|
|
return emails.uniq
|
|
|
|
end
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
#Search Bing.com for email's of target domain
|
|
|
|
def search_bing(targetdom)
|
|
|
|
print_status("Searching Bing email addresses from #{targetdom}")
|
|
|
|
response = ""
|
|
|
|
emails = []
|
|
|
|
header = { 'User-Agent' => "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/4.0.221.6 Safari/525.13"}
|
|
|
|
clnt = Net::HTTP::Proxy(@proxysrv,@proxyport,@proxyuser,@proxypass).new("www.bing.com")
|
|
|
|
searches = 1
|
|
|
|
while searches < 201
|
|
|
|
begin
|
2012-04-20 06:37:24 +00:00
|
|
|
resp = clnt.get2("/search?q=%40#{targetdom}&first=#{searches.to_s}",header)
|
|
|
|
response << resp.body
|
2009-10-19 04:58:50 +00:00
|
|
|
rescue
|
|
|
|
end
|
|
|
|
searches = searches + 10
|
|
|
|
end
|
|
|
|
print_status("Extracting emails from Bing search results...")
|
|
|
|
response.gsub!(/<.?strong?[>]*>/, "")
|
|
|
|
response.scan(/[A-Z0-9._%+-]+@#{targetdom}/i) do |t|
|
|
|
|
emails << t.downcase
|
|
|
|
end
|
|
|
|
return emails.uniq
|
|
|
|
end
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
#for writing file with all email's found
|
|
|
|
def write_output(data)
|
2010-05-03 17:13:09 +00:00
|
|
|
print_status("Writing email address list to #{datastore['OUTFILE']}...")
|
2010-07-01 23:33:07 +00:00
|
|
|
::File.open(datastore['OUTFILE'], "ab") do |fd|
|
2009-10-19 04:58:50 +00:00
|
|
|
fd.write(data)
|
|
|
|
end
|
|
|
|
end
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
def run
|
|
|
|
if datastore['PROXY']
|
|
|
|
@proxysrv,@proxyport = datastore['PROXY'].split(":")
|
|
|
|
@proxyuser = datastore['PROXY_USER']
|
|
|
|
@proxypass = datastore['PROXY_PASS']
|
|
|
|
else
|
|
|
|
@proxysrv,@proxyport = nil, nil
|
|
|
|
end
|
|
|
|
print_status("Harvesting emails .....")
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
|
|
|
|
target = datastore['DOMAIN']
|
|
|
|
|
|
|
|
emails = []
|
|
|
|
emails << search_google(target) if datastore['SEARCH_GOOGLE']
|
|
|
|
emails << search_bing(target) if datastore['SEARCH_BING']
|
|
|
|
emails << search_yahoo(target) if datastore['SEARCH_YAHOO']
|
|
|
|
emails.flatten!
|
|
|
|
emails.uniq!
|
|
|
|
emails.sort!
|
|
|
|
|
|
|
|
print_status("Located #{emails.length} email addresses for #{target}")
|
|
|
|
emails.each do |e|
|
|
|
|
print_status("\t#{e.to_s}")
|
|
|
|
end
|
2010-05-03 17:13:09 +00:00
|
|
|
|
2009-10-19 04:58:50 +00:00
|
|
|
write_output(emails.join("\n")) if datastore['OUTFILE']
|
|
|
|
end
|
|
|
|
end
|