Land #9424, Add SharknAT&To external scanner
commit
6caba521d3
4
LICENSE
4
LICENSE
|
@ -75,6 +75,10 @@ Files: lib/metasm.rb lib/metasm/* data/cpuinfo/*
|
|||
Copyright: 2006-2010 Yoann GUILLOT
|
||||
License: LGPL-2.1
|
||||
|
||||
Files: lib/msf/core/modules/external/python/async_timeout/*
|
||||
Copyright: 2016-2017 Andrew Svetlov
|
||||
License: Apache 2.0
|
||||
|
||||
Files: lib/net/dns.rb lib/net/dns/*
|
||||
Copyright: 2006 Marco Ceresa
|
||||
License: Ruby
|
||||
|
|
|
@ -170,6 +170,20 @@ class DataStore < Hash
|
|||
datastore_hash
|
||||
end
|
||||
|
||||
# Hack on a hack for the external modules
|
||||
def to_nested_values
|
||||
datastore_hash = {}
|
||||
self.keys.each do |k|
|
||||
# TODO arbitrary depth
|
||||
if self[k].is_a? Array
|
||||
datastore_hash[k.to_s] = self[k].map(&:to_s)
|
||||
else
|
||||
datastore_hash[k.to_s] = self[k].to_s
|
||||
end
|
||||
end
|
||||
datastore_hash
|
||||
end
|
||||
|
||||
#
|
||||
# Persists the contents of the data store to a file
|
||||
#
|
||||
|
|
|
@ -3,9 +3,7 @@ module Msf::Module::External
|
|||
|
||||
def wait_status(mod)
|
||||
begin
|
||||
while mod.running
|
||||
m = mod.get_status
|
||||
if m
|
||||
while m = mod.get_status
|
||||
case m.method
|
||||
when :message
|
||||
log_output(m)
|
||||
|
@ -16,7 +14,6 @@ module Msf::Module::External
|
|||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
rescue Interrupt => e
|
||||
raise e
|
||||
rescue Exception => e
|
||||
|
@ -72,6 +69,20 @@ module Msf::Module::External
|
|||
service[:name] = data['name'] if data['name']
|
||||
|
||||
report_service(service)
|
||||
when 'vuln'
|
||||
# Required
|
||||
vuln = {host: data['host'], name: data['name']}
|
||||
|
||||
# Optional
|
||||
vuln[:info] = data['info'] if data['info']
|
||||
vuln[:refs] = data['refs'] if data['refs']
|
||||
vuln[:port] = data['port'] if data['port']
|
||||
vuln[:proto] = data['port'] if data['port']
|
||||
|
||||
# Metasploit magic
|
||||
vuln[:refs] = self.references
|
||||
|
||||
report_vuln(vuln)
|
||||
else
|
||||
print_warning "Skipping unrecognized report type #{m.params['type']}"
|
||||
end
|
||||
|
|
|
@ -26,10 +26,11 @@ class Msf::Modules::External::Bridge
|
|||
end
|
||||
|
||||
def get_status
|
||||
if self.running
|
||||
if self.running || !self.messages.empty?
|
||||
m = receive_notification
|
||||
if m.nil?
|
||||
close_ios
|
||||
self.messages.close
|
||||
self.running = false
|
||||
end
|
||||
|
||||
|
@ -130,8 +131,9 @@ class Msf::Modules::External::Bridge
|
|||
raise EOFError.new
|
||||
else
|
||||
fds = res[0]
|
||||
# Preferentially drain and log stderr
|
||||
if fds.include? err
|
||||
# Preferentially drain and log stderr, EOF counts as activity, but
|
||||
# stdout might have some buffered data left, so carry on
|
||||
if fds.include?(err) && !err.eof?
|
||||
errbuf = err.readpartial(4096)
|
||||
elog "Unexpected output running #{self.path}:\n#{errbuf}"
|
||||
end
|
||||
|
|
|
@ -29,7 +29,13 @@ class Msf::Modules::External::Message
|
|||
end
|
||||
|
||||
def to_json
|
||||
JSON.generate({jsonrpc: '2.0', id: self.id, method: self.method, params: self.params.to_h})
|
||||
params =
|
||||
if self.params.respond_to? :to_nested_values
|
||||
self.params.to_nested_values
|
||||
else
|
||||
self.params.to_h
|
||||
end
|
||||
JSON.generate({jsonrpc: '2.0', id: self.id, method: self.method, params: params})
|
||||
end
|
||||
|
||||
protected
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
# Vendored from https://github.com/aio-libs/async-timeout
|
||||
# Copyright: 2016-2017 Andrew Svetlov
|
||||
# License: Apache 2.0
|
||||
|
||||
import asyncio
|
||||
|
||||
|
||||
__version__ = '2.0.0'
|
||||
|
||||
|
||||
class timeout:
|
||||
"""timeout context manager.
|
||||
|
||||
Useful in cases when you want to apply timeout logic around block
|
||||
of code or in cases when asyncio.wait_for is not suitable. For example:
|
||||
|
||||
>>> async with timeout(0.001):
|
||||
... async with aiohttp.get('https://github.com') as r:
|
||||
... await r.text()
|
||||
|
||||
|
||||
timeout - value in seconds or None to disable timeout logic
|
||||
loop - asyncio compatible event loop
|
||||
"""
|
||||
def __init__(self, timeout, *, loop=None):
|
||||
self._timeout = timeout
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self._loop = loop
|
||||
self._task = None
|
||||
self._cancelled = False
|
||||
self._cancel_handler = None
|
||||
self._cancel_at = None
|
||||
|
||||
def __enter__(self):
|
||||
return self._do_enter()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._do_exit(exc_type)
|
||||
|
||||
@asyncio.coroutine
|
||||
def __aenter__(self):
|
||||
return self._do_enter()
|
||||
|
||||
@asyncio.coroutine
|
||||
def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
self._do_exit(exc_type)
|
||||
|
||||
@property
|
||||
def expired(self):
|
||||
return self._cancelled
|
||||
|
||||
@property
|
||||
def remaining(self):
|
||||
if self._cancel_at is not None:
|
||||
return max(self._cancel_at - self._loop.time(), 0.0)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _do_enter(self):
|
||||
# Support Tornado 5- without timeout
|
||||
# Details: https://github.com/python/asyncio/issues/392
|
||||
if self._timeout is None:
|
||||
return self
|
||||
|
||||
self._task = current_task(self._loop)
|
||||
if self._task is None:
|
||||
raise RuntimeError('Timeout context manager should be used '
|
||||
'inside a task')
|
||||
|
||||
if self._timeout <= 0:
|
||||
self._loop.call_soon(self._cancel_task)
|
||||
return self
|
||||
|
||||
self._cancel_at = self._loop.time() + self._timeout
|
||||
self._cancel_handler = self._loop.call_at(
|
||||
self._cancel_at, self._cancel_task)
|
||||
return self
|
||||
|
||||
def _do_exit(self, exc_type):
|
||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||
self._cancel_handler = None
|
||||
self._task = None
|
||||
raise asyncio.TimeoutError
|
||||
if self._timeout is not None and self._cancel_handler is not None:
|
||||
self._cancel_handler.cancel()
|
||||
self._cancel_handler = None
|
||||
self._task = None
|
||||
|
||||
def _cancel_task(self):
|
||||
self._task.cancel()
|
||||
self._cancelled = True
|
||||
|
||||
|
||||
def current_task(loop):
|
||||
task = asyncio.Task.current_task(loop=loop)
|
||||
if task is None:
|
||||
if hasattr(loop, 'current_task'):
|
||||
task = loop.current_task()
|
||||
|
||||
return task
|
|
@ -1,4 +1,7 @@
|
|||
import sys, os, json
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def log(message, level='info'):
|
||||
rpc_send({'jsonrpc': '2.0', 'method': 'message', 'params': {
|
||||
|
@ -6,19 +9,23 @@ def log(message, level='info'):
|
|||
'message': message
|
||||
}})
|
||||
|
||||
def report_host(ip, opts={}):
|
||||
|
||||
def report_host(ip, **opts):
|
||||
host = opts.copy()
|
||||
host.update({'host': ip})
|
||||
rpc_send({'jsonrpc': '2.0', 'method': 'report', 'params': {
|
||||
'type': 'host', 'data': host
|
||||
}})
|
||||
report('host', host)
|
||||
|
||||
def report_service(ip, opts={}):
|
||||
|
||||
def report_service(ip, **opts):
|
||||
service = opts.copy()
|
||||
service.update({'host': ip})
|
||||
rpc_send({'jsonrpc': '2.0', 'method': 'report', 'params': {
|
||||
'type': 'service', 'data': service
|
||||
}})
|
||||
report('service', service)
|
||||
|
||||
|
||||
def report_vuln(ip, name, **opts):
|
||||
vuln = opts.copy()
|
||||
vuln.update({'host': ip, 'name': name})
|
||||
report('vuln', vuln)
|
||||
|
||||
|
||||
def run(metadata, module_callback):
|
||||
|
@ -32,6 +39,13 @@ def run(metadata, module_callback):
|
|||
'message': 'Module completed'
|
||||
}})
|
||||
|
||||
|
||||
def report(kind, data):
|
||||
rpc_send({'jsonrpc': '2.0', 'method': 'report', 'params': {
|
||||
'type': kind, 'data': data
|
||||
}})
|
||||
|
||||
|
||||
def rpc_send(req):
|
||||
print(json.dumps(req))
|
||||
sys.stdout.flush()
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
import asyncio
|
||||
import functools
|
||||
import re
|
||||
|
||||
from async_timeout import timeout
|
||||
from metasploit import module
|
||||
|
||||
|
||||
def make_scanner(payload='', pattern='', onmatch=None, connect_timeout=3, read_timeout=10):
|
||||
return lambda args: start_scanner(payload, pattern, args, onmatch, connect_timeout=connect_timeout, read_timeout=read_timeout)
|
||||
|
||||
|
||||
def start_scanner(payload, pattern, args, onmatch, **timeouts):
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(run_scanner(payload, pattern, args, onmatch, **timeouts))
|
||||
|
||||
|
||||
async def run_scanner(payload, pattern, args, onmatch, **timeouts):
|
||||
probes = [probe_host(host, int(args['rport']), payload, **timeouts) for host in args['rhosts']]
|
||||
async for (target, res) in Scan(probes):
|
||||
if isinstance(res, Exception):
|
||||
module.log('{}:{} - Error connecting: {}'.format(*target, res), level='error')
|
||||
elif res and re.search(pattern, res):
|
||||
module.log('{}:{} - Matches'.format(*target), level='good')
|
||||
module.log('{}:{} - Matches with: {}'.format(*target, res), level='debug')
|
||||
onmatch(target, res)
|
||||
else:
|
||||
module.log('{}:{} - Does not match'.format(*target), level='info')
|
||||
module.log('{}:{} - Does not match with: {}'.format(*target, res), level='debug')
|
||||
|
||||
|
||||
class Scan:
|
||||
def __init__(self, runs):
|
||||
self.queue = asyncio.queues.Queue()
|
||||
self.total = len(runs)
|
||||
self.done = 0
|
||||
|
||||
for r in runs:
|
||||
f = asyncio.ensure_future(r)
|
||||
args = r.cr_frame.f_locals
|
||||
target = (args['host'], args['port'])
|
||||
f.add_done_callback(functools.partial(self.__queue_result, target))
|
||||
|
||||
def __queue_result(self, target, f):
|
||||
res = None
|
||||
|
||||
try:
|
||||
res = f.result()
|
||||
except Exception as e:
|
||||
res = e
|
||||
|
||||
self.queue.put_nowait((target, res))
|
||||
|
||||
async def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
if self.done == self.total:
|
||||
raise StopAsyncIteration
|
||||
|
||||
res = await self.queue.get()
|
||||
self.done += 1
|
||||
return res
|
||||
|
||||
|
||||
async def probe_host(host, port, payload, connect_timeout, read_timeout):
|
||||
buf = bytearray()
|
||||
|
||||
try:
|
||||
async with timeout(connect_timeout):
|
||||
r, w = await asyncio.open_connection(host, port)
|
||||
remote = w.get_extra_info('peername')
|
||||
if remote[0] == host:
|
||||
module.log('{}:{} - Connected'.format(host, port), level='debug')
|
||||
else:
|
||||
module.log('{}({}):{} - Connected'.format(host, *remote), level='debug')
|
||||
w.write(payload)
|
||||
await w.drain()
|
||||
|
||||
async with timeout(read_timeout):
|
||||
while len(buf) < 4096:
|
||||
data = await r.read(4096)
|
||||
if data:
|
||||
module.log('{}:{} - Received {} bytes'.format(host, port, len(data)), level='debug')
|
||||
buf.extend(data)
|
||||
else:
|
||||
break
|
||||
except asyncio.TimeoutError:
|
||||
if buf:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
try:
|
||||
w.close()
|
||||
except Exception:
|
||||
# Either we got something and the socket got in a bad state, or the
|
||||
# original error will point to the root cause
|
||||
pass
|
||||
|
||||
return buf
|
|
@ -13,6 +13,8 @@ class Msf::Modules::External::Shim
|
|||
capture_server(mod)
|
||||
when 'dos'
|
||||
dos(mod)
|
||||
when 'scanner.multi'
|
||||
multi_scanner(mod)
|
||||
else
|
||||
# TODO have a nice load error show up in the logs
|
||||
''
|
||||
|
@ -35,7 +37,7 @@ class Msf::Modules::External::Shim
|
|||
meta[:authors] = mod.meta['authors'].map(&:dump).join(",\n ")
|
||||
|
||||
meta[:options] = mod.meta['options'].map do |n, o|
|
||||
"Opt#{o['type'].capitalize}.new(#{n.dump},
|
||||
"Opt#{o['type'].camelize}.new(#{n.dump},
|
||||
[#{o['required']}, #{o['description'].dump}, #{o['default'].inspect}])"
|
||||
end.join(",\n ")
|
||||
meta
|
||||
|
@ -69,6 +71,16 @@ class Msf::Modules::External::Shim
|
|||
render_template('capture_server.erb', meta)
|
||||
end
|
||||
|
||||
def self.multi_scanner(mod)
|
||||
meta = mod_meta_common(mod)
|
||||
meta[:date] = mod.meta['date'].dump
|
||||
meta[:references] = mod.meta['references'].map do |r|
|
||||
"[#{r['type'].upcase.dump}, #{r['ref'].dump}]"
|
||||
end.join(",\n ")
|
||||
|
||||
render_template('multi_scanner.erb', meta)
|
||||
end
|
||||
|
||||
def self.dos(mod)
|
||||
meta = mod_meta_common(mod)
|
||||
meta[:date] = mod.meta['date'].dump
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
require 'msf/core/modules/external/bridge'
|
||||
require 'msf/core/module/external'
|
||||
|
||||
class MetasploitModule < Msf::Auxiliary
|
||||
include Msf::Auxiliary::Scanner
|
||||
include Msf::Module::External
|
||||
|
||||
def initialize
|
||||
super({
|
||||
<%= common_metadata meta %>
|
||||
'References' =>
|
||||
[
|
||||
<%= meta[:references] %>
|
||||
],
|
||||
'DisclosureDate' => <%= meta[:date] %>,
|
||||
})
|
||||
|
||||
register_options([
|
||||
<%= meta[:options] %>
|
||||
])
|
||||
end
|
||||
|
||||
def run_batch_size
|
||||
200
|
||||
end
|
||||
|
||||
def run_batch(ips)
|
||||
mod = Msf::Modules::External::Bridge.open(<%= meta[:path] %>)
|
||||
datastore.delete('RHOSTS')
|
||||
datastore['rhosts'] = ips
|
||||
mod.run(datastore)
|
||||
wait_status(mod)
|
||||
end
|
||||
end
|
|
@ -0,0 +1,48 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from metasploit import module, probe_scanner
|
||||
|
||||
|
||||
metadata = {
|
||||
'name': 'Open WAN-to-LAN proxy on AT&T routers',
|
||||
'description': '''
|
||||
The Arris NVG589 and NVG599 routers configured with AT&T U-verse
|
||||
firmware 9.2.2h0d83 expose an un-authenticated proxy that allows
|
||||
connecting from WAN to LAN by MAC address.
|
||||
''',
|
||||
'authors': [
|
||||
'Joseph Hutchins' # Initial disclosure
|
||||
'Jon Hart <jon_hart[AT]rapid7.com>', # Dummy payload and response pattern
|
||||
'Adam Cammack <adam_cammack[AT]rapid7.com>' # Metasploit module
|
||||
],
|
||||
'date': '2017-08-31',
|
||||
'references': [
|
||||
{'type': 'cve', 'ref': '2017-14117'},
|
||||
{'type': 'url', 'ref': 'https://www.nomotion.net/blog/sharknatto/'},
|
||||
{'type': 'url', 'ref': 'https://blog.rapid7.com/2017/09/07/measuring-sharknat-to-exposures/#vulnerability5port49152tcpexposure'},
|
||||
{'type': 'aka', 'ref': 'SharknAT&To'},
|
||||
{'type': 'aka', 'ref': 'sharknatto'}
|
||||
],
|
||||
'type': 'scanner.multi',
|
||||
'options': {
|
||||
'rhosts': {'type': 'address_range', 'description': 'The target address', 'required': True, 'default': None},
|
||||
'rport': {'type': 'port', 'description': 'The target port', 'required': True, 'default': 49152},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def report_wproxy(target, response):
|
||||
# We don't use the response here, but if we were a banner scraper we could
|
||||
# print or report it
|
||||
module.report_vuln(target[0], 'wproxy', port=target[0])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
study = probe_scanner.make_scanner(
|
||||
# Payload and pattern are given and applied straight to the socket, so
|
||||
# they need to be bytes-like
|
||||
payload=b'\x2a\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00',
|
||||
pattern=b'^\\*\xce.{3}$',
|
||||
onmatch=report_wproxy
|
||||
)
|
||||
module.run(metadata, study)
|
Loading…
Reference in New Issue