Adding the fileNamePrefix which was introduced in bloodhound so files get properly zipped again

main
dt 2024-03-19 21:24:09 +01:00
parent e12fef0620
commit bcd0896bc1
2 changed files with 10 additions and 17 deletions

View File

@ -289,7 +289,7 @@ class ldap(connection):
# Re-connect since we logged off
self.create_conn_obj()
self.output_filename = os.path.expanduser(f"~/.nxc/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}".replace(":", "-"))
self.output_filename = os.path.expanduser(f"~/.nxc/logs/{self.hostname}_{self.host}".replace(":", "-"))
def print_host_info(self):
self.logger.debug("Printing host info for LDAP")
@ -1375,15 +1375,18 @@ class ldap(connection):
num_workers=10,
disable_pooling=False,
timestamp=timestamp,
fileNamePrefix=self.output_filename.split("/")[-1],
computerfile=None,
cachefile=None,
exclude_dcs=False,
)
self.output_filename += f"_{timestamp}"
self.logger.highlight(f"Compressing output into {self.output_filename}bloodhound.zip")
list_of_files = os.listdir(os.getcwd())
with ZipFile(self.output_filename + "bloodhound.zip", "w") as z:
for each_file in list_of_files:
if each_file.startswith(timestamp) and each_file.endswith("json"):
if each_file.startswith(self.output_filename.split("/")[-1]) and each_file.endswith("json"):
z.write(each_file)
os.remove(each_file)

View File

@ -44,17 +44,7 @@ class BloodHound:
# Create an object resolver
self.ad.create_objectresolver(self.pdc)
def run(
self,
collect,
num_workers=10,
disable_pooling=False,
timestamp="",
computerfile="",
cachefile=None,
exclude_dcs=False,
):
def run(self, collect, num_workers=10, disable_pooling=False, timestamp="", fileNamePrefix="", computerfile="", cachefile=None, exclude_dcs=False):
start_time = time.time()
if cachefile:
self.ad.load_cachefile(cachefile)
@ -82,7 +72,7 @@ class BloodHound:
)
# Initialize enumerator
membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling)
membership_enum.enumerate_memberships(timestamp=timestamp)
membership_enum.enumerate_memberships(timestamp=timestamp, fileNamePrefix=fileNamePrefix)
elif "container" in collect:
# Fetch domains for later, computers if needed
self.pdc.prefetch_info(
@ -92,7 +82,7 @@ class BloodHound:
)
# Initialize enumerator
membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling)
membership_enum.do_container_collection(timestamp=timestamp)
membership_enum.do_container_collection(timestamp=timestamp, fileNamePrefix=fileNamePrefix)
elif do_computer_enum:
# We need to know which computers to query regardless
# We also need the domains to have a mapping from NETBIOS -> FQDN for local admins
@ -102,7 +92,7 @@ class BloodHound:
self.pdc.get_domains("acl" in collect)
if "trusts" in collect or "acl" in collect or "objectprops" in collect:
trusts_enum = DomainEnumerator(self.ad, self.pdc)
trusts_enum.dump_domain(collect, timestamp=timestamp)
trusts_enum.dump_domain(collect, timestamp=timestamp, fileNamePrefix=fileNamePrefix)
if do_computer_enum:
# If we don't have a GC server, don't use it for deconflictation
have_gc = len(self.ad.gcs()) > 0
@ -114,7 +104,7 @@ class BloodHound:
computerfile=computerfile,
exclude_dcs=exclude_dcs,
)
computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp)
computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp, fileNamePrefix=fileNamePrefix)
end_time = time.time()
minutes, seconds = divmod(int(end_time - start_time), 60)
self.logger.highlight("Done in %02dM %02dS" % (minutes, seconds))