Improvements for the spider_plus CME module including downloading files
parent
35d2619cfb
commit
81a03045d7
|
@ -12,38 +12,54 @@ from impacket.smbconnection import SessionError
|
|||
|
||||
|
||||
CHUNK_SIZE = 4096
|
||||
suffixes = ["Bytes", "KB", "MB", "GB", "TB", "PB"]
|
||||
|
||||
|
||||
def humansize(nbytes):
|
||||
i = 0
|
||||
while nbytes >= 1024 and i < len(suffixes) - 1:
|
||||
def human_size(nbytes):
|
||||
"""
|
||||
This function takes a number of bytes as input and converts it to a human-readable
|
||||
size representation with appropriate units (e.g., KB, MB, GB, TB).
|
||||
"""
|
||||
suffixes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
|
||||
|
||||
# Find the appropriate unit suffix and convert bytes to higher units
|
||||
for i in range(len(suffixes)):
|
||||
if nbytes < 1024 or i == len(suffixes) - 1:
|
||||
break
|
||||
nbytes /= 1024.0
|
||||
i += 1
|
||||
f = ("%.2f" % nbytes).rstrip("0").rstrip(".")
|
||||
return "%s %s" % (f, suffixes[i])
|
||||
|
||||
# Format the number of bytes with two decimal places and remove trailing zeros and decimal point
|
||||
size_str = f"{nbytes:.2f}".rstrip("0").rstrip(".")
|
||||
|
||||
# Return the human-readable size with the appropriate unit suffix
|
||||
return f"{size_str} {suffixes[i]}"
|
||||
|
||||
|
||||
def humaclock(time):
|
||||
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time))
|
||||
def human_time(timestamp):
|
||||
"""This function takes a numerical timestamp (seconds since the epoch) and formats it
|
||||
as a human-readable date and time in the format "YYYY-MM-DD HH:MM:SS".
|
||||
"""
|
||||
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(timestamp))
|
||||
|
||||
|
||||
def make_dirs(path):
|
||||
"""
|
||||
Create the directory structure. We handle an exception `os.errno.EEXIST` that
|
||||
may occured while the OS is creating the directories.
|
||||
This function attempts to create directories at the given path. It handles the
|
||||
exception `os.errno.EEXIST` that may occur if the directories already exist.
|
||||
"""
|
||||
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
pass
|
||||
|
||||
|
||||
get_list_from_option = lambda opt: list(map(lambda o: o.lower(), filter(bool, opt.split(","))))
|
||||
def get_list_from_option(opt):
|
||||
"""
|
||||
This function takes a comma-separated string and converts it to a list of lowercase strings.
|
||||
It filters out empty strings from the input before converting.
|
||||
"""
|
||||
return list(map(lambda o: o.lower(), filter(bool, opt.split(","))))
|
||||
|
||||
|
||||
class SMBSpiderPlus:
|
||||
|
@ -51,20 +67,37 @@ class SMBSpiderPlus:
|
|||
self,
|
||||
smb,
|
||||
logger,
|
||||
read_only,
|
||||
exclude_dirs,
|
||||
download_flag,
|
||||
stats_flag,
|
||||
exclude_exts,
|
||||
exclude_filter,
|
||||
max_file_size,
|
||||
output_folder,
|
||||
):
|
||||
self.smb = smb
|
||||
self.host = self.smb.conn.getRemoteHost()
|
||||
self.conn_retry = 5
|
||||
self.max_connection_attempts = 5
|
||||
self.logger = logger
|
||||
self.results = {}
|
||||
|
||||
self.read_only = read_only
|
||||
self.exclude_dirs = exclude_dirs
|
||||
self.stats = {
|
||||
"shares": list(),
|
||||
"shares_readable": list(),
|
||||
"shares_writable": list(),
|
||||
"num_shares_filtered": 0,
|
||||
"num_folders": 0,
|
||||
"num_folders_filtered": 0,
|
||||
"num_files": 0,
|
||||
"file_sizes": list(),
|
||||
"file_exts": set(),
|
||||
"num_get_success": 0,
|
||||
"num_get_fail": 0,
|
||||
"num_files_filtered": 0,
|
||||
"num_files_unmodified": 0,
|
||||
"num_files_updated": 0,
|
||||
}
|
||||
self.download_flag = download_flag
|
||||
self.stats_flag = stats_flag
|
||||
self.exclude_filter = exclude_filter
|
||||
self.exclude_exts = exclude_exts
|
||||
self.max_file_size = max_file_size
|
||||
self.output_folder = output_folder
|
||||
|
@ -73,11 +106,14 @@ class SMBSpiderPlus:
|
|||
make_dirs(self.output_folder)
|
||||
|
||||
def reconnect(self):
|
||||
if self.conn_retry > 0:
|
||||
self.conn_retry -= 1
|
||||
self.logger.display(f"Reconnect to server {self.conn_retry}")
|
||||
"""This function performs a series of reconnection attempts, up to `self.max_connection_attempts`,
|
||||
with a 3-second delay between each attempt. It renegotiates the session by creating a new
|
||||
connection object and logging in again.
|
||||
"""
|
||||
for i in range(1, self.max_connection_attempts + 1):
|
||||
self.logger.display(f"Reconnection attempt #{i}/{self.max_connection_attempts} to server.")
|
||||
|
||||
# Renogociate the session
|
||||
# Renegotiate the session
|
||||
time.sleep(3)
|
||||
self.smb.create_conn_obj()
|
||||
self.smb.login()
|
||||
|
@ -86,20 +122,21 @@ class SMBSpiderPlus:
|
|||
return False
|
||||
|
||||
def list_path(self, share, subfolder):
|
||||
"""This function returns a list of paths for a given share/folder."""
|
||||
filelist = []
|
||||
try:
|
||||
# Get file list for the current folder
|
||||
filelist = self.smb.conn.listPath(share, subfolder + "*")
|
||||
|
||||
except SessionError as e:
|
||||
self.logger.debug(f'Failed listing files on share "{share}" in directory {subfolder}.')
|
||||
self.logger.debug(f'Failed listing files on share "{share}" in folder "{subfolder}".')
|
||||
self.logger.debug(str(e))
|
||||
|
||||
if "STATUS_ACCESS_DENIED" in str(e):
|
||||
self.logger.debug(f'Cannot list files in directory "{subfolder}"')
|
||||
self.logger.debug(f'Cannot list files in folder "{subfolder}".')
|
||||
|
||||
elif "STATUS_OBJECT_PATH_NOT_FOUND" in str(e):
|
||||
self.logger.debug(f"The directory {subfolder} does not exist.")
|
||||
self.logger.debug(f"The folder {subfolder} does not exist.")
|
||||
|
||||
elif self.reconnect():
|
||||
filelist = self.list_path(share, subfolder)
|
||||
|
@ -107,6 +144,7 @@ class SMBSpiderPlus:
|
|||
return filelist
|
||||
|
||||
def get_remote_file(self, share, path):
|
||||
"""This function will check if a path is readable in a SMB share."""
|
||||
try:
|
||||
remote_file = RemoteFile(self.smb.conn, path, share, access=FILE_READ_DATA)
|
||||
return remote_file
|
||||
|
@ -117,9 +155,10 @@ class SMBSpiderPlus:
|
|||
return None
|
||||
|
||||
def read_chunk(self, remote_file, chunk_size=CHUNK_SIZE):
|
||||
"""
|
||||
Read the next chunk of data from the remote file.
|
||||
We retry 3 times if there is a SessionError that is not a `STATUS_END_OF_FILE`.
|
||||
"""This function reads the next chunk of data from the provided remote file using
|
||||
the specified chunk size. If a `SessionError` is encountered,
|
||||
it retries up to 3 times by reconnecting the SMB connection. If the maximum number
|
||||
of retries is exhausted or an unexpected exception occurs, it returns an empty chunk.
|
||||
"""
|
||||
|
||||
chunk = ""
|
||||
|
@ -143,199 +182,375 @@ class SMBSpiderPlus:
|
|||
|
||||
return chunk
|
||||
|
||||
def spider(self):
|
||||
self.logger.debug("Enumerating shares for spidering")
|
||||
def get_file_save_path(self, remote_file):
|
||||
"""This function processes the remote file path to extract the filename and the folder
|
||||
path where the file should be saved locally. It converts forward slashes (/) and backslashes (\)
|
||||
in the remote file path to the appropriate path separator for the local file system.
|
||||
The folder path and filename are then obtained separately.
|
||||
"""
|
||||
|
||||
# Remove the backslash before the remote host part and replace slashes with the appropriate path separator
|
||||
remote_file_path = str(remote_file)[2:].replace("/", os.path.sep).replace("\\", os.path.sep)
|
||||
|
||||
# Split the path to obtain the folder path and the filename
|
||||
folder, filename = os.path.split(remote_file_path)
|
||||
|
||||
# Join the output folder with the folder path to get the final local folder path
|
||||
folder = os.path.join(self.output_folder, folder)
|
||||
|
||||
return folder, filename
|
||||
|
||||
def spider_shares(self):
|
||||
"""This function enumerates all available shares for the SMB connection, spiders
|
||||
through the readable shares, and saves the metadata of the shares to a JSON file.
|
||||
"""
|
||||
self.logger.info("Enumerating shares for spidering.")
|
||||
shares = self.smb.shares()
|
||||
|
||||
try:
|
||||
# Get all available shares for the SMB connection
|
||||
for share in shares:
|
||||
perms = share["access"]
|
||||
name = share["name"]
|
||||
share_perms = share["access"]
|
||||
share_name = share["name"]
|
||||
self.stats["shares"].append(share_name)
|
||||
|
||||
self.logger.debug(f'Share "{name}" has perms {perms}')
|
||||
|
||||
# We only want to spider readable shares
|
||||
if not "READ" in perms:
|
||||
self.logger.info(f'Share "{share_name}" has perms {share_perms}')
|
||||
if "WRITE" in share_perms:
|
||||
self.stats["shares_writable"].append(share_name)
|
||||
if "READ" in share_perms:
|
||||
self.stats["shares_readable"].append(share_name)
|
||||
else:
|
||||
# We only want to spider readable shares
|
||||
self.logger.debug(f'Share "{share_name}" not readable.')
|
||||
continue
|
||||
|
||||
# `exclude_dirs` is applied to the shares name
|
||||
if name.lower() in self.exclude_dirs:
|
||||
self.logger.debug(f'Share "{name}" has been excluded.')
|
||||
# `exclude_filter` is applied to the shares name
|
||||
if share_name.lower() in self.exclude_filter:
|
||||
self.logger.info(f'Share "{share_name}" has been excluded.')
|
||||
self.stats["num_shares_filtered"] += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
# Start the spider at the root of the share folder
|
||||
self.results[name] = {}
|
||||
self._spider(name, "")
|
||||
self.results[share_name] = {}
|
||||
self.spider_folder(share_name, "")
|
||||
except SessionError:
|
||||
traceback.print_exc()
|
||||
self.logger.fail(f"Got a session error while spidering")
|
||||
self.logger.fail(f"Got a session error while spidering.")
|
||||
self.reconnect()
|
||||
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.logger.fail(f"Error enumerating shares: {str(e)}")
|
||||
|
||||
# Save the server shares metadatas if we want to grep on filenames
|
||||
# Save the metadata.
|
||||
self.dump_folder_metadata(self.results)
|
||||
|
||||
# Print stats.
|
||||
if self.stats_flag:
|
||||
self.print_stats()
|
||||
|
||||
return self.results
|
||||
|
||||
def _spider(self, share, subfolder):
|
||||
self.logger.debug(f'Spider share "{share}" on folder "{subfolder}"')
|
||||
def spider_folder(self, share_name, folder):
|
||||
"""This recursive function traverses through the contents of the specified share and folder.
|
||||
It checks each entry (file or folder) against various filters, performs file metadata recording,
|
||||
and downloads eligible files if the download flag is set.
|
||||
"""
|
||||
self.logger.info(f'Spider share "{share_name}" in folder "{folder}".')
|
||||
|
||||
filelist = self.list_path(share, subfolder + "*")
|
||||
if share.lower() in self.exclude_dirs:
|
||||
self.logger.debug(f"The directory has been excluded")
|
||||
return
|
||||
filelist = self.list_path(share_name, folder + "*")
|
||||
|
||||
# For each entry:
|
||||
# - It's a directory then we spider it (skipping `.` and `..`)
|
||||
# - It's a folder then we spider it (skipping `.` and `..`)
|
||||
# - It's a file then we apply the checks
|
||||
for result in filelist:
|
||||
next_path = subfolder + result.get_longname()
|
||||
next_path_lower = next_path.lower()
|
||||
self.logger.debug(f'Current file on share "{share}": {next_path}')
|
||||
next_filedir = result.get_longname()
|
||||
if next_filedir in [".", ".."]:
|
||||
continue
|
||||
next_fullpath = folder + next_filedir
|
||||
result_type = "folder" if result.is_directory() else "file"
|
||||
self.stats[f"num_{result_type}s"] += 1
|
||||
|
||||
# Exclude the current result if it's in the exlude_dirs list
|
||||
if any(map(lambda d: d in next_path_lower, self.exclude_dirs)):
|
||||
self.logger.debug(f'The path "{next_path}" has been excluded')
|
||||
# Check file-dir exclusion filter.
|
||||
if any(d in next_filedir.lower() for d in self.exclude_filter):
|
||||
self.logger.info(f'The {result_type} "{next_filedir}" has been excluded')
|
||||
self.stats[f"{result_type}s_filtered"] += 1
|
||||
continue
|
||||
|
||||
if result.is_directory():
|
||||
if result.get_longname() in [".", ".."]:
|
||||
continue
|
||||
self._spider(share, next_path + "/")
|
||||
|
||||
if result_type == "folder":
|
||||
self.logger.info(f'Current folder in share "{share_name}": "{next_fullpath}"')
|
||||
self.spider_folder(share_name, next_fullpath + "/")
|
||||
else:
|
||||
# Record the file metadata
|
||||
self.results[share][next_path] = {
|
||||
"size": humansize(result.get_filesize()),
|
||||
#'ctime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_ctime())),
|
||||
"ctime_epoch": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result.get_ctime_epoch())),
|
||||
#'mtime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_mtime())),
|
||||
"mtime_epoch": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result.get_mtime_epoch())),
|
||||
#'atime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_atime())),
|
||||
"atime_epoch": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(result.get_atime_epoch())),
|
||||
}
|
||||
self.logger.info(f'Current file in share "{share_name}": "{next_fullpath}"')
|
||||
self.parse_file(share_name, next_fullpath, result)
|
||||
|
||||
# The collection logic is here. You can add more checks based
|
||||
# on the file size, content, name, date...
|
||||
def parse_file(self, share_name, file_path, file_info):
|
||||
"""This function checks file attributes against various filters, records file metadata,
|
||||
and downloads eligible files if the download flag is set.
|
||||
"""
|
||||
|
||||
# Check the file extension. We check here to prevent the creation
|
||||
# of a RemoteFile object that perform a remote connection.
|
||||
file_extension = next_path[next_path.rfind(".") + 1 :]
|
||||
if file_extension in self.exclude_exts:
|
||||
self.logger.debug(f'The file "{next_path}" has an excluded extension')
|
||||
continue
|
||||
# Record the file metadata
|
||||
file_size = file_info.get_filesize()
|
||||
file_creation_time = file_info.get_ctime_epoch()
|
||||
file_modified_time = file_info.get_mtime_epoch()
|
||||
file_access_time = file_info.get_atime_epoch()
|
||||
self.results[share_name][file_path] = {
|
||||
"size": human_size(file_size),
|
||||
"ctime_epoch": human_time(file_creation_time),
|
||||
"mtime_epoch": human_time(file_modified_time),
|
||||
"atime_epoch": human_time(file_access_time),
|
||||
}
|
||||
self.stats["file_sizes"].append(file_size)
|
||||
|
||||
# If there is not results in the file but the size is correct,
|
||||
# then we save it
|
||||
if result.get_filesize() > self.max_file_size:
|
||||
self.logger.debug(f"File {result.get_longname()} has size {result.get_filesize()}")
|
||||
continue
|
||||
# Check if proceeding with download attempt.
|
||||
if not self.download_flag:
|
||||
return
|
||||
|
||||
## You can add more checks here: date, ...
|
||||
if self.read_only == True:
|
||||
continue
|
||||
# Check file extension filter.
|
||||
_, file_extension = os.path.splitext(file_path)
|
||||
if file_extension:
|
||||
self.stats["file_exts"].add(file_extension.lower())
|
||||
if file_extension.lower() in self.exclude_exts:
|
||||
self.logger.info(f'The file "{file_path}" has an excluded extension.')
|
||||
self.stats["num_files_filtered"] += 1
|
||||
return
|
||||
|
||||
# The file passes the checks, then we fetch it!
|
||||
remote_file = self.get_remote_file(share, next_path)
|
||||
# Check file size limits.
|
||||
if file_size > self.max_file_size:
|
||||
self.logger.info(f"File {file_path} has size {human_size(file_size)} > max size {human_size(self.max_file_size)}.")
|
||||
self.stats["num_files_filtered"] += 1
|
||||
return
|
||||
|
||||
if not remote_file:
|
||||
self.logger.fail(f'Cannot open remote file "{next_path}".')
|
||||
continue
|
||||
# Check if the remote file is readable.
|
||||
remote_file = self.get_remote_file(share_name, file_path)
|
||||
if not remote_file:
|
||||
self.logger.fail(f'Cannot read remote file "{file_path}".')
|
||||
self.stats["num_get_fail"] += 1
|
||||
return
|
||||
|
||||
try:
|
||||
remote_file.open()
|
||||
# Check if the file is already downloaded and up-to-date.
|
||||
file_dir, file_name = self.get_file_save_path(remote_file)
|
||||
download_path = os.path.join(file_dir, file_name)
|
||||
needs_update_flag = False
|
||||
if os.path.exists(download_path):
|
||||
if file_modified_time <= os.stat(download_path).st_mtime and os.path.getsize(download_path) == file_size:
|
||||
self.logger.info(f'File already downloaded "{file_path}" => "{download_path}".')
|
||||
self.stats["num_files_unmodified"] += 1
|
||||
return
|
||||
else:
|
||||
needs_update_flag = True
|
||||
|
||||
## TODO: add checks on the file content here
|
||||
self.save_file(remote_file)
|
||||
# Download file.
|
||||
download_success = False
|
||||
try:
|
||||
self.logger.info(f'Downloading file "{file_path}" => "{download_path}".')
|
||||
remote_file.open()
|
||||
self.save_file(remote_file, share_name)
|
||||
remote_file.close()
|
||||
download_success = True
|
||||
except SessionError as e:
|
||||
if "STATUS_SHARING_VIOLATION" in str(e):
|
||||
pass
|
||||
except Exception as e:
|
||||
self.logger.fail(f'Failed to download file "{file_path}". Error: {str(e)}')
|
||||
|
||||
remote_file.close()
|
||||
# Increment stats counters
|
||||
if download_success:
|
||||
self.stats["num_get_success"] += 1
|
||||
if needs_update_flag:
|
||||
self.stats["num_files_updated"] += 1
|
||||
else:
|
||||
self.stats["num_get_fail"] += 1
|
||||
|
||||
except SessionError as e:
|
||||
if "STATUS_SHARING_VIOLATION" in str(e):
|
||||
pass
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self.logger.fail(f"Error reading file {next_path}: {str(e)}")
|
||||
def save_file(self, remote_file, share_name):
|
||||
"""This function reads the `remote_file` in chunks using the `read_chunk` method.
|
||||
Each chunk is then written to the local file until the entire file is saved.
|
||||
It handles cases where the file remains empty due to errors.
|
||||
"""
|
||||
|
||||
def save_file(self, remote_file):
|
||||
# Reset the remote_file to point to the begining of the file
|
||||
# Reset the remote_file to point to the beginning of the file.
|
||||
remote_file.seek(0, 0)
|
||||
|
||||
# remove the "\\" before the remote host part
|
||||
file_path = str(remote_file)[2:]
|
||||
# The remote_file.file_name contains '/'
|
||||
file_path = file_path.replace("/", os.path.sep)
|
||||
file_path = file_path.replace("\\", os.path.sep)
|
||||
filename = file_path.split(os.path.sep)[-1]
|
||||
directory = os.path.join(self.output_folder, file_path[: -len(filename)])
|
||||
folder, filename = self.get_file_save_path(remote_file)
|
||||
download_path = os.path.join(folder, filename)
|
||||
|
||||
# Create the subdirectories based on the share name and file path
|
||||
self.logger.debug(f'Create directory "{directory}"')
|
||||
make_dirs(directory)
|
||||
# Create the subdirectories based on the share name and file path.
|
||||
self.logger.debug(f'Create folder "{folder}"')
|
||||
make_dirs(folder)
|
||||
|
||||
with open(os.path.join(directory, filename), "wb") as fd:
|
||||
while True:
|
||||
chunk = self.read_chunk(remote_file)
|
||||
if not chunk:
|
||||
break
|
||||
fd.write(chunk)
|
||||
try:
|
||||
with open(download_path, "wb") as fd:
|
||||
while True:
|
||||
chunk = self.read_chunk(remote_file)
|
||||
if not chunk:
|
||||
break
|
||||
fd.write(chunk)
|
||||
except Exception as e:
|
||||
self.logger.fail(f'Error writing file "{remote_path}" from share "{share_name}": {e}')
|
||||
|
||||
# Check if the file is empty and should not be.
|
||||
if os.path.getsize(download_path) == 0 and remote_file.get_filesize() > 0:
|
||||
os.remove(download_path)
|
||||
remote_path = str(remote_file)[2:]
|
||||
self.logger.fail(f'Unable to download file "{remote_path}".')
|
||||
|
||||
def dump_folder_metadata(self, results):
|
||||
# Save the remote host shares metadatas to a json file
|
||||
# TODO: use the json file as an input to save only the new or modified
|
||||
# files since the last time.
|
||||
path = os.path.join(self.output_folder, f"{self.host}.json")
|
||||
with open(path, "w", encoding="utf-8") as fd:
|
||||
fd.write(json.dumps(results, indent=4, sort_keys=True))
|
||||
"""This function takes the metadata results as input and writes them to a JSON file
|
||||
in the `self.output_folder`. The results are formatted with indentation and
|
||||
sorted keys before being written to the file.
|
||||
"""
|
||||
metadata_path = os.path.join(self.output_folder, f"{self.host}.json")
|
||||
try:
|
||||
with open(metadata_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(json.dumps(results, indent=4, sort_keys=True))
|
||||
self.logger.success(f'Saved share-file metadata to "{metadata_path}".')
|
||||
except Exception as e:
|
||||
self.logger.fail(f"Failed to save share metadata: {str(e)}")
|
||||
|
||||
def print_stats(self):
|
||||
"""This function prints the statistics during processing."""
|
||||
|
||||
# Share statistics.
|
||||
shares = self.stats.get("shares", [])
|
||||
if shares:
|
||||
num_shares = len(shares)
|
||||
shares_str = ", ".join(shares)
|
||||
self.logger.display(f"SMB Shares: {num_shares} ({shares_str})")
|
||||
shares_readable = self.stats.get("shares_readable", [])
|
||||
if shares_readable:
|
||||
num_readable_shares = len(shares_readable)
|
||||
if len(shares_readable) > 10:
|
||||
shares_readable_str = ", ".join(shares_readable[:10]) + "..."
|
||||
else:
|
||||
shares_readable_str = ", ".join(shares_readable)
|
||||
self.logger.display(f"SMB Readable Shares: {num_readable_shares} ({shares_readable_str})")
|
||||
shares_writable = self.stats.get("shares_writable", [])
|
||||
if shares_writable:
|
||||
num_writable_shares = len(shares_writable)
|
||||
if len(shares_writable) > 10:
|
||||
shares_writable_str = ", ".join(shares_writable[:10]) + "..."
|
||||
else:
|
||||
shares_writable_str = ", ".join(shares_writable)
|
||||
self.logger.display(f"SMB Writable Shares: {num_writable_shares} ({shares_writable_str})")
|
||||
num_shares_filtered = self.stats.get("num_shares_filtered", 0)
|
||||
if num_shares_filtered:
|
||||
self.logger.display(f"SMB Filtered Shares: {num_shares_filtered}")
|
||||
|
||||
# Folder statistics.
|
||||
num_folders = self.stats.get("num_folders", 0)
|
||||
self.logger.display(f"Total folders found: {num_folders}")
|
||||
num_folders_filtered = self.stats.get("num_folders_filtered", 0)
|
||||
if num_folders_filtered:
|
||||
num_filtered_folders = len(num_folders_filtered)
|
||||
self.logger.display(f"Folders Filtered: {num_filtered_folders}")
|
||||
|
||||
# File statistics.
|
||||
num_files = self.stats.get("num_files", 0)
|
||||
self.logger.display(f"Total files found: {num_files}")
|
||||
num_files_filtered = self.stats.get("num_files_filtered", 0)
|
||||
if num_files_filtered:
|
||||
self.logger.display(f"Files filtered: {num_files_filtered}")
|
||||
if num_files == 0:
|
||||
return
|
||||
|
||||
# File sizing statistics.
|
||||
file_sizes = self.stats.get("file_sizes", [])
|
||||
if file_sizes:
|
||||
total_file_size = sum(file_sizes)
|
||||
min_file_size = min(file_sizes)
|
||||
max_file_size = max(file_sizes)
|
||||
average_file_size = total_file_size / num_files
|
||||
self.logger.display(f"File size average: {human_size(average_file_size)}")
|
||||
self.logger.display(f"File size min: {human_size(min_file_size)}")
|
||||
self.logger.display(f"File size max: {human_size(max_file_size)}")
|
||||
|
||||
# Extension statistics.
|
||||
file_exts = list(self.stats.get("file_exts", []))
|
||||
if file_exts:
|
||||
num_unique_file_exts = len(file_exts)
|
||||
if len(file_exts) > 10:
|
||||
unique_exts_str = ", ".join(file_exts[:10]) + "..."
|
||||
else:
|
||||
unique_exts_str = ", ".join(file_exts)
|
||||
self.logger.display(f"File unique exts: {num_unique_file_exts} ({unique_exts_str})")
|
||||
|
||||
# Download statistics.
|
||||
if self.download_flag:
|
||||
num_get_success = self.stats.get("num_get_success", 0)
|
||||
if num_get_success:
|
||||
self.logger.display(f"Downloads successful: {num_get_success}")
|
||||
num_get_fail = self.stats.get("num_get_fail", 0)
|
||||
if num_get_fail:
|
||||
self.logger.display(f"Downloads failed: {num_get_fail}")
|
||||
num_files_unmodified = self.stats.get("num_files_unmodified", 0)
|
||||
if num_files_unmodified:
|
||||
self.logger.display(f"Unmodified files: {num_files_unmodified}")
|
||||
num_files_updated = self.stats.get("num_files_updated", 0)
|
||||
if num_files_updated:
|
||||
self.logger.display(f"Updated files: {num_files_updated}")
|
||||
if num_files_unmodified and not num_files_updated:
|
||||
self.logger.display("All files were not changed.")
|
||||
if num_files_filtered == num_files:
|
||||
self.logger.display("All files were ignored.")
|
||||
if num_get_fail == 0:
|
||||
self.logger.success("All files processed successfully.")
|
||||
|
||||
|
||||
class CMEModule:
|
||||
"""
|
||||
Spider plus module
|
||||
Module by @vincd
|
||||
Updated by @godylockz
|
||||
"""
|
||||
|
||||
name = "spider_plus"
|
||||
description = "List files on the target server (excluding `DIR` directories and `EXT` extensions) and save them to the `OUTPUT` directory if they are smaller then `SIZE`"
|
||||
description = "List files recursively (excluding `EXCLUDE_FILTER` and `EXCLUDE_EXTS` extensions) and save JSON share-file metadata to the `OUTPUT_FOLDER`. If `DOWNLOAD_FLAG`=True, download files smaller then `MAX_FILE_SIZE` to the `OUTPUT_FOLDER`."
|
||||
supported_protocols = ["smb"]
|
||||
opsec_safe = True # Does the module touch disk?
|
||||
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
|
||||
multiple_hosts = True # Does the module support multiple hosts?
|
||||
|
||||
def options(self, context, module_options):
|
||||
"""
|
||||
READ_ONLY Only list files and put the name into a JSON (default: True)
|
||||
EXCLUDE_EXTS Extension file to exclude (Default: ico,lnk)
|
||||
EXCLUDE_DIR Directory to exclude (Default: print$,IPC$)
|
||||
MAX_FILE_SIZE Max file size allowed to dump (Default: 51200)
|
||||
OUTPUT Path of the remote folder where the dump will occur (Default: /tmp/cme_spider_plus)
|
||||
DOWNLOAD_FLAG Download all share folders/files (Default: False)
|
||||
STATS_FLAG Disable file/download statistics (Default: True)
|
||||
EXCLUDE_EXTS Case-insensitive extension filter to exclude (Default: ico,lnk)
|
||||
EXCLUDE_FILTER Case-insensitive filter to exclude folders/files (Default: print$,ipc$)
|
||||
MAX_FILE_SIZE Max file size to download (Default: 51200)
|
||||
OUTPUT_FOLDER Path of the local folder to save files (Default: /tmp/cme_spider_plus)
|
||||
"""
|
||||
|
||||
self.read_only = module_options.get("READ_ONLY", True)
|
||||
self.download_flag = False
|
||||
if any("DOWNLOAD" in key for key in module_options.keys()):
|
||||
self.download_flag = True
|
||||
self.stats_flag = True
|
||||
if any("STATS" in key for key in module_options.keys()):
|
||||
self.stats_flag = False
|
||||
self.exclude_exts = get_list_from_option(module_options.get("EXCLUDE_EXTS", "ico,lnk"))
|
||||
self.exlude_dirs = get_list_from_option(module_options.get("EXCLUDE_DIR", "print$,IPC$"))
|
||||
self.max_file_size = int(module_options.get("SIZE", 50 * 1024))
|
||||
self.output_folder = module_options.get("OUTPUT", os.path.join("/tmp", "cme_spider_plus"))
|
||||
self.exclude_exts = [d.lower() for d in self.exclude_exts] # force case-insensitive
|
||||
self.exclude_filter = get_list_from_option(module_options.get("EXCLUDE_FILTER", "print$,ipc$"))
|
||||
self.exclude_filter = [d.lower() for d in self.exclude_filter] # force case-insensitive
|
||||
self.max_file_size = int(module_options.get("MAX_FILE_SIZE", 50 * 1024))
|
||||
self.output_folder = module_options.get("OUTPUT_FOLDER", os.path.join("/tmp", "cme_spider_plus"))
|
||||
|
||||
|
||||
def on_login(self, context, connection):
|
||||
context.log.display("Started spidering plus with option:")
|
||||
context.log.display(" DIR: {dir}".format(dir=self.exlude_dirs))
|
||||
context.log.display(" EXT: {ext}".format(ext=self.exclude_exts))
|
||||
context.log.display(" SIZE: {size}".format(size=self.max_file_size))
|
||||
context.log.display(" OUTPUT: {output}".format(output=self.output_folder))
|
||||
context.log.display("Started module spidering_plus with the following options:")
|
||||
context.log.display(f" DOWNLOAD_FLAG: {self.download_flag}")
|
||||
context.log.display(f" STATS_FLAG: {self.stats_flag}")
|
||||
context.log.display(f"EXCLUDE_FILTER: {self.exclude_filter}")
|
||||
context.log.display(f" EXCLUDE_EXTS: {self.exclude_exts}")
|
||||
context.log.display(f" MAX_FILE_SIZE: {human_size(self.max_file_size)}")
|
||||
context.log.display(f" OUTPUT_FOLDER: {self.output_folder}")
|
||||
|
||||
spider = SMBSpiderPlus(
|
||||
connection,
|
||||
context.log,
|
||||
self.read_only,
|
||||
self.exlude_dirs,
|
||||
self.download_flag,
|
||||
self.stats_flag,
|
||||
self.exclude_exts,
|
||||
self.exclude_filter,
|
||||
self.max_file_size,
|
||||
self.output_folder,
|
||||
)
|
||||
|
||||
spider.spider()
|
||||
spider.spider_shares()
|
||||
|
|
|
@ -372,13 +372,18 @@ class mssql(connection):
|
|||
|
||||
@requires_admin
|
||||
def get_file(self):
|
||||
self.logger.display(f"Copy {self.args.get_file[0]} to {self.args.get_file[1]}")
|
||||
remote_path = self.args.get_file[0]
|
||||
download_path = self.args.get_file[1]
|
||||
self.logger.display(f'Copying "{remote_path}" to "{download_path}"')
|
||||
|
||||
try:
|
||||
exec_method = MSSQLEXEC(self.conn)
|
||||
exec_method.get_file(self.args.get_file[0], self.args.get_file[1])
|
||||
self.logger.success(f"File {self.args.get_file[0]} was transferred to {self.args.get_file[1]}")
|
||||
self.logger.success(f'File "{remote_path}" was downloaded to "{download_path}"')
|
||||
except Exception as e:
|
||||
self.logger.fail(f"Error reading file {self.args.get_file[0]}: {e}")
|
||||
self.logger.fail(f'Error reading file "{remote_path}": {e}')
|
||||
if os.path.getsize(download_path) == 0:
|
||||
os.remove(download_path)
|
||||
|
||||
# We hook these functions in the tds library to use CME's logger instead of printing the output to stdout
|
||||
# The whole tds library in impacket needs a good overhaul to preserve my sanity
|
||||
|
|
|
@ -1378,16 +1378,20 @@ class smb(connection):
|
|||
self.logger.fail(f"Error writing file to share {self.args.share}: {e}")
|
||||
|
||||
def get_file(self):
|
||||
self.logger.display(f"Copying {self.args.get_file[0]} to {self.args.get_file[1]}")
|
||||
file_handle = self.args.get_file[1]
|
||||
share_name = self.args.share
|
||||
remote_path = self.args.get_file[0]
|
||||
download_path = self.args.get_file[1]
|
||||
self.logger.display(f'Copying "{remote_path}" to "{download_path}"')
|
||||
if self.args.append_host:
|
||||
file_handle = f"{self.hostname}-{self.args.get_file[1]}"
|
||||
with open(file_handle, "wb+") as file:
|
||||
download_path = f"{self.hostname}-{remote_path}"
|
||||
with open(download_path, "wb+") as file:
|
||||
try:
|
||||
self.conn.getFile(self.args.share, self.args.get_file[0], file.write)
|
||||
self.logger.success(f"File {self.args.get_file[0]} was transferred to {file_handle}")
|
||||
self.conn.getFile(share_name, remote_path, file.write)
|
||||
self.logger.success(f'File "{remote_path}" was downloaded to "{download_path}"')
|
||||
except Exception as e:
|
||||
self.logger.fail(f"Error reading file {self.args.share}: {e}")
|
||||
self.logger.fail(f'Error writing file "{remote_path}" from share "{share_name}": {e}')
|
||||
if os.path.getsize(download_path) == 0:
|
||||
os.remove(download_path)
|
||||
|
||||
def enable_remoteops(self):
|
||||
if self.remote_ops is not None and self.bootkey is not None:
|
||||
|
|
Loading…
Reference in New Issue