2022-07-18 23:59:14 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
2016-12-15 07:28:00 +00:00
|
|
|
import logging
|
2023-04-05 22:50:56 +00:00
|
|
|
import os
|
2019-11-10 21:42:04 +00:00
|
|
|
from io import StringIO
|
2023-04-25 17:54:49 +00:00
|
|
|
|
2023-09-14 21:07:15 +00:00
|
|
|
from nxc.config import process_secret
|
|
|
|
from nxc.protocols.mssql.mssqlexec import MSSQLEXEC
|
|
|
|
from nxc.connection import *
|
|
|
|
from nxc.helpers.logger import highlight
|
|
|
|
from nxc.helpers.bloodhound import add_user_bh
|
|
|
|
from nxc.helpers.powershell import create_ps_command
|
2016-12-15 07:28:00 +00:00
|
|
|
from impacket import tds
|
2022-11-16 20:39:44 +00:00
|
|
|
from impacket.krb5.ccache import CCache
|
2017-05-03 00:52:16 +00:00
|
|
|
from impacket.smbconnection import SMBConnection, SessionError
|
2023-05-02 15:17:59 +00:00
|
|
|
from impacket.tds import (
|
|
|
|
SQLErrorException,
|
|
|
|
TDS_LOGINACK_TOKEN,
|
|
|
|
TDS_ERROR_TOKEN,
|
|
|
|
TDS_ENVCHANGE_TOKEN,
|
|
|
|
TDS_INFO_TOKEN,
|
|
|
|
TDS_ENVCHANGE_VARCHAR,
|
|
|
|
TDS_ENVCHANGE_DATABASE,
|
|
|
|
TDS_ENVCHANGE_LANGUAGE,
|
|
|
|
TDS_ENVCHANGE_CHARSET,
|
|
|
|
TDS_ENVCHANGE_PACKETSIZE,
|
|
|
|
)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2017-10-25 06:45:58 +00:00
|
|
|
|
2016-12-15 07:28:00 +00:00
|
|
|
class mssql(connection):
|
|
|
|
def __init__(self, args, db, host):
|
|
|
|
self.mssql_instances = None
|
|
|
|
self.domain = None
|
2020-05-01 18:33:18 +00:00
|
|
|
self.server_os = None
|
2016-12-15 07:28:00 +00:00
|
|
|
self.hash = None
|
2020-09-06 13:21:38 +00:00
|
|
|
self.os_arch = None
|
2023-05-02 15:17:59 +00:00
|
|
|
self.nthash = ""
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2017-10-25 06:45:58 +00:00
|
|
|
connection.__init__(self, args, db, host)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2017-05-03 00:52:16 +00:00
|
|
|
def proto_flow(self):
|
|
|
|
self.proto_logger()
|
|
|
|
if self.create_conn_obj():
|
|
|
|
self.enum_host_info()
|
|
|
|
self.print_host_info()
|
2023-08-12 10:17:08 +00:00
|
|
|
if self.login():
|
|
|
|
if hasattr(self.args, "module") and self.args.module:
|
|
|
|
self.call_modules()
|
|
|
|
else:
|
|
|
|
self.call_cmd_args()
|
2017-05-03 00:52:16 +00:00
|
|
|
|
2016-12-15 07:28:00 +00:00
|
|
|
def proto_logger(self):
|
2023-09-14 21:07:15 +00:00
|
|
|
self.logger = NXCAdapter(
|
2023-03-30 03:59:22 +00:00
|
|
|
extra={
|
2023-04-30 21:24:18 +00:00
|
|
|
"protocol": "MSSQL",
|
|
|
|
"host": self.host,
|
|
|
|
"port": self.args.port,
|
2023-05-02 15:17:59 +00:00
|
|
|
"hostname": "None",
|
2023-03-30 03:59:22 +00:00
|
|
|
}
|
|
|
|
)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
|
|
|
def enum_host_info(self):
|
2023-09-17 20:16:22 +00:00
|
|
|
# this try pass breaks module http server, more info https://github.com/byt3bl33d3r/CrackMapExec/issues/363
|
2020-05-01 18:20:55 +00:00
|
|
|
try:
|
|
|
|
# Probably a better way of doing this, grab our IP from the socket
|
2023-04-30 21:24:18 +00:00
|
|
|
self.local_ip = str(self.conn.socket).split()[2].split("=")[1].split(":")[0]
|
2020-05-01 18:20:55 +00:00
|
|
|
except:
|
|
|
|
pass
|
2017-10-25 06:45:58 +00:00
|
|
|
|
2023-07-24 18:40:36 +00:00
|
|
|
if self.args.no_smb:
|
2020-09-06 13:21:38 +00:00
|
|
|
self.domain = self.args.domain
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
smb_conn = SMBConnection(self.host, self.host, None)
|
2017-10-25 02:08:19 +00:00
|
|
|
try:
|
2023-04-30 21:24:18 +00:00
|
|
|
smb_conn.login("", "")
|
2020-09-06 13:21:38 +00:00
|
|
|
except SessionError as e:
|
2020-10-01 14:46:13 +00:00
|
|
|
if "STATUS_ACCESS_DENIED" in e.getErrorString():
|
2017-10-25 02:08:19 +00:00
|
|
|
pass
|
|
|
|
|
2020-09-06 13:21:38 +00:00
|
|
|
self.domain = smb_conn.getServerDNSDomainName()
|
|
|
|
self.hostname = smb_conn.getServerName()
|
|
|
|
self.server_os = smb_conn.getServerOS()
|
2023-04-30 21:24:18 +00:00
|
|
|
self.logger.extra["hostname"] = self.hostname
|
2020-09-06 13:21:38 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
smb_conn.logoff()
|
|
|
|
except:
|
|
|
|
pass
|
2017-10-25 02:08:19 +00:00
|
|
|
|
2020-09-06 13:21:38 +00:00
|
|
|
if self.args.domain:
|
|
|
|
self.domain = self.args.domain
|
2017-10-25 02:08:19 +00:00
|
|
|
|
2020-09-06 13:21:38 +00:00
|
|
|
if self.args.local_auth:
|
|
|
|
self.domain = self.hostname
|
|
|
|
except Exception as e:
|
2023-05-08 18:39:36 +00:00
|
|
|
self.logger.fail(f"Error retrieving host domain: {e} specify one manually with the '-d' flag")
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2020-09-06 13:21:38 +00:00
|
|
|
self.mssql_instances = self.conn.getInstances(0)
|
2023-05-02 15:17:59 +00:00
|
|
|
self.db.add_host(
|
|
|
|
self.host,
|
|
|
|
self.hostname,
|
|
|
|
self.domain,
|
|
|
|
self.server_os,
|
|
|
|
len(self.mssql_instances),
|
|
|
|
)
|
2017-11-02 09:43:08 +00:00
|
|
|
|
2016-12-15 07:28:00 +00:00
|
|
|
try:
|
|
|
|
self.conn.disconnect()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def print_host_info(self):
|
2023-05-08 18:39:36 +00:00
|
|
|
self.logger.display(f"{self.server_os} (name:{self.hostname}) (domain:{self.domain})")
|
2020-09-06 13:21:38 +00:00
|
|
|
# if len(self.mssql_instances) > 0:
|
2023-03-30 03:59:22 +00:00
|
|
|
# self.logger.display("MSSQL DB Instances: {}".format(len(self.mssql_instances)))
|
2020-09-06 13:21:38 +00:00
|
|
|
# for i, instance in enumerate(self.mssql_instances):
|
|
|
|
# self.logger.debug("Instance {}".format(i))
|
|
|
|
# for key in instance.keys():
|
|
|
|
# self.logger.debug(key + ":" + instance[key])
|
2016-12-15 07:28:00 +00:00
|
|
|
|
|
|
|
def create_conn_obj(self):
|
|
|
|
try:
|
2023-04-30 21:24:18 +00:00
|
|
|
self.conn = tds.MSSQL(self.host, self.args.port)
|
2016-12-15 07:28:00 +00:00
|
|
|
self.conn.connect()
|
2023-05-16 00:55:42 +00:00
|
|
|
except socket.error as e:
|
|
|
|
self.logger.debug(f"Error connecting to MSSQL: {e}")
|
2016-12-15 07:28:00 +00:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2022-03-03 20:25:03 +00:00
|
|
|
def check_if_admin(self):
|
2016-12-15 07:28:00 +00:00
|
|
|
try:
|
2023-04-25 17:54:49 +00:00
|
|
|
results = self.conn.sql_query("SELECT IS_SRVROLEMEMBER('sysadmin')")
|
|
|
|
is_admin = int(results[0][""])
|
2017-05-03 00:52:16 +00:00
|
|
|
except Exception as e:
|
2023-04-30 21:24:18 +00:00
|
|
|
self.logger.fail(f"Error querying for sysadmin role: {e}")
|
2016-12-15 07:28:00 +00:00
|
|
|
return False
|
|
|
|
|
2023-04-30 21:24:18 +00:00
|
|
|
if is_admin:
|
|
|
|
self.admin_privs = True
|
|
|
|
self.logger.debug(f"User is admin")
|
|
|
|
else:
|
|
|
|
return False
|
2016-12-15 07:28:00 +00:00
|
|
|
return True
|
|
|
|
|
2023-05-02 15:17:59 +00:00
|
|
|
def kerberos_login(
|
|
|
|
self,
|
|
|
|
domain,
|
|
|
|
username,
|
|
|
|
password="",
|
|
|
|
ntlm_hash="",
|
|
|
|
aesKey="",
|
|
|
|
kdcHost="",
|
|
|
|
useCache=False,
|
|
|
|
):
|
2022-10-24 08:10:44 +00:00
|
|
|
try:
|
2022-10-24 08:24:21 +00:00
|
|
|
self.conn.disconnect()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.create_conn_obj()
|
2022-10-24 11:30:07 +00:00
|
|
|
|
2023-04-30 21:24:18 +00:00
|
|
|
nthash = ""
|
2022-10-31 12:33:41 +00:00
|
|
|
hashes = None
|
2023-04-30 21:24:18 +00:00
|
|
|
if ntlm_hash != "":
|
|
|
|
if ntlm_hash.find(":") != -1:
|
2022-10-31 12:33:41 +00:00
|
|
|
hashes = ntlm_hash
|
2023-04-30 21:24:18 +00:00
|
|
|
nthash = ntlm_hash.split(":")[1]
|
2022-10-31 12:33:41 +00:00
|
|
|
else:
|
|
|
|
# only nt hash
|
2023-04-30 21:24:18 +00:00
|
|
|
hashes = f":{ntlm_hash}"
|
2022-10-31 12:33:41 +00:00
|
|
|
nthash = ntlm_hash
|
|
|
|
|
2023-05-02 15:17:59 +00:00
|
|
|
if not all("" == s for s in [self.nthash, password, aesKey]):
|
2022-10-31 12:33:41 +00:00
|
|
|
kerb_pass = next(s for s in [self.nthash, password, aesKey] if s)
|
|
|
|
else:
|
2023-05-02 15:17:59 +00:00
|
|
|
kerb_pass = ""
|
2022-10-31 12:33:41 +00:00
|
|
|
try:
|
2023-05-02 15:17:59 +00:00
|
|
|
res = self.conn.kerberosLogin(
|
|
|
|
None,
|
|
|
|
username,
|
|
|
|
password,
|
|
|
|
domain,
|
|
|
|
hashes,
|
|
|
|
aesKey,
|
|
|
|
kdcHost=kdcHost,
|
|
|
|
useCache=useCache,
|
|
|
|
)
|
2022-10-24 08:10:44 +00:00
|
|
|
if res is not True:
|
|
|
|
self.conn.printReplies()
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.password = password
|
2023-05-02 15:17:59 +00:00
|
|
|
if username == "" and useCache:
|
2023-04-30 21:24:18 +00:00
|
|
|
ccache = CCache.loadFile(os.getenv("KRB5CCNAME"))
|
2022-11-16 20:39:44 +00:00
|
|
|
principal = ccache.principal.toPrincipal()
|
|
|
|
self.username = principal.components[0]
|
|
|
|
username = principal.components[0]
|
|
|
|
else:
|
|
|
|
self.username = username
|
2022-10-24 08:10:44 +00:00
|
|
|
self.domain = domain
|
|
|
|
self.check_if_admin()
|
|
|
|
|
2023-05-16 00:55:42 +00:00
|
|
|
used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}"
|
|
|
|
domain = f"{domain}\\" if not self.args.local_auth else ""
|
|
|
|
|
|
|
|
self.logger.success(f"{domain}{username}{used_ccache} {self.mark_pwned()}")
|
2022-10-24 08:10:44 +00:00
|
|
|
if not self.args.local_auth:
|
|
|
|
add_user_bh(self.username, self.domain, self.logger, self.config)
|
2023-04-23 11:45:16 +00:00
|
|
|
return True
|
2022-10-24 08:10:44 +00:00
|
|
|
except Exception as e:
|
2023-05-16 00:55:42 +00:00
|
|
|
used_ccache = " from ccache" if useCache else f":{process_secret(kerb_pass)}"
|
|
|
|
domain = f"{domain}\\" if not self.args.local_auth else ""
|
|
|
|
self.logger.fail(f"{domain}\\{username}{used_ccache} {e}")
|
2022-10-24 08:10:44 +00:00
|
|
|
return False
|
|
|
|
|
2016-12-15 07:28:00 +00:00
|
|
|
def plaintext_login(self, domain, username, password):
|
2020-05-01 21:18:16 +00:00
|
|
|
try:
|
|
|
|
self.conn.disconnect()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.create_conn_obj()
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2020-05-01 21:11:54 +00:00
|
|
|
try:
|
2023-04-25 17:54:49 +00:00
|
|
|
# this is to prevent a decoding issue in impacket/ntlm.py:617 where it attempts to decode the domain
|
|
|
|
if not domain:
|
|
|
|
domain = ""
|
2023-05-08 18:39:36 +00:00
|
|
|
res = self.conn.login(None, username, password, domain, None, not self.args.local_auth)
|
2020-05-01 21:11:54 +00:00
|
|
|
if res is not True:
|
2023-05-16 00:55:42 +00:00
|
|
|
self.handle_mssql_reply()
|
2020-09-06 19:38:29 +00:00
|
|
|
return False
|
2020-05-01 21:11:54 +00:00
|
|
|
|
|
|
|
self.password = password
|
|
|
|
self.username = username
|
2020-09-06 19:38:29 +00:00
|
|
|
self.domain = domain
|
2022-03-03 20:25:03 +00:00
|
|
|
self.check_if_admin()
|
2023-04-12 04:25:38 +00:00
|
|
|
self.db.add_credential("plaintext", domain, username, password)
|
2020-05-01 21:11:54 +00:00
|
|
|
|
|
|
|
if self.admin_privs:
|
2023-05-08 18:39:36 +00:00
|
|
|
self.db.add_admin_user("plaintext", domain, username, password, self.host)
|
2020-05-01 21:11:54 +00:00
|
|
|
|
2023-05-16 00:55:42 +00:00
|
|
|
domain = f"{domain}\\" if not self.args.local_auth else ""
|
|
|
|
out = f"{domain}{username}:{process_secret(password)} {self.mark_pwned()}"
|
2020-05-01 21:11:54 +00:00
|
|
|
self.logger.success(out)
|
2022-02-06 12:33:49 +00:00
|
|
|
if not self.args.local_auth:
|
|
|
|
add_user_bh(self.username, self.domain, self.logger, self.config)
|
2023-04-23 11:45:16 +00:00
|
|
|
return True
|
2023-03-10 06:12:59 +00:00
|
|
|
except BrokenPipeError as e:
|
2023-04-05 22:50:56 +00:00
|
|
|
self.logger.fail(f"Broken Pipe Error while attempting to login")
|
2023-08-12 20:51:36 +00:00
|
|
|
return False
|
2020-05-01 21:11:54 +00:00
|
|
|
except Exception as e:
|
2023-05-02 15:17:59 +00:00
|
|
|
self.logger.fail(f"{domain}\\{username}:{process_secret(password)}")
|
2023-04-25 17:54:49 +00:00
|
|
|
self.logger.exception(e)
|
2020-05-01 21:11:54 +00:00
|
|
|
return False
|
2020-04-30 14:06:57 +00:00
|
|
|
|
2016-12-15 07:28:00 +00:00
|
|
|
def hash_login(self, domain, username, ntlm_hash):
|
2023-04-12 04:25:38 +00:00
|
|
|
lmhash = ""
|
|
|
|
nthash = ""
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2017-10-25 03:52:41 +00:00
|
|
|
# This checks to see if we didn't provide the LM Hash
|
2023-04-12 04:25:38 +00:00
|
|
|
if ntlm_hash.find(":") != -1:
|
|
|
|
lmhash, nthash = ntlm_hash.split(":")
|
2016-12-15 07:28:00 +00:00
|
|
|
else:
|
|
|
|
nthash = ntlm_hash
|
|
|
|
|
2020-05-01 21:18:16 +00:00
|
|
|
try:
|
|
|
|
self.conn.disconnect()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.create_conn_obj()
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2020-05-01 21:18:16 +00:00
|
|
|
try:
|
2023-03-30 03:59:22 +00:00
|
|
|
res = self.conn.login(
|
|
|
|
None,
|
|
|
|
username,
|
2023-04-30 21:24:18 +00:00
|
|
|
"",
|
2023-03-30 03:59:22 +00:00
|
|
|
domain,
|
2023-04-30 21:24:18 +00:00
|
|
|
":" + nthash if not lmhash else ntlm_hash,
|
2023-05-02 15:17:59 +00:00
|
|
|
not self.args.local_auth,
|
2023-03-30 03:59:22 +00:00
|
|
|
)
|
2020-05-01 21:18:16 +00:00
|
|
|
if res is not True:
|
|
|
|
self.conn.printReplies()
|
|
|
|
return False
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2020-05-01 21:18:16 +00:00
|
|
|
self.hash = ntlm_hash
|
|
|
|
self.username = username
|
|
|
|
self.domain = domain
|
|
|
|
self.check_if_admin()
|
2023-04-30 21:24:18 +00:00
|
|
|
self.db.add_credential("hash", domain, username, ntlm_hash)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2020-05-01 21:18:16 +00:00
|
|
|
if self.admin_privs:
|
2023-04-30 21:24:18 +00:00
|
|
|
self.db.add_admin_user("hash", domain, username, ntlm_hash, self.host)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2023-05-16 00:55:42 +00:00
|
|
|
out = f"{domain}\\{username} {process_secret(ntlm_hash)} {self.mark_pwned()}"
|
2020-05-01 21:18:16 +00:00
|
|
|
self.logger.success(out)
|
2022-02-06 12:33:49 +00:00
|
|
|
if not self.args.local_auth:
|
|
|
|
add_user_bh(self.username, self.domain, self.logger, self.config)
|
2023-04-23 11:45:16 +00:00
|
|
|
return True
|
2023-03-10 06:12:59 +00:00
|
|
|
except BrokenPipeError as e:
|
2023-04-05 22:50:56 +00:00
|
|
|
self.logger.fail(f"Broken Pipe Error while attempting to login")
|
2023-08-12 20:51:36 +00:00
|
|
|
return False
|
2020-05-01 21:18:16 +00:00
|
|
|
except Exception as e:
|
2023-05-02 15:17:59 +00:00
|
|
|
self.logger.fail(f"{domain}\\{username}:{process_secret(ntlm_hash)} {e}")
|
2020-05-01 21:18:16 +00:00
|
|
|
return False
|
2016-12-15 07:28:00 +00:00
|
|
|
|
|
|
|
def mssql_query(self):
|
2023-08-11 22:05:50 +00:00
|
|
|
if self.conn.lastError:
|
|
|
|
# Invalid connection
|
|
|
|
return None
|
|
|
|
query = self.args.mssql_query
|
|
|
|
self.logger.info(f"Query to run:\n{query}")
|
|
|
|
try:
|
|
|
|
raw_output = self.conn.sql_query(query)
|
|
|
|
self.logger.info("Executed MSSQL query")
|
|
|
|
self.logger.debug(f"Raw output: {raw_output}")
|
|
|
|
for data in raw_output:
|
|
|
|
if isinstance(data, dict):
|
|
|
|
for key, value in data.items():
|
|
|
|
if key:
|
|
|
|
self.logger.highlight(f"{key}:{value}")
|
|
|
|
else:
|
|
|
|
self.logger.highlight(f"{value}")
|
|
|
|
else:
|
|
|
|
self.logger.fail("Unexpected output")
|
|
|
|
except Exception as e:
|
|
|
|
self.logger.exception(e)
|
|
|
|
return None
|
|
|
|
|
|
|
|
return raw_output
|
2016-12-15 07:28:00 +00:00
|
|
|
|
|
|
|
@requires_admin
|
2023-04-30 21:47:52 +00:00
|
|
|
def execute(self, payload=None, print_output=False):
|
2016-12-15 07:28:00 +00:00
|
|
|
if not payload and self.args.execute:
|
|
|
|
payload = self.args.execute
|
|
|
|
|
2023-04-12 04:25:38 +00:00
|
|
|
self.logger.info(f"Command to execute:\n{payload}")
|
2023-04-30 21:24:18 +00:00
|
|
|
try:
|
|
|
|
exec_method = MSSQLEXEC(self.conn)
|
2023-04-30 21:47:52 +00:00
|
|
|
raw_output = exec_method.execute(payload, print_output)
|
|
|
|
self.logger.info("Executed command via mssqlexec")
|
|
|
|
self.logger.debug(f"Raw output: {raw_output}")
|
2023-04-30 21:24:18 +00:00
|
|
|
except Exception as e:
|
|
|
|
self.logger.exception(e)
|
|
|
|
return None
|
|
|
|
|
2023-04-12 04:25:38 +00:00
|
|
|
if hasattr(self, "server"):
|
2023-03-30 03:59:22 +00:00
|
|
|
self.server.track_host(self.host)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
|
|
|
if self.args.execute or self.args.ps_execute:
|
2023-04-12 04:25:38 +00:00
|
|
|
self.logger.success("Executed command via mssqlexec")
|
2023-04-30 21:47:52 +00:00
|
|
|
if self.args.no_output:
|
|
|
|
self.logger.debug(f"Output set to disabled")
|
|
|
|
else:
|
|
|
|
for line in raw_output:
|
|
|
|
self.logger.highlight(line)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2023-04-30 21:24:18 +00:00
|
|
|
return raw_output
|
2016-12-15 07:28:00 +00:00
|
|
|
|
|
|
|
@requires_admin
|
2023-05-02 15:17:59 +00:00
|
|
|
def ps_execute(
|
|
|
|
self,
|
|
|
|
payload=None,
|
|
|
|
get_output=False,
|
|
|
|
methods=None,
|
|
|
|
force_ps32=False,
|
|
|
|
dont_obfs=True,
|
|
|
|
):
|
2016-12-15 07:28:00 +00:00
|
|
|
if not payload and self.args.ps_execute:
|
|
|
|
payload = self.args.ps_execute
|
2023-04-30 21:47:52 +00:00
|
|
|
if not self.args.no_output:
|
|
|
|
get_output = True
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2023-04-30 21:24:18 +00:00
|
|
|
# We're disabling PS obfuscation by default as it breaks the MSSQLEXEC execution method
|
2023-05-08 18:39:36 +00:00
|
|
|
ps_command = create_ps_command(payload, force_ps32=force_ps32, dont_obfs=dont_obfs)
|
2017-10-25 06:45:58 +00:00
|
|
|
return self.execute(ps_command, get_output)
|
2016-12-15 07:28:00 +00:00
|
|
|
|
2022-06-29 11:44:41 +00:00
|
|
|
@requires_admin
|
|
|
|
def put_file(self):
|
2023-04-12 04:25:38 +00:00
|
|
|
self.logger.display(f"Copy {self.args.put_file[0]} to {self.args.put_file[1]}")
|
|
|
|
with open(self.args.put_file[0], "rb") as f:
|
2022-06-29 11:44:41 +00:00
|
|
|
try:
|
|
|
|
data = f.read()
|
2023-04-12 04:25:38 +00:00
|
|
|
self.logger.display(f"Size is {len(data)} bytes")
|
2022-06-29 11:44:41 +00:00
|
|
|
exec_method = MSSQLEXEC(self.conn)
|
|
|
|
exec_method.put_file(data, self.args.put_file[1])
|
|
|
|
if exec_method.file_exists(self.args.put_file[1]):
|
2023-04-12 04:25:38 +00:00
|
|
|
self.logger.success("File has been uploaded on the remote machine")
|
2022-06-29 11:44:41 +00:00
|
|
|
else:
|
2023-05-08 18:39:36 +00:00
|
|
|
self.logger.fail("File does not exist on the remote system... error during upload")
|
2022-06-29 11:44:41 +00:00
|
|
|
except Exception as e:
|
2023-04-12 04:25:38 +00:00
|
|
|
self.logger.fail(f"Error during upload : {e}")
|
2022-06-29 11:44:41 +00:00
|
|
|
|
|
|
|
@requires_admin
|
|
|
|
def get_file(self):
|
2023-07-20 00:23:18 +00:00
|
|
|
remote_path = self.args.get_file[0]
|
|
|
|
download_path = self.args.get_file[1]
|
|
|
|
self.logger.display(f'Copying "{remote_path}" to "{download_path}"')
|
|
|
|
|
2022-06-29 11:44:41 +00:00
|
|
|
try:
|
|
|
|
exec_method = MSSQLEXEC(self.conn)
|
|
|
|
exec_method.get_file(self.args.get_file[0], self.args.get_file[1])
|
2023-07-20 00:23:18 +00:00
|
|
|
self.logger.success(f'File "{remote_path}" was downloaded to "{download_path}"')
|
2022-06-29 11:44:41 +00:00
|
|
|
except Exception as e:
|
2023-07-20 00:23:18 +00:00
|
|
|
self.logger.fail(f'Error reading file "{remote_path}": {e}')
|
|
|
|
if os.path.getsize(download_path) == 0:
|
|
|
|
os.remove(download_path)
|
2022-06-29 11:44:41 +00:00
|
|
|
|
2023-09-14 21:07:15 +00:00
|
|
|
# We hook these functions in the tds library to use nxc's logger instead of printing the output to stdout
|
2023-04-30 21:24:18 +00:00
|
|
|
# The whole tds library in impacket needs a good overhaul to preserve my sanity
|
2023-05-16 00:55:42 +00:00
|
|
|
def handle_mssql_reply(self):
|
|
|
|
for keys in self.conn.replies.keys():
|
|
|
|
for i, key in enumerate(self.conn.replies[keys]):
|
2023-04-30 21:24:18 +00:00
|
|
|
if key["TokenType"] == TDS_ERROR_TOKEN:
|
|
|
|
error = f"ERROR({key['ServerName'].decode('utf-16le')}): Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}"
|
2023-05-16 00:55:42 +00:00
|
|
|
self.conn.lastError = SQLErrorException(f"ERROR: Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}")
|
|
|
|
self.logger.fail(error)
|
2023-04-30 21:24:18 +00:00
|
|
|
elif key["TokenType"] == TDS_INFO_TOKEN:
|
2023-05-16 00:55:42 +00:00
|
|
|
self.logger.display(f"INFO({key['ServerName'].decode('utf-16le')}): Line {key['LineNumber']:d}: {key['MsgText'].decode('utf-16le')}")
|
2023-04-30 21:24:18 +00:00
|
|
|
elif key["TokenType"] == TDS_LOGINACK_TOKEN:
|
2023-05-16 00:55:42 +00:00
|
|
|
self.logger.display(f"ACK: Result: {key['Interface']} - {key['ProgName'].decode('utf-16le')} ({key['MajorVer']:d}{key['MinorVer']:d} {key['BuildNumHi']:d}{key['BuildNumLow']:d}) ")
|
2023-04-30 21:24:18 +00:00
|
|
|
elif key["TokenType"] == TDS_ENVCHANGE_TOKEN:
|
2023-05-02 15:17:59 +00:00
|
|
|
if key["Type"] in (
|
|
|
|
TDS_ENVCHANGE_DATABASE,
|
|
|
|
TDS_ENVCHANGE_LANGUAGE,
|
|
|
|
TDS_ENVCHANGE_CHARSET,
|
|
|
|
TDS_ENVCHANGE_PACKETSIZE,
|
|
|
|
):
|
2023-04-30 21:24:18 +00:00
|
|
|
record = TDS_ENVCHANGE_VARCHAR(key["Data"])
|
|
|
|
if record["OldValue"] == "":
|
2023-05-02 15:17:59 +00:00
|
|
|
record["OldValue"] = "None".encode("utf-16le")
|
|
|
|
elif record["NewValue"] == "":
|
|
|
|
record["NewValue"] = "None".encode("utf-16le")
|
2023-04-30 21:24:18 +00:00
|
|
|
if key["Type"] == TDS_ENVCHANGE_DATABASE:
|
|
|
|
_type = "DATABASE"
|
|
|
|
elif key["Type"] == TDS_ENVCHANGE_LANGUAGE:
|
|
|
|
_type = "LANGUAGE"
|
|
|
|
elif key["Type"] == TDS_ENVCHANGE_CHARSET:
|
|
|
|
_type = "CHARSET"
|
|
|
|
elif key["Type"] == TDS_ENVCHANGE_PACKETSIZE:
|
|
|
|
_type = "PACKETSIZE"
|
|
|
|
else:
|
|
|
|
_type = f"{key['Type']:d}"
|
2023-05-16 00:55:42 +00:00
|
|
|
self.logger.display(f"ENVCHANGE({_type}): Old Value: {record['OldValue'].decode('utf-16le')}, New Value: {record['NewValue'].decode('utf-16le')}")
|