Functionality to update to sub-techniques, including an option to provide a local stix path. Also updated sample YAML file.
parent
08b30f0f35
commit
f77aed3ef0
34
dettect.py
34
dettect.py
|
@ -3,6 +3,7 @@ import os
|
|||
import signal
|
||||
from interactive_menu import *
|
||||
from editor import DeTTECTEditor
|
||||
import generic
|
||||
|
||||
|
||||
def _init_menu():
|
||||
|
@ -62,6 +63,10 @@ def _init_menu():
|
|||
parser_data_sources.add_argument('-of', '--output-filename', help='set the output filename')
|
||||
parser_data_sources.add_argument('-ln', '--layer-name', help='set the name of the Navigator layer')
|
||||
parser_data_sources.add_argument('--health', help='check the YAML file(s) for errors', action='store_true')
|
||||
parser_data_sources.add_argument('--local-stix-path', help='path to a local STIX repository to use DeTT&CT offline '
|
||||
'or to use a specific version of STIX objects.')
|
||||
parser_data_sources.add_argument('--update-to-sub-techniques', help='Update the technique administration YAML file'
|
||||
'to ATT&CK with sub-techniques.', action='store_true')
|
||||
|
||||
# create the visibility parser
|
||||
parser_visibility = subparsers.add_parser('visibility', aliases=['v'],
|
||||
|
@ -92,13 +97,17 @@ def _init_menu():
|
|||
parser_visibility.add_argument('-of', '--output-filename', help='set the output filename')
|
||||
parser_visibility.add_argument('-ln', '--layer-name', help='set the name of the Navigator layer')
|
||||
parser_visibility.add_argument('--health', help='check the YAML file for errors', action='store_true')
|
||||
parser_visibility.add_argument('--local-stix-path', help='path to a local STIX repository to use DeTT&CT offline '
|
||||
'or to use a specific version of STIX objects.')
|
||||
parser_visibility.add_argument('--update-to-sub-techniques', help='Update the technique administration YAML file'
|
||||
'to ATT&CK with sub-techniques.', action='store_true')
|
||||
|
||||
# create the detection parser
|
||||
parser_detection = subparsers.add_parser('detection', aliases=['d'],
|
||||
help='detection coverage mapping based on techniques',
|
||||
description='Create a heat map based on detection scores, overlay '
|
||||
'detections with visibility, generate a detection '
|
||||
'improvement graph, output to Excel or check the health of '
|
||||
'detections with visibility, generate a detection '
|
||||
'improvement graph, output to Excel or check the health of '
|
||||
'the technique administration YAML file.')
|
||||
parser_detection.add_argument('-ft', '--file-tech', help='path to the technique administration YAML file (used to '
|
||||
'score the level of detection)', required=True)
|
||||
|
@ -124,11 +133,15 @@ def _init_menu():
|
|||
parser_detection.add_argument('-of', '--output-filename', help='set the output filename')
|
||||
parser_detection.add_argument('-ln', '--layer-name', help='set the name of the Navigator layer')
|
||||
parser_detection.add_argument('--health', help='check the YAML file(s) for errors', action='store_true')
|
||||
parser_detection.add_argument('--local-stix-path', help='path to a local STIX repository to use DeTT&CT offline '
|
||||
'or to use a specific version of STIX objects.')
|
||||
parser_detection.add_argument('--update-to-sub-techniques', help='Update the technique administration YAML file'
|
||||
'to ATT&CK with sub-techniques.', action='store_true')
|
||||
|
||||
# create the group parser
|
||||
parser_group = subparsers.add_parser('group', aliases=['g'],
|
||||
description='Create threat actor group heat maps, compare group(s) and '
|
||||
'compare group(s) with visibility and detection coverage.',
|
||||
'compare group(s) with visibility and detection coverage.',
|
||||
help='threat actor group mapping')
|
||||
parser_group.add_argument('-g', '--groups', help='specify the ATT&CK Groups to include separated using commas. '
|
||||
'Group can be their ID, name or alias (default is all groups). '
|
||||
|
@ -163,11 +176,15 @@ def _init_menu():
|
|||
parser_group.add_argument('-of', '--output-filename', help='set the output filename')
|
||||
parser_group.add_argument('-ln', '--layer-name', help='set the name of the Navigator layer')
|
||||
parser_group.add_argument('--health', help='check the YAML file(s) for errors', action='store_true')
|
||||
parser_group.add_argument('--local-stix-path', help='path to a local STIX repository to use DeTT&CT offline '
|
||||
'or to use a specific version of STIX objects.')
|
||||
parser_group.add_argument('--update-to-sub-techniques', help='Update the technique administration YAML file'
|
||||
'to ATT&CK with sub-techniques.', action='store_true')
|
||||
|
||||
# create the generic parser
|
||||
parser_generic = subparsers.add_parser('generic', description='Generic functions which will output to stdout.',
|
||||
help='includes: statistics on ATT&CK data source and updates on techniques'
|
||||
', groups and software', aliases=['ge'])
|
||||
', groups and software', aliases=['ge'])
|
||||
|
||||
parser_generic.add_argument('-ds', '--datasources', help='get a sorted count on how many ATT&CK Enterprise '
|
||||
'techniques are covered by a particular Data Source',
|
||||
|
@ -181,6 +198,8 @@ def _init_menu():
|
|||
parser_generic.add_argument('--sort', help='sorting of the output from \'-u/--update\' on modified or creation '
|
||||
'date (default = modified)', choices=['modified', 'created'],
|
||||
default='modified')
|
||||
parser_generic.add_argument('--local-stix-path', help='path to a local STIX repository to use DeTT&CT offline '
|
||||
'or to use a specific version of STIX objects.')
|
||||
|
||||
return menu_parser
|
||||
|
||||
|
@ -193,6 +212,13 @@ def _menu(menu_parser):
|
|||
"""
|
||||
args = menu_parser.parse_args()
|
||||
|
||||
if 'local_stix_path' in args and args.local_stix_path:
|
||||
generic.local_stix_path = args.local_stix_path
|
||||
|
||||
if 'update_to_sub_techniques' in args and args.update_to_sub_techniques:
|
||||
from upgrade import upgrade_to_sub_techniques
|
||||
upgrade_to_sub_techniques(args.file_tech)
|
||||
|
||||
if args.interactive:
|
||||
interactive_menu()
|
||||
|
||||
|
|
73
generic.py
73
generic.py
|
@ -4,12 +4,14 @@ import pickle
|
|||
from io import StringIO
|
||||
from datetime import datetime as dt
|
||||
from ruamel.yaml import YAML
|
||||
from upgrade import upgrade_yaml_file
|
||||
from upgrade import upgrade_yaml_file, check_yaml_updated_to_sub_techniques
|
||||
from constants import *
|
||||
from health import check_yaml_file_health
|
||||
|
||||
# Due to performance reasons the import of attackcti is within the function that makes use of this library.
|
||||
|
||||
local_stix_path = None
|
||||
|
||||
|
||||
def _save_attack_data(data, path):
|
||||
"""
|
||||
|
@ -27,20 +29,32 @@ def _save_attack_data(data, path):
|
|||
|
||||
def load_attack_data(data_type):
|
||||
"""
|
||||
Load the cached ATT&CK data from disk, if not expired (data file on disk is older then EXPIRE_TIME seconds).
|
||||
By default the ATT&CK data is loaded from the online TAXII server or from the local cache directory. The
|
||||
local cache directory will be used if the file is not expired (data file on disk is older then EXPIRE_TIME
|
||||
seconds). When the local_stix_path option is given, the ATT&CK data will be loaded from the given path of
|
||||
a local STIX repository.
|
||||
:param data_type: the desired data type, see DATATYPE_XX constants.
|
||||
:return: MITRE ATT&CK data object (STIX or custom schema)
|
||||
"""
|
||||
if os.path.exists("cache/" + data_type):
|
||||
with open("cache/" + data_type, 'rb') as f:
|
||||
cached = pickle.load(f)
|
||||
write_time = cached[1]
|
||||
if not (dt.now() - write_time).total_seconds() >= EXPIRE_TIME:
|
||||
# the first item in the list contains the ATT&CK data
|
||||
return cached[0]
|
||||
|
||||
from attackcti import attack_client
|
||||
mitre = attack_client()
|
||||
if local_stix_path is not None:
|
||||
if local_stix_path is not None and os.path.isdir(os.path.join(local_stix_path, 'enterprise-attack')) \
|
||||
and os.path.isdir(os.path.join(local_stix_path, 'pre-attack')) \
|
||||
and os.path.isdir(os.path.join(local_stix_path, 'mobile-attack')):
|
||||
mitre = attack_client(local_path=local_stix_path)
|
||||
else:
|
||||
print('[!] Not a valid local STIX path: ' + local_stix_path)
|
||||
quit()
|
||||
else:
|
||||
if os.path.exists("cache/" + data_type):
|
||||
with open("cache/" + data_type, 'rb') as f:
|
||||
cached = pickle.load(f)
|
||||
write_time = cached[1]
|
||||
if not (dt.now() - write_time).total_seconds() >= EXPIRE_TIME:
|
||||
# the first item in the list contains the ATT&CK data
|
||||
return cached[0]
|
||||
|
||||
mitre = attack_client()
|
||||
|
||||
attack_data = None
|
||||
if data_type == DATA_TYPE_STIX_ALL_RELATIONSHIPS:
|
||||
|
@ -167,7 +181,9 @@ def load_attack_data(data_type):
|
|||
attack_data = mitre.get_mobile_mitigations()
|
||||
attack_data = mitre.remove_revoked(attack_data)
|
||||
|
||||
_save_attack_data(attack_data, "cache/" + data_type)
|
||||
# Only use cache when using online TAXII server:
|
||||
if local_stix_path is None:
|
||||
_save_attack_data(attack_data, "cache/" + data_type)
|
||||
|
||||
return attack_data
|
||||
|
||||
|
@ -863,8 +879,8 @@ def _check_file_type(filename, file_type=None):
|
|||
|
||||
def check_file(filename, file_type=None, health_is_called=False):
|
||||
"""
|
||||
Calls three functions to perform the following checks: is the file a valid YAML file, needs the file to be upgrade,
|
||||
does the file contain errors.
|
||||
Calls four functions to perform the following checks: is the file a valid YAML file, needs the file to be upgraded,
|
||||
does the file contain errors or does the file need a sub-techniques upgrade.
|
||||
:param filename: path to a YAML file
|
||||
:param file_type: value to check against the 'file_type' key in the YAML file
|
||||
:param health_is_called: boolean that specifies if detailed errors in the file will be printed by the function 'check_yaml_file_health'
|
||||
|
@ -878,6 +894,10 @@ def check_file(filename, file_type=None, health_is_called=False):
|
|||
upgrade_yaml_file(filename, file_type, yaml_content['version'], load_attack_data(DATA_TYPE_STIX_ALL_TECH))
|
||||
check_yaml_file_health(filename, file_type, health_is_called)
|
||||
|
||||
if file_type == FILE_TYPE_TECHNIQUE_ADMINISTRATION:
|
||||
if not check_yaml_updated_to_sub_techniques(filename):
|
||||
return None
|
||||
|
||||
return yaml_content['file_type']
|
||||
|
||||
return yaml_content # value is None
|
||||
|
@ -1075,3 +1095,28 @@ def clean_filename(filename):
|
|||
:return: sanitized filename
|
||||
"""
|
||||
return filename.replace('/', '').replace('\\', '').replace(':', '')[:200]
|
||||
|
||||
|
||||
def get_technique_from_yaml(yaml_content, technique_id):
|
||||
"""
|
||||
Generic function to lookup a specific technique_id in the YAML content.
|
||||
:param techniques: list with all techniques
|
||||
:param technique_id: technique_id to look for
|
||||
:return: the technique you're searching for. None if not found.
|
||||
"""
|
||||
for tech in yaml_content['techniques']:
|
||||
if tech['technique_id'] == technique_id:
|
||||
return tech
|
||||
|
||||
|
||||
def remove_technique_from_yaml(yaml_content, technique_id):
|
||||
"""
|
||||
Function to delete a specific technique in the YAML content.
|
||||
:param techniques: list with all techniques
|
||||
:param technique_id: technique_id to look for
|
||||
:return: none
|
||||
"""
|
||||
for tech in yaml_content['techniques']:
|
||||
if tech['technique_id'] == technique_id:
|
||||
yaml_content['techniques'].remove(tech)
|
||||
return
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -167,32 +167,35 @@ def _map_and_colorize_techniques_for_detections(my_techniques):
|
|||
if s == 3 else COLOR_D_4 if s == 4 else COLOR_D_5 if s == 5 else ''
|
||||
technique = get_technique(techniques, technique_id)
|
||||
|
||||
for tactic in get_tactics(technique):
|
||||
x = dict()
|
||||
x['techniqueID'] = technique_id
|
||||
x['color'] = color
|
||||
x['comment'] = ''
|
||||
x['enabled'] = True
|
||||
x['tactic'] = tactic.lower().replace(' ', '-')
|
||||
x['metadata'] = []
|
||||
x['score'] = s
|
||||
cnt = 1
|
||||
tcnt = len([d for d in technique_data['detection'] if get_latest_score(d) >= 0])
|
||||
for detection in technique_data['detection']:
|
||||
d_score = get_latest_score(detection)
|
||||
if d_score >= 0:
|
||||
location = ', '.join(detection['location'])
|
||||
applicable_to = ', '.join(detection['applicable_to'])
|
||||
x['metadata'].append({'name': '-Applicable to', 'value': applicable_to})
|
||||
x['metadata'].append({'name': '-Detection score', 'value': str(d_score)})
|
||||
x['metadata'].append({'name': '-Detection location', 'value': location})
|
||||
x['metadata'].append({'name': '-Technique comment', 'value': detection['comment']})
|
||||
x['metadata'].append({'name': '-Detection comment', 'value': get_latest_comment(detection)})
|
||||
if cnt != tcnt:
|
||||
x['metadata'].append({'name': '---', 'value': '---'})
|
||||
cnt += 1
|
||||
x['metadata'] = make_layer_metadata_compliant(x['metadata'])
|
||||
mapped_techniques.append(x)
|
||||
if technique is not None:
|
||||
for tactic in get_tactics(technique):
|
||||
x = dict()
|
||||
x['techniqueID'] = technique_id
|
||||
x['color'] = color
|
||||
x['comment'] = ''
|
||||
x['enabled'] = True
|
||||
x['tactic'] = tactic.lower().replace(' ', '-')
|
||||
x['metadata'] = []
|
||||
x['score'] = s
|
||||
cnt = 1
|
||||
tcnt = len([d for d in technique_data['detection'] if get_latest_score(d) >= 0])
|
||||
for detection in technique_data['detection']:
|
||||
d_score = get_latest_score(detection)
|
||||
if d_score >= 0:
|
||||
location = ', '.join(detection['location'])
|
||||
applicable_to = ', '.join(detection['applicable_to'])
|
||||
x['metadata'].append({'name': '-Applicable to', 'value': applicable_to})
|
||||
x['metadata'].append({'name': '-Detection score', 'value': str(d_score)})
|
||||
x['metadata'].append({'name': '-Detection location', 'value': location})
|
||||
x['metadata'].append({'name': '-Technique comment', 'value': detection['comment']})
|
||||
x['metadata'].append({'name': '-Detection comment', 'value': get_latest_comment(detection)})
|
||||
if cnt != tcnt:
|
||||
x['metadata'].append({'name': '---', 'value': '---'})
|
||||
cnt += 1
|
||||
x['metadata'] = make_layer_metadata_compliant(x['metadata'])
|
||||
mapped_techniques.append(x)
|
||||
else:
|
||||
print('[!] Technique ' + technique_id + ' is unknown in ATT&CK. Ignoring this technique.')
|
||||
except Exception as e:
|
||||
print('[!] Possible error in YAML file at: %s. Error: %s' % (technique_id, str(e)))
|
||||
quit()
|
||||
|
@ -225,34 +228,37 @@ def _map_and_colorize_techniques_for_visibility(my_techniques, my_data_sources,
|
|||
technique = get_technique(techniques, technique_id)
|
||||
color = COLOR_V_1 if s == 1 else COLOR_V_2 if s == 2 else COLOR_V_3 if s == 3 else COLOR_V_4 if s == 4 else ''
|
||||
|
||||
for tactic in get_tactics(technique):
|
||||
x = dict()
|
||||
x['techniqueID'] = technique_id
|
||||
x['color'] = color
|
||||
x['comment'] = ''
|
||||
x['enabled'] = True
|
||||
x['tactic'] = tactic.lower().replace(' ', '-')
|
||||
x['metadata'] = []
|
||||
x['metadata'].append({'name': '-Available data sources', 'value': my_ds})
|
||||
x['metadata'].append({'name': '-ATT&CK data sources', 'value': ', '.join(get_applicable_data_sources_technique(technique['x_mitre_data_sources'],
|
||||
applicable_data_sources))})
|
||||
x['metadata'].append({'name': '---', 'value': '---'})
|
||||
x['score'] = s
|
||||
if technique is not None:
|
||||
for tactic in get_tactics(technique):
|
||||
x = dict()
|
||||
x['techniqueID'] = technique_id
|
||||
x['color'] = color
|
||||
x['comment'] = ''
|
||||
x['enabled'] = True
|
||||
x['tactic'] = tactic.lower().replace(' ', '-')
|
||||
x['metadata'] = []
|
||||
x['metadata'].append({'name': '-Available data sources', 'value': my_ds})
|
||||
x['metadata'].append({'name': '-ATT&CK data sources', 'value': ', '.join(get_applicable_data_sources_technique(technique['x_mitre_data_sources'],
|
||||
applicable_data_sources))})
|
||||
x['metadata'].append({'name': '---', 'value': '---'})
|
||||
x['score'] = s
|
||||
|
||||
cnt = 1
|
||||
tcnt = len(technique_data['visibility'])
|
||||
for visibility in technique_data['visibility']:
|
||||
applicable_to = ', '.join(visibility['applicable_to'])
|
||||
x['metadata'].append({'name': '-Applicable to', 'value': applicable_to})
|
||||
x['metadata'].append({'name': '-Visibility score', 'value': str(get_latest_score(visibility))})
|
||||
x['metadata'].append({'name': '-Technique comment', 'value': visibility['comment']})
|
||||
x['metadata'].append({'name': '-Visibility comment', 'value': get_latest_comment(visibility)})
|
||||
if cnt != tcnt:
|
||||
x['metadata'].append({'name': '---', 'value': '---'})
|
||||
cnt += 1
|
||||
cnt = 1
|
||||
tcnt = len(technique_data['visibility'])
|
||||
for visibility in technique_data['visibility']:
|
||||
applicable_to = ', '.join(visibility['applicable_to'])
|
||||
x['metadata'].append({'name': '-Applicable to', 'value': applicable_to})
|
||||
x['metadata'].append({'name': '-Visibility score', 'value': str(get_latest_score(visibility))})
|
||||
x['metadata'].append({'name': '-Technique comment', 'value': visibility['comment']})
|
||||
x['metadata'].append({'name': '-Visibility comment', 'value': get_latest_comment(visibility)})
|
||||
if cnt != tcnt:
|
||||
x['metadata'].append({'name': '---', 'value': '---'})
|
||||
cnt += 1
|
||||
|
||||
x['metadata'] = make_layer_metadata_compliant(x['metadata'])
|
||||
mapped_techniques.append(x)
|
||||
x['metadata'] = make_layer_metadata_compliant(x['metadata'])
|
||||
mapped_techniques.append(x)
|
||||
else:
|
||||
print('[!] Technique ' + technique_id + ' is unknown in ATT&CK. Ignoring this technique.')
|
||||
|
||||
for t in techniques:
|
||||
tech_id = get_attack_id(t)
|
||||
|
|
267
upgrade.py
267
upgrade.py
|
@ -1,4 +1,7 @@
|
|||
from constants import *
|
||||
import simplejson
|
||||
from io import StringIO
|
||||
import os
|
||||
|
||||
|
||||
def _load_techniques(yaml_file_lines):
|
||||
|
@ -140,7 +143,7 @@ def _upgrade_technique_yaml_10_to_11(file_lines, attack_tech_data):
|
|||
file_new_lines.append(l)
|
||||
tech_id = REGEX_YAML_TECHNIQUE_ID_GROUP.search(l).group(1)
|
||||
tech_name = get_technique(attack_tech_data, tech_id)['name']
|
||||
file_new_lines.append(indent_chars + 'technique_name: ' + tech_name+'\n')
|
||||
file_new_lines.append(indent_chars + 'technique_name: ' + tech_name + '\n')
|
||||
elif REGEX_YAML_DETECTION.match(l):
|
||||
file_new_lines.append(l)
|
||||
file_new_lines.append((indent_chars * 2) + "applicable_to: ['all']\n")
|
||||
|
@ -342,3 +345,265 @@ def _upgrade_technique_yaml_11_to_12(file_lines, attack_tech_data):
|
|||
new_lines = fix_date_and_remove_null(yaml_file, date_for_visibility, input_type='ruamel')
|
||||
|
||||
return new_lines
|
||||
|
||||
|
||||
def check_yaml_updated_to_sub_techniques(filename):
|
||||
"""
|
||||
Checks if the YAML technique administration file is already updated to ATT&CK with sub-techniques by comparing the techniques to the the crosswalk file.
|
||||
:param filename: YAML administration file
|
||||
:return:
|
||||
"""
|
||||
from generic import init_yaml, backup_file, fix_date_and_remove_null, load_attack_data, get_technique, get_technique_from_yaml, remove_technique_from_yaml
|
||||
|
||||
# Open the crosswalk file from MITRE:
|
||||
conversion_table = None
|
||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'mitre-data/subtechniques-crosswalk.json'), 'r') as f:
|
||||
conversion_table = simplejson.load(f)
|
||||
|
||||
# Open the techniques YAML file:
|
||||
_yaml = init_yaml()
|
||||
with open(filename, 'r') as yaml_file:
|
||||
yaml_content = _yaml.load(yaml_file)
|
||||
|
||||
# Keep track which techniques can be auto updated and which need manual updating
|
||||
auto_updatable_techniques = []
|
||||
manual_update_techniques = []
|
||||
for item in conversion_table:
|
||||
for tech in item:
|
||||
for sub_tech in item[tech]:
|
||||
# Check if technique is in YAML file:
|
||||
yaml_technique = get_technique_from_yaml(yaml_content, tech)
|
||||
if yaml_technique is None:
|
||||
break
|
||||
else:
|
||||
# Only check technique ID's that changed into something else (other technique or other sub-technique)
|
||||
if sub_tech['id'] != tech:
|
||||
# No conversion possible: Multiple techniques became one technique or one sub-technique:
|
||||
if sub_tech['explanation'] in ["Created to consolidate behavior around encrypted C2",
|
||||
"Created to consolidate behavior around encrypting and compressing collected data",
|
||||
"Created to refine the idea behind Common and Uncommonly Used Port to focus the behavior on use of a non-standard port for C2 based on the protocol used",
|
||||
"Existing technique that became a sub-technique. Consolidates Modify Existing Service and New Service techniques into one sub-technique"]:
|
||||
manual_update_techniques.append(tech)
|
||||
|
||||
# No conversion: One technique became multiple sub techniques:
|
||||
elif sub_tech['explanation'] in ["Deprecated and split into separate Bash, VBScript, and Python sub-techniques of Command and Scripting Interpreter.",
|
||||
"Deprecated and split into separate Component Object Model and Distributed Component Object Model sub-techniques.",
|
||||
"Deprecated and split into separate Unquoted Path, PATH Environment Variable, and Search Order Hijacking sub-techniques."]:
|
||||
manual_update_techniques.append(tech)
|
||||
|
||||
# No conversion: Technique merged with other technique:
|
||||
# # T1017 is also merged to T1072, unfortunatly the explanation doesn't tell this
|
||||
elif sub_tech['explanation'] in ["Merged with and name change from Standard Non-Application Layer Protocol"] \
|
||||
or 'Name change from Application Deployment Software' in sub_tech['explanation']:
|
||||
manual_update_techniques.append(tech)
|
||||
|
||||
# Remove deprecated items:
|
||||
elif sub_tech['id'] == 'N/A':
|
||||
auto_updatable_techniques.append(tech)
|
||||
|
||||
# Technique ID's that are changed:
|
||||
# T1070 changed to T1551
|
||||
elif sub_tech['explanation'] == "Remains Technique":
|
||||
auto_updatable_techniques.append(tech)
|
||||
|
||||
# Conversion from technique to sub-technique:
|
||||
elif 'Existing technique that became a sub-technique' in sub_tech['explanation'] \
|
||||
or 'Broken out from pre-defined behavior within Input Capture' in sub_tech['explanation'] \
|
||||
or 'Broken out from pre-defined behavior within Process Injection' in sub_tech['explanation'] \
|
||||
or 'Added due to manipulation of token information' in sub_tech['explanation'] \
|
||||
or 'Added due to manipulation of tokens' in sub_tech['explanation']:
|
||||
auto_updatable_techniques.append(tech)
|
||||
|
||||
if len(auto_updatable_techniques) > 0:
|
||||
print('[!] File: \'' + filename + '\' needs to be updated to ATT&CK with sub-techniques. Use option --update-to-sub-techniques to perform the update.')
|
||||
return False
|
||||
elif len(auto_updatable_techniques) == 0 and len(manual_update_techniques) > 0:
|
||||
print('[!] File: \'' + filename +
|
||||
'\' needs some manual work to upgrade to ATT&CK with sub-techniques. See the list below what needs to be changed.')
|
||||
print('')
|
||||
upgrade_to_sub_techniques(filename, notify_only=True)
|
||||
return False
|
||||
elif len(auto_updatable_techniques) == 0 and len(manual_update_techniques) == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def upgrade_to_sub_techniques(filename, notify_only=False):
|
||||
"""
|
||||
Upgrade the YAML technique administration file to ATT&CK with sub-techniques
|
||||
:param filename: YAML administration file
|
||||
:return:
|
||||
"""
|
||||
from generic import init_yaml, backup_file, fix_date_and_remove_null, load_attack_data, get_technique, get_technique_from_yaml, remove_technique_from_yaml, ask_yes_no, local_stix_path
|
||||
|
||||
if not notify_only and not ask_yes_no('DeTT&CT is going to update \'' + filename + '\' to ATT&CK with sub-techniques. A backup of this file will be generated. Do you want to continue:'):
|
||||
quit()
|
||||
|
||||
# Open the crosswalk file from MITRE:
|
||||
conversion_table = None
|
||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'mitre-data/subtechniques-crosswalk.json'), 'r') as f:
|
||||
conversion_table = simplejson.load(f)
|
||||
|
||||
# Open the techniques YAML file:
|
||||
_yaml = init_yaml()
|
||||
with open(filename, 'r') as yaml_file:
|
||||
yaml_content = _yaml.load(yaml_file)
|
||||
|
||||
# Get the MITRE ATT&CK techniques (e.g. to get the new name for renamed techniques):
|
||||
techniques = load_attack_data(DATA_TYPE_STIX_ALL_TECH_ENTERPRISE)
|
||||
|
||||
# Check if STIX object collection (TAXII server or local STIX objects) contain sub-techniques, by checking the existence of the first sub-technique (T1001.001)
|
||||
stix_sub_tech_check = get_technique(techniques, 'T1001.001')
|
||||
if stix_sub_tech_check is None:
|
||||
if local_stix_path:
|
||||
print('[!] The local STIX repository \'' + local_stix_path +
|
||||
'\' doesn\'t contain ATT&CK sub-techniques. This is necessary to perform the update.')
|
||||
else:
|
||||
print('[!] The TAXII server doesn\'t contain ATT&CK sub-techniques. This is necessary to perform the update.')
|
||||
quit()
|
||||
|
||||
# Keep an ignore list for techniques that are already been taken care of:
|
||||
ignore_list = []
|
||||
|
||||
# Collect messages and show them at the end grouped by comparable messages:
|
||||
become_subtech_msgs = []
|
||||
deprecated_msgs = []
|
||||
renamed_msgs = []
|
||||
subtech_added_msgs = []
|
||||
new_id_msgs = []
|
||||
warning_msgs = []
|
||||
for item in conversion_table:
|
||||
for tech in item:
|
||||
for sub_tech in item[tech]:
|
||||
# Check if technique is in YAML file:
|
||||
yaml_technique = get_technique_from_yaml(yaml_content, tech)
|
||||
|
||||
# Only apply changes to techniques that are in the YAML file:
|
||||
if yaml_technique is not None and tech not in ignore_list:
|
||||
# First check the source techniques that are equal to the destination techniques:
|
||||
if sub_tech['id'] == tech:
|
||||
# Do nothing for the items with "Remains Technique" because nothing changes.
|
||||
if 'Remains Technique' in sub_tech['explanation'] \
|
||||
or 'Remove from lateral-movement, Renamed, Name change from Logon Scripts and new sub-techniques added' in sub_tech['explanation'] \
|
||||
or 'Remove from credential-access, New sub-techniques added' in sub_tech['explanation']:
|
||||
pass
|
||||
|
||||
# Explanations we've missed:
|
||||
else:
|
||||
warning_msgs.append('[!] Explanation \'' + sub_tech['explanation'] +
|
||||
'\' in the subtechniques-crosswalk.json provided by MITRE not handled by DeTT&CT. Please check manually. Technique ' + tech)
|
||||
|
||||
# Perform the renames
|
||||
if 'renamed' in sub_tech['explanation'].lower():
|
||||
new_name = get_technique(techniques, sub_tech['id'])['name']
|
||||
if yaml_technique['technique_name'] != new_name:
|
||||
renamed_msgs.append('[i] Technique ' + tech + ' is renamed from \'' + yaml_technique['technique_name'] +
|
||||
'\' to \'' + new_name + '\'.')
|
||||
yaml_technique['technique_name'] = new_name
|
||||
|
||||
# Then check the source techniques that are not equal to the destination techniques:
|
||||
elif sub_tech['id'] != tech:
|
||||
# No conversion possible: Multiple techniques became one technique or one sub-technique:
|
||||
if sub_tech['explanation'] in ["Created to consolidate behavior around encrypted C2",
|
||||
"Created to consolidate behavior around encrypting and compressing collected data",
|
||||
"Created to refine the idea behind Common and Uncommonly Used Port to focus the behavior on use of a non-standard port for C2 based on the protocol used",
|
||||
"Existing technique that became a sub-technique. Consolidates Modify Existing Service and New Service techniques into one sub-technique"]:
|
||||
text = 'sub-technique' if '.' in sub_tech['id'] else 'technique'
|
||||
warning_msgs.append('[!] Technique ' + tech + ' has been consolidated with multiple other techniques into one ' +
|
||||
text + ': ' + sub_tech['id'] + '. You need to migrate this technique manually.')
|
||||
|
||||
# No conversion: One technique became multiple sub techniques:
|
||||
elif sub_tech['explanation'] in ["Deprecated and split into separate Bash, VBScript, and Python sub-techniques of Command and Scripting Interpreter.",
|
||||
"Deprecated and split into separate Component Object Model and Distributed Component Object Model sub-techniques.",
|
||||
"Deprecated and split into separate Unquoted Path, PATH Environment Variable, and Search Order Hijacking sub-techniques."]:
|
||||
sub_ids = []
|
||||
for i in item[tech]:
|
||||
sub_ids.append(i['id'])
|
||||
warning_msgs.append('[!] Technique ' + tech + ' is deprecated and split into multiple sub-techniques: ' + ', '.join(sub_ids) +
|
||||
'. You need to migrate this technique manually.')
|
||||
ignore_list.append(tech)
|
||||
|
||||
# No conversion: Technique merged with other technique:
|
||||
# # T1017 is also merged to T1072, unfortunatly the explanation doesn't tell this
|
||||
elif sub_tech['explanation'] in ["Merged with and name change from Standard Non-Application Layer Protocol"] \
|
||||
or 'Name change from Application Deployment Software' in sub_tech['explanation']:
|
||||
warning_msgs.append('[!] Technique ' + tech + ' is merged with ' + sub_tech['id'] +
|
||||
'. You need to migrate this technique manually.')
|
||||
|
||||
# Remove deprecated items:
|
||||
elif sub_tech['id'] == 'N/A':
|
||||
remove_technique_from_yaml(yaml_content, tech)
|
||||
deprecated_msgs.append('[i] Technique ' + tech + ' is deprecated. Technique bas been removed from the YAML file.')
|
||||
|
||||
# Technique ID's that are changed:
|
||||
# T1070 changed to T1551
|
||||
elif sub_tech['explanation'] == "Remains Technique":
|
||||
yaml_technique['technique_id'] = sub_tech['id']
|
||||
new_id_msgs.append('[i] The ID of technique ' + tech + ' is changed to ' + sub_tech['id'] + '.')
|
||||
|
||||
# Conversion from technique to sub-technique:
|
||||
elif 'Existing technique that became a sub-technique' in sub_tech['explanation'] \
|
||||
or 'Broken out from pre-defined behavior within Input Capture' in sub_tech['explanation'] \
|
||||
or 'Broken out from pre-defined behavior within Process Injection' in sub_tech['explanation'] \
|
||||
or 'Added due to manipulation of token information' in sub_tech['explanation'] \
|
||||
or 'Added due to manipulation of tokens' in sub_tech['explanation']:
|
||||
yaml_technique['technique_id'] = sub_tech['id']
|
||||
yaml_technique['technique_name'] = get_technique(techniques, sub_tech['id'])['name']
|
||||
become_subtech_msgs.append('[i] Technique ' + tech + ' has become sub-technique: ' +
|
||||
sub_tech['id'] + '. Change applied in the YAML file.')
|
||||
|
||||
# Explanations we've missed:
|
||||
else:
|
||||
warning_msgs.append('[!] Explanation \'' + sub_tech['explanation'] +
|
||||
'\' in the subtechniques-crosswalk.json provided by MITRE not handled by DeTT&CT. Please check manually. Technique ' + tech)
|
||||
|
||||
# Perform the renames
|
||||
if 'renamed' in sub_tech['explanation'].lower():
|
||||
new_name = get_technique(techniques, sub_tech['id'])['name']
|
||||
print(tech)
|
||||
if yaml_technique['technique_name'] != new_name:
|
||||
renamed_msgs.append('[i] Technique ' + tech + ' is renamed from \'' + yaml_technique['technique_name'] +
|
||||
'\' to \'' + new_name + '\'.')
|
||||
yaml_technique['technique_name'] = new_name
|
||||
|
||||
# Print the results:
|
||||
if len(become_subtech_msgs + deprecated_msgs + renamed_msgs + subtech_added_msgs + new_id_msgs) > 0:
|
||||
print("Informational messages (no action needed):")
|
||||
|
||||
for item in become_subtech_msgs:
|
||||
print(item)
|
||||
for item in deprecated_msgs:
|
||||
print(item)
|
||||
for item in renamed_msgs:
|
||||
print(item)
|
||||
for item in subtech_added_msgs:
|
||||
print(item)
|
||||
for item in new_id_msgs:
|
||||
print(item)
|
||||
print('')
|
||||
|
||||
if len(warning_msgs) > 0:
|
||||
print("Messages that need your attention:")
|
||||
for item in warning_msgs:
|
||||
print(item)
|
||||
print('')
|
||||
|
||||
if len(become_subtech_msgs + deprecated_msgs + renamed_msgs + subtech_added_msgs + new_id_msgs + warning_msgs) == 0:
|
||||
print('[i] No techniques found that need to be updated to ATT&CK sub-techniques.')
|
||||
else:
|
||||
if not notify_only:
|
||||
# Create backup of the YAML file:
|
||||
backup_file(filename)
|
||||
with open(filename, 'w') as fd:
|
||||
# ruamel does not support output to a variable. Therefore we make use of StringIO.
|
||||
string_io = StringIO()
|
||||
_yaml.dump(yaml_content, string_io)
|
||||
string_io.seek(0)
|
||||
new_lines = string_io.readlines()
|
||||
fd.writelines(new_lines)
|
||||
print('File written: ' + filename)
|
||||
|
||||
# Quit DeTT&CT when manual work needs to be done:
|
||||
if len(warning_msgs) > 0:
|
||||
quit()
|
||||
|
|
Loading…
Reference in New Issue