コード例 #1
0
def get_stix_data(domain, version=None, remote=None):
    """
    download the ATT&CK STIX data for the given domain and version from MITRE/CTI (or just domain if a remote workbench
    is specified).
    :param domain: the domain of ATT&CK to fetch data from, e.g "enterprise-attack"
    :param version: the version of attack to fetch data from, e.g "v8.1". If omitted, returns the latest version
                    (not used for invocations that use remote)
    :param remote: optional url to a ATT&CK workbench instance. If specified, data will be retrieved from the target
                    Workbench instead of MITRE/CTI
    :returns: a MemoryStore containing the domain data
    """
    if remote:  # Using Workbench Instance
        if ':' not in remote[6:]:
            remote += ":3000"
        if not remote.startswith('http'):
            remote = 'http://' + remote
        url = f"{remote}/api/stix-bundles?domain={domain}&includeRevoked=true&includeDeprecated=true"
        stix_json = requests.get(url).json()
        return MemoryStore(stix_json)
    else:  # Using MITRE/CTI
        if version:
            url = f"https://raw.githubusercontent.com/mitre/cti/ATT%26CK-{version}/{domain}/{domain}.json"
        else:
            url = f"https://raw.githubusercontent.com/mitre/cti/master/{domain}/{domain}.json"

        stix_json = requests.get(url).json()
        return MemoryStore(stix_data=stix_json["objects"])
def getFrameworkOverviewLayers(controls, mappings, attackdata, domain,
                               frameworkname, version):
    """ingest mappings and controls and attackdata, and return an array of layer jsons for layers according to control family"""
    # build list of control families
    familyIDToControls, familyIDToName, idToFamily = parseFamilyData(controls)

    outlayers = [{
        "outfile":
        f"{frameworkname}-overview.json",
        "layer":
        layer(
            f"{frameworkname} overview",
            f"{frameworkname} heatmap overview of control mappings, where scores are the number of associated controls",
            domain,
            toTechniquelist(controls, mappings, attackdata, familyIDToControls,
                            familyIDToName, idToFamily), version)
    }]
    for familyID in familyIDToControls:
        controlsInFamily = MemoryStore(stix_data=familyIDToControls[familyID])
        techniquesInFamily = toTechniquelist(controlsInFamily, mappings,
                                             attackdata, familyIDToControls,
                                             familyIDToName, idToFamily)
        if len(techniquesInFamily
               ) > 0:  # don't build heatmaps with no mappings
            # build family overview mapping
            outlayers.append({
                "outfile":
                os.path.join("by_family",
                             familyIDToName[familyID].replace(" ", "_"),
                             f"{familyID}-overview.json"),
                "layer":
                layer(
                    f"{familyIDToName[familyID]} overview",
                    f"{frameworkname} heatmap for controls in the {familyIDToName[familyID]} family, where scores are the number of associated controls",
                    domain, techniquesInFamily, version)
            })
            # build layer for each control
            for control in familyIDToControls[familyID]:
                controlMs = MemoryStore(stix_data=control)
                control_id = control["external_references"][0]["external_id"]
                techniquesMappedToControl = toTechniquelist(
                    controlMs, mappings, attackdata, familyIDToControls,
                    familyIDToName, idToFamily)
                if len(techniquesMappedToControl
                       ) > 0:  # don't build heatmaps with no mappings
                    outlayers.append({
                        "outfile":
                        os.path.join(
                            "by_family",
                            familyIDToName[familyID].replace(" ", "_"),
                            f"{'_'.join(control_id.split(' '))}.json"),
                        "layer":
                        layer(f"{control_id} mappings",
                              f"{frameworkname} {control_id} mappings", domain,
                              techniquesMappedToControl, version)
                    })

    return outlayers
コード例 #3
0
def convert(filename, output='output.json'):
    count = 0
    with open(filename) as json_file:
        vList = []
        data = json.load(json_file)

        print("Loaded the file")
        for cves in data['CVE_Items']:
                count += 1
                # Getting the different fields
                name = cves['cve']['CVE_data_meta']['ID']
                description = cves['cve']['description']['description_data'][0]["value"]
                cdate = cves['publishedDate']
                mdate = cves['lastModifiedDate']
                creator = cves['cve']['CVE_data_meta']['ASSIGNER']

                # Creating the vulnerability with the extracted fields
                vuln = Vulnerability(name=name, created=cdate, modified=mdate, description=description)

                # Adding the vulnerability to the list of vulnerabilities    
                vList.append(vuln)
    # Creating the bundle from the list of vulnerabilities
    bundle = Bundle(vList)
    # Creating a MemoryStore object from the bundle
    memorystore = MemoryStore(bundle)
    # Dumping this object to a file
    memorystore.save_to_file(output)

    print("Successfully converted " + str(count) + " vulnerabilities")
コード例 #4
0
ファイル: heatmap.py プロジェクト: jpmattin/attack-scripts
def generate():
    """parse the STIX on MITRE/CTI and return a layer dict with techniques with randomized scores"""
    # import the STIX data from MITRE/CTI
    stix = requests.get(
        "https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json"
    ).json()
    ms = MemoryStore(stix_data=stix["objects"])
    # get all techniques in STIX
    techniques = ms.query([Filter("type", "=", "attack-pattern")])
    # parse techniques into layer format
    techniques_list = []
    for technique in techniques:
        # skip deprecated and revoked
        if ("x_mitre_deprecated" in technique and
                technique["x_mitre_deprecated"]) or ("revoked" in technique
                                                     and technique["revoked"]):
            continue
        techniqueID = technique["external_references"][0][
            "external_id"]  # get the attackID
        techniques_list.append({
            "techniqueID": techniqueID,
            "score": random.randint(1, 100)  # random score
        })
    # return the techniques in a layer dict
    return {
        "name": "heatmap example",
        "version": "3.0",
        "sorting": 3,  # descending order of score
        "description":
        "An example layer where all techniques have a randomized score",
        "domain": "mitre-enterprise",
        "techniques": techniques_list,
    }
コード例 #5
0
def rel_mem_store():
    cam = Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
    idy = Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
    ind = Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
    mal = Malware(id=MALWARE_ID, **MALWARE_KWARGS)
    rel1 = Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
    rel2 = Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
    rel3 = Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
    stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3]
    yield MemoryStore(stix_objs)
コード例 #6
0
def get_attack(url: str, proxy_string: str, timeout: int) -> MemoryStore:
    """Fetch Mitre ATT&CK JSON data in Stix2 format and return a Stix2 memory store"""
    attack = worker.fetch_json(url, proxy_string, timeout)

    # Create memory store
    mem = MemoryStore()

    # Add all objects to the memory store
    for obj in parse(attack, allow_custom=True).objects:
        mem.add(obj)

    return mem
コード例 #7
0
    def __init__(self):
        """Download and store in memory the STIX data on instantiation."""
        if self.kill_chain_name == "":
            raise ValueError(
                f"Kill chain name not specified in class {self.__class__.__name__}"
            )

        if self.url == "":
            raise ValueError(
                f"URL not specified in class {self.__class__.__name__}")

        logging.info(f"Downloading STIX data at: {self.url}")
        stix_json = requests.get(self.url).json()
        self._memory_store = MemoryStore(stix_data=stix_json["objects"])
コード例 #8
0
def test_memory_store_save_load_file(mem_store):
    filename = 'memory_test/mem_store.json'
    mem_store.save_to_file(filename)
    contents = open(os.path.abspath(filename)).read()

    assert '"id": "indicator--d81f86b9-975b-bc0b-775e-810c5ad45a4f",' in contents
    assert '"id": "indicator--d81f86b8-975b-bc0b-775e-810c5ad45a4f",' in contents

    mem_store2 = MemoryStore()
    mem_store2.load_from_file(filename)
    assert mem_store2.get("indicator--d81f86b8-975b-bc0b-775e-810c5ad45a4f")
    assert mem_store2.get("indicator--d81f86b9-975b-bc0b-775e-810c5ad45a4f")

    shutil.rmtree(os.path.dirname(filename))
def test_memory_store_save_load_file_no_name_provided(fs_mem_store_no_name):
    filename = fs_mem_store_no_name  # the fixture fs_mem_store yields filename where the memory store was written to

    # STIX2 contents of mem_store have already been written to file
    # (this is done in fixture 'fs_mem_store'), so can already read-in here
    contents = open(os.path.abspath(filename)).read()

    assert '"id": "indicator--00000000-0000-4000-8000-000000000001",' in contents
    assert '"id": "indicator--00000000-0000-4000-8000-000000000001",' in contents

    mem_store2 = MemoryStore()
    mem_store2.load_from_file(filename)
    assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
    assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
コード例 #10
0
def test_memory_store_save_load_file(mem_store, fs_mem_store):
    filename = fs_mem_store  # the fixture fs_mem_store yields filename where the memory store was written to

    # STIX2 contents of mem_store have already been written to file
    # (this is done in fixture 'fs_mem_store'), so can already read-in here
    contents = open(os.path.abspath(filename)).read()

    assert '"id": "indicator--d81f86b9-975b-bc0b-775e-810c5ad45a4f",' in contents
    assert '"id": "indicator--d81f86b8-975b-bc0b-775e-810c5ad45a4f",' in contents

    mem_store2 = MemoryStore()
    mem_store2.load_from_file(filename)
    assert mem_store2.get("indicator--d81f86b8-975b-bc0b-775e-810c5ad45a4f")
    assert mem_store2.get("indicator--d81f86b9-975b-bc0b-775e-810c5ad45a4f")
コード例 #11
0
def get_stix_data(domain, version=None):
    """
    download the ATT&CK STIX data for the given domain and version from MITRE/CTI.
    :param domain: the domain of ATT&CK to fetch data from, e.g "enterprise-attack"
    :param version: the version of attack to fetch data from, e.g "v8.1". If omitted, returns the latest version
    :returns: a MemoryStore containing the domain data
    """
    if version:
        url = f"https://raw.githubusercontent.com/mitre/cti/ATT%26CK-{version}/{domain}/{domain}.json"
    else:
        url = f"https://raw.githubusercontent.com/mitre/cti/master/{domain}/{domain}.json"

    stix_json = requests.get(url).json()
    return MemoryStore(stix_data=stix_json["objects"])
コード例 #12
0
ファイル: cvetostix2.py プロジェクト: siemonster/connectors
def convert(filename, output='output.json'):
    # Create the default author
    author = Identity(name='The MITRE Corporation',
                      identity_class='organization')
    count = 0
    with open(filename) as json_file:
        vulnerabilities_bundle = [author]
        data = json.load(json_file)

        print("Loaded the file")
        for cves in data['CVE_Items']:
            count += 1
            # Get the name
            name = cves['cve']['CVE_data_meta']['ID']

            # Create external references
            external_reference = ExternalReference(
                source_name='NIST NVD',
                url='https://nvd.nist.gov/vuln/detail/' + name)
            external_references = [external_reference]
            for reference in cves['cve']['references']['reference_data']:
                external_reference = ExternalReference(
                    source_name=reference['refsource'], url=reference['url'])
                external_references.append(external_reference)

            # Getting the different fields
            description = cves['cve']['description']['description_data'][0][
                "value"]
            cdate = cves['publishedDate']
            mdate = cves['lastModifiedDate']

            # Creating the vulnerability with the extracted fields
            vuln = Vulnerability(name=name,
                                 created=cdate,
                                 modified=mdate,
                                 description=description,
                                 created_by_ref=author,
                                 external_references=external_references)
            # Adding the vulnerability to the list of vulnerabilities
            vulnerabilities_bundle.append(vuln)
    # Creating the bundle from the list of vulnerabilities
    bundle = Bundle(vulnerabilities_bundle)
    # Creating a MemoryStore object from the bundle
    memorystore = MemoryStore(bundle)
    # Dumping this object to a file
    memorystore.save_to_file(output)

    print("Successfully converted " + str(count) + " vulnerabilities")
def getLayersByProperty(controls, mappings, attackdata, domain, frameworkname,
                        x_mitre, version):
    """get layers grouping the mappings according to values of the given property"""
    propertyname = x_mitre.split("x_mitre_")[1]  # remove prefix

    familyIDToControls, familyIDToName, idToFamily = parseFamilyData(controls)

    # group controls by the property
    propertyValueToControls = {}

    def addToDict(value, control):
        if value in propertyValueToControls:
            propertyValueToControls[value].append(control)
        else:
            propertyValueToControls[value] = [control]

    # iterate through controls, grouping by property
    isListType = False
    for control in controls.query([Filter("type", "=", "course-of-action")]):
        value = control.get(x_mitre)
        if not value: continue
        if isinstance(value, list):
            isListType = True
            for v in value:
                addToDict(v, control)
        else:
            addToDict(value, control)

    outlayers = []
    for value in propertyValueToControls:
        # controls for the corresponding values
        controlsOfValue = MemoryStore(stix_data=propertyValueToControls[value])
        techniques = toTechniquelist(controlsOfValue, mappings, attackdata,
                                     familyIDToControls, familyIDToName,
                                     idToFamily)
        if len(techniques) > 0:
            # build layer for this technique set
            outlayers.append({
                "outfile":
                os.path.join(f"by_{propertyname}", f"{value}.json"),
                "layer":
                layer(
                    f"{propertyname}={value} mappings",
                    f"techniques where the {propertyname} of associated controls {'includes' if isListType else 'is'} {value}",
                    domain, techniques, version)
            })

    return outlayers
コード例 #14
0
ファイル: msbtostix2.py プロジェクト: onekill1801/testdemo
def convert(parse_data, output='output.json'):
	# Create the default author
	author = Identity(name='The MS Bulletin Corporation', identity_class='organization')
	print(author)
	count = 0

	vulnerabilities_bundle = [author]
	# Getting modified date
	mdate = parse_data["rss"]["channel"]["lastBuildDate"]
	for msb in parse_data["rss"]["channel"]["item"]:
		count += 1
		# Get the name
		name = msb["title"]
		# Getting the create date
		cdate = msb["pubDate"]
		# Getting description
		description = msb["description"]
		 # Create external references
		external_references = ExternalReference(
			source_name="Microsoft Security Bulletin",
			url=msb["link"]
		)
		# Creating the vulnerability with the extracted fields
		vuln = Vulnerability(
			name=name,
			created=cdate,
			modified=mdate,
			description=description,
			created_by_ref=author,
			external_references=external_references
        )
        # Adding the vulnerability to the list of vulnerabilities
		vulnerabilities_bundle.append(vuln)
	# Creating the bundle from the list of vulnerabilities
	bundle = Bundle(vulnerabilities_bundle)
	# Creating a MemoryStore object from the bundle
	memorystore = MemoryStore(bundle)
	# Dumping this object to a file
	memorystore.save_to_file(output)

	print("Successfully converted " + str(count) + " vulnerabilities")
コード例 #15
0
    def __init__(self, source='taxii', local=None):
        """
            Initialization - Creates a matrix generator object

            :param server: Source to utilize (taxii or local)
            :param local: string path to local cache of stix data
        """
        self.convert_data = {}
        if source.lower() not in ['taxii', 'local']:
            print(
                '[MatrixGen] - Unable to generate matrix, source {} is not one of "taxii" or "local"'
                .format(source))
            raise ValueError

        if source.lower() == 'taxii':
            self.server = Server('https://cti-taxii.mitre.org/taxii')
            self.api_root = self.server.api_roots[0]
            self.collections = dict()
            for collection in self.api_root.collections:
                if collection.title != "PRE-ATT&CK":
                    tc = Collection(
                        'https://cti-taxii.mitre.org/stix/collections/' +
                        collection.id)
                    self.collections[collection.title.split(' ')
                                     [0].lower()] = TAXIICollectionSource(tc)
        elif source.lower() == 'local':
            if local is not None:
                hd = MemoryStore()
                if 'mobile' in local.lower():
                    self.collections['mobile'] = hd.load_from_file(local)
                else:
                    self.collections['enterprise'] = hd.load_from_file(local)
            else:
                print(
                    '[MatrixGen] - "local" source specified, but path to local source not provided'
                )
                raise ValueError
        self.matrix = {}
        self._build_matrix()
コード例 #16
0
def load(url):
    """Load stix data from file"""
    src = MemoryStore()
    src.load_from_file(url)
    return src
コード例 #17
0
filename = sys.argv[1]
count = 0
with open(filename) as json_file:
    vList = []
    data = json.load(json_file)
    
    print("Loaded the file")
    for cves in data['CVE_Items']:
            count += 1
            # Getting the different fields
            name = cves['cve']['CVE_data_meta']['ID']
            description = cves['cve']['description']['description_data'][0]["value"]
            cdate = cves['publishedDate']
            mdate = cves['lastModifiedDate']
            creator = cves['cve']['CVE_data_meta']['ASSIGNER']
            
            # Creating the vulnerability with the extracted fields
            vuln = Vulnerability(name=name, created=cdate, modified=mdate, description=description)
            
            # Adding the vulnerability to the list of vulnerabilities    
            vList.append(vuln)
# Creating the bundle from the list of vulnerabilities
bundle = Bundle(vList)
# Creating a MemoryStore object from the bundle
memorystore = MemoryStore(bundle)
# Dumping this object to a file
memorystore.save_to_file('output.json')
    
print("Successfully converted " + str(count) + " vulnerabilities")
    
        help=
        "if flag specified, will remove the contents the output folder before writing layers"
    )
    parser.add_argument(
        "--build-directory",
        dest="buildDir",
        action="store_true",
        help=
        "if flag specified, will build a markdown file listing the output files for easy access in the Navigator"
    )

    args = parser.parse_args()

    print("downloading ATT&CK data... ", end="", flush=True)
    attackdata = MemoryStore(stix_data=requests.get(
        f"https://raw.githubusercontent.com/mitre/cti/ATT%26CK-{args.version}/{args.domain}/{args.domain}.json"
    ).json()["objects"])
    print("done")

    print("loading controls framework... ", end="", flush=True)
    with open(args.controls, "r") as f:
        controls = MemoryStore(stix_data=json.load(f)["objects"],
                               allow_custom=True)
    print("done")

    print("loading mappings... ", end="", flush=True)
    with open(args.mappings, "r") as f:
        mappings = MemoryStore(stix_data=json.load(f)["objects"])
    print("done")

    print("generating layers... ", end="", flush=True)
コード例 #19
0
def generate(softwaretype="software"):
    """ generate and return a layer dict showing techniques used by software
        If softwaretype is specified as "malware" or "tool", only shows software of that type. If softwaretype is specified as "software" output layer shows both malware and tools
    """
    # import the STIX data from MITRE/CTI
    stix = requests.get(
        "https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json"
    ).json()
    ms = MemoryStore(stix_data=stix["objects"])
    # software includes malware and tool types so perform two queries and merge the results
    software_filters = []
    if softwaretype == "malware" or softwaretype == "software":
        software_filters.append([Filter('type', '=', 'malware')])
    if softwaretype == "tool" or softwaretype == "software":
        software_filters.append([Filter('type', '=', 'tool')])

    software = list(chain.from_iterable(ms.query(f) for f in software_filters))

    # build a list of techniques used by software
    techniques_used = {}  #attackID => using software names
    for thesoftware in software:
        # filter out revoked and deprecated software
        if ("x_mitre_deprecated" in thesoftware
                and thesoftware["x_mitre_deprecated"]) or (
                    "revoked" in thesoftware and thesoftware["revoked"]):
            continue
        for relationship in ms.relationships(thesoftware["id"]):
            # skip all non-technique relationships
            if "attack-pattern" not in relationship["target_ref"]: continue
            technique = ms.get(relationship["target_ref"])
            # filter out deprecated and revoked techniques
            if ("x_mitre_deprecated" in technique
                    and technique["x_mitre_deprecated"]) or (
                        "revoked" in technique and technique["revoked"]):
                continue
            techniqueID = technique["external_references"][0]["external_id"]
            # store usage in techniques_used struct
            if techniqueID in techniques_used:
                techniques_used[techniqueID].append(thesoftware["name"])
            else:
                techniques_used[techniqueID] = [thesoftware["name"]]

    # format the techniques for the output layer
    techniques_list = []
    highest_usage = 0
    lowest_usage = 1
    for techniqueID in techniques_used:
        # determine the number of used techniques for the score
        count = len(techniques_used[techniqueID])
        highest_usage = max(highest_usage, count)
        lowest_usage = min(lowest_usage, count)
        # append technique struct to list of layer-formatted techniques
        techniques_list.append({
            "techniqueID":
            techniqueID,
            "comment":
            "executed by " + ", ".join(techniques_used[techniqueID]),
            "score":
            count,
        })
    # set up layer name and desc according to softwaretype
    if softwaretype != "software":
        plural = "tools" if softwaretype == "tool" else "malware"
        layername = f"Software ({softwaretype}) Execution"
        layerdescription = f"All techniques that can be executed by software of subtype {softwaretype}, where the score is the count of {plural} using the technique"
    else:
        layername = "Software Execution"
        layerdescription = f"All techniques that can be executed by software, where the score is the count of software using the technique"

    # construct and return the layer as a dict
    return {
        "name": layername,
        "description": layerdescription,
        "version": "3.0",
        "domain": "mitre-enterprise",
        "techniques": techniques_list,
        "sorting": 3,  # order in descending order of score (count)
        "gradient": {
            "colors": [
                "#fff7b3",  # low counts are yellow
                "#ff6666",  # high counts are red
            ],
            "minValue": lowest_usage,
            "maxValue": highest_usage
        },
    }
コード例 #20
0
#!/usr/bin/env python3
import os
import logging
import sys
import datetime
import certstream
import whois
import requests
import csv
import urllib.request
from tld import get_tld
from bs4 import BeautifulSoup
from stix2 import MemoryStore, Indicator

# ne pas toucher, le fichier site-database sera ecrase sinon
mem = MemoryStore()
GREEN = "\033[38;5;2m"  # Clean
RED = "\033[38;5;1m"  # Phishing
LIGHT_RED = "\033[38;5;9m"  # Grand danger
GRAY = "\033[38;5;7m"  # En calcul
WHITE = "\033[0m"  # Reset

fname = open("list-fr.csv", 'r')
file = csv.reader(fname)
dico = {
    '0': ['o'],
    'I': ['l', '1'],
    '8': ['b'],
    '1': ['l', 'i'],
    '5': ['s'],
    'i': ['j'],
コード例 #21
0
def generate():
    """parse the STIX on MITRE/CTI and return a layer dict showing all techniques used by an APT group with phrase 'bear' in the group aliases."""
    # import the STIX data from MITRE/CTI
    stix = requests.get(
        "https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json"
    ).json()
    ms = MemoryStore(stix_data=stix["objects"])

    groups = ms.query([Filter("type", "=", "intrusion-set")])

    # find bear groups
    bear_groups = []  #list of groups with bear in name
    for group in groups:
        # filter out deprecated and revoked groups
        if ("x_mitre_deprecated" in group
                and group["x_mitre_deprecated"]) or ("revoked" in group
                                                     and group["revoked"]):
            continue
        # check all aliases for bear
        for alias in group["aliases"]:
            if re.match(".*bear.*", alias, re.IGNORECASE) is not None:
                bear_groups.append(group)
                break  # don't match the same group multiple times

    # find techniques used by bear groups
    techniques_used = {}  #attackID => using bear groups
    for bear in bear_groups:
        # construct the "bear" name for the comment
        # if bear occurs in multiple aliases, list them all
        bearnames = []
        for alias in bear["aliases"]:
            if re.match(".*bear.*", alias, re.IGNORECASE) is not None:
                bearnames.append(alias)
        bearname = bearnames[0]
        if len(bearnames) > 1:
            bearname += " (AKA " + ",".join(bearnames[1:]) + ")"

        # get techniques used by this group
        relationships = ms.relationships(bear["id"])
        for relationship in relationships:
            # skip all non-technique relationships
            if "attack-pattern" not in relationship["target_ref"]: continue
            technique = ms.get(relationship["target_ref"])
            # filter out deprecated and revoked techniques
            if ("x_mitre_deprecated" in technique
                    and technique["x_mitre_deprecated"]) or (
                        "revoked" in technique and technique["revoked"]):
                continue
            techniqueID = technique["external_references"][0]["external_id"]
            # store usage in techniques_used struct
            if techniqueID in techniques_used:
                techniques_used[techniqueID].append(bearname)
            else:
                techniques_used[techniqueID] = [bearname]

    # format the techniques for the output layer
    techniques_list = []
    for techniqueID in techniques_used:
        techniques_list.append({
            "techniqueID":
            techniqueID,
            "comment":
            "used by " + ", ".join(techniques_used[techniqueID]),
            "color":
            "#ff6666"
        })
    # construct and return the layer as a dict
    return {
        "name":
        "*Bear APTs",
        "versions": {
            "layer": "4.1",
            "navigator": "4.1"
        },
        "description":
        "All techniques used by an APT group with phrase 'bear' in the group aliases",
        "domain":
        "enterprise-attack",
        "techniques":
        techniques_list,
        "legendItems": [{
            "label": "Used by a group the phrase 'bear' in the group aliases",
            "color": "#ff6666"
        }]
    }
コード例 #22
0
    def __init__(self, source='taxii', resource=None):
        """
            Initialization - Creates a matrix generator object

            :param source: Source to utilize (taxii, remote, or local)
            :param resource: string path to local cache of stix data (local) or url of an ATT&CK Workbench (remote)
        """
        self.convert_data = {}
        self.collections = dict()
        if source.lower() not in ['taxii', 'local', 'remote']:
            print(
                '[MatrixGen] - Unable to generate matrix, source {} is not one of "taxii", "remote" or '
                '"local"'.format(source))
            raise ValueError

        if source.lower() == 'taxii':
            self.server = Server('https://cti-taxii.mitre.org/taxii')
            self.api_root = self.server.api_roots[0]
            for collection in self.api_root.collections:
                if collection.title != "PRE-ATT&CK":
                    tc = Collection(
                        'https://cti-taxii.mitre.org/stix/collections/' +
                        collection.id)
                    self.collections[collection.title.split(' ')
                                     [0].lower()] = TAXIICollectionSource(tc)
        elif source.lower() == 'local':
            if resource is not None:
                hd = MemoryStore()
                hd.load_from_file(resource)
                if 'mobile' in resource.lower():
                    self.collections['mobile'] = hd
                else:
                    self.collections['enterprise'] = hd
            else:
                print(
                    '[MatrixGen] - "local" source specified, but path to local source not provided'
                )
                raise ValueError
        elif source.lower() == 'remote':
            if resource is not None:
                if ':' not in resource[6:]:
                    print(
                        '[MatrixGen] - "remote" source missing port; assuming ":3000"'
                    )
                    resource += ":3000"
                if not resource.startswith('http'):
                    resource = 'http://' + resource
                for dataset in ['enterprise', 'mobile']:
                    hd = MemoryStore()
                    response = requests.get(
                        f"{resource}/api/stix-bundles?domain={dataset}-"
                        f"attack&includeRevoked=true&includeDeprecated=true")
                    response.raise_for_status(
                    )  # ensure we notice bad responses
                    _add(hd, json.loads(response.text), True, None)
                    self.collections[dataset] = hd
            else:
                print(
                    f'[MatrixGen] - WARNING: "remote" selected without providing a "resource" url. The use of '
                    f'"remote" requires the inclusion of a "resource" url to an ATT&CK Workbench instance. No matrix '
                    f'will be generated...')
        self.matrix = {}
        self._build_matrix()
コード例 #23
0
                def load_dir(dir):
                    data_store = MemoryStore()
                    datafile = os.path.join(dir, domain + ".json")
                    data_store.load_from_file(datafile)

                    return load_datastore(data_store)
コード例 #24
0
def generate(show_nodetect=False):
    """
        generate and return a layer dict showing techniques used by APT3 and APT29 as well as software used by those groups
        param show_nodetect, if true, causes techniques that have no data-sources to be highlighted as well
    """
    stix = requests.get(
        "https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json"
    ).json()
    ms = MemoryStore(stix_data=stix["objects"])
    apt3 = ms.get("intrusion-set--0bbdf25b-30ff-4894-a1cd-49260d0dd2d9")
    apt29 = ms.get("intrusion-set--899ce53f-13a0-479b-a0e4-67d46e241542")

    techniques_used = {
    }  # attackID => {apt3: boolean, apt29: boolean, software: Set, detection: boolean}

    for apt in [apt3, apt29]:

        def use_technique(technique, software=None):
            """helper function to record a technique as used"""
            techniqueID = technique["external_references"][0]["external_id"]
            # init struct if the technique has not been seen before
            if not techniqueID in techniques_used:
                techniques_used[techniqueID] = {
                    "APT3": False,
                    "APT29": False,
                    "software": set(),
                    "datasources": []
                }
            # record new data
            techniques_used[techniqueID][apt["name"]] = True
            if "x_mitre_data_sources" in technique and len(
                    technique["x_mitre_data_sources"]) > 0:
                techniques_used[techniqueID]["datasources"] = technique[
                    "x_mitre_data_sources"]
            if software:
                techniques_used[techniqueID]["software"].add(software["name"])

        # traverse relationships
        for relationship in ms.relationships(apt["id"]):
            target_obj = ms.get(relationship["target_ref"])
            # skip relationships with deprecated objects
            if ("x_mitre_deprecated" in target_obj
                    and target_obj["x_mitre_deprecated"]) or (
                        "revoked" in target_obj and target_obj["revoked"]):
                continue
            # technique type relationship
            if target_obj["type"] == "attack-pattern":
                # record technique usage
                use_technique(target_obj)
            # software type relationship, traverse to find software-used techniques
            if target_obj["type"] == "malware" or target_obj["type"] == "tool":
                software = target_obj
                for software_relationship in ms.relationships(software["id"]):
                    software_target_obj = ms.get(
                        software_relationship["target_ref"])
                    # skip relationships with deprecated objects
                    if ("x_mitre_deprecated" in software_target_obj
                            and software_target_obj["x_mitre_deprecated"]) or (
                                "revoked" in software_target_obj
                                and software_target_obj["revoked"]):
                        continue
                    if software_target_obj["type"] == "attack-pattern":
                        # record technique usage
                        use_technique(software_target_obj, software)

    # format the techniques for the output layer
    techniques_list = []

    def color_lookup(usage):
        if show_nodetect and not len(usage["datasources"]) > 0:
            return "#fc3b3b"
        if usage["APT3"] and usage["APT29"]:
            return "#74c476"
        if usage["APT3"]: return "#6baed6"
        if usage["APT29"]: return "#fce93b"

    for techniqueID in techniques_used:
        # determine the number of used techniques for the score
        comment = ""
        if show_nodetect:
            if len(techniques_used[techniqueID]["datasources"]) > 0:
                comment = f"considered detectable by a notional organization because it has data-sources {', '.join(techniques_used[techniqueID]['datasources'])}"
            else:
                comment = "considered undetectable by a notional organization because it has no data-sources"
        else:
            used = []
            if techniques_used[techniqueID]["APT3"]: used.append("APT3")
            if techniques_used[techniqueID]["APT29"]: used.append("APT29")
            used += list(techniques_used[techniqueID]["software"])
            comment = f"used by {', '.join(used)}"
        # append technique struct to list of layer-formatted techniques
        techniques_list.append({
            "techniqueID":
            techniqueID,
            "color":
            color_lookup(techniques_used[techniqueID]),
            "comment":
            comment,
        })

    # construct and return the layer as a dict
    # set up layer information according to show_nodetect
    name = "APT3 + APT29 with software"
    description = "This layer shows techniques (including techniques from software used by the groups) used by APT3 only in blue, APT29 only in yellow, and both APT3 and APT29 in green."
    legend = [{
        "label":
        "Used by APT3 or a software APT3 uses",
        "color":
        color_lookup({
            "APT3": True,
            "APT29": False,
            "datasources": ["placeholder"]
        })
    }, {
        "label":
        "Used by APT29 or a software APT29 uses",
        "color":
        color_lookup({
            "APT3": False,
            "APT29": True,
            "datasources": ["placeholder"]
        })
    }, {
        "label":
        "Used by both APT3 or a softare APT3 uses and APT29 or a software APT29 uses",
        "color":
        color_lookup({
            "APT3": True,
            "APT29": True,
            "datasources": ["placeholder"]
        })
    }]
    # additional formatting when displaying notional detectability
    if show_nodetect:
        name += " and notional no detection"
        description += " The techniques in red denote techniques considered undetectable by a notional organization because they have no data-sources. Disclaimer: Data-sources in ATT&CK are sources of information that COULD be used to identify adversary actions, however the exactness of that evidence varies greatly. Therefore the presence of a data source for technique should only be considered a potential metric for detectability."
        legend.append({
            "label":
            "Used by either APT3 or APT29 but considered undetectable by a notional organization because it has no data-sources",
            "color":
            color_lookup({
                "APT3": True,
                "APT29": True,
                "datasources": []
            })
        })

    # layer struct
    return {
        "name": name,
        "version": "3.0",
        "description": description,
        "domain": "mitre-enterprise",
        "techniques": techniques_list,
        "legendItems": legend
    }
コード例 #25
0
def convert(filename, output="output.json"):
    # Create the default author
    author = Identity(name="The MITRE Corporation",
                      identity_class="organization")
    count = 0
    with open(filename) as json_file:
        vulnerabilities_bundle = [author]
        data = json.load(json_file)
        for cves in data["CVE_Items"]:
            count += 1
            # Get the name
            name = cves["cve"]["CVE_data_meta"]["ID"]

            # Create external references
            external_reference = ExternalReference(
                source_name="NIST NVD",
                url="https://nvd.nist.gov/vuln/detail/" + name)
            external_references = [external_reference]
            for reference in cves["cve"]["references"]["reference_data"]:
                external_reference = ExternalReference(
                    source_name=reference["refsource"], url=reference["url"])
                external_references.append(external_reference)

            # Getting the different fields
            description = cves["cve"]["description"]["description_data"][0][
                "value"]
            base_score = (cves["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]
                          if "baseMetricV3" in cves["impact"] else None)
            base_severity = (
                cves["impact"]["baseMetricV3"]["cvssV3"]["baseSeverity"]
                if "baseMetricV3" in cves["impact"] else None)
            attack_vector = (
                cves["impact"]["baseMetricV3"]["cvssV3"]["attackVector"]
                if "baseMetricV3" in cves["impact"] else None)
            integrity_impact = (
                cves["impact"]["baseMetricV3"]["cvssV3"]["integrityImpact"]
                if "baseMetricV3" in cves["impact"] else None)
            availability_impact = (
                cves["impact"]["baseMetricV3"]["cvssV3"]["availabilityImpact"]
                if "baseMetricV3" in cves["impact"] else None)
            cdate = cves["publishedDate"]
            mdate = cves["lastModifiedDate"]

            # Creating the vulnerability with the extracted fields
            vuln = Vulnerability(
                name=name,
                created=cdate,
                modified=mdate,
                description=description,
                created_by_ref=author,
                external_references=external_references,
                custom_properties={
                    "x_opencti_base_score": base_score,
                    "x_opencti_base_severity": base_severity,
                    "x_opencti_attack_vector": attack_vector,
                    "x_opencti_integrity_impact": integrity_impact,
                    "x_opencti_availability_impact": availability_impact,
                },
            )
            # Adding the vulnerability to the list of vulnerabilities
            vulnerabilities_bundle.append(vuln)
    # Creating the bundle from the list of vulnerabilities
    bundle = Bundle(vulnerabilities_bundle)
    # Creating a MemoryStore object from the bundle
    memorystore = MemoryStore(bundle)
    # Dumping this object to a file
    memorystore.save_to_file(output)
コード例 #26
0
def mem_store():
    yield MemoryStore(STIX_OBJS1)
コード例 #27
0
 def load_dir(dir, new=False):
     data_store = MemoryStore()
     datafile = os.path.join(dir, domain + ".json")
     data_store.load_from_file(datafile)
     parse_subtechniques(data_store, new)
     return load_datastore(data_store)
コード例 #28
0
ファイル: react2stix.py プロジェクト: zinint/atc-react
from scripts.atcutils import ATCutils
from stix2 import MemoryStore, CustomObject, properties

ATCconfig = ATCutils.load_config("scripts/config.yml")
stix_mem = MemoryStore()

@CustomObject('x-react-stage', [ 
    ( 'name', properties.StringProperty(required=True)), 
    ( 'description', properties.StringProperty()),
    ( 'external_references', properties.ObjectReferenceProperty())] )
class ReactStage(object):
    def __init__(self, name=None, **kwargs):
        list_of_stages = ['Preparation','Identification','Containment','Eradication','Recovery','Lessons Learned']
        if name and name not in list_of_stages:
            raise ValueError("'%s' is not a recognized stage of RE&CT." % name)


@CustomObject( 'x-react-action', [ 
    ( 'name', properties.StringProperty(required=True)), 
    ( 'description', properties.StringProperty()), 
    ( 'external_references', properties.ObjectReferenceProperty()),
    ( 'kill_chain_phases', properties.ListProperty(properties.DictionaryProperty)) ] )
class ReactAction(object):
    def __init__(self, name=None, **kwargs):
        pass


@CustomObject('x-react-matrix', [ 
    ( 'name', properties.StringProperty(required=True)), 
    ( 'description', properties.StringProperty()), 
    ( 'tactic_refs', properties.ListProperty(properties.StringProperty)) ] )
コード例 #29
0
def generate_dos_stix21_report():
    root_dir = os.path.dirname(os.path.abspath(__file__))
    import_path = os.path.join(root_dir, 'data\\')
    export_path = os.path.join(root_dir, 'results\\')
    # sys.stdout = open(export_path+'console_output_DoS_use_case', 'w')

    stix21_object_list_DOS = list()

    print('\nUSE CASE 2 -- DoS Attack:\n')

    imported_stix21_data = import_static_stix21_data()
    imported_sro_list = imported_stix21_data[1]

    print('\n-------------------------------------------')

    get_static_mitm_sco_list()

    print('-------------------------------------------\n')

    converted_logs_DOS1 = convert_log_entries(
        import_simulation_output(import_path, "use_case_2_plc.log"))
    converted_logs_DOS2 = convert_log_entries(
        import_simulation_output(import_path, "use_case_2_hmi.log"))
    converted_pcap_DOS = convert_pcap_frames(
        import_simulation_output(import_path,
                                 "use_case_2_network_traffic.json"))

    print('')

    print(get_all_ip_addr(converted_logs_DOS1))
    print('')

    pretty_print_list(get_timespan(converted_logs_DOS1))
    print('')

    print(get_all_severity_level(converted_logs_DOS1))
    print('')

    print(get_all_ip_addr(converted_logs_DOS2))
    print('')

    pretty_print_list(get_timespan(converted_logs_DOS2))
    print('')

    print(get_all_severity_level(converted_logs_DOS2))

    print('\n-------------------------------------------\n')

    print('Generated STIX2.1 SCOs from log entries:')

    ip1dos = converted_logs_DOS1[0].generate_ipv4_addr()
    ip2dos = converted_logs_DOS2[0].generate_ipv4_addr()
    process_dos = converted_logs_DOS1[0].generate_process()
    stix21_object_list_DOS.append(ip1dos)
    stix21_object_list_DOS.append(ip2dos)
    stix21_object_list_DOS.append(process_dos)

    print(ip1dos, ip2dos, process_dos)

    print('\n-------------------------------------------\n')

    pretty_print_list(get_timespan(converted_pcap_DOS))
    print('')

    print(get_all_protocols(converted_pcap_DOS))

    print('\nDisplaying the last 10 pcap entries:')
    for element in converted_pcap_DOS[-10:]:
        print(element)

    ack_traffic = list()
    for element in converted_pcap_DOS:
        if element.eth_src == '00:00:00:00:00:02' or element.eth_dst == '00:00:00:00:00:02':
            ack_traffic.append(element)

    print('\nDisplaying 5 SYN/ACK pcap entries:')
    pretty_print_list(ack_traffic[:5])

    print('\n-------------------------------------------\n')

    mac3_dos = converted_pcap_DOS[0].generate_mac_addr('src')
    stix21_object_list_DOS.append(mac3_dos)
    mac1_dos = converted_pcap_DOS[0].generate_mac_addr('dst')
    stix21_object_list_DOS.append(mac1_dos)
    mac2_dos = ack_traffic[0].generate_mac_addr('dst')
    stix21_object_list_DOS.append(mac2_dos)

    print('Generated STIX2.1 MAC addresses from pcap frames:')
    print(mac1_dos, mac2_dos, mac3_dos)

    print(
        '\nGenerated STIX2.1 network traffic SCOs from pcap frames (excerpt shown):'
    )

    network_traffic_DOS_list = list()
    for element in converted_pcap_DOS[:100]:
        network_traffic_DOS = element.generate_network_traffic(
            stix21_object_list_DOS)
        network_traffic_DOS_list.append(network_traffic_DOS)
        stix21_object_list_DOS.append(network_traffic_DOS)

    pretty_print_list(network_traffic_DOS_list[:5])

    print('\n-------------------------------------------\n')
    get_static_stix21_objects_dos_round_1()

    print('\n-------------------------------------------\n')
    ip2dos_updated = IPv4Address(id=ip2dos.id,
                                 value=ip2dos.value,
                                 resolves_to_refs=[mac2_dos.id, mac3_dos.id])
    stix21_object_list_DOS.remove(ip2dos)
    stix21_object_list_DOS.append(ip2dos_updated)

    print('Updated IPv4 address object (embedded relationship):\n{}\n'.format(
        ip2dos_updated))

    print('Custom selected and generated Infrastructure SDO and related SROs:')
    infrastructure_dos = Infrastructure(
        name='Conyeor belt digital twin',
        description=
        "Digital twin representing a conveyor belt with HMI and PLC. Target of the conducted attack"
    )
    stix21_object_list_DOS.append(infrastructure_dos)

    rel_infra_ip1_dos = Relationship(source_ref=infrastructure_dos,
                                     relationship_type='consists_of',
                                     target_ref=ip1dos)
    rel_infra_ip2_dos = Relationship(source_ref=infrastructure_dos,
                                     relationship_type='consists_of',
                                     target_ref=ip2dos_updated)
    rel_infra_process_dos = Relationship(source_ref=infrastructure_dos,
                                         relationship_type='consists_of',
                                         target_ref=process_dos)
    stix21_object_list_DOS.append(rel_infra_ip1_dos)
    stix21_object_list_DOS.append(rel_infra_ip2_dos)
    stix21_object_list_DOS.append(rel_infra_process_dos)

    print(infrastructure_dos, rel_infra_ip1_dos, rel_infra_ip2_dos,
          rel_infra_process_dos)

    print(
        '\nCustom generated Observed Data for IP addresses and spoofed SYN-flooding traffic:'
    )

    observed_data1_dos = ObservedData(
        first_observed=converted_logs_DOS2[0].timestamp,
        last_observed=converted_logs_DOS2[-1].timestamp,
        number_observed=1,
        object_refs=[ip2dos_updated]  # duplicate IP
    )
    stix21_object_list_DOS.append(observed_data1_dos)

    nw_traffic_dos_id_list = list()
    for element in network_traffic_DOS_list:
        nw_traffic_dos_id_list.append(element.id)

    observed_data2_dos = ObservedData(
        first_observed=converted_pcap_DOS[0].timestamp,
        last_observed=converted_pcap_DOS[len(converted_pcap_DOS) -
                                         1].timestamp,
        number_observed=100,
        object_refs=nw_traffic_dos_id_list  # SYN traffic excerpt
    )
    stix21_object_list_DOS.append(observed_data2_dos)

    print(observed_data1_dos, observed_data2_dos)

    print('\n-------------------------------------------\n')

    search_list1 = search_stix21_objects(imported_sro_list, "observed-data",
                                         'direct')
    print('The following direct relationships exist for Observed Data:')
    for entry in search_list1:
        print(entry)

    print(
        'Custom generated Indicators and relationships between Observed Data and Indicators:\n'
    )

    lhs1 = ObjectPath("ipv4-addr", ["resolves_to_refs[0]"])
    lhs1b = ObjectPath("ipv4-addr", ["resolves_to_refs[1]"])
    ob1 = EqualityComparisonExpression(lhs1,
                                       StringConstant('00:00:00:00:00:03'),
                                       True)
    ob1b = EqualityComparisonExpression(lhs1b,
                                        StringConstant('00:00:00:00:00:03'))
    pattern1_dos = ObservationExpression(AndBooleanExpression([ob1, ob1b]))

    indicator1_dos = Indicator(
        name='Spoofing indicator - duplicate IP address',
        description='IP address resolves to two different MAC addresses',
        pattern=pattern1_dos,
        pattern_type='stix',
        valid_from=datetime.datetime.now())
    stix21_object_list_DOS.append(indicator1_dos)

    print(indicator1_dos)

    lhs2 = ObjectPath('network-traffic', ['scr_ref'])
    ob2 = EqualityComparisonExpression(lhs2,
                                       StringConstant('00:00:00:00:00:03'))
    lhs2a = ObjectPath('network-traffic', ['dst_ref'])
    ob2a = EqualityComparisonExpression(lhs2a,
                                        StringConstant('00:00:00:00:00:01'))
    lhs2b = ObjectPath('network-traffic', ['protocols[1]'])
    ob2b = EqualityComparisonExpression(lhs2b, StringConstant('tcp'))
    obe2 = ObservationExpression(AndBooleanExpression([ob2, ob2a, ob2b]))
    pattern2_dos = QualifiedObservationExpression(
        QualifiedObservationExpression(obe2, RepeatQualifier(100)),
        WithinQualifier(1))

    indicator2_dos = Indicator(
        name='SYN flooding indicator',
        description=
        'Highly repetitive tcp network traffic originating from malicious MAC address',
        pattern=pattern2_dos,
        pattern_type='stix',
        valid_from=datetime.datetime.now())
    stix21_object_list_DOS.append(indicator2_dos)

    print(indicator2_dos)

    rel_indicator_observed1_dos = Relationship(source_ref=indicator1_dos,
                                               relationship_type='based-on',
                                               target_ref=observed_data1_dos)
    rel_indicator_observed2_dos = Relationship(source_ref=indicator2_dos,
                                               relationship_type='based-on',
                                               target_ref=observed_data2_dos)

    stix21_object_list_DOS.append(rel_indicator_observed1_dos)
    stix21_object_list_DOS.append(rel_indicator_observed2_dos)

    print(rel_indicator_observed1_dos, rel_indicator_observed2_dos)

    print('\n-------------------------------------------\n')
    print(
        'Custom generated Attack Pattern, Tool and additional relationships:')

    attack_pattern_dos = AttackPattern(
        name='DoS SYN flooding attack',
        description=
        'The attacker executes a Denial of Service attack with TCP SYN requests consuming the resources of'
        ' its target',
        external_references=[
            ExternalReference(source_name='capec', external_id='CAPEC-125'),
            ExternalReference(source_name='capec', external_id='CAPEC-482')
        ],
        kill_chain_phases=KillChainPhase(
            kill_chain_name='lockheed-martin-cyber-kill-chain',
            phase_name='actions-on-objective'))
    stix21_object_list_DOS.append(attack_pattern_dos)

    tool_dos = Tool(name='hping3')
    stix21_object_list_DOS.append(tool_dos)

    print(attack_pattern_dos, tool_dos)

    rel_indicator_attack1_dos = Relationship(source_ref=indicator1_dos,
                                             relationship_type='indicates',
                                             target_ref=attack_pattern_dos)
    rel_indicator_attack2_dos = Relationship(source_ref=indicator2_dos,
                                             relationship_type='indicates',
                                             target_ref=attack_pattern_dos)
    rel_attack_tool_dos = Relationship(source_ref=attack_pattern_dos,
                                       relationship_type='uses',
                                       target_ref=tool_dos)
    stix21_object_list_DOS.append(rel_indicator_attack1_dos)
    stix21_object_list_DOS.append(rel_indicator_attack2_dos)
    stix21_object_list_DOS.append(rel_attack_tool_dos)

    print(rel_indicator_attack1_dos, rel_indicator_attack2_dos,
          rel_attack_tool_dos)

    DOS_id_list = list()
    for element in stix21_object_list_DOS:
        DOS_id_list.append(element.id)

    print('\n-------------------------------------------\n')
    print('Generated Report for the Digital Twin DoS simulation use case:')

    report_DOS = Report(
        name='Digital Twin based DoS attack simulation with SYN flooding',
        description=
        'This report describes a simulated DoS attack on a conveyor belt using a digital twin in'
        ' simulation mode. The attack is based on repeatedly spoofed TCP traffic.',
        published=datetime.datetime.now(),
        object_refs=DOS_id_list)
    stix21_object_list_DOS.append(report_DOS)

    print(report_DOS)

    bundle_DOS = Bundle(objects=stix21_object_list_DOS)

    print('\n-------------------------------------------')

    mem = MemoryStore()
    mem.add(bundle_DOS)
    # mem.save_to_file(export_path+'STIX21_output_DoS_use_case.json')

    print('-------------------------------------------')