def dupehash(cls, apiobj):
            properties = rgetattr(apiobj, ['object_', 'properties'], None)
            obs_type = rgetattr(properties, ['_XSI_TYPE'], None)
            if obs_type is not None:
                if obs_type in OBS_HASH_PATHS:
                    # we can produce a meaningful hash for selected properties
                    to_hash = "%s|%s|%s" % (cls.SHORT_NAME, obs_type, '|'.join(
                        [
                            str(rgetattr(properties,
                                         str(path).split('.'), ''))
                            for path in OBS_HASH_PATHS[obs_type]
                        ]))
                    hash_ = "certuk:%s" % hashlib.sha1(to_hash).hexdigest()
                else:
                    # we can't produce a meaningful hash on selected properties, so hash them all
                    to_hash = scrubcopy(
                        nested_get(apiobj.to_dict(), ['object', 'properties'],
                                   {}), ['xsi:type'])
                    hash_ = "certuk:%s" % dicthash_sha1(to_hash,
                                                        salt=cls.SHORT_NAME)
            else:
                # we don't know what this is, so let the existing code deal with it
                hash_ = super(DBObservablePatch, cls).dupehash(apiobj)

            return hash_
 def summary_value_generator(self, obj):
     http_request_response_list = rgetattr(
         obj, ['_object', 'properties', 'http_request_response'])
     value = rgetattr(http_request_response_list[0], [
         'http_client_request', 'http_request_header', 'parsed_header',
         'user_agent'
     ], '(undefined)')
     return str(value)
 def summary_value_generator(self, obj):
     network_connection_object = "Source Socket Address: "
     network_connection_object += self.get_socket_summary(
         rgetattr(obj, ['_object', 'properties', 'source_socket_address']))
     network_connection_object += ": Destination Socket Address: "
     network_connection_object += self.get_socket_summary(
         rgetattr(obj,
                  ['_object', 'properties', 'destination_socket_address']))
     return network_connection_object
 def to_draft_handler(self, observable, tg, load_by_id, id_ns=''):
     return {
         'objectType': 'HTTP Session',
         'id': rgetattr(observable, ['id_'], ''),
         'id_ns': id_ns,
         'title': rgetattr(observable, ['title'], ''),
         'description': str(rgetattr(observable, ['description'], '')),
         'user_agent': self.summary_value_generator(observable)
     }
def _package_objects_to_consider(contents, local, type_, property_):
    ids_to_objects_to_consider = {}
    for id_, io in contents.iteritems():
        is_local = rgetattr(contents.get(id_, None), ['api_object', 'obj', 'id_ns'], '') == LOCAL_NAMESPACE
        correct_ns = is_local if local else (not is_local)
        if not correct_ns:
            continue
        if rgetattr(contents.get(id_, None), ['api_object', 'ty'], '') == type_ and len(
                rgetattr(contents.get(id_, None), property_, '')) > 0:
            ids_to_objects_to_consider.setdefault(id_, []).append(io)
    return ids_to_objects_to_consider
    def get_socket(self, socket_object):
        address = rgetattr(socket_object, ['ip_address', 'address_value'], '')
        hostname = rgetattr(socket_object, ['hostname', 'hostname_value'], '')
        port = rgetattr(socket_object, ['port', 'port_value'], '')
        protocol = rgetattr(socket_object, ['port', 'layer4_protocol'], '')

        socket = {
            "ip_address": str(address),
            "hostname": str(hostname),
            "port": str(port),
            "protocol": str(protocol)
        }
        return socket
def _merge_properties(api_object, id_, count, additional_file_hashes):
    api_object.obj.sighting_count = _get_sighting_count(api_object.obj) + count
    if id_ in additional_file_hashes:
        file_properties = rgetattr(api_object, ['obj', 'object_', 'properties'], None)
        for hash_type, hash_value in additional_file_hashes[id_].iteritems():
            if getattr(file_properties, hash_type, None) is None:
                setattr(file_properties, hash_type, hash_value)
Esempio n. 8
0
    def to_draft(cls, inc, tg, load_by_id, id_ns=''):
        draft = super(DBIncidentPatch, cls).to_draft(inc, tg, load_by_id, id_ns)
        if 'responder' in draft:  # fix unbalanced save / load keys in incident.py
            del draft['responder']

        draft['responders'] = [EdgeInformationSource.clone(responder).to_draft() for responder in inc.responders]
        draft['coordinators'] = [EdgeInformationSource.clone(coordinator).to_draft() for coordinator in
                                 inc.coordinators]

        draft['categories'] = [c.value for c in rgetattr(inc, ['categories'], [])]
        if inc.time:
            draft['time'] = inc.time.to_dict()
            for key, value in draft.get('time').iteritems():
                if isinstance(value, basestring):
                    new_value = DBIncidentPatch.convert_to_and_strip_config_timezone(value)
                    draft.get('time')[key] = new_value
                else:
                    value['value'] = DBIncidentPatch.convert_to_and_strip_config_timezone(value['value'])

        if inc.external_ids:
            draft['external_ids'] = []
            for ex_id in inc.external_ids:
                draft['external_ids'].append({'source': ex_id.source, 'id': ex_id.value})

        # Redoing to use correct patched function, can't guarantee correct function if monkey patched
        draft["markings"] = DBIncidentPatch.handling_to_draft(inc, "statement")
        draft["tlp"] = DBIncidentPatch.handling_to_draft(inc, "color")

        return draft
    class DBObservablePatch(DBObservable):
        def __init__(self, obj=None, item=None, id_=None):
            super(DBObservablePatch, self).__init__(obj, item, id_)

        CUSTOM_DRAFT_HANDLERS = custom_draft_handler_map

        @staticmethod
        def _get_custom_to_draft_handler(object_type):
            try:
                return DBObservablePatch.CUSTOM_DRAFT_HANDLERS[object_type]
            except KeyError:
                raise ValueError("Unexpected Object Type %s" % object_type)

        @classmethod
        def to_draft(cls, observable, tg, load_by_id, id_ns=''):
            try:
                draft = super(DBObservablePatch, cls).to_draft(observable,
                                                               tg,
                                                               load_by_id,
                                                               id_ns=id_ns)
                # When creating a draft from a fully formed object with None for some fields, the draft contains
                # fields with string 'None' which can cause parsing issues e.g. vlan_num
                for attr, value in draft.iteritems():
                    if value == 'None':
                        draft[attr] = ''

                # to_draft sets the hash_type to lower case. This is not what is expected during Inboxing or Validation!
                hash_type_map = {
                    'md5': 'MD5',
                    'md6': 'MD6',
                    'sha1': 'SHA1',
                    'sha224': 'SHA224',
                    'sha256': 'SHA256',
                    'sha384': 'SHA384',
                    'sha512': 'SHA512',
                    'ssdeep': 'SSDEEP',
                    'other': 'Other'
                }

                if 'hashes' in draft:
                    for hash_ in draft['hashes']:
                        hash_['hash_type'] = hash_type_map.get(
                            hash_['hash_type'].lower(), 'Other')

                return draft
            except ValueError, v:
                if v.__class__ != ValueError:
                    raise

            object_type = rgetattr(observable,
                                   ['_object', '_properties', '_XSI_TYPE'],
                                   'None')
            draft_handler = DBObservablePatch._get_custom_to_draft_handler(
                object_type)
            return draft_handler(observable, tg, load_by_id, id_ns)
Esempio n. 10
0
    def to_draft(cls, indicator, tg, load_by_id, id_ns=''):
        draft = super(DBIndicatorPatch, cls).to_draft(indicator, tg,
                                                      load_by_id, id_ns)
        kill_chain_phases = rgetattr(indicator, ['kill_chain_phases'])
        if kill_chain_phases:
            draft['kill_chain_phase'] = rgetattr(kill_chain_phases[0],
                                                 ['phase_id'], '')

        test_mechanisms = rgetattr(indicator, ['test_mechanisms'])
        if test_mechanisms:
            for test_mechanism in test_mechanisms:
                if isinstance(test_mechanism, SnortTestMechanism):
                    snort_dict = SnortTestMechanism.to_dict(test_mechanism)
                    snort_dict['type'] = 'Snort'
                    draft.setdefault('test_mechanisms', []).append(snort_dict)
                if isinstance(test_mechanism, YaraTestMechanism):
                    yara_dict = YaraTestMechanism.to_dict(test_mechanism)
                    yara_dict['type'] = 'Yara'
                    draft.setdefault('test_mechanisms', []).append(yara_dict)
        return draft
Esempio n. 11
0
def dedup(collection, idref_path='item.idref'):
    if not collection or len(collection) == 0:
        return
    seen = set()
    idx = 0
    for edge in collection:
        idref = rgetattr(edge, idref_path.split('.'))
        if idref not in seen:
            seen.add(idref)
            collection[idx] = edge
            idx += 1
    del collection[idx:]
 def to_draft_handler(self, observable, tg, load_by_id, id_ns=''):
     return {
         'objectType':
         'Network Connection',
         'id':
         rgetattr(observable, ['id_'], ''),
         'id_ns':
         id_ns,
         'title':
         rgetattr(observable, ['title'], ''),
         'description':
         str(rgetattr(observable, ['description'], '')),
         'source_socket_address':
         self.get_socket(
             rgetattr(observable,
                      ['_object', 'properties', 'source_socket_address'])),
         'destination_socket_address':
         self.get_socket(
             rgetattr(
                 observable,
                 ['_object', 'properties', 'destination_socket_address']))
     }
def _generate_map_table_on_hash(contents, hashes, type_):
    hash_to_ids = {}
    for id_, hash_ in sorted(hashes.iteritems()):
        if rgetattr(contents.get(id_, None), ['api_object', 'ty'], '') == type_:
            hash_to_ids.setdefault(hash_, []).append(id_)

    map_table = {}
    for hash_, ids in hash_to_ids.iteritems():
        if len(ids) > 1:
            master = _get_id_with_lowest_tlp(contents, ids)
            ids.remove(master)
            for dup in ids:
                map_table[dup] = master
    return map_table
Esempio n. 14
0
 def handling_to_draft(construct, structure):
     draft = {}
     draft[structure] = []
     handling = rgetattr(construct, ['handling'], '')
     if handling:
         for marking in handling.markings:
             for marking_structure in marking.marking_structures:
                 # stops it adding handling_caveats to marking even though both are in simple marking structures
                 if getattr(marking_structure, 'marking_model_name', None):
                     # retrieve all simple marking statements for handling_caveats
                     if structure == "handling_caveat":
                         draft[structure].append(getattr(marking_structure, 'statement', None))
                 else:
                     if getattr(marking_structure, structure, None):
                         draft[structure].append(getattr(marking_structure, structure, None))
     return draft[structure]
Esempio n. 15
0
 def get_additional_file_hashes(original, duplicates):
     additional_file_hashes = {}
     api_object = EdgeObject.load(original).to_ApiObject()
     if rgetattr(api_object, STIXDedup.PROPERTY_TYPE,
                 None) == 'FileObjectType':
         for dup in duplicates:
             try:
                 api_obj, tlp, esms, etou = STIXDedup.load_eo(dup)
             except EdgeError as e:
                 continue
             io = InboxItem(api_object=api_obj,
                            etlp=tlp,
                            esms=esms,
                            etou=etou)
             add_additional_file_hashes(io, additional_file_hashes,
                                        original)
     return additional_file_hashes
def add_additional_file_hashes(io, additional_file_hashes, existing_id):
    def add_missing_file_hash(inbox_object, file_hashes, property_name):
        hash_type = property_name[-1]
        if hash_type not in file_hashes:
            hash_value = rgetattr(inbox_object, property_name, None)
            if hash_value is not None:
                file_hashes[hash_type] = hash_value

    if rgetattr(io, PROPERTY_TYPE, None) == 'FileObjectType':
        if existing_id not in additional_file_hashes:
            additional_file_hashes[existing_id] = {}
        add_missing_file_hash(io, additional_file_hashes[existing_id], PROPERTY_MD5)
        add_missing_file_hash(io, additional_file_hashes[existing_id], PROPERTY_SHA1)
        add_missing_file_hash(io, additional_file_hashes[existing_id], PROPERTY_SHA224)
        add_missing_file_hash(io, additional_file_hashes[existing_id], PROPERTY_SHA256)
        add_missing_file_hash(io, additional_file_hashes[existing_id], PROPERTY_SHA384)
        add_missing_file_hash(io, additional_file_hashes[existing_id], PROPERTY_SHA512)
def _add_matching_file_observables(db, map_table, contents):
    # identify file observables in contents excluding any which are already in map_table
    new_file_obs = {id_: inbox_item for (id_, inbox_item) in contents.iteritems()
                    if inbox_item.api_object.ty == 'obs' and
                    id_ not in map_table and  # exclude perfect matches which have already been discovered via data hash
                    rgetattr(inbox_item, PROPERTY_TYPE, 'Unknown') == 'FileObjectType'}
    if not new_file_obs:
        # if we have no new file observables, we can bail out
        return

    existing_file_obs = _find_matching_db_file_obs(db, new_file_obs)

    for existing_file in existing_file_obs:
        existing_file_obj = EdgeObject(existing_file).to_ApiObject()
        for (new_id, new_file) in new_file_obs.iteritems():
            if new_id in map_table:
                break
            if _is_matching_file(existing_file_obj, new_file.api_object):
                map_table[new_id] = existing_file['_id']
 def matches(existing, new, property_path):
     # NOTE: need to ignore the `api_object` part of the property path here - hence `[1:]`
     existing_value = rgetattr(existing, property_path[1:], None)
     new_value = rgetattr(new, property_path[1:], None)
     return existing_value is not None and new_value is not None and existing_value == new_value
 def extract_properties(inbox_items, property_path):
     return list({
         str(rgetattr(inbox_item, property_path, None))
         for inbox_item in inbox_items.itervalues()
         if rgetattr(inbox_item, property_path, None) is not None
     })
 def custom_get_obs_value(obj):
     type_ = rgetattr(obj, ['object_', 'properties', '_XSI_TYPE'], None)
     custom_type_handler = custom_object_value_map.get(type_)
     if custom_type_handler is None:
         return original_get_obs_value(obj)
     return custom_type_handler(obj)
 def add_missing_file_hash(inbox_object, file_hashes, property_name):
     hash_type = property_name[-1]
     if hash_type not in file_hashes:
         hash_value = rgetattr(inbox_object, property_name, None)
         if hash_value is not None:
             file_hashes[hash_type] = hash_value
Esempio n. 22
0
def collapse_nested_values(value):
    return rgetattr(value, ['value'], value)