def clone_direct(request, id_): stix_id = id_ try: if stix_id: edge_object = EdgeObject.load(stix_id, request.user.filters()) if edge_object.ty == 'obs': return error_with_message(request, "Observables cannot be cloned") new_id = IDManager().get_new_id(edge_object.ty) draft = to_draft_wrapper(edge_object) draft['id'] = new_id draft['id_ns'] = LOCAL_NS Draft.upsert(edge_object.ty, draft, request.user) return redirect( '/' + TYPE_TO_URL[edge_object.ty] + '/build/' + new_id, request) return error_with_message( request, "No clonable object found; please only choose " + "the clone option from an object's summary or external publish page" ) except Exception as e: ext_ref_error = "not found" if e.message.endswith(ext_ref_error): return error_with_message( request, "Unable to load object as some external references were not found: " + e.message[0:-len(ext_ref_error)]) else: return error_with_message(request, e.message)
def review_set_handling(request, data): try: edge_object = EdgeObject.load(data["rootId"]) generic_object = edge_object.to_ApiObject() generic_object.obj.timestamp = datetime.now(tz.tzutc()) append_handling(generic_object, data["handling"]) ip = InboxProcessorForBuilders( user=request.user, ) ip.add(InboxItem(api_object=generic_object, etlp=edge_object.etlp)) ip.run() return { 'message': '', 'state': 'success', "success": True } except InboxError as e: log_error(e, 'adapters/review/handling', 'Failed to set Handling') return { 'message': e.message, 'state': 'error', "success": False }
def load_eo(id_): eo = EdgeObject.load(id_) tlp = eo.etlp if hasattr(eo, 'etlp') else 'NULL' esms = eo.esms if hasattr(eo, 'esms') else [] etou = eo.etou if hasattr(eo, 'etou') else [] api_obj = eo.to_ApiObject() return api_obj, tlp, esms, etou
def __validate_observables(observables): validation_results = {} dummy_id = 1 for observable in observables: id_ = observable.get('id') object_type = observable.get('objectType') if not id_: # No id, so we can safely assume this is something from the builder... observable_properties = ObservableStructureConverter.builder_to_simple( object_type, observable) validation_info = ObservableValidator.validate( **observable_properties) validation_results[ 'Observable ' + str(dummy_id)] = validation_info.validation_dict else: namespace_validation = NamespaceValidationInfo.validate( r'obs', id_) if namespace_validation.is_local(): real_observable = EdgeObject.load(id_) as_package, _ = real_observable.capsulize('temp') validation_info = PackageValidationInfo.validate( as_package) validation_results.update(validation_info.validation_dict) else: validation_results.update( {id_: namespace_validation.validation_dict}) dummy_id += 1 return validation_results
def iterate_draft(draft_object, bl_ids, id_matches, hide_edge_ids, show_edge_ids, hidden_ids, request): def create_draft_observable_id(obs): d = create_draft_obs_hash(obs) return draft_object['id'].replace('indicator', 'observable') + DRAFT_ID_SEPARATOR + d def create_draft_obs_node(obs_id, title): summary = {'title': title, 'type': 'obs', 'value': '', '_id': obs_id, 'cv': '', 'tg': '', 'data': {'idns': '', 'etlp': '', 'summary': {'title': title}, 'hash': '', 'api': ''}, 'created_by_organization': ''} return EdgeObject(summary) def create_draft_ind_node(ind_id, title): summary = {'title': title, 'type': 'ind', 'value': '', '_id': ind_id, 'cv': '', 'tg': '', 'data': {'idns': '', 'etlp': '', 'summary': {'title': title}, 'hash': '', 'api': ''}, 'created_by_organization': ''} return EdgeObject(summary) stack = [] for i in xrange(len(draft_object['observables'])): observable = draft_object['observables'][i] obs_id = observable['id'] if 'id' in observable else create_draft_observable_id(observable) if obs_id not in hidden_ids: if DRAFT_ID_SEPARATOR in obs_id: stack.append( (1, 0, create_draft_obs_node(obs_id, observable_to_name(observable, True)), REL_TYPE_DRAFT)) else: try: stack.append((1, 0, EdgeObject.load(obs_id, request.user.filters()), REL_TYPE_EDGE)) except: stack.append((1, 0, create_external_reference_from_id(obs_id), REL_TYPE_EXT)) stack.append((0, None, create_draft_ind_node(draft_object['id'], draft_object['title']), REL_TYPE_DRAFT)) return create_graph(stack, bl_ids, id_matches, hide_edge_ids, show_edge_ids, hidden_ids, request)
def _edges_of_master(contents, master_id): if master_id in contents: return [e.idref for e in contents[master_id].api_object.edges()] else: return [ e.idref for e in EdgeObject.load(master_id).to_ApiObject().edges() ]
def _map_id_to_tlp(contents, map_table): tlp_levels = {} for dup, original in map_table.iteritems(): dup_tlp = contents[dup].etlp original_tlp = EdgeObject.load(original).etlp if TLP_MAP[dup_tlp] < TLP_MAP[original_tlp]: tlp_levels[original] = dup_tlp else: tlp_levels[original] = original_tlp return tlp_levels
def observable_extract(request, output_format, obs_type_filter, id_, revision): revision = "latest" # override as not sure if it makes sense to use the revision. def text_writer(value, obs_type): if obs_type == obs_type_filter or obs_type_filter == "all": return value + os.linesep return "" def snort_writer(value, obs_type): if obs_type == obs_type_filter or obs_type_filter == "all": snort_val = generate_snort(value, obs_type, id_.split(':', 1)[1].split('-', 1)[1]) if snort_val: return snort_val + os.linesep return "" def not_implemented_writer(*args): return "" result = "" if output_format == "text": writer = text_writer elif output_format == "SNORT": writer = snort_writer else: writer = not_implemented_writer result = "%s not implemented" % output_format stack = [id_] history = set() while stack: node_id = stack.pop() if node_id in history: continue history.add(node_id) try: eo = EdgeObject.load(node_id, request.user.filters(), revision=revision if node_id is id_ else "latest") except EdgeError: continue stack.extend([edge.id_ for edge in eo.edges]) if eo.ty != 'obs': continue if eo.apidata.has_key("observable_composition"): continue result += writer(eo.summary['value'], eo.summary['type']) response = HttpResponse(content_type='text/txt') response['Content-Disposition'] = 'attachment; filename="%s_%s_%s.txt"' % (output_format, obs_type_filter, id_) response.write(result) return response
def rehash(timestamp): """ A script to recalculate all observable data hashes according to CERT requirements (can safely be run multiple times) """ page_size = 5000 cert_builder.apply_customizations() db = get_db() base_query = { 'type': 'obs', 'data.summary.type': { '$ne': 'ObservableComposition' } } if timestamp: base_query.update({'created_on': { '$gte': timestamp }}) cursor = db.stix.find(base_query, {'_id': 1}) bulk = db.stix.initialize_unordered_bulk_op() update_count = 0 def bulk_execute(bulk): try: bulk.execute() except Exception: pass return db.stix.initialize_unordered_bulk_op() for row in cursor: update_count += 1 stix_id = row['_id'] eo = EdgeObject.load(stix_id) ao = eo.to_ApiObject() new_hash = ao.localhash() bulk.find({ '_id': stix_id, 'data.hash': {'$ne': new_hash} }).update({ '$set': { 'data.hash': new_hash } }) if not update_count % page_size: bulk = bulk_execute(bulk) if update_count % page_size: bulk_execute(bulk)
def get_matches(id_, request): eo = EdgeObject.load(id_, request.user.filters()) return [ doc['_id'] for doc in get_db().stix.find( { 'data.hash': eo.doc['data']['hash'], 'type': eo.ty, '_id': { '$ne': eo.id_ } }, {'_id': 1}) ]
def _update_existing_properties(additional_sightings, additional_file_hashes, user, tlp_levels): inbox_processor = InboxProcessorForBuilders(user=user) for id_, count in additional_sightings.iteritems(): edge_object = EdgeObject.load(id_) api_object = edge_object.to_ApiObject() _merge_properties(api_object, id_, count, additional_file_hashes) inbox_processor.add( InboxItem(api_object=api_object, etlp=tlp_levels[id_], etou=edge_object.etou, esms=edge_object.esms)) inbox_processor.run()
def get_additional_file_hashes(original, duplicates): additional_file_hashes = {} api_object = EdgeObject.load(original).to_ApiObject() if rgetattr(api_object, STIXDedup.PROPERTY_TYPE, None) == 'FileObjectType': for dup in duplicates: try: api_obj, tlp, esms, etou = STIXDedup.load_eo(dup) except EdgeError as e: continue io = InboxItem(api_object=api_obj, etlp=tlp, esms=esms, etou=etou) add_additional_file_hashes(io, additional_file_hashes, original) return additional_file_hashes
def ajax_incident_timeline(request, id_): try: edge_object = EdgeObject.load(id_) except EdgeError as e: return JsonResponse({'message': e.message}, status=400) try: if edge_object.ty != 'inc': return JsonResponse( {'message': "Only timelines for Incidents can be viewed"}, status=400) if not edge_object.obj.time: return JsonResponse({'message': "No times found in this Incident"}, status=400) time_dict = edge_object.obj.time.to_dict() graph = dict() graph['nodes'] = [] graph['links'] = [] graph['title'] = "Incident : " + edge_object.obj.title graph['tzname'] = datetime.datetime.now(settings.LOCAL_TZ).tzname() for key, value in time_dict.iteritems(): if isinstance(value, basestring): time_ms = unix_time_millis(get_local_datetime(time_dict[key])) else: # stored in value field time_ms = unix_time_millis( get_local_datetime(time_dict[key]['value'])) graph['nodes'].append({ "name": PRETTY_TIME_TYPE[key], "date": time_ms }) return JsonResponse(graph, status=200) except Exception as e: ext_ref_error = "not found" if e.message.endswith(ext_ref_error): message = "Unable to load object as some external references were not found: " + e.message[ 0:-len(ext_ref_error)] else: message = e.message return JsonResponse({'message': message}, status=500)
def _update_existing_objects(ids_to_references, user, tlp_levels): inbox_processor = InboxProcessorForBuilders(user=user) for id_, tlp in tlp_levels.iteritems(): edge_object = EdgeObject.load(id_) api_object = edge_object.to_ApiObject() if id_ in ids_to_references: references = ids_to_references[id_] if edge_object.ty == 'ttp': _merge_ttps(api_object.obj, references) elif edge_object.ty == 'tgt': _merge_tgts(api_object.obj, references) setattr(api_object.obj, 'timestamp', datetime.datetime.utcnow()) setattr(api_object.obj, 'tlp', tlp) inbox_processor.add( InboxItem(api_object=api_object, etlp=tlp, etou=edge_object.etou, esms=edge_object.esms)) inbox_processor.run()
def move_existing_observables(obs_ids, source_draft_ind, target_draft_ind): existing_obs_to_move = [obs for obs in obs_ids if DRAFT_ID_SEPARATOR not in obs] obs_to_move = [] for id_ in existing_obs_to_move: try: obs_to_move.append(EdgeObject.load(id_).to_draft()) except EdgeError: continue target_draft_ind['observables'].extend(obs_to_move) new_source_draft_obs = [] for obs in source_draft_ind['observables']: # If the obs doesn't have an ID then this suggests the obs is a draft if 'id' not in obs: new_source_draft_obs.append(obs) elif obs['id'] not in existing_obs_to_move: new_source_draft_obs.append(obs) source_draft_ind['observables'] = new_source_draft_obs
def capsulize_patch(self, pkg_id, enable_bfs=False): contents = [] pkg = STIXPackage( id_=pkg_id, stix_header=generate_stix_header(self) ) def pkg_dispatch(eo): if isinstance(eo.obj, stixbase.DBStixBase): PACKAGE_ADD_DISPATCH[eo.ty](pkg, eo.obj._object) else: PACKAGE_ADD_DISPATCH[eo.ty](pkg, eo.obj) if enable_bfs: queue = [self.id_] completed_ids = set() while queue: eo_id = queue.pop() if eo_id in completed_ids: continue completed_ids.add(eo_id) if self.id_ == eo_id: eo = self #must do this as self may be a version other than latest else: try: eo = EdgeObject.load(eo_id, self.filters) except EdgeError: continue pkg_dispatch(eo) contents.append(eo) queue.extend([edge.id_ for edge in eo.edges]) else: pkg_dispatch(self) contents.append(self) return pkg, contents
def child_ID_in_Parent_Edges(self, child_id, parent_id): return child_id in [ edges.idref for edges in EdgeObject.load(parent_id).to_ApiObject().edges() ]
def create_graph(stack, bl_ids, id_matches, hide_edge_ids, show_edge_ids, hidden_ids, request): def show_edges(): return ((REL_TYPE_BACKLINK != rel_type and REL_TYPE_MATCH != rel_type) or (node_id in show_edge_ids)) and \ (node_id not in hide_edge_ids) nodes = [] links = [] def create_external_reference_from_edge(edge): summary = { 'title': edge.id_, 'type': edge.ty, 'value': '', '_id': edge.id_, 'cv': '', 'tg': '', 'data': { 'idns': '', 'etlp': '', 'summary': { 'title': edge.id_ }, 'hash': '', 'api': {} }, 'created_by_organization': '' } return EdgeObject(summary) def get_node_type(): return LINK_TO_NODE_TYPE[rel_type] id_to_idx = {} while stack: depth, parent_idx, node, rel_type = stack.pop() node_id = node.id_ if node_id in hidden_ids: continue node_type = get_node_type() is_new_node = node_id not in id_to_idx if is_new_node: idx = len(nodes) id_to_idx[node_id] = idx title = node.summary.get("title", None) if title is None: title = build_title(node) if node_type in (NODE_TYPE_EXT, NODE_TYPE_DRAFT): backlinks, matches = False, False else: backlinks, matches, = backlinks_exist(node_id), matches_exist( node_id, request) nodes.append( dict(id=node_id, type=node.ty, title=title, depth=depth, node_type=node_type, has_backlinks=backlinks, has_matches=matches, has_edges=len(node.edges) != 0, edges_shown=show_edges(), matches_shown=node_id in id_matches, backlinks_shown=node_id in bl_ids)) else: idx = id_to_idx[node_id] if parent_idx is not None: links.append({ "source": parent_idx, "target": idx, "rel_type": rel_type }) if is_new_node: if show_edges(): for edge in node.edges: try: stack.append((depth + 1, idx, EdgeObject.load(edge.id_, request.user.filters()), REL_TYPE_EDGE)) except EdgeError as e: if e.message == edge.id_ + " not found": obj = create_external_reference_from_edge(edge) stack.append((depth + 1, idx, obj, REL_TYPE_EXT)) continue except Exception as e: raise e if node_id in bl_ids: for eoId in [ val for doc in get_backlinks(node_id) for val in doc['value'].keys() ]: try: stack.append((depth + 1, idx, EdgeObject.load(eoId, request.user.filters()), REL_TYPE_BACKLINK)) except: stack.append((depth + 1, idx, create_external_reference_from_id(eoId), REL_TYPE_EXT)) if node_id in id_matches: for eoId in get_matches(node_id, request): try: stack.append((depth + 1, idx, EdgeObject.load(eoId, request.user.filters()), REL_TYPE_MATCH)) except: stack.append((depth + 1, idx, create_external_reference_from_id(eoId), REL_TYPE_EXT)) return dict(nodes=nodes, links=links)
def get_additional_sightings_count(duplicates): count = 0 for dup in duplicates: api_object = EdgeObject.load(dup).to_ApiObject() count += _get_sighting_count(api_object.obj) return count
def filtered_loader(idref): try: return EdgeObject.load(idref, self.filters) except EdgeError as e: return create_external_reference_from_id(idref)
def user_loader(idref): return EdgeObject.load(idref, request.user.filters())