def run(self): def build_activity_message(num_of_duplicates): if num_of_duplicates: return 'Deduped %d Observables ' % (num_of_duplicates) else: return "No Observable duplicates found" messages = [] last_run_at = self.config.task.last_run_at elapsed = StopWatch() try: try: rehash.rehash(last_run_at) except: pass # rehash failed, let's just continue with the hashes as is. original_to_duplicates = STIXDedup.find_duplicates( self.config.only_local_ns) for original, duplicates in original_to_duplicates.iteritems(): try: self.merge_object(original, duplicates) except Exception as e: log_error(e, 'adapters/dedup/dedup', 'Failed to merge %s' % original) messages.append(build_activity_message( len(original_to_duplicates))) messages.insert(0, 'Online Dedup in %ds: ' % int(elapsed.sec())) log_activity('system', 'DEDUP', 'INFO', "\n \t".join(messages)) except Exception as e: log_activity('system', 'DEDUP', 'ERROR', e.message)
def remap_duplicates_parents( self, parents, map_table ): # Inbox parents of duplicates remapping duplicate reference to original if parents: ip = InboxProcessor(user=self.user, trustgroups=None) for id_, type_ in parents.iteritems(): try: api_obj, tlp, esms, etou = STIXDedup.load_eo(id_) except EdgeError as e: # Parent may not exist in Edge but has a record in the STIX BLs collection continue try: api_obj = api_obj.remap(map_table) ip.add( InboxItem(api_object=api_obj, etlp=tlp, esms=esms, etou=etou)) except InboxError as e: log_error(e, 'adapters/dedup/dedup', 'Adding parent %s to IP failed' % id_) continue try: ip.run() except InboxError as e: log_error(e, 'adapters/dedup/dedup', 'Remapping parent objects failed')
def review_set_handling(request, data): try: edge_object = EdgeObject.load(data["rootId"]) generic_object = edge_object.to_ApiObject() generic_object.obj.timestamp = datetime.now(tz.tzutc()) append_handling(generic_object, data["handling"]) ip = InboxProcessorForBuilders( user=request.user, ) ip.add(InboxItem(api_object=generic_object, etlp=edge_object.etlp)) ip.run() return { 'message': '', 'state': 'success', "success": True } except InboxError as e: log_error(e, 'adapters/review/handling', 'Failed to set Handling') return { 'message': e.message, 'state': 'error', "success": False }
def generate_error_message(username, message, e, elapsed): log_activity(username, 'INCIDENT INGEST', 'ERROR', message) log_error(e, 'adapters/incident/import', 'Import Failed') return JsonResponse({ 'duration': int(elapsed.ms()), 'messages': [message], 'state': 'error' }, status=500)
def get(cls): try: task = PeriodicTaskWithTTL.objects.get(task=cls.TASK_NAME) return cls(task) except DoesNotExist, e: log_error(e, 'adapters/dedup/config', 'Configuration for dedup task not found') raise
def ajax_get_retention_config(request, data): success = True error_message = "" config_values = {} try: ret_config = RetentionConfiguration.get() config_values = ret_config.to_dict() except Exception, e: success = False error_message = e.message log_error(e, 'Retention config')
def ajax_get_fts_config(request, data): success = True error_message = "" config_values = {} try: fts_config = FtsConfiguration.get() config_values = fts_config.to_dict() except Exception, e: success = False error_message = e.message log_error(e, 'FTS config')
def ajax_reset_dedup_config(request, data): success = True error_message = "" config_values = {} try: DedupConfiguration.reset() dedup_config = DedupConfiguration.get() config_values = dedup_config.to_dict() except Exception, e: success = False log_error(e, 'Fts config')
def ajax_set_retention_config(request, data): success = True error_message = "" try: RetentionConfiguration.set_from_dict(data) except Exception, e: success = False if isinstance(e, KeyError): error_message = 'value missing: %s' % e.message else: error_message = e.message log_error(e, 'Retention config')
def remap_backlinks_for_original(original, duplicates): parents_of_original, parents_of_duplicate = STIXDedup.calculate_backlinks( original, duplicates) parents_of_original.update(parents_of_duplicate) for dup in duplicates: # Strip out references to duplicates in updated backlinks if dup in parents_of_original: del parents_of_original[dup] try: get_db().stix_backlinks.update( {'_id': original}, {'$set': { 'value': parents_of_original }}, upsert=True) except PyMongoError as pme: log_error(pme, 'adapters/dedup/dedup', 'Updating backlinks failed') if parents_of_duplicate: try: get_db().stix_backlinks.remove({'_id': {'$in': duplicates}}) except PyMongoError as pme: log_error(pme, 'adapters/dedup/dedup', 'Removing parent backlinks failed')
def ajax_publish(request, data): success = True error_message = "" root_id = None try: root_id = data['root_id'] edge_object = PublisherEdgeObject.load_and_parse(root_id) package = PackageGenerator.build_package(edge_object) namespace_info = edge_object.ns_dict() Publisher.push_package(package, namespace_info) # Narrow down which exceptions we catch...? except Exception, e: message = '' taxii_response = get_exception_stack_variable('tr') if taxii_response: message = '\nTAXII Staus Message:\n' + json.dumps( taxii_response.to_dict()) log_error(e, 'Publisher', message) success = False error_message = e.message
def raise_event(self, source, **event_args): for handler in self._handlers: try: self._handlers[handler](source, **event_args) except Exception, e: log_error(e, 'adapters/audit/event')
def create_jobs(): for item in tasks: try: item['installer']() except Exception, e: log_error(e, 'adapters/cron/setup')
def ajax_import(request, username): if not request.method == 'POST': return JsonResponse({}, status=405) if not request.META.get('HTTP_ACCEPT') == 'application/json': return JsonResponse({}, status=406) if not request.META.get('CONTENT_TYPE') in {'application/xml', 'text/xml'}: return JsonResponse({}, status=415) try: request.user = Repository_User.objects.get(username=username) except DoesNotExist: return JsonResponse({}, status=403) elapsed = StopWatch() ip = None try: ip = DedupInboxProcessor(user=request.user, streams=[(request, None)]) ip.run() duration = int(elapsed.ms()) if len(ip.filter_messages) == 0 and ip.message: ip.filter_messages.append(ip.message) log_activity( username, 'DEDUP', 'INFO', build_activity_message(ip.saved_count, duration, ip.filter_messages, ip.validation_result)) return JsonResponse( { 'count': ip.saved_count, 'duration': duration, 'messages': ip.filter_messages, 'state': 'success', 'validation_result': ip.validation_result }, status=202) except (XMLSyntaxError, EntitiesForbidden, InboxError) as e: count = ip.saved_count if isinstance(ip, DedupInboxProcessor) else 0 duration = int(elapsed.ms()) messages = [e.message] validation_result = ip.validation_result if isinstance( ip, DedupInboxProcessor) else {} log_activity( username, 'DEDUP', 'WARN', build_activity_message(count, duration, messages, validation_result)) return JsonResponse( { 'count': count, 'duration': duration, 'messages': messages, 'state': 'invalid', 'validation_result': validation_result }, status=400) except Exception as e: log_activity(username, 'DEDUP', 'ERROR', e.message) log_error(e, 'adapters/dedup/import', 'Import failed') return JsonResponse( { 'duration': int(elapsed.ms()), 'messages': [e.message], 'state': 'error' }, status=500)
def configure_publisher_actions(): try: create_action_if_not_exist(status.PUBLISH_SUCCESS) create_action_if_not_exist(status.PUBLISH_FAIL) except Exception, e: log_error(e, 'adapters/audit/setup')