def main(): """Main function for retrieve_events""" utility = Utility() args = utility.updated_hash() for i in args["api_keys"]: event = Event(i['key_id'], i['secret_key']) event.retrieve_events()
def generateDownloadLink(self, event, context): # This is a step function called from a state machine # Event type will always be "step-function" source_event = Event(event, context) data = source_event.parse() if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format( target.name)) return False results = Results(target.name, self.s3_client, self.bucket, self.base_results_path) status, output, download_url = results.generateURL() if download_url: return { 'status': status, 'output': output, 'url': download_url } else: if status == 404: message = 'No results found for target' else: message = 'Unknown error' return {'status': status, 'message': message} else: self.logger.error("Unrecognized payload: {}".format(data)) return False
def formatForSNS(self, event, context): # This is a step function called from a state machine # Event type will always be "step-function" source_event = Event(event, context) data = source_event.parse() if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format( target.name)) return False # Extract the dictionary here and signed URL output_tracker = event['responses']['Generatedownloadlink'][ 'output'] signed_url = event['responses']['Generatedownloadlink']['url'] contents = (target.name, output_tracker, signed_url) formatter = Formatter(self.logger) subject, body = formatter.formatForEmail(contents) return {'subject': subject, 'body': body} else: self.logger.error("Unrecognized payload: {}".format(data)) return False
def queue(self, event, context): # print("Event: {}, context: {}".format(event, context.invoked_function_arn)) source_event = Event(event, context) data = source_event.parse() if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format(target.name)) return Response({ "statusCode": 400, "body": json.dumps({'error': 'Target was not valid or missing'}) }).with_security_headers() scan_uuid = str(uuid.uuid4()) self.sqs_client.send_message( QueueUrl=self.queueURL, MessageBody="portscan|" + target.name + "|" + scan_uuid ) # Use a UUID for the scan type and return it return Response({ "statusCode": 200, "body": json.dumps({'uuid': scan_uuid}) }).with_security_headers() else: self.logger.error("Unrecognized payload: {}".format(data)) return Response({ "statusCode": 400, "body": json.dumps({'error': 'Unrecognized payload'}) }).with_security_headers()
def main(): """Main function for retrieve_events""" utility = Utility() args = utility.updated_hash() for i in args["api_keys"]: event = Event(i["key_id"], i["secret_key"]) event.retrieve_events()
def pollScanResults(self, event, context): # This function will take a Tenable.io scan ID, and # query Tenable.io API for the status of that scan, and # if completed, return the results a JSON object source_event = Event(event, context) data = source_event.parse() if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format( target.name)) return False scanID = event['responses']['Tenablescan']['id'] scanner = TIOScanner(logger=self.logger) json_result = scanner.scanResult(scanID, result_format="json") html_result = scanner.scanResult(scanID, result_format="html") if json_result and html_result: send_to_s3(target.name + "_tenablescan", json_result, client=self.s3_client, bucket=self.s3_bucket) send_to_s3(target.name + "_tenablescan", html_result, client=self.s3_client, bucket=self.s3_bucket) return {'statusCode': 200} else: self.logger.error("Unrecognized payload: {}".format(data)) return False
def __init__(self, gene, threshold): Event.__init__(self, gene) self.etype = 'RI' self.construct_events(gene) if len(self.positive_ids) > 1 and threshold: self.merge_events(threshold=threshold, overlap_fun=self.overlap)
def init_event_flow(): EVENTFLOW = EventFlow() global_var.set_value("EVENTFLOW", EVENTFLOW) EVENT = Event() global_var.set_value("EVENT", EVENT) EVENT.get_event_flow_module() EVENTFLOW.get_event_module() WriteLog.info(__name__, "初始化事件流完成")
def main(): global event, playbook, configs cirtaHome = os.path.dirname(os.path.realpath(__file__)) configs = config(os.path.join(cirtaHome, "etc")) options = processArgs(configs) initLogging(configs, options) playbook = Playbook(configs, options) event = Event(cirta_id, configs, options, playbook, cirtaHome) printModeHeader(playbook, event) printCirtaID(event) if options.seed: seedAttributes(event) event.cirta_status = "running" log.state(event.getAttrs()) launchInitializers(playbook, event) collectSourcesInput(playbook, event) launchSources(playbook, event, preAction=True) if playbook.POST_SOURCES and playbook.ACTIONS and launchActionsNow(playbook, event): playbook.actionsLaunched = True launchActions(playbook, event) launchSources(playbook, event, preAction=False) if playbook.POST_SOURCES and playbook.ACTIONS and not playbook.actionsLaunched: keepaliveWait() playbook.actionsLaunched = True launchActions(playbook, event) if not playbook.actionsLaunched: keepaliveWait() launchActions(playbook, event) if hasattr(event, "_backgroundedDS"): launchBackgroundedSources(playbook, event) if hasattr(event, "_backgroundedActions"): launchBackgroundedActions(playbook, event) checkStackTraces(event) event.cirta_status = "finished" log.state(event.getAttrs()) log.info('msg="cirta execution finished"')
def downloadResults(self, event, context): # This is a lambda function called from API GW # Event type will always be "api-gw" source_event = Event(event, context) data = source_event.parse() if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format( target.name)) return Response({ "statusCode": 400, "body": json.dumps({'error': 'Target was not valid or missing'}) }).with_security_headers() results = Results(target.name, self.s3_client, self.bucket, self.base_results_path) # Always use the download route scan_results, status = results.download() if scan_results: return Response({ "statusCode": status, "headers": { "Content-Type": "application/gzip", "Content-Disposition": "attachment; filename={}.tgz".format(target.name) }, "body": base64.b64encode(scan_results.getvalue()).decode("utf-8"), "isBase64Encoded": True }).with_security_headers() else: if status == 404: resp_body = 'No results found for target' elif status == 500: resp_body = 'Unable to download scan results' else: resp_body = 'Unknown error' return Response({ "statusCode": status, "body": json.dumps({'error': resp_body}) }).with_security_headers() else: self.logger.error("Unrecognized payload: {}".format(data)) return Response({ "statusCode": 400, "body": json.dumps({'error': 'Unrecognized payload'}) }).with_security_headers()
def import_events() -> [Event]: events = [] events_path = Path('events.json').resolve() with open(events_path, 'r') as f: data = f.read() events_json = json.loads(data) for event_json in events_json: event = Event(**event_json) event.date = datetime.strptime(event_json['date'], '%Y-%m-%d %H:%M') # format date events.append(event) return events
def __init__(self, gene, threshold): Event.__init__(self, gene) if self.gene.strand == '-': self.etype = 'AF' else: self.etype = 'AL' self.positive_coords = {} self.negative_coords = {} self.construct_events(gene) if len(self.positive_ids) > 1 and threshold: self.merge_events(threshold=threshold, overlap_fun=self.overlap)
def __init__(self, name, mode=0, freq=None, spec=None): self.__mode = mode self.__name = name self.__freq = freq self.__spec = spec self.event = Event() self.__index = name2index(name)
class Driver(object): def __init__(self, name, mode=0, freq=None, spec=None): self.__mode = mode self.__name = name self.__freq = freq self.__spec = spec self.event = Event() self.__index = name2index(name) def __str__(self): return self.__class__.__name__ def get_name(self): return self.__name def get_mode(self): return self.__mode def get_index(self): return self.__index def get_profile(self): info = {'type':str(self), 'mode':self.__mode} if self.__freq: info.update({'freq':self.__freq}) if self.__spec: info.update({'spec':self.__spec}) return info def callback(self): self.event.set() def open(self): pass def close(self): pass def get(self): pass def put(self, *args, **kwargs): pass
def setEvent(): global g_queue event_label = request.args.get('id', type=str) event = Event.from_str(event_label) print(Fore.MAGENTA + Style.BRIGHT + 'ACTION: {}'.format(event.name) + Style.RESET_ALL) response = g_queue.respond(event) print(Fore.MAGENTA + Style.BRIGHT + 'RESPONSE mimetype: {}; json: {}'.format( response.mimetype, response.json) + Style.RESET_ALL) return response
def runFromStepFunction(self, event, context): source_event = Event(event, context) data = source_event.parse() if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format( target.name)) return False # Run the scan here and return the ScanRef object scanner = TIOScanner(logger=self.logger) scanner_ref = scanner.scan(target.name) if scanner_ref: scanner_ref.launch(wait=False) return {'id': scanner_ref.id} else: return False else: self.logger.error("Unrecognized payload: {}".format(data)) return False
def test_parse(self): target = "infosec.mozilla.org" partial_apigw_event = {"body": '{"target": "' + target + '"}'} partial_stepf_event = {"target": target} invalid_event = {"TEST": "TEST"} test_aws_context = None test_event_1 = Event(partial_apigw_event, test_aws_context) apigw_event = test_event_1.parse() assert test_event_1.type == "api-gw" assert apigw_event == partial_stepf_event test_event_2 = Event(partial_stepf_event, test_aws_context) step_function_event = test_event_2.parse() assert test_event_2.type == "step-function" assert partial_stepf_event == step_function_event test_event_3 = Event(invalid_event, test_aws_context) assert test_event_3.parse() is False
async def events(self, ctx, command: str = None, *args): if command == 'template' and len(args) == 0: await ctx.send(FORMAT_JSON.format(Event().to_json())) elif command == 'create' and len(args) == 1: await self.create_event(ctx, args[0]) elif command == 'edit' and len(args) == 1: pass elif command == 'copy' and len(args) == 2: pass elif command == 'open' and len(args) == 1: pass elif command == 'close' and len(args) == 1: pass else: await ctx.send(embed=create_help_embed(self.help))
async def create_event(ctx, template_msg_link): split_link = template_msg_link.split("/") channel_id = int(split_link[-2]) message_id = int(split_link[-1]) message = await ctx.bot.get_channel(channel_id).fetch_message( message_id) content = message.content event_or_error_message = Event.from_json( ctx, content[content.index("{"):content.rindex("}") + 1]) if isinstance(event_or_error_message, Event): event = event_or_error_message await ctx.send(FORMAT_JSON.format(event.to_json())) elif isinstance(event_or_error_message, str): error_message = event_or_error_message await ctx.send(embed=create_basic_embed(error_message, EMOJI_ERROR) )
async def on_subscribe(self, observer: Observer): """ The flow is the following: :param observer: :return: """ while True: event = await self.queue.get() if event is Event.completed(): break await observer.on_next(event.value) await observer.on_completed()
async def events(self, ctx, command: str = None, *args): if command == 'template' and len(args) == 0: await ctx.send(FORMAT_JSON.format(Event().to_json())) elif command == 'create' and len(args) == 1: await self.create_event(ctx, args[0]) elif command == 'new' and len(args) == 0: # TODO: Document and implement this - interactive event creation session. pass elif command == 'edit' and len(args) == 1: pass elif command == 'copy' and len(args) == 2: pass elif command == 'open' and len(args) == 1: pass elif command == 'close' and len(args) == 1: pass else: prefix = get_prefix(self.bot, ctx.message) await ctx.send(embed=create_help_embed(self.help, prefix))
def __init__(self, driver): self._driver = driver self.onNewCardDetected = Event() self.onCardRemoved = Event() self.onCardStillPresent = Event()
from lib.event import Event from lib.hpfeeds import HPFeeds print('\n') ### print('> Create an event') e = Event() e.add('source.ip', '192.168.1.20') e.add('source.port', '61432') e.add('destination.ip', '192.168.1.200') e.add('destination.port', '22') print('Event: {}'.format(e)) print('--------\n') ### print('> Test: value already defined') e.add('destination.port', '23') print('Event: {}'.format(e)) print('--------\n') ### # print('> Test: invalid key') # e.add('destination.xxx', 'whatever') # print('Event: {}'.format(e)) # print('--------\n')
async def start_pushing(): await asyncio.sleep(.1) await proxy_subject.proxy(Event(value)) await asyncio.sleep(.1) await proxy_subject.proxy(Event(value))
def parse_page(self, events_json): result = [] for event in events_json['events']['event']: curr_event = Event() curr_event.name = helpers.clean_string(event['title']) description = helpers.clean_string(event['description']) if not description: curr_event.description = self.get_alt_description(event['id']) curr_event.date = datetime.datetime.strptime(event['start_time'], "%Y-%m-%d %H:%M:%S") curr_event.place = event['venue_name'] curr_event.address1 = helpers.clean_address(event['venue_address']) curr_event.address2 = None curr_event.city = helpers.clean_city(event['city_name']) curr_event.state = event['region_abbr'] curr_event.zipcode = event['postal_code'] curr_event.cost = 0 if 'cost' not in event else event['cost'] curr_event.link = event['url'] curr_event.api = 'http://api.eventful.com/json/events/get?app_key=' + self.token + '&id=' + event['id'] curr_event.source = self.source curr_event.api_id = event['id'] result.append(curr_event) return result
print "\tUsage: python %s <input-file> <queue1,queue2..>" % sys.argv[0] print print "\tExample: python %s /tmp/events.txt events-queue" % sys.argv[0] print exit() file = open(sys.argv[1], 'r') queues = sys.argv[2].split(',') pipeline = Pipeline(None, queues) for line in file: line = decode(line) try: event = Event.from_unicode(line) except: print "ERROR - lines from file needs to follow Event format" for key in event.keys(): value = event.value(key) event.clear(key) key = key.replace(' ','_') event.add(key, value) time.sleep(0.01) pipeline.send(decode(event)) file.close()
def getResults(self, event, context): # print("Event: {}, context: {}".format(event, context)) source_event = Event(event, context) data = source_event.parse() print(source_event.type) if data: target = Target(data.get('target')) if not target: self.logger.error("Target validation failed of: {}".format( target.name)) return Response({ "statusCode": 400, "body": json.dumps({'error': 'Target was not valid or missing'}) }).with_security_headers() results = Results(target.name, self.s3_client, self.bucket, self.base_results_path) if source_event.type == "step-function": # Use generateURL route download_url, status = results.generateDownloadURL() if download_url: return Response({ "statusCode": status, "body": json.dumps({'url': download_url}) }).with_security_headers() else: if status == 404: resp_body = 'No results found for target' else: resp_body = 'Unknown error' return Response({ "statusCode": status, "body": json.dumps({'error': resp_body}) }).with_security_headers() else: # Use download route scan_results, status = results.download() if scan_results: return Response({ "statusCode": status, "headers": { "Content-Type": "application/gzip", "Content-Disposition": "attachment; filename={}.tgz".format(target.name) }, "body": base64.b64encode( scan_results.getvalue()).decode("utf-8"), "isBase64Encoded": True }).with_security_headers() else: if status == 404: resp_body = 'No results found for target' elif status == 500: resp_body = 'Unable to download scan results' else: resp_body = 'Unknown error' return Response({ "statusCode": status, "body": json.dumps({'error': resp_body}) }).with_security_headers() else: self.logger.error("Unrecognized payload: {}".format(data)) return Response({ "statusCode": 400, "body": json.dumps({'error': 'Unrecognized payload'}) }).with_security_headers()
def test_defaults(self): test_event = Event() assert type(test_event) is Event assert test_event.event is None assert test_event.context is None assert test_event.type == "api-gw"
def get_event_loc(self, event_id): response = self._get('api2/eventlist', { 'event_id': event_id, 'id': self.token }) event = ElementTree.fromstring(response.text.encode('utf-8')).find('event') result = Event() result.name = helpers.clean_string(event.find('title').text) result.description = ' '.join([ helpers.clean_string(event.find('description').text), helpers.clean_string(event.find('e_description').text) ]) result.place = helpers.clean_address(event.find('e_address1').text) result.address1 = helpers.clean_address(event.find('e_address2').text) result.city = helpers.clean_city(event.find('e_city').text) result.state = event.find('e_state').text result.zipcode = event.find('e_zip').text result.link = event.find('link').text result.api = [response.url] result.source = 'Brown Paper Tickets' result.api_id = event_id return result
def process_event(event, sip_campaign_names): """ Process the event. """ logging.info('Starting to process event: {}'.format(event['name'])) start_time = time.time() # Store the SIP campaign names with a lowercase wiki tag version. campaign_dict = dict() for campaign in sip_campaign_names: campaign_dict[campaign.replace(' ', '').lower()] = campaign # Create the event object. try: e = Event(event, sip) except: logging.exception('Error creating the Event object: {}'.format(event['name'])) return # Build the event.json file. try: e.setup(alert_uuids=event['alerts']) except: logging.exception('Error setting up the Event object: {}'.format(event['name'])) return # Connect to the wiki page. wiki = ConfluenceEventPage(e.name_wiki, sip) # Add the wiki page URL to the event JSON. e.json['wiki_url'] = wiki.get_page_url() # If the event has changed or we are forcing a refresh, we need to update the wiki page. if e.changed or wiki.is_page_refresh_checked(): # 99% of the same things need to be updated if the event has changed or if someone # forced a refresh using the wiki page. However, there are a couple differences between # the two, so if the event has changed somehow, we want to make sure that takes # precedence over the wiki refresh button. if e.changed: wiki_refresh = False logging.info('Event has changed. Updating wiki: {}'.format(e.json['name'])) else: wiki_refresh = True logging.info('Forcing event refresh. Updating wiki: {}'.format(e.json['name'])) """ FIGURE OUT THE EVENT SOURCE """ wiki_labels = wiki.get_labels() for tag in config['core']['event_sources']: if tag in wiki_labels: event_source = config['core']['event_sources'][tag] break """ ADD ANY WHITELISTED INDICATORS FROM THE SUMMARY TABLE TO SIP """ # Read the Indicator Summary table to see if there are any checked (whitelisted) indicators. good_indicators, whitelisted_indicators = wiki.read_indicator_summary_table() if whitelisted_indicators: logging.info('Detected newly whitelisted indicators: {}'.format(e.json['name'])) # If there were any Hash indicators checked as whitelisted, we need to check if there are any related # Hash indicators that were NOT checked. If there were, we want to make sure to treat them as whitelisted. hash_cache = [] for i in whitelisted_indicators: if i['type'].startswith('Hash - '): # Loop over the indicators in the event JSON to find the matching indicator. for json_indicator in e.json['indicators']: if i['type'] == json_indicator['type'] and i['value'] == json_indicator['value']: # Loop over the relationships (the corresponding hashes) and see if any of them # are in the good indicators list (as in they were not checked as whitelisted on the wiki). relationships = json_indicator['relationships'] for rel in relationships: # Only continue if we haven't already verified this hash. if not rel in hash_cache: hash_cache.append(rel) for good_indicator in good_indicators: if good_indicator['type'].startswith('Hash - ') and good_indicator['value'] == rel: # Add the good hash indicator to the whitelisted indicator list. logging.debug('Whitelisting "{}" indicator "{}" by association to: {}'.format(good_indicator['type'], rel, i['value'])) whitelisted_indicators.append(good_indicator) if sip: # Add the whitelisted indicators to the SIP whitelist. for i in whitelisted_indicators: # If this is a "URI - Path" or "URI - URL" indicator, check its relationships to see if its # corresponding "URI - Domain Name" or "Address - ipv4-addr" indicator was also checked. If it was, # we want to ignore the path and URL indicators since the domain/IP serves as a least common denominator. # This prevents the SIP whitelist from ballooning in size and slowing things down over time. skip = False if i['type'] == 'URI - Path' or i['type'] == 'URI - URL': # Loop over the indicators in the event JSON to find the matching indicator. for json_indicator in e.json['indicators']: if i['type'] == json_indicator['type'] and i['value'] == json_indicator['value']: # Loop over the whitelisted indicators and see any of them are a whitelisted (checked) # domain name or IP address. If the domain/IP appears in the relationships (for the # "URI - Path" indicators) or in the value (for "URI - URL" indicators), we can ignore it. relationships = json_indicator['relationships'] for x in whitelisted_indicators: if x['type'] == 'URI - Domain Name' or x['type'] == 'Address - ipv4-addr': if any(x['value'] in rel for rel in relationships) or x['value'] in i['value']: logging.debug('Ignoring redundant "{}" indicator "{}" for SIP whitelist.'.format(i['type'], i['value'])) skip = True if not skip: logging.warning('Adding "{}" indicator "{}" to SIP whitelist.'.format(i['type'], i['value'])) try: data = {'references': [{'source': event_source, 'reference': wiki.get_page_url()}], 'status': 'Deprecated', 'confidence': 'low', 'impact': 'low', 'tags': ['whitelist:e2w'], 'type': i['type'], 'username': '******', 'value': i['value']} result = sip.post('indicators', data) except ConflictError: pass except: logging.exception('Error adding "{}" indicator "{}" to SIP whitelist'.format(i['type'], i['value'])) """ READ MANUAL INDICATORS FROM WIKI PAGE AND ADD NEW ONES TO SIP """ if sip: # Read whatever manual indicators are listed on the wiki page so they can be added to SIP and the event. manual_indicators = wiki.read_manual_indicators() for i in manual_indicators: # Add a "manual_indicator" tag to the indicators so that we can exclude them from the # monthly indicator pruning process. i['tags'].append('manual_indicator') # Try to add the indicator to SIP. A RequestError will be raised it it already exists. try: # Assemble the correct tags to add to this indicator. ignore_these_tags = config['wiki']['ignore_these_tags'] add_these_tags = [tag for tag in e.json['tags'] if not tag in ignore_these_tags] i['tags'] += add_these_tags # Perform the API call to add the indicator. data = {'references': [{'source': event_source, 'reference': wiki.get_page_url()}], 'confidence': 'low', 'impact': 'low', 'tags': i['tags'], 'type': i['type'], 'username': '******', 'value': i['value']} try: result = sip.post('indicators', data) logging.warning('Added "{}" manual indicator "{}" to SIP: {}'.format(i['type'], i['value'], result['id'])) except ConflictError: pass except: logging.exception('Error addding "{}" manual indicator "{}" to SIP'.format(i['type'], i['value'])) except ConflictError: # Since the indicator already exists, try to update it to make sure that it # has all of the latest wiki page tags. Start by getting the existing indicator. result = sip.get('/indicators?type={}&exact_value={}'.format(i['type'], urllib.parse.quote(i['value']))) if result: try: id_ = result[0]['id'] data = {'tags': i['tags']} sip.put('/indicators/{}'.format(id_), data) except ConflictError: pass except: logging.exception('Error updating tags on manual indicator: {}'.format(id_)) except: logging.exception('Error adding "{}" manual indicator "{}" to SIP'.format(i['type'], i['value'])) # Check if there are any manual indicators in the event JSON that do not appear in this current # reading of the Manual Indicators section. This implies that someone removed something from the # table on the wiki page and refreshed the page. Presumably this means they did not actually want # that indicator, so the best we can do for now it to change its status to Informational. # TODO: Possible improvement to this would be to search for FA Queue ACE alerts for this indicator # and FP them, which would also set the indicator's status to Informational. old_manual_indicators = [i for i in e.json['indicators'] if 'manual_indicator' in i['tags']] for old_indicator in old_manual_indicators: if not [i for i in manual_indicators if i['type'] == old_indicator['type'] and i['value'] == old_indicator['value']]: try: # Find the indicator's SIP ID and disable it. result = sip.get('indicators?type={}&exact_value={}'.format(old_indicator['type'], urllib.parse.quote(old_indicator['value']))) if result: id_ = result[0]['id'] data = {'status': 'Informational'} result = sip.put('indicators/{}'.format(id_), data) logging.error('Disabled deleted "{}" manual indicator "{}" in SIP: {}'.format(old_indicator['type'], old_indicator['value'], id_)) except: logging.exception('Error disabling deleted "{}" manual indicator "{}" in SIP'.format(old_indicator['type'], old_indicator['value'])) """ RE-SETUP THE EVENT """ # Parse the event. try: e.setup(manual_indicators=manual_indicators, force=True) except: logging.exception('Error refreshing Event object: {}'.format(e.json['name'])) return # Get the remediation status for the e-mails in the event. try: for email in e.json['emails']: email['remediated'] = False """ key = '' if email['original_recipient']: key = '{}:{}'.format(email['message_id'], email['original_recipient']) elif len(email['to_addresses']) == 1: key = '{}:{}'.format(email['message_id'], email['to_addresses'][0]) # Continue if we were able to create the MySQL "key" value for this e-mail. if key: # Search the ACE database for the remediation status. c = ace_db.cursor() query = 'SELECT * FROM remediation WHERE `key`="{}"'.format(key) c.execute(query) # Fetch all of the rows. rows = c.fetchall() for row in rows: result = row[6] # A successful result string in the database looks like: # (200) [{"address":"*****@*****.**","code":200,"message":"success"}] if '"code":200' in result and '"message":"success"' in result: email['remediated'] = True """ except: logging.exception('Error getting remediation status for e-mail.') """ ADD SIP STATUS OF EACH INDICATOR TO THE EVENT JSON """ if sip: # Used as a cache so we don't query SIP for the same indicator. queried_indicators = {} # Query SIP to get the status of the indicators. logging.debug('Querying SIP for indicator statuses.') for i in e.json['indicators']: # Skip this indicator if it is whitelisted. if i['status'] == 'Whitelisted' or i['whitelisted']: continue type_value = '{}{}'.format(i['type'], i['value']) # Continue if we haven't already processed this type/value pair indicator. if not type_value in queried_indicators: # Get the indicator status from SIP. Ignore any indicators that were already set to Informational. if not i['status'] == 'Informational': result = None try: result = sip.get('indicators?type={}&exact_value={}'.format(i['type'], urllib.parse.quote(i['value']))) except RequestError as E: if 'uri too large' in str(E).lower(): logging.warning("414 Request-URI Too Large for indicator value: {}".format(urllib.parse.quote(i['value']))) if result: id_ = result[0]['id'] result = sip.get('indicators/{}'.format(id_)) i['status'] = result['status'] # Add the indicator to the queried cache. queried_indicators[type_value] = i['status'] # We've already queried SIP for this type/value, so just set the status. else: i['status'] = queried_indicators[type_value] """ RUN ALL OF THE CLEAN INDICATOR MODULES """ e.clean_indicators() """ RUN ALL OF THE EVENT DETECTION MODULES """ # Save a copy of the old event tags to compare against to see if any were manually removed. old_tags = e.json['tags'][:] e.event_detections() """ GATHER UP ALL OF THE EVENT TAGS """ # Add the wiki tags to the event tags. This ensures that tags that we add to the wiki page # get added to the indicators in the Indicator Summary table. wiki_tags = wiki.get_labels() e.json['tags'] += wiki_tags e.json['tags'] = list(set(e.json['tags'])) # Check if the event tags have a campaign name in them. if 'campaign' in e.json['tags']: # See if any of the event tags are a valid campaign name. for tag in e.json['tags']: if tag in campaign_dict: # Set the campaign name in the event JSON. e.json['campaign'] = {'sip': campaign_dict[tag], 'wiki': tag} # Replace any campaign tag with the "apt:" version. try: e.json['tags'].append('apt:{}'.format(e.json['campaign']['wiki'])) e.json['tags'].remove(e.json['campaign']['wiki']) except: pass # Now check if any of the wiki tags were manually removed. This can happen if we have an FP # event detection, so we want to make sure we don't keep applying those FP tags to the page. # NOTE: We only do this check if the event has NOT changed and the wiki refresh button was checked. if wiki_refresh: for tag in e.json['tags'][:]: if not tag in wiki_tags: try: e.json['tags'].remove(tag) logging.info('Tag manually removed from wiki page: {}'.format(tag)) except: pass """ NOTIFY SLACK OF AN INCIDENT """ if config['slack']['enabled']: if 'incidents' in e.json['tags'] and not e.json['slack_notify']: e.json['slack_notify'] = str(datetime.datetime.now()) data = {'text': '<!channel> :rotating_light: Possible incident detected: {}'.format(e.json['wiki_url'])} try: slack_webhook_url = config['slack']['slack_webhook_url'] proxy = config['network']['proxy'] if slack_webhook_url: if proxy: requests.post(config['slack']['slack_webhook_url'], json=data, proxies={'http': proxy, 'https': proxy}) else: requests.post(config['slack']['slack_webhook_url'], json=data) except: e.json['slack_notify'] = '' logging.exception('Unable to notify Slack of incident') """ UPDATE THE WIKI PAGE """ # Refresh the wiki page using the updated JSON. try: wiki.refresh_event_page(e.json) except: logging.exception('Error refreshing wiki page: {}'.format(e.json['name'])) # Since we updated the wiki page, add the version to the event JSON. This is used # so that the intel processing button can not process a wiki page that has a newer # version without first refreshing the page. e.json['wiki_version'] = wiki.get_page_version() # Read the indicator summary table. good_indicators, whitelisted_indicators = wiki.read_indicator_summary_table() # Write out the event JSON. e.write_json() # If the intel processing checkbox is checked... if wiki.is_event_ready_for_sip_processing(e.json['wiki_version']): logging.info('Processing the event intel: {}'.format(e.json['name'])) # Figure out the event source. wiki_labels = wiki.get_labels() for tag in config['core']['event_sources']: if tag in wiki_labels: source = config['core']['event_sources'][tag] break # Add each good indicator into SIP. if sip: good_indicators, whitelisted_indicators = wiki.read_indicator_summary_table() for i in good_indicators: tags = sorted(list(set(i['tags'] + e.json['tags']))) ignore_these_tags = config['wiki']['ignore_these_tags'] for label in ignore_these_tags: try: tags.remove(label) except: pass try: data = {'references': [{'source': source, 'reference': wiki.get_page_url()}], 'status': i['status'], 'confidence': 'low', 'impact': 'low', 'tags': tags, 'type': i['type'], 'username': '******', 'value': i['value']} result = sip.post('indicators', data) except ConflictError: pass except: logging.exception('Error when adding "{}" indicator "{}" into SIP.'.format(i['type'], i['value'])) # Close the event in ACE. try: ace_api.update_event_status(e.json['ace_event']['id'], 'CLOSED') logging.warning('Closed event in ACE: {}'.format(e.json['name'])) # Update the wiki to reflect that the event was processed. wiki.update_event_processed() except: logging.exception('Error when closing the event in ACE: {}'.format(e.json['name'])) logging.info('Finished event "{0:s}" in {1:.5f} seconds.'.format(event['name'], time.time() - start_time))
def parse_page(self, events_json): result = [] map_client = MapClient() for event in events_json['discover_events']: if not event['address']: continue address_dict = map_client.breakdown_address(event['address']) if not address_dict: continue curr_event = Event() curr_event.name = helpers.clean_string(event['title']) curr_event.description = helpers.clean_string(event['description']) curr_event.date = parser.parse(event['start_time']) curr_event.place = event['location'] curr_event.address1 = address_dict['address1'] curr_event.address2 = None curr_event.city = address_dict['city'] curr_event.state = address_dict['state'] curr_event.zipcode = address_dict['zipcode'] curr_event.cost = 0 if not event['price'] else event['price'] curr_event.link = event['ticket_url'] curr_event.api = 'https://www.universe.com/api/v2/event_id/' + str(event['id']) curr_event.source = self.source curr_event.api_id = event['id'] result.append(curr_event) return result
G_CFG.mq.mq_dict[ "routing_key"] = args.routing_key if args.routing_key else args.coin if args.exchange_name: G_CFG.mq.mq_dict["exchange_name"] = args.exchange_name if args.vhost: G_CFG.mq.mq_dict["vhost"] = args.vhost if args.mq_host: G_CFG.mq.mq_dict["host"] = args.mq_host if args.mq_user: G_CFG.mq.mq_dict["username"] = args.mq_user if args.mq_password: G_CFG.mq.mq_dict["password"] = args.mq_password if args.mode: G_CFG.coin.coin_dict["mode"] = args.mode G_CFG.mysql.mysql_dict["db"] = args.coin G_CFG.log.log_dict["filename"] = f"log/{args.coin}.log" G_CFG.message.message_dict["monitor_path"] = f"log/{args.coin}.txt" mode = G_CFG.coin.coin_dict["mode"] if mode not in ["prod", "dev"]: G_LOGGER.info("未知的运行模式") exit() # 手动推送区块数据 coin_push = Event.coin_push() coin_name = args.coin block_num = args.block_num count = args.count print('手动推送区块数据,coin: {}, start_block: {}, end_blcok: {}'.format( coin_name, block_num, block_num + count - 1)) coin_push.push_sync(block_num, count)
def __init__(self, real, balance, pair): self.balance = balance self.real = real self.e = Event() self.pair = pair
async def on_completed(self): await self.proxy(Event.completed())
async def on_next(self, value): await self.proxy(Event(value))