def push_event_for_push(push, recipients, title, type, extras): push.audience = jpush.audience( {'alias': ['u%014d' % (recipient) for recipient in recipients]}) logging.info('title=%s' % (title)) ios_msg = jpush.ios(alert=title.encode('utf-8'), badge='+1', extras=extras) android_msg = jpush.android(alert=title.encode('utf-8'), extras=extras) push.notification = jpush.notification(alert=title.encode('utf-8'), android=android_msg, ios=ios_msg) push.platform = jpush.all_ push.options = {'apns_production': config.jpush.production} msg_id = '' try: rsp = push.send() msg_id = rsp.payload["msg_id"] logging.info(rsp) except Exception as e: logging.warning(e) pass for recipient in recipients: db.insert_event(recipient, msg_id, type, json.dumps(extras), 0)
def parse_logs_to_events(log_lines, regex=SYSLOG_RE): """ Parses log lines """ event_ids = [] for line in log_lines: matched = re.match(regex, line) if not matched: continue # Expand the result of "matched.groups()" to individual variables datestamp, timestamp, device_name, error_code, error_message = ( matched.groups()) # Create an event in our DB event_id = db.insert_event( datestamp, timestamp, device_name, error_code, error_message) event_ids.append(event_id) return event_ids
def parse_logs_to_events(log_lines, regex=SYSLOG_RE): """ Parses log lines """ event_ids = [] for line in log_lines: matched = re.match(regex, line) if not matched: continue # Expand the result of "matched.groups()" to individual variables datestamp, timestamp, device_name, error_code, error_message = ( matched.groups()) # Create an event in our DB event_id = db.insert_event(datestamp, timestamp, device_name, error_code, error_message) event_ids.append(event_id) return event_ids
def webhook_handler(): # Aborts this function early if signature does not match. enforce_signature(request) event_type = request.headers['X-GitHub-Event'] event_record_id = db.insert_event(event_type) # print("Inserted %s event with row ID %d" % (event_type, event_record_id)) payload = json.loads(request.get_data()) should_fetch = event_should_trigger_fetch(event_type, payload, event_record_id) print("Should fetch?", should_fetch) if should_fetch: # This is idempotent; if there is already an ongoing fetch, # it will just return without doing anything. print("About to re-fetch...") response_dict = long_git_operations.do_pr_fetch() print("Finished re-fetch with response:", response_dict) return "", 200, None
def func(event): pre_existing = get_event(event.id) if not pre_existing: insert_event(event.id, event) return event
import db import fb next_id = db.get_next_queue() while "_id" in next_id: try: print(next_id["_id"]) profile = fb.get_event_profile(next_id["_id"]) profile["keywords"] = next_id["keywords"] db.insert_event(profile) db.delete_queue(next_id["_id"]) next_id = db.get_next_queue() except: print("Finished")