def index(): """ Receives messages from a push subscription from Pub/Sub. Parses the message, and inserts it into BigQuery. """ event = None envelope = request.get_json() # Check that data has been posted if not envelope: raise Exception("Expecting JSON payload") # Check that message is a valid pub/sub message if "message" not in envelope: raise Exception("Not a valid Pub/Sub Message") msg = envelope["message"] if "attributes" not in msg: raise Exception("Missing pubsub attributes") try: event = process_azuredevops_event(msg) print(f" Event which is to be inserted into Big query {event}") if event: # [Do not edit below] # print(f"event processes {event}") shared.insert_row_into_bigquery(event) except Exception as e: entry = { "severity": "WARNING", "msg": "Data not saved to BigQuery", "errors": str(e), "json_payload": envelope } print(f"EXCEPTION raised {json.dumps(entry)}") return "", 204
def index(): """ Receives messages from a push subscription from Pub/Sub. Parses the message, and inserts it into BigQuery. """ event = None envelope = request.get_json() # Check that data has been posted if not envelope: raise Exception("Expecting JSON payload") # Check that message is a valid pub/sub message if "message" not in envelope: raise Exception("Not a valid Pub/Sub Message") msg = envelope["message"] if "attributes" not in msg: raise Exception("Missing pubsub attributes") try: attr = msg["attributes"] # Header Event info if "headers" in attr: headers = json.loads(attr["headers"]) # Process Github Events if "X-Github-Event" in headers: event = process_github_event(headers, msg) shared.insert_row_into_bigquery(event) except Exception as e: entry = { "severity": "WARNING", "msg": "Data not saved to BigQuery", "errors": str(e), "json_payload": envelope } print(json.dumps(entry)) # Flush the stdout to avoid log buffering. sys.stdout.flush() return "", 204
def index(): """ Receives messages from a push subscription from Pub/Sub. Parses the message, and inserts it into BigQuery. """ event = None envelope = request.get_json() print(f"envelope recieved: {envelope}") # Check that data has been posted if not envelope: raise Exception("CloudBuildParser: Expecting JSON payload") # Check that message is a valid pub/sub message if "message" not in envelope: raise Exception("Not a valid Pub/Sub Message") msg = envelope["message"] if "attributes" not in msg: raise Exception("Missing pubsub attributes") try: attr = msg["attributes"] # Process Cloud Build event if "buildId" in attr: event = process_cloud_build_event(attr, msg) else: event = process_cloud_build_notification(msg) shared.insert_row_into_bigquery(event) except Exception as e: entry = { "severity": "WARNING", "msg": "Data not saved to BigQuery", "errors": str(e), "json_payload": envelope } print(json.dumps(entry)) return "", 204
def index(): """ Receives messages from a push subscription from Pub/Sub. Parses the message, and inserts it into BigQuery. """ event = None envelope = request.get_json() # Check that data has been posted if not envelope: raise Exception("Expecting JSON payload") # Check that message is a valid pub/sub message if "message" not in envelope: raise Exception("Not a valid Pub/Sub Message") msg = envelope["message"] if "attributes" not in msg: raise Exception("Missing pubsub attributes") try: # [TODO: Replace mock function below] event = process_new_source_event(msg) # [Do not edit below] shared.insert_row_into_bigquery(event) except Exception as e: entry = { "severity": "WARNING", "msg": "Data not saved to BigQuery", "errors": str(e), "json_payload": envelope } print(json.dumps(entry)) # Flush the stdout to avoid log buffering. sys.stdout.flush() return "", 204