def googleLoggerCall(logName, logData): ''' Send the data that needs to be logged to the google logging service. In general the data we do want to send is in all cases a JSON structure (dict), however, to be sure that in case the function is (mis-)used for another type of integration we do check if the variable type of LogData is either a dict or string. Depending on this we will call the google logging client in another fashion. :param logName: :param logData: :return: ''' # Instantiates a google logger client and define the log target to use to write the data. The name of the log is # defined by the user in the form of a function variable when configuring the function in Oracle Cloud. GoogleloggingClient = logging.Client() logger = GoogleloggingClient.logger(logName) # check for the type of var, we do expect the var to be a dict holding a JSON structure. However, in case it is a # string value we do need to use log_text instead of using log_struct. This will make the logic more robust and able # to cope with other data types than only JSON structures. if type(logData) is str: logger.log_text(logData) elif type(logData) is dict: logger.log_struct(logData) else: print("cannot determine what type of var this is")
def upload_text_photo(): photo = request.json['file'] filename = request.json['filename'] selected_language = request.json['language'] audio_language = selected_language selected_language = selected_language.split('-')[0] name, blob_public_url = upload_photo_to_storage(photo, filename) client = logging.Client() logger = client.logger('log_name') text = get_text(name, logger) translated_text = translate_text(text, selected_language) audio = get_audio(translated_text, audio_language) audio_name = audio_language + name + ".mp3" audio_name, audio_url = upload_mp3_to_storage(audio, audio_name) audio = audio_url info = dict() info["original_text"] = text info["translated_text"] = translated_text info["audio"] = audio return jsonify(json.dumps(info)), 200, { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Headers': 'Content-Type', }
def stackdriver_log(sender, payload={}): if settings.GOOGLE_REQUEST_LOGGING is False: return if not hasattr(settings, '_gcp_logger'): from google.cloud import logging google.auth.default() client = logging.Client() logger_name = '%s-api' % settings.APP_NAME log.debug("Enabled Stackdriver request logging.") settings._gcp_logger = client.logger(logger_name) if settings._gcp_logger is not None: settings._gcp_logger.log_struct(payload)
def stackdriver_log(sender, payload={}): if not hasattr(settings, '_gcp_logger'): try: from google.cloud import logging google.auth.default() client = logging.Client() logger_name = '%s-api' % settings.APP_NAME settings._gcp_logger = client.logger(logger_name) log.debug("Enabled Stackdriver request logging.") except Exception as exc: log.debug("Disable Stackdriver: %s", exc) settings._gcp_logger = None if settings._gcp_logger is not None: settings._gcp_logger.log_struct(payload)
def upload_photo(): photo = request.json['file'] filename = request.json['filename'] selected_language = request.json['language'] audio_language = selected_language selected_language = selected_language.split('-')[0] name, blob_public_url = upload_photo_to_storage(photo, filename) client = logging.Client() logger = client.logger('log_name') description, latitude, longitude = get_landmark(name, logger) # Create a Cloud Datastore client. datastore_client = datastore.Client() # The kind for the new entity. kind = 'Landmarks' # Create the Cloud Datastore key for the new entity. key = datastore_client.key(kind, name) place_id = get_place_id(description) formatted_address, formatted_phone_number, international_phone_number, types, website = get_details(place_id) wikipedia_extract = get_wikipedia_extract(description) wikipedia_extract = translate_text(wikipedia_extract, selected_language) audio = get_audio(wikipedia_extract, audio_language) audio_name = audio_language + description + ".mp3" audio_name, audio_url = upload_mp3_to_storage(audio, audio_name) audio = audio_url entity = datastore.Entity(key, exclude_from_indexes=['wikipedia_extract', "audio"]) entity['blob_name'] = name entity['image_public_url'] = blob_public_url entity['description'] = description entity["latitude"] = latitude entity['longitude'] = longitude entity['formatted_address'] = formatted_address entity["formatted_phone_number"] = formatted_phone_number entity["international_phone_number"] = international_phone_number entity["types"] = types entity["website"] = website entity["wikipedia_extract"] = wikipedia_extract entity["audio"] = audio # Save the new entity to Datastore. datastore_client.put(entity) info = dict() info["description"] = entity.get("description", "Unknown") info["latitude"] = entity.get("latitude", "Unknown") info["longitude"] = entity.get("longitude", "Unknown") info["url"] = entity.get("image_public_url", "Unknown") info['formatted_address'] = entity.get("formatted_address", "Unknown") info["formatted_phone_number"] = entity.get("formatted_phone_number", "Unknown") info["international_phone_number"] = entity.get("international_phone_number", "Unknown") info["types"] = entity.get("types", []) info["website"] = entity.get("website", "Unknown") info["wikipedia_extract"] = entity.get("wikipedia_extract", "Unknown") info["audio"] = audio return jsonify(json.dumps(info)), 200, { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Headers': 'Content-Type', }
from pip._internal.cli.status_codes import SUCCESS from _ast import If import os import json import datetime import base64 import pymysql from google.cloud import tasks_v2 from google.protobuf import timestamp_pb2 from google.cloud import language from google.cloud.language import enums from google.cloud.language import types from google.cloud import logging import ast log_client = logging.Client() logger = log_client.logger("cloudfunctions.googleapis.com%2Fcloud-functions") ALLOWED_EXTENSIONS = {'xml'} app = Flask(__name__) bucket_config = {} with open("xml_bucket.config.json") as fh: bucket_config = json.load(fh) @app.route('/upload', methods=['POST', 'GET']) def upload(): return render_template('index.html', messg=mess22)
def write_log(msg): logging_client = logging.Client() logger = logging_client.logger('time-tracker-log') logger.log_text(msg, severity='INFO')
from flask import Flask import os import sys import glob import string import random import redis import logging from gcloud import storage, pubsub from google.cloud import logging PROJECT_ID = 'transcode-159215' TOPIC = 'projects/{}/topics/message'.format(PROJECT_ID) logclient = logging.Client() logger = logclient.logger("ffmpeg-pool") app = Flask(__name__) app.config["SECRET_KEY"] = "test" app.debug = True def publish(msg): pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic("ffmpeg-pool") if not topic.exists(): topic.create() topic.publish(msg)