Пример #1
0
def init(app):
    logging.info('Initializing application')
    app.config['db'] = firestore.Client(project=PROJECT)

    app.config['FIREBASE_PROJECT_ID'] = PROJECT
    app.config['FIREBASE_AUTH_SIGN_IN_OPTIONS'] = PROVIDERS

    if os.getenv('GAE_ENV', '').startswith('standard') or os.getenv(
            'CLOUD_RUN', '') == "True":
        app.secret_key = get_secret(PROD_FLASK_SECRET)
        app.config['FIREBASE_API_KEY'] = get_secret(FIREBASE_API_KEY)
        app.config['IS_DEV'] = False
        app.debug = False
        app.config['UPLOAD_BUCKET'] = UPLOAD_BUCKET
        client = google.cloud.logging.Client()
        client.setup_logging()
        logging.basicConfig(level=logging.INFO)
    else:
        app.config['IS_DEV'] = True
        app.config['FIREBASE_API_KEY'] = "dev"
        if os.getenv('DEV_FLASK_SECRET'):
            app.secret_key = os.getenv('DEV_FLASK_SECRET')
        else:
            app.secret_key = open(DEV_FLASK_SECRET, 'rb').read()
        app.debug = True
        app.config['UPLOAD_BUCKET'] = "dev_uploads"
        logging.basicConfig(level=logging.INFO)
Пример #2
0
def setup_logging():
    logclient = google.cloud.logging.Client()
    logclient.get_default_handler()
    logclient.setup_logging()
    logging.basicConfig(
        level=logging.INFO,
        format="%(asctime)s %(filename)s %(levelname)s: %(message)s")
Пример #3
0
def connectCloudStuff(agent, do_cloud_logging):
    """ Connects to logging and firestore DB and returns the db connection"""

    # Initialze google stuff
    credKey = "GOOGLE_APPLICATION_CREDENTIALS"
    if not credKey in os.environ:
        print("Creds:", agent.googleCredentialFile)
        os.environ[credKey] = agent.googleCredentialFile

    cred = credentials.Certificate(os.environ.get(credKey))
    firebase_admin.initialize_app(cred, {'projectId': agent.googleProjectId})
    db = firestore.client()

    # Initialize logging
    if do_cloud_logging:
        # logger = logging_client.logger("tkbuild-agent-" + agent.name )
        logging_client = google.cloud.logging.Client()

        logname = "tkbuild-agent-" + agent.name
        print("Log name is :", logname)

        logging_handler = google.cloud.logging.handlers.CloudLoggingHandler(
            logging_client, name=logname)
        google.cloud.logging.handlers.setup_logging(logging_handler)

        # Also echo to stdout
        rootLogger = logging.getLogger()
        #rootLogger.setLevel(logging.DEBUG)

        stdoutHandler = logging.StreamHandler(sys.stdout)
        #stdoutHandler.setLevel(logging.DEBUG)
        formatter = logging.Formatter('%(levelname)s: %(message)s')
        stdoutHandler.setFormatter(formatter)
        rootLogger.addHandler(stdoutHandler)

    else:
        print("Cloud logging is off")
        # Just run with stdout logging for testing
        logging.basicConfig(level=logging.INFO)

    # logging.debug("log debug")
    # logging.info("log info")
    # logging.warning("log warn")
    # logging.error("log error")

    logging.info(f"Agent: {agent.name}: {agent.desc}")
    testRepoProj = None
    for p in agent.projects.values():

        fetchRepoUrl = "(No Fetch Step Defined)"
        pfetch = p.getFetchWorkstep()
        if pfetch:
            fetchRepoUrl = pfetch.repoUrl
        logging.info(f"Project: {p.projectId} -- {fetchRepoUrl}")

    return db
    from google.cloud import firestore

    # Instantiates a client
    logging_client = google.cloud.logging.Client()

    # Connects the logger to the root logging handler; by default this captures
    # all logs at INFO level and higher
    logging_client.setup_logging()
    logger = logging_client.logger(__name__)
    logger.log_struct = log_struct_flatten(logger.log_struct)
else:
    # must be imported after google.cloud.logging
    import logging
    import types
    from pprint import pformat
    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)

    def log_text(self, text, **kw):
        level = logging.INFO
        if 'severity' in kw:
            level = getattr(logging, kw.pop('severity'))
        self.log(level, text, **kw)

    logger.log_text = log_text.__get__(logger)

    def log_struct(self, info, **kw):
        logger.log_text(pformat(info), **kw)

    logger.log_struct = log_struct.__get__(logger)
Пример #5
0
import notifications.alter as alter
from rest.developer import developer_api
from rest.health_check import health_check_api
from rest.language import language_api
from rest.repository import repository_api
from rest.search import search_api

app = Flask(__name__)
app.register_blueprint(health_check_api)
app.register_blueprint(developer_api)
app.register_blueprint(repository_api)
app.register_blueprint(language_api)
app.register_blueprint(search_api)

logging.basicConfig(level=logging.DEBUG, format='%(message)s')
requests_logging = logging.getLogger("urllib3")
requests_logging.setLevel(logging.CRITICAL)
requests_logging.propagate = False


@app.errorhandler(Exception)
def handler_requests_exceptions(error):
    alter.to_slack({'text': 'Exception: {}'.format(error)})
    return 'Oops, something went wrong', 500


@app.after_request
def apply_headers_and_status_code(response):
    response.headers['Content-Type'] = 'application/json'
    return response
def logging_filter(record):
    """
    Filter logs so that only records from this module are shown.
    :param record:
    :return:
    """
    return 'discord_dictionary_bot' in record.name or 'discord_dictionary_bot' in record.pathname


def gcp_logging_filter(record):
    return 'google.cloud.logging_v2.handlers.transports.background_thread' not in record.name


# Set up logging
logging.basicConfig(
    format='%(asctime)s [%(name)s] [%(levelname)s] %(message)s',
    level=logging.DEBUG,
    datefmt='%m/%d/%Y %H:%M:%S')
logging.getLogger().handlers[0].addFilter(gcp_logging_filter)


def try_read_token(token_or_path: str) -> str:
    """
    Try to read from the given file. If the file can be read, return the file contents. Otherwise, return the argument.
    :param token_or_path:
    :return:
    """
    try:
        with open(token_or_path) as file:
            return file.read()
    except IOError:
        pass  # Ignore and assume the argument is a token string not a file path
Пример #7
0
from constants import *

es = Elasticsearch(
    # hosts=[{'host': ES_HOST, 'port': ES_PORT}]    
)

from google.cloud import logging

logging_client = logging.Client()
# logger = logging_client.logger(LOGGER_NAME)
logging_client.get_default_handler()
logging_client.setup_logging()

import logging

logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG)


producer = KafkaProducer(bootstrap_servers=KAFKA_HOST, 
   key_serializer=lambda m: m.encode('utf-8'),
   value_serializer=lambda m: json.dumps(m).encode('ascii'))
# print("KAFKA_OCR_FILE_TOPIC", KAFKA_OCR_FILE_TOPIC)
consumer = KafkaConsumer(KAFKA_OCR_FILE_TOPIC, 
# auto_offset_reset='earliest', 
bootstrap_servers=KAFKA_HOST)

for msg in consumer:
    logging.info("Received a new message in "+str(KAFKA_OCR_FILE_TOPIC)+" topic in OCR worker")
    print('Message key:', msg.key.decode('utf-8'))
    uuid = msg.key.decode('utf-8')
Пример #8
0
    doc='/doc/',
    version='1.0',
    default="EpiPro",  # Default namespace
    title="EpiPro REST API Documentation",  # Documentation Title
    description="This is a EpiPro App REST API.\r\n SENG3011 workshop project"
)  # Documentation Description

CORS(app)
app.register_blueprint(blueprint)

try:
    # This is used when running locally only. When deploying to Google App
    # Engine, a webserver process such as Gunicorn will serve the app. This
    # can be configured by adding an `entrypoint` to app.yaml.
    logging.basicConfig(filemode='w',
                        format='%(asctime)s - %(message)s',
                        datefmt='%d-%b-%y %H:%M:%S',
                        level=logging.INFO)
    logging.info('LET THE GAMES BEGIN! API STARTS')
    logging.info('==========================================')
    logger = logging.getLogger('werkzeug')
    handler = logging.FileHandler('./log/Api_log.log')
    logger.addHandler(handler)
    # Also add the handler to Flask's logger for cases
    #  where Werkzeug isn't used as the underlying WSGI server.
    app.logger.addHandler(handler)
except BaseException:
    pass

client = MongoClient(config.MONGO_URI, config.MONGO_PORT)
db = client[config.MONGO_DB]
Пример #9
0
import base64
from datetime import datetime
from os import environ
import re
import sys
import google.api_core.exceptions
import google.cloud.logging
from google.cloud import bigquery
from pytz import utc

from flask import Flask, request
import json

import logging

logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)

########################################################################################
# FLASK WEB SERVICE - TO USE WITH CLOUD RUN/DOCKER

app = Flask(__name__)


@app.route("/", methods=['GET', 'POST'])
def index():
    '''Main function called by Pub/Sub subscription'''

    logger.info("****** ROUTE FUNCTION RUNNING********")
    if request.method == 'GET':
        logger.info("****** GET METHOD TRIGGERED ********")