예제 #1
0
def start_app():
    # If running in Google Cloud Run, use cloud logging
    if "K_SERVICE" in os.environ:
        # Setup Google Cloud logging
        # By default this captures all logs at INFO level and higher
        log_client = google.cloud.logging.Client()
        log_client.get_default_handler()
        log_client.setup_logging()
        logging.info("Using google cloud logging")
    else:
        logging.getLogger().setLevel(logging.INFO)
        logging.info("Using standard logging")

    logging.info("Starting app")
    data.Start()
    return app
예제 #2
0
파일: cloud.py 프로젝트: joeld42/tk_build
def connectCloudStuff(agent, do_cloud_logging):
    """ Connects to logging and firestore DB and returns the db connection"""

    # Initialze google stuff
    credKey = "GOOGLE_APPLICATION_CREDENTIALS"
    if not credKey in os.environ:
        print("Creds:", agent.googleCredentialFile)
        os.environ[credKey] = agent.googleCredentialFile

    cred = credentials.Certificate(os.environ.get(credKey))
    firebase_admin.initialize_app(cred, {'projectId': agent.googleProjectId})
    db = firestore.client()

    # Initialize logging
    if do_cloud_logging:
        # logger = logging_client.logger("tkbuild-agent-" + agent.name )
        logging_client = google.cloud.logging.Client()

        logname = "tkbuild-agent-" + agent.name
        print("Log name is :", logname)

        logging_handler = google.cloud.logging.handlers.CloudLoggingHandler(
            logging_client, name=logname)
        google.cloud.logging.handlers.setup_logging(logging_handler)

        # Also echo to stdout
        rootLogger = logging.getLogger()
        #rootLogger.setLevel(logging.DEBUG)

        stdoutHandler = logging.StreamHandler(sys.stdout)
        #stdoutHandler.setLevel(logging.DEBUG)
        formatter = logging.Formatter('%(levelname)s: %(message)s')
        stdoutHandler.setFormatter(formatter)
        rootLogger.addHandler(stdoutHandler)

    else:
        print("Cloud logging is off")
        # Just run with stdout logging for testing
        logging.basicConfig(level=logging.INFO)

    # logging.debug("log debug")
    # logging.info("log info")
    # logging.warning("log warn")
    # logging.error("log error")

    logging.info(f"Agent: {agent.name}: {agent.desc}")
    testRepoProj = None
    for p in agent.projects.values():

        fetchRepoUrl = "(No Fetch Step Defined)"
        pfetch = p.getFetchWorkstep()
        if pfetch:
            fetchRepoUrl = pfetch.repoUrl
        logging.info(f"Project: {p.projectId} -- {fetchRepoUrl}")

    return db
예제 #3
0
def logging_handler(client):
    # [START create_default_handler]
    import logging
    handler = client.get_default_handler()
    cloud_logger = logging.getLogger('cloudLogger')
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    cloud_logger.error('bad news')
    # [END create_default_handler]

    # [START create_cloud_handler]
    from google.cloud.logging.handlers import CloudLoggingHandler
    handler = CloudLoggingHandler(client)
    cloud_logger = logging.getLogger('cloudLogger')
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    cloud_logger.error('bad news')
    # [END create_cloud_handler]

    # [START create_named_handler]
    handler = CloudLoggingHandler(client, name='mycustomlog')
예제 #4
0
def logging_handler(client):
    # [START create_default_handler]
    import logging
    handler = client.get_default_handler()
    cloud_logger = logging.getLogger('cloudLogger')
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    cloud_logger.error('bad news')
    # [END create_default_handler]

    # [START create_cloud_handler]
    from google.cloud.logging.handlers import CloudLoggingHandler
    handler = CloudLoggingHandler(client)
    cloud_logger = logging.getLogger('cloudLogger')
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    cloud_logger.error('bad news')
    # [END create_cloud_handler]

    # [START create_named_handler]
    handler = CloudLoggingHandler(client, name='mycustomlog')
예제 #5
0
def logging_handler(client):
    # [START create_default_handler]
    import logging

    handler = client.get_default_handler()
    cloud_logger = logging.getLogger("cloudLogger")
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    cloud_logger.error("bad news")
    # [END create_default_handler]

    # [START create_cloud_handler]
    from google.cloud.logging.handlers import CloudLoggingHandler
    from google.cloud.logging_v2.handlers import setup_logging

    handler = CloudLoggingHandler(client)
    setup_logging(handler)
    # [END create_cloud_handler]

    # [START create_named_handler]
    handler = CloudLoggingHandler(client, name="mycustomlog")
예제 #6
0
location = ''
# Payload file name that user uploads
payload_name = ''
# Name of PubSub host topic (unique)
topic_name = ''
# Name of PubSub host subscription (unique)
sub_name = ''
# User defined name of a particular experiment
experimentName = ''
# Where the mock ransomware payload is directed to begin execution
rootPairString = '-root=.\\files'
# Maximum number of retries allowed for a test before marked as failed
max_retries = 3
# Logging object
logger = None
# PubSub Host Subscriber object (pulls messages off sub_name)
subscriber = None
# PubSub Host Publisher object (publishes messages to various host VM topics)
publisher = None
# Future corresponding to asynchronous subscription callback (pulling messages off sub_name published by various VMs)
future = None

#Code used to setup GCP logger START
client = google.cloud.logging.Client()
handler = CloudLoggingHandler(client)
cloud_logger = logging.getLogger('cloudLogger')
#NOTE: Can set minimum log level here
cloud_logger.setLevel(logging.INFO)  # normally defaults to WARN
cloud_logger.addHandler(handler)
logger = cloud_logger
#Code used to setup GCP logger END
예제 #7
0
import notifications.alter as alter
from rest.developer import developer_api
from rest.health_check import health_check_api
from rest.language import language_api
from rest.repository import repository_api
from rest.search import search_api

app = Flask(__name__)
app.register_blueprint(health_check_api)
app.register_blueprint(developer_api)
app.register_blueprint(repository_api)
app.register_blueprint(language_api)
app.register_blueprint(search_api)

logging.basicConfig(level=logging.DEBUG, format='%(message)s')
requests_logging = logging.getLogger("urllib3")
requests_logging.setLevel(logging.CRITICAL)
requests_logging.propagate = False


@app.errorhandler(Exception)
def handler_requests_exceptions(error):
    alter.to_slack({'text': 'Exception: {}'.format(error)})
    return 'Oops, something went wrong', 500


@app.after_request
def apply_headers_and_status_code(response):
    response.headers['Content-Type'] = 'application/json'
    return response
def main():
    # Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--discord-token',
        help=
        'Token to use when running the bot. You can either use the raw token string or a path to a text file containing the token.',
        dest='discord_bot_token',
        default='discord_token.txt')
    parser.add_argument('--ffmpeg-path',
                        help='Path to ffmpeg executable.',
                        dest='ffmpeg_path',
                        default='ffmpeg')
    parser.add_argument(
        '--google-credentials-path',
        help='Path to Google application credentials JSON file.',
        dest='google_credentials_path',
        default='google_credentials.json')
    parser.add_argument(
        '--dictionary-api',
        help=
        'A list of dictionary API\'s to use for fetching definitions. These should be in order of priority and separated by comma\'s. Available API\'s are'
        '\'google\', \'owlbot\', \'webster\', and \'rapid-words\'. Some API\'s require tokens that must be provided with the appropriate arguments.',
        dest='dictionary_api',
        default='google')
    parser.add_argument(
        '--owlbot-api-token',
        help=
        'The token to use for the Owlbot dictionary API. You can use either the raw token string or a path to a text file containing the token.',
        dest='owlbot_api_token',
        default='owlbot_api_token.txt')
    parser.add_argument(
        '--webster-api-token',
        help=
        'The token to use for the Merriam Webster dictionary API. You can use either the raw token string or a path to a text file containing the token.',
        dest='webster_api_token',
        default='webster_api_token.txt')
    parser.add_argument(
        '--rapid-words-api-token',
        help=
        'The token to use for the RapidAPI WordsAPI dictionary API. You can use either the raw token string or a path to a text file containing the token.',
        dest='rapid_words_api_token',
        default='rapid_words_api_token.txt')
    args = parser.parse_args()

    # Set Google API credentials
    os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = args.google_credentials_path

    # Set up GCP logging
    gcp_logging_client = google.cloud.logging.Client()
    gcp_logging_handler = CloudLoggingHandler(gcp_logging_client,
                                              name='discord-dictionary-bot')
    gcp_logging_handler.addFilter(gcp_logging_filter)
    logging.getLogger().addHandler(gcp_logging_handler)

    # Check which dictionary API we should use
    dictionary_apis = []
    for name in args.dictionary_api.split(','):

        if name == 'google':
            dictionary_apis.append(UnofficialGoogleAPI())
        elif name == 'owlbot':

            if 'owlbot_api_token' not in args:
                print(
                    f'You must specify an API token with --owlbot-api-token to use the owlbot dictionary API!'
                )
                return

            # Read owlbot API token from file
            owlbot_api_token = try_read_token(args.owlbot_api_token)

            dictionary_apis.append(OwlBotDictionaryAPI(owlbot_api_token))

        elif name == 'webster':

            if 'webster_api_token' not in args:
                print(
                    f'You must specify an API token with --webster-api-token to use the Merriam Webster dictionary API!'
                )
                return

            # Read API token from file
            webster_api_token = try_read_token(args.webster_api_token)

            dictionary_apis.append(MerriamWebsterAPI(webster_api_token))

        elif name == 'rapid-words':

            if 'rapid_words_api_token' not in args:
                print(
                    f'You must specify an API token with --rapid-words-api-token to use the Rapid API WordsAPI dictionary API!'
                )
                return

            # Read API token from file
            rapid_words_api_token = try_read_token(args.rapid_words_api_token)

            dictionary_apis.append(RapidWordsAPI(rapid_words_api_token))

        else:
            print(f'Invalid dictionary API: {args.dictionary_api}')
            return

    # Start client
    bot = DiscordBotClient(BackupDictionaryAPI(dictionary_apis),
                           args.ffmpeg_path)
    bot.run(try_read_token(args.discord_bot_token))
예제 #9
0
def process_chunk(output, group):
    name = multiprocessing.current_process().name
    logger = logging.getLogger("ltv/{}".format(name))
    try:
    logger.debug("Processing a chunk to {}".format(output))
import google.cloud.logging
import logging
import os

from battleforcastile_auth.custom_formatter import CustomFormatter

logger = logging.getLogger()
logger.setLevel(logging.INFO)

if os.getenv('PRODUCTION_MODE'):
    client = google.cloud.logging.Client()
    handler = client.get_default_handler()
    handler.setFormatter(CustomFormatter())
    logger.addHandler(handler)

예제 #11
0
def bq_to_yahoo(
    event: dict,
    content,
) -> bool:
    # logging
    logging_client = google.cloud.logging.Client()
    handler = CloudLoggingHandler(logging_client)
    cloud_logger = logging.getLogger('cloudLogger')
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    # error reporting
    error_reporting_client = error_reporting.Client()

    # start
    func_name = sys._getframe().f_code.co_name
    cloud_logger.info('%s start.' % (func_name))

    # environmet variable
    project_id = os.environ.get('project_id', 'all-project-264506')
    bq_dataset_id = os.environ.get('bq_dataset_id', 'mk_demo_project')
    start_date = os.environ.get('start_date', '20200701')
    end_date = os.environ.get('end_date', '20200731')
    bq_query = os.environ.get(
        'bq_query', 'SELECT segmentId, clientId, idfa, adid ' +
        'FROM `{project_id}.{bq_dataset_id}.yahoo_demo1` ' +
        'WHERE created_at BETWEEN {start_date} AND {end_date}').format(
            project_id=project_id,
            bq_dataset_id=bq_dataset_id,
            start_date=start_date,
            end_date=end_date,
        )

    try:
        success_count = 0
        # create model
        ebty = ExportBqDataToY(
            project_id,
            bq_dataset_id,
        )
        # get data from BigQuery
        bq_data = ebty.get_data_from_bq(bq_query)

        bq_data_length = len(list(bq_data))

        # send data by API
        for row in bq_data:
            url_param_idfa = {}
            url_param_adid = {}
            url_param_gclid = {}
            try:
                if row.idfa != '':
                    # IDFA
                    url_param_idfa['referrer'] = 'idfa_referrer'
                    url_param_idfa['key'] = 'idfa'
                    url_param_idfa['value'] = row.idfa
                    url_param_idfa['flag'] = str(row.segmentId)
                    ebty.send_api(url_param_idfa)
                if row.adid != '':
                    # AAID
                    url_param_adid['referrer'] = 'adid_referrer'
                    url_param_adid['key'] = 'adid'
                    url_param_adid['value'] = row.adid
                    url_param_adid['flag'] = str(row.segmentId)
                    ebty.send_api(url_param_adid)
                # GA client ID
                url_param_gclid['referrer'] = 'gaid_referrer'
                url_param_gclid['key'] = 'ga_client_id'
                url_param_gclid['value'] = row.clientId
                url_param_gclid['flag'] = str(row.segmentId)
                ebty.send_api(url_param_gclid)
            except Exception as e:
                msg = '%s: %s.' % (__file__, e)
                cloud_logger.error(msg)
                error_reporting_client.report(msg)
                continue
            else:
                success_count += 1
            # wait
            sleep(0.01)

    except Exception as e:
        msg = '%s: %s.' % (__file__, e)
        cloud_logger.error(msg)
        error_reporting_client.report(msg)
        return False

    # end
    cloud_logger.info('total send count:%d, success send count:%d.' % (
        bq_data_length,
        success_count,
    ))
    cloud_logger.info('%s end.' % (func_name))
    return True
예제 #12
0
import google.cloud.logging
import logging

from spaceship.config import Config

# enable google cloud logging
# from: https://cloud.google.com/logging/docs/setup/python
if Config.IN_PRODUCTION:
    client = google.cloud.logging.Client()
    client.setup_logging(log_level=logging.INFO)

# set log level on the root logger
root = logging.getLogger()
root.setLevel(logging.INFO)

# in production, set gunicorn handlers to what google provided
if Config.IN_PRODUCTION:
    gunicorn_logger = logging.getLogger('gunicorn.error')
    gunicorn_logger.handlers = root.handlers

# if google logging is not available, set up our own (to stdout)
else:
    # send output to stdout
    handler = logging.StreamHandler()
    root.addHandler(handler)

    # our log format
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)s %(name)s : %(message)s')
    handler.setFormatter(formatter)
예제 #13
0

@app.route(API_VERSION + "/del_policy", methods=["GET"])
def del_policy():
    """
    Delete a policy
    Returns:

    """
    name = request.args.get("policy")
    with client.context():
        res = PolicyModel.query(PolicyModel.Name == name).get()
        if not res:
            return "not found", 404
        res.key.delete()
    return "ok", 200


@app.route("/")
def index():
    """
    Main Page
    :return:
    """
    return "ok", 200


if __name__ == "__main__":
    logging.getLogger("googleapiclient.discovery_cache").setLevel(logging.info)
    app.run(debug=False)
예제 #14
0
파일: main.py 프로젝트: FionaCLin/epipro
)  # Documentation Description

CORS(app)
app.register_blueprint(blueprint)

try:
    # This is used when running locally only. When deploying to Google App
    # Engine, a webserver process such as Gunicorn will serve the app. This
    # can be configured by adding an `entrypoint` to app.yaml.
    logging.basicConfig(filemode='w',
                        format='%(asctime)s - %(message)s',
                        datefmt='%d-%b-%y %H:%M:%S',
                        level=logging.INFO)
    logging.info('LET THE GAMES BEGIN! API STARTS')
    logging.info('==========================================')
    logger = logging.getLogger('werkzeug')
    handler = logging.FileHandler('./log/Api_log.log')
    logger.addHandler(handler)
    # Also add the handler to Flask's logger for cases
    #  where Werkzeug isn't used as the underlying WSGI server.
    app.logger.addHandler(handler)
except BaseException:
    pass

client = MongoClient(config.MONGO_URI, config.MONGO_PORT)
db = client[config.MONGO_DB]

parser = reqparse.RequestParser()

LOCATION = 'location'
KEY_TERMS = 'key_terms'
예제 #15
0
            }
        },
    'handlers': {
        'console': {
            'class': 'logging.StreamHandler',
            'level': 'DEBUG',
            'stream': 'ext://sys.stdout',
            }
        },
    'root': {
        'level': 'DEBUG',
        'handlers': ['console'],
        }
    })

logging.getLogger('root').info("Logging init!")

# If `entrypoint` is not defined in app.yaml, App Engine will look for an app
# called `app` in `main.py`.
app = Flask(__name__)
auth = HTTPBasicAuth()


from auth_credentials_store.all_credentials import all_credentials
app.register_blueprint(all_credentials)
from quick_follow_up.follow_up import follow_up
app.register_blueprint(follow_up, url_prefix="/follow_up")


@auth.verify_password
def verify_password(username, password):
예제 #16
0
def bq_to_yahoo(
        event: dict,
        content,
) -> bool:
    # logging
    logging_client = google.cloud.logging.Client()
    handler = CloudLoggingHandler(logging_client)
    cloud_logger = logging.getLogger('cloudLogger')
    cloud_logger.setLevel(logging.INFO)
    cloud_logger.addHandler(handler)
    # error reporting
    error_reporting_client = error_reporting.Client()
    # get parameters
    args = docopt(__doc__)
    conf_file_path = args['--conf_file_path']
    # start
    cloud_logger.info('%s start.' % (__file__))
    # check lock file
    lock_file_path = os.path.join(
        os.path.abspath(os.path.dirname(__file__)) + '/../lock/',
        os.path.splitext(os.path.basename(conf_file_path))[0] + '.lock',
    )
    if os.path.isfile(lock_file_path):
        msg = '%s: msg:"%s".' % (__file__, 'already running.')
        cloud_logger.error(msg)
        error_reporting_client.report(msg)
        sys.exit(1)
    # make lock file
    Path(lock_file_path).touch()
    try:
        # setting
        with open(conf_file_path) as f:
            conf_data = yaml.full_load(f)
        bq_project_id = conf_data['bq']['project_id']
        bq_dataset_id = conf_data['bq']['dataset_id']
        bq_query = conf_data['bq']['query'].format(
            project_id=bq_project_id,
        )
        success_count = 0
        # create model
        ebdty = bq_to_yahoo_src.ExportBqDataToY(
            bq_project_id,
            bq_dataset_id,
        )
        # get data from BigQuery
        bq_data = ebdty.get_data_from_bq(bq_query)
        bq_data_length = len(list(bq_data))
        # send data by API
        for row in bq_data:
            url_param_idfa = {}
            url_param_aaid = {}
            url_param_gclid = {}
            try:
                if row.idfa != '':
                    # IDFA
                    url_param_idfa['referrer'] = 'idfa_referrer'
                    url_param_idfa['key'] = 'idfa'
                    url_param_idfa['value'] = row.idfa
                    url_param_idfa['flag'] = str(row.segmentId)
                    ebdty.send_api(url_param_idfa)
                if row.aaid != '':
                    # AAID
                    url_param_aaid['referrer'] = 'adid_referrer'
                    url_param_aaid['key'] = 'adid'
                    url_param_aaid['value'] = row.aaid
                    url_param_aaid['flag'] = str(row.segmentId)
                    ebdty.send_api(url_param_aaid)
                # GA client ID
                url_param_gclid['referrer'] = 'gaid_referrer'
                url_param_gclid['key'] = 'ga_client_id'
                url_param_gclid['value'] = row.clientId
                url_param_gclid['flag'] = str(row.segmentId)
                ebdty.send_api(url_param_gclid)
            except Exception as e:
                msg = '%s: %s.' % (__file__, e)
                cloud_logger.error(msg)
                error_reporting_client.report(msg)
            else:
                success_count += 1
            # wait
            sleep(0.01)
        # delete lock file
        os.remove(lock_file_path)
    except Exception as e:
        msg = '%s: %s.' % (__file__, e)
        cloud_logger.error(msg)
        error_reporting_client.report(msg)
        # delete lock file
        os.remove(lock_file_path)
        sys.exit(1)
    # end
    cloud_logger.info('total send count:%d, success send count:%d.' % (
        bq_data_length,
        success_count,
    ))
    cloud_logger.info('%s end.' % (__file__))
    sys.exit(0)
예제 #17
0
            currentReq = currentRequest.get()
            TRACE = "projects/{}/traces/{}".format(client.project,
                                                   currentReq._traceID)
            currentReq.maxLogLevel = max(currentReq.maxLogLevel,
                                         record.levelno)
        except:
            TRACE = None

        self.transport.send(record,
                            message,
                            resource=self.resource,
                            labels=self.labels,
                            trace=TRACE)


logger = logging.getLogger()
logger.setLevel(logging.DEBUG)

handler = ViURDefaultLogger(client,
                            name="ViUR-Messages",
                            resource=Resource(type="gae_app", labels={}))
logger.addHandler(handler)

sh = logging.StreamHandler()
formatter = logging.Formatter(
    "%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s")
sh.setFormatter(formatter)
logger.addHandler(sh)

for logger_name in EXCLUDED_LOGGER_DEFAULTS:
    logger = logging.getLogger(logger_name)
예제 #18
0
# Imports the Google Cloud client library
import google.cloud.logging

# Instantiates a client
client = google.cloud.logging.Client()

# Connects the logger to the root logging handler; by default this captures
# all logs at INFO level and higher
cloud_handler = client.get_default_handler()

# Imports Python standard library logging
import logging

logger = logging.getLogger('Cloud Logger')
logger.setLevel(logging.INFO)
logger.addHandler(cloud_handler)
# Emits the data using the standard logging module
logger.warning('get default handler')
예제 #19
0
import sys

DRY_RUN = True
SUICIDE_FLAG = False
PROJECT_ID = sys.env["GLOUCD_PROJECT"]

pnconfig = PNConfiguration()

pnconfig.subscribe_key = 'sub-c-52a9ab50-291b-11e5-baaa-0619f8945a4f'
# pnconfig.reconnect_policy = PNReconnectionPolicy.LINEAR
pubnub = PubNub(pnconfig)

import logging

logger = logging.getLogger('bitflyer-collector')
if not DRY_RUN:
    logging_client = google.cloud.logging.Client(PROJECT_ID)
    logger.addHandler(logging_client.get_default_handler())

logger.setLevel(logging.INFO)


class StreamDataProcessing():
    def __init__(self):
        pass

    def stream_data(self, rows):
        pass

예제 #20
0
    :param record:
    :return:
    """
    return 'discord_dictionary_bot' in record.name or 'discord_dictionary_bot' in record.pathname


def gcp_logging_filter(record):
    return 'google.cloud.logging_v2.handlers.transports.background_thread' not in record.name


# Set up logging
logging.basicConfig(
    format='%(asctime)s [%(name)s] [%(levelname)s] %(message)s',
    level=logging.DEBUG,
    datefmt='%m/%d/%Y %H:%M:%S')
logging.getLogger().handlers[0].addFilter(gcp_logging_filter)


def try_read_token(token_or_path: str) -> str:
    """
    Try to read from the given file. If the file can be read, return the file contents. Otherwise, return the argument.
    :param token_or_path:
    :return:
    """
    try:
        with open(token_or_path) as file:
            return file.read()
    except IOError:
        pass  # Ignore and assume the argument is a token string not a file path
    return token_or_path
    # Instantiates a client
    logging_client = google.cloud.logging.Client()

    # Connects the logger to the root logging handler; by default this captures
    # all logs at INFO level and higher
    logging_client.setup_logging()
    logger = logging_client.logger(__name__)
    logger.log_struct = log_struct_flatten(logger.log_struct)
else:
    # must be imported after google.cloud.logging
    import logging
    import types
    from pprint import pformat
    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)

    def log_text(self, text, **kw):
        level = logging.INFO
        if 'severity' in kw:
            level = getattr(logging, kw.pop('severity'))
        self.log(level, text, **kw)

    logger.log_text = log_text.__get__(logger)

    def log_struct(self, info, **kw):
        logger.log_text(pformat(info), **kw)

    logger.log_struct = log_struct.__get__(logger)

    logger.log_text("local logging mode")
예제 #22
0
import google.cloud.logging
from flask import Flask, request, render_template
from chatterbot import ChatBot
from chatterbot.trainers import ListTrainer
from chatterbot.trainers import ChatterBotCorpusTrainer
import re
from logging.config import dictConfig
import random, logging
from google.cloud.logging.handlers import CloudLoggingHandler

from logging.config import dictConfig

client = google.cloud.logging.Client()
handler = CloudLoggingHandler(client)
cloud_logger = logging.getLogger()
cloud_logger.addHandler(handler)

app = Flask(__name__)
# app.config["DEBUG"] = True

# logging.basicConfig(level=logging.INFO, format=, handlers=[
#     logging.FileHandler("log.log")
# ])
db_user = os.environ["DB_USER"]
db_pass = os.environ["DB_PASS"]
db_name = os.environ["DB_NAME"]
db_host = os.environ["DB_HOST"]

db = f"mongodb+srv://{db_user}:{db_pass}@{db_host}/{db_name}?retryWrites=true&w=majority"
bot = ChatBot(