Ejemplo n.º 1
0
def create_sink(sink_name, destination_bucket, filter_):
    """Creates a sink to export logs to the given Cloud Storage bucket.

    The filter determines which logs this sink matches and will be exported
    to the destination. For example a filter of 'severity>=INFO' will send
    all logs that have a severity of INFO or greater to the destination.
    See https://cloud.google.com/logging/docs/view/advanced_filters for more
    filter information.
    """
    logging_client = logging.Client()

    # The destination can be a Cloud Storage bucket, a Cloud Pub/Sub topic,
    # or a BigQuery dataset. In this case, it is a Cloud Storage Bucket.
    # See https://cloud.google.com/logging/docs/api/tasks/exporting-logs for
    # information on the destination format.
    destination = 'storage.googleapis.com/{bucket}'.format(
        bucket=destination_bucket)

    sink = logging_client.sink(sink_name, filter_, destination)

    if sink.exists():
        print('Sink {} already exists.'.format(sink.name))
        return

    sink.create()
    print('Created sink {}'.format(sink.name))
Ejemplo n.º 2
0
def delete_sink(sink_name):
    """Deletes a sink."""
    logging_client = logging.Client()
    sink = logging_client.sink(sink_name)

    sink.delete()

    print('Deleted sink {}'.format(sink.name))
Ejemplo n.º 3
0
def _write_to_cloud_logging(log_id, log_file_path):
    """Write log file content to cloud logging"""
    if not CLOUD_LOGGING_CLIENT:
        CLOUD_LOGGING_CLIENT = cloud_logging.Client()
    cloud_logger = CLOUD_LOGGING_CLIENT.logger(log_id)
    if log_file_path:
        with open(log_file_path, 'r') as log_file:
            cloud_logger.log_text(log_file.read())
Ejemplo n.º 4
0
def delete_logger(logger_name):
    """Deletes a logger and all its entries.

    Note that a deletion can take several minutes to take effect.
    """
    logging_client = logging.Client()
    logger = logging_client.logger(logger_name)

    logger.delete()

    print('Deleted all logging entries for {}'.format(logger.name))
Ejemplo n.º 5
0
def _write_to_cloud_logging(log_id, log_file_path):
    """Write log file content to cloud logging"""
    # TODO(ethanbao): Turn conductor into a python object so that the logging
    # client can be instance variable not global variable.
    global CLOUD_LOGGING_CLIENT
    if not CLOUD_LOGGING_CLIENT:
        CLOUD_LOGGING_CLIENT = cloud_logging.Client()
    cloud_logger = CLOUD_LOGGING_CLIENT.logger(log_id)
    if log_file_path:
        with open(log_file_path, 'r') as log_file:
            cloud_logger.log_text(log_file.read())
def example_sink(cloud_config):
    client = logging.Client()

    sink = client.sink(
        TEST_SINK_NAME, TEST_SINK_FILTER,
        'storage.googleapis.com/{bucket}'.format(
            bucket=cloud_config.storage_bucket))

    if sink.exists():
        sink.delete()

    sink.create()

    return sink
Ejemplo n.º 7
0
def test_create(cloud_config, capsys):
    sink_name = TEST_SINK_NAME_TMPL.format(_random_id())

    try:
        export.create_sink(sink_name, cloud_config.storage_bucket,
                           TEST_SINK_FILTER)
    # Clean-up the temporary sink.
    finally:
        try:
            logging.Client().sink(sink_name).delete()
        except:
            pass

    out, _ = capsys.readouterr()
    assert sink_name in out
Ejemplo n.º 8
0
def example_sink(cloud_config):
    client = logging.Client()

    sink = client.sink(
        TEST_SINK_NAME_TMPL.format(_random_id()), TEST_SINK_FILTER,
        'storage.googleapis.com/{bucket}'.format(
            bucket=cloud_config.storage_bucket))

    sink.create()

    yield sink

    try:
        sink.delete()
    except:
        pass
Ejemplo n.º 9
0
def update_sink(sink_name, filter_):
    """Changes a sink's filter.

    The filter determines which logs this sink matches and will be exported
    to the destination. For example a filter of 'severity>=INFO' will send
    all logs that have a severity of INFO or greater to the destination.
    See https://cloud.google.com/logging/docs/view/advanced_filters for more
    filter information.
    """
    logging_client = logging.Client()
    sink = logging_client.sink(sink_name)

    sink.reload()

    sink.filter_ = filter_
    print('Updated sink {}'.format(sink.name))
    sink.update()
Ejemplo n.º 10
0
def list_sinks():
    """Lists all sinks."""
    logging_client = logging.Client()

    sinks = []
    token = None
    while True:
        new_sinks, token = logging_client.list_sinks(page_token=token)
        sinks.extend(new_sinks)
        if token is None:
            break

    if not sinks:
        print('No sinks.')

    for sink in sinks:
        print('{}: {} -> {}'.format(sink.name, sink.filter_, sink.destination))
Ejemplo n.º 11
0
def _print_log(pipeline_id):
    # Fetch the cloud logging entry if the exection fails. Wait for 30 secs,
    # because it takes a while for the logging to become available.
    logger.critical(
        'The remote pipeline execution failed. It will wait for 30 '
        'seconds before fetching the log for remote pipeline execution.', )
    time.sleep(30)
    client = logging.Client()
    pipeline_logger = client.logger(pipeline_id)
    entries, token = pipeline_logger.list_entries()
    for entry in entries:
        logger.error(entry.payload)

    logger.info(
        'You can always run the following command to fetch the log entry:\n'
        '    gcloud beta logging read "logName=projects/vkit-pipeline/logs/%s"'
        % pipeline_id, )
Ejemplo n.º 12
0
def list_entries(logger_name):
    """Lists the most recent entries for a given logger."""
    logging_client = logging.Client()
    logger = logging_client.logger(logger_name)

    print('Listing entries for logger {}:'.format(logger.name))

    entries = []
    page_token = None

    while True:
        new_entries, page_token = logger.list_entries(page_token=page_token)
        entries.extend(new_entries)
        if not page_token:
            break

    for entry in entries:
        timestamp = entry.timestamp.isoformat()
        print('* {}: {}'.format(timestamp, entry.payload))
Ejemplo n.º 13
0
def write_entry(logger_name):
    """Writes log entries to the given logger."""
    logging_client = logging.Client()

    # This log can be found in the Cloud Logging console under 'Custom Logs'.
    logger = logging_client.logger(logger_name)

    # Make a simple text log
    logger.log_text('Hello, world!')

    # Simple text log with severity.
    logger.log_text('Goodbye, world!', severity='ERROR')

    # Struct log. The struct can be any JSON-serializable dictionary.
    logger.log_struct({
        'name': 'King Arthur',
        'quest': 'Find the Holy Grail',
        'favorite_color': 'Blue'
    })

    print('Wrote logs to {}.'.format(logger.name))
Ejemplo n.º 14
0
def get_client(project_id):
    """Builds an http client authenticated with the service account
    credentials."""
    # [START auth]
    credentials = GoogleCredentials.get_application_default()
    return logging.Client(project=project_id, credentials=credentials)
Ejemplo n.º 15
0
def setUpModule():
    _helpers.PROJECT = TESTS_PROJECT
    Config.CLIENT = logging.Client()
Ejemplo n.º 16
0
import os
from gcloud import storage, pubsub, logging
import sys
import socket
import time
import redis
import glob
from google.cloud import logging

logclient = logging.Client()
logger = logclient.logger("ffmpeg-pool")

PROJECT_ID = 'transcode-159215'
TOPIC = 'projects/{}/topics/message'.format(PROJECT_ID)
psclient = None
pstopic = None
pssub = None


class RedisQueue(object):
    def __init__(self, name, namespace='queue'):
        self.__db = redis.Redis(
            host="redis-11670.c10.us-east-1-4.ec2.cloud.redislabs.com",
            port=11670)
        self.key = '%s:%s' % (namespace, name)

    def qsize(self):
        return self.__db.llen(self.key)

    def empty(self):
        return self.qsize() == 0
Ejemplo n.º 17
0
def logger():
    client = logging.Client()
    return client
Ejemplo n.º 18
0
def example_log():
    client = logging.Client()
    logger = client.logger(TEST_LOGGER_NAME)
    text = 'Hello, world.'
    logger.log_text(text)
    return text