示例#1
0
def __connect_gcl(logger_name: str) -> Logger:
    """
    Sets the ``GOOGLE_APPLICATION_CREDENTIALS`` environmental variable for
    connecting to Stackdriver-Logging and initiates a new logger with name
    **logger_name**.

    :param logger_name: The name of the logger.
    :return: A Stackdriver Logger.

    .. note:: A google credentials file should exist in the current
    directory with the name ``meetup-analysis.json``.
    """
    pprint(f"Initiating {logger_name}...")

    try:
        logging_client = logging.Client()
    except DefaultCredentialsError:
        google_env_var = "GOOGLE_APPLICATION_CREDENTIALS"
        credentials_path = os.path.join(os.getcwd(), '../meetup-analysis.json')
        if os.environ.get(google_env_var) is None:
            if os.path.exists(credentials_path):
                os.environ[google_env_var] = credentials_path
                pprint("Found google credentials.", pformat=BColors.OKGREEN)
            else:
                raise FileNotFoundError(
                    f'Could not find environmental variable [{google_env_var}]'
                    f' or the credentials file [{credentials_path}]')
        logging_client = logging.Client()

    logger = logging_client.logger(logger_name)  # Initiate a new logger.

    return logger
def instantiate_client(_unused_client, _unused_to_delete):
    """Instantiate client."""

    # [START client_create_default]
    from google.cloud import logging
    client = logging.Client()
    # [END client_create_default]

    credentials = object()
    # [START client_create_explicit]
    from google.cloud import logging
    client = logging.Client(project='my-project', credentials=credentials)
def test_create_sink(capfd):
    try:
        # run the function
        logs_operator = export_logs_utility(
            test_variables["sink_name"],
            test_variables["project_id"],
            test_variables["dataset_name"],
            test_variables["dataset_location"],
            "create",
            test_variables["filter_"],
        )

        logs_operator.create_sink()
        out, err = capfd.readouterr()

        logging_client = logging.Client()
        sink = logging_client.sink(logs_operator.sink_name)
        # assert that the sink exists
        assert sink.exists()
        # assert that the print message exists in the terminal output
        assert out == "Created sink {}\n".format(test_variables["sink_name"])
    except AssertionError:
        # assert that the sink exists
        assert sink.exists()
        assert out == "Sink {} already exists.\n".format(
            test_variables["sink_name"])
def test_delete_sink(capfd):
    # run the function
    logs_operator = export_logs_utility(
        test_variables["sink_name"],
        test_variables["project_id"],
        test_variables["dataset_name"],
        test_variables["dataset_location"],
        "delete",
        test_variables["filter_"],
    )

    # create sink
    logs_operator.create_sink()
    logging_client = logging.Client()
    sink = logging_client.sink(logs_operator.sink_name)
    # assert that the sink exists
    assert sink.exists()

    # delete sink
    logs_operator.delete_sink()
    out, err = capfd.readouterr()

    # assert that the print message exists in the terminal output
    assert "Deleted sink {}\n".format(test_variables["sink_name"]) in out

    # assert that it does not exist
    assert not sink.exists()
示例#5
0
    def create_sink(self):
        """Creates a sink to export logs to the given Cloud Storage bucket.

        The filter determines which logs this sink matches and will be exported
        to the destination. For example a filter of 'severity>=INFO' will send
        all logs that have a severity of INFO or greater to the destination.
        See https://cloud.google.com/logging/docs/view/advanced_filters for more
        filter information.
        """
        logging_client = logging.Client()

        # The destination can be a Cloud Storage bucket, a Cloud Pub/Sub topic,
        # or a BigQuery dataset. In this case, it is a Cloud Storage Bucket.
        # See https://cloud.google.com/logging/docs/api/tasks/exporting-logs for
        # information on the destination format.
        destination = f"bigquery.googleapis.com/projects/{self.project_id}/datasets/{self.dataset_name}"

        sink = logging_client.sink(self.sink_name, self.filter_, destination)

        if sink.exists():
            print("Sink {} already exists.".format(sink.name))
            return

        sink.create(unique_writer_identity=True)
        print("Created sink {}".format(sink.name))
示例#6
0
    def process(self, KVelement, offsetData):
        try:
            key, element = KVelement
            imgMetadata = element['imgMetadata']
            fileName = imgMetadata['fileName']
            if not 'Nt' in imgMetadata:
                assert imgMetadata['Nz'] == 1
                assert offsetData[key].size > 0
                imgMetadata['Nt'] = offsetData[key].size
            else:
                assert 'Nz' in imgMetadata
                # Nframes = imgMetadata['Nt']*imgMetadata['Nz']
                # assert Nframes == offsetData[key].size
            splitter = self._getSplitter(imgMetadata)
            IFD = element['IFD']
            sortedIFDs = offsetData[key]
            assert IFD in sortedIFDs
            n = searchsorted(sortedIFDs, IFD) + 1
            for chunk in splitter.iterChunks(n, element['frame']):
                yield beam.pvalue.TaggedOutput('chunks', chunk)
        except:
            client = error_reporting.Client()
            client.report('File Not Processed: ' + fileName)
            client.report_exception()

            logging_client = logging.Client()
            log_name = 'TIF-reader'
            logger = logging_client.logger(log_name)
            logmessage = {'Error': 'File cannot be read', 'Filename': fileName}
            logger.log_struct(logmessage)
            yield beam.pvalue.TaggedOutput('readError',
                                           ('File Not Processed', fileName))
示例#7
0
文件: add.py 项目: andyakins/GCPOC
def post(instring):
    errorLog = error_reporting.Client(project='gcpoc-173120',
                                 service="GCPOCGetService",
                                 version="1.0.0")
    logClient = logging.Client()
    logger = logClient.logger('GCPOCPostLog')

    connection = pymysql.connect(
                            host=os.environ['GCPOC_DB_HOST'],
                            user=os.environ['GCPOC_DB_USER'],
                            password=os.environ['GCPOC_DB_PASSWORD'],
                            db=os.environ['GCPOC_DB_DATABASE'],
                            charset='utf8mb4',
                            cursorclass=pymysql.cursors.Cursor)
    try:
        with connection.cursor() as cursor:
            sql = "INSERT INTO gcpoc (instring) VALUES (%s)"
            cursor.execute(sql, (instring,))
            logger.log_text("Added %s to database" % instring)
        connection.commit()
    except:
        errorLog.report_exception()
        return instring,500
    finally:
        connection.close()
    return instring,201
    def __init__(self, **kwargs):
        StreamHandler.__init__(self)
        if 'log_name' not in kwargs.keys():
            raise ValueError("Missing log_name value'")
        log_name = kwargs['log_name']
        if not isinstance(log_name, str):
            raise ValueError("Invalid value: log_name must be of type string")

        self.parse_json = False
        if 'parse_json' in kwargs.keys():
            parse_json = kwargs['parse_json']
            if not isinstance(parse_json, bool):
                raise ValueError(
                    "Invalid value: parse_json must be of type bool")
            self.parse_json = parse_json
        self.add_hostname = False
        if 'add_hostname' in kwargs.keys():
            add_hostname = kwargs['add_hostname']
            if not isinstance(add_hostname, bool):
                raise ValueError(
                    "Invalid value: parse_json must be of type bool")
            self.add_hostname = add_hostname
            self.hostname = socket.gethostname()

        self.logger = logging.Client().logger(log_name)
def get_log_entries(unique_id):
    cloud_client = cloud_logging.Client()
    log_name = 'cloudfunctions.googleapis.com%2Fcloud-functions'
    cloud_logger = cloud_client.logger(log_name)

    start_time = time.time()
    is_existing = {'pdf': False, 'xml': False}
    while True:
        if time.time() - start_time > 60:
            raise TimeoutError('No matching logs found')
        else:
            logging.info('Refreshing logs...')
            entries = request_log(cloud_logger=cloud_logger,
                                  unique_id=unique_id)

            for entry in entries:
                logging.info('Found logging: {}'.format(entry.payload))
                if 'PDF invoice sent' in entry.payload:
                    is_existing['pdf'] = True
                elif 'XML invoice sent' in entry.payload:
                    is_existing['xml'] = True

            if is_existing['pdf'] and is_existing['xml']:
                logging.info('Both files succesful posted')
                break
            else:
                time.sleep(15.0 - ((time.time() - start_time) % 15.0))
示例#10
0
文件: export.py 项目: ianagpawa/nano
def create_sink(sink_name, destination_bucket, filter_):
    """Creates a sink to export logs to the given Cloud Storage bucket.

    The filter determines which logs this sink matches and will be exported
    to the destination. For example a filter of 'severity>=INFO' will send
    all logs that have a severity of INFO or greater to the destination.
    See https://cloud.google.com/logging/docs/view/advanced_filters for more
    filter information.
    """
    logging_client = logging.Client()

    # The destination can be a Cloud Storage bucket, a Cloud Pub/Sub topic,
    # or a BigQuery dataset. In this case, it is a Cloud Storage Bucket.
    # See https://cloud.google.com/logging/docs/api/tasks/exporting-logs for
    # information on the destination format.
    destination = 'storage.googleapis.com/{bucket}'.format(
        bucket=destination_bucket)

    sink = logging_client.sink(
        sink_name,
        filter_,
        destination)

    if sink.exists():
        print('Sink {} already exists.'.format(sink.name))
        return

    sink.create()
    print('Created sink {}'.format(sink.name))
示例#11
0
文件: gcp.py 项目: williamsedward/daq
    def __init__(self, config, callback_handler):
        self.config = config
        self._callback_handler = callback_handler
        if 'gcp_cred' not in config:
            LOGGER.info('No gcp_cred credential specified in config')
            self._pubber = None
            self._storage = None
            self._firestore = None
            self._client_name = None
            return
        cred_file = self.config['gcp_cred']
        LOGGER.info('Loading gcp credentials from %s', cred_file)
        # Normal execution assumes default credentials.
        # pylint: disable=protected-access
        (self._credentials, self._project) = google_auth._load_credentials_from_file(cred_file)
        self._client_name = self._parse_creds(cred_file)
        self._site_name = self._get_site_name()
        self._pubber = pubsub_v1.PublisherClient(credentials=self._credentials)
        LOGGER.info('Initialized gcp pub/sub %s:%s:%s', self._project,
                    self._client_name, self._site_name)
        self._firestore = self._initialize_firestore(cred_file)
        self._report_bucket_name = self.REPORT_BUCKET_FORMAT % self._project
        self._storage = storage.Client(project=self._project, credentials=self._credentials)
        self._ensure_report_bucket()
        self._config_callbacks = {}
        self._logging = logging.Client(credentials=self._credentials, project=self._project)

        LOGGER.info('Connection initialized at %s', get_timestamp())
示例#12
0
    def process(self, element):
        decoded_data = element.data.decode("utf-8")
        data = json.loads(decoded_data)

        api_url = f"{aq_baseurl}?city=Richmond&state=Virginia&country=USA&key={AQ_API_KEY}"
        r = requests.get(api_url)

        aq_dict = r.json()
        air_q = data['air_quality']
        air_q.append(aq_dict)
        data['air_quality'] = air_q
        currentTime = time.time()
        data['ma_arrival'] = currentTime
        uuid = data['uuid']
        enriched_bird_str = json.dumps(data).encode('utf-8')
        updated_element = element
        updated_element.data = enriched_bird_str

        client = logging.Client()
        log_name = data['uuid']
        logger = client.logger(log_name)
        logger.log_text(
            f"MID ATLANTIC, v.{version['__version__']}, uuid: {uuid}, Elapsed time since last step: {currentTime - data['ne_arrival']}"
        )

        yield updated_element
示例#13
0
    def get_handler(self):
        """Create a fully configured CloudLoggingHandler.

        Returns:
            (obj): Instance of `google.cloud.logging.handlers.
                                CloudLoggingHandler`
        """

        gcl_client = gcl_logging.Client(project=self.project_id,
                                        credentials=self.credentials)
        handler = gcl_handlers.CloudLoggingHandler(gcl_client,
                                                   resource=self.resource,
                                                   labels={
                                                       'resource_id':
                                                       self.instance_id,
                                                       'resource_project':
                                                       self.project_id,
                                                       'resource_zone':
                                                       self.zone,
                                                       'resource_host':
                                                       self.hostname
                                                   })
        handler.setFormatter(self.get_formatter())
        self._set_worker_thread_level()
        return handler
示例#14
0
    def _ParseLoggingArguments(self, options):
        """Parses the --logging flag.

    Args:
      options (argparse.Namespace): the parsed command-line arguments.
    Raises:
      errors.BadConfigOption: if the options are invalid.
    """
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s - %(levelname)s - %(name)s - %(message)s')
        self._logger = logging.getLogger(self.__class__.__name__)

        if 'stackdriver' in options.logging:
            if not self._gcs_settings:
                raise errors.BadConfigOption(
                    'Please provide a valid --gs_keyfile to enable StackDriver '
                    'logging')
            gcp_credentials = service_account.Credentials.from_service_account_file(
                options.gs_keyfile)
            project_id = self._gcs_settings.get('project_id', None)

            gcp_logging_client = google_logging.Client(
                project=project_id, credentials=gcp_credentials)
            self._stackdriver_handler = CloudLoggingHandler(gcp_logging_client,
                                                            name='GiftStick')
            self._logger.addHandler(self._stackdriver_handler)

        if options.log_progress:
            if 'stackdriver' not in options.logging:
                raise errors.BadConfigOption(
                    'Progress logging requires Stackdriver logging to be enabled'
                )
            self._progress_logger = google_logging.logger.Logger(
                'GiftStick', gcp_logging_client)
示例#15
0
    def __init__(self, config, callback_handler):
        self.config = config
        self._callback_handler = callback_handler
        cred_file = self.config.get('gcp_cred')
        if not cred_file:
            LOGGER.info(
                'No gcp_cred file specified in config, disabling gcp use.')
            self._pubber = None
            self._storage = None
            self._firestore = None
            self._client_name = None
            return
        assert SUCCESSFUL_IMPORTS, "Missing google cloud python dependencies."
        LOGGER.info('Loading gcp credentials from %s', cred_file)
        # Normal execution assumes default credentials.
        (self._credentials,
         self._project) = google_auth.load_credentials_from_file(cred_file)
        self._client_name = self._parse_creds(cred_file)
        self._site_name = self._get_site_name()
        self._pubber = pubsub_v1.PublisherClient(credentials=self._credentials)
        LOGGER.info('Initialized gcp pub/sub %s:%s:%s', self._project,
                    self._client_name, self._site_name)
        self._firestore = self._initialize_firestore(cred_file)
        self._report_bucket_name = self.REPORT_BUCKET_FORMAT % self._project
        self._storage = storage.Client(project=self._project,
                                       credentials=self._credentials)
        self._bucket = self._ensure_report_bucket()
        self._config_callbacks = {}
        self._logging = logging.Client(credentials=self._credentials,
                                       project=self._project)

        LOGGER.info('Connection initialized at %s', get_timestamp())
示例#16
0
def configure_logging():
    """Configure DNA to use cloud logging.

  Invoke once at application startup, before any log calls.
  """
    client = cloud_logging.Client()
    client.setup_logging(log_level=logging.DEBUG)
示例#17
0
文件: talos.py 项目: sedezee/TalosBot
def configure_logging():
    """
        Configure the loggers for Talos. Sets up the Talos loggers
        and discord.py loggers separately, so they can be easily configured
        independently.
    """
    fh = logging.FileHandler(utils.log_folder / "dtalos.log")
    dfh = logging.FileHandler(utils.log_folder / "dpy.log")
    sh = logging.StreamHandler(sys.stderr)
    gh = None
    try:
        import google.cloud.logging as glog
        client = glog.Client()
        gh = client.get_default_handler()
        gh.name = "dtalos"
        gh.setLevel(logging.WARNING)
    except (ImportError, OSError):
        print("Could not setup GCloud logging, setup google dependencies")

    ff = logging.Formatter("%(levelname)s:%(name)s:%(message)s")

    dlog = logging.getLogger("discord")

    utils.configure_logger(log,
                           handlers=[fh, sh, gh],
                           formatter=ff,
                           level=logging.INFO,
                           propagate=False)
    utils.configure_logger(dlog,
                           handlers=[dfh, sh],
                           formatter=ff,
                           level=logging.INFO,
                           propagate=False)
示例#18
0
def main(argv):
    del argv  # Unused
    creds, project_id = google.auth.default(scopes=SCOPE)

    logging_client = gcp_logging.Client(project=project_id, credentials=creds)
    logging_client.setup_logging(log_level=logging.INFO)

    movie_service = imdb.Service(FLAGS.min_votes, FLAGS.movie_file,
                                 FLAGS.rating_file)

    shutdown_handler = util.GracefulShutdown()
    signal.signal(signal.SIGTERM, shutdown_handler.exit)

    while not shutdown_handler.is_exit():
        logging.info("Start iteration.")

        sheets_service = sheets.BuildService(creds)

        titles = sheets.ReadMovieNames(sheets_service, FLAGS.sheet_id)
        ratings = movie_service.GetRating(titles)

        logging.info(
            str(sheets.WriteMovieInfo(ratings, sheets_service,
                                      FLAGS.sheet_id)))

        logging.info("End iteration.")
        time.sleep(FLAGS.polling_interval)

    logging.info("Exit")
示例#19
0
def search() -> list:
    errorLog = error_reporting.Client(project='gcpoc-173120',
                                      service="GCPOCGetService",
                                      version="1.0.0")
    logClient = logging.Client()
    logger = logClient.logger('GCPOCGetLog')

    connection = pymysql.connect(host=os.environ['GCPOC_DB_HOST'],
                                 user=os.environ['GCPOC_DB_USER'],
                                 password=os.environ['GCPOC_DB_PASSWORD'],
                                 db=os.environ['GCPOC_DB_DATABASE'],
                                 charset='utf8mb4',
                                 cursorclass=pymysql.cursors.Cursor)
    try:
        with connection.cursor() as cursor:
            sql = "SELECT instring from gcpoc"
            cursor.execute(sql)
            result = [item[0] for item in cursor.fetchall()]
            logger.log_text("Found %d items in database" % len(result))
    except:
        errorLog.report_exception()
        return None, 500
    finally:
        connection.close

    return result, 200
示例#20
0
def setup_logging():
    """Connects the default logger to Google Cloud Logging.
    Only logs at INFO level or higher will be captured.
    """
    client = logging.Client()
    client.get_default_handler()
    client.setup_logging()
示例#21
0
def log(text, severity=LOG_SEVERITY_DEFAULT, log_name=app_name):
    logging_client = logging.Client()
    logger = logging_client.logger(log_name)

    return logger.log_text(text, severity=severity,
                           resource=google.cloud.logging.Resource(type="cloud_function",
                                                                  labels={}))
示例#22
0
def add_stackdriver_sink(loguru_logger, log_name):
    """Google cloud log sink in "Global" i.e.
    https://console.cloud.google.com/logs/viewer?project=silken-impulse-217423&minLogLevel=0&expandAll=false&resource=global
    """
    global stackdriver_client
    if not in_test() and stackdriver_client is None and \
            not blconfig.disable_cloud_log_sinks:
        stackdriver_client = gcloud_logging.Client()
        stackdriver_logger = stackdriver_client.logger(log_name)

    def sink(message):
        record = message.record
        level = str(record['level'])
        if level == 'SUCCESS':
            severity = 'NOTICE'
        elif level == 'TRACE':
            # Nothing lower than DEBUG in stackdriver
            severity = 'DEBUG'
        elif level == 'EXCEPTION':
            severity = 'ERROR'
        elif level in VALID_STACK_DRIVER_LEVELS:
            severity = level
        else:
            severity = 'INFO'
        if not in_test():
            stackdriver_logger.log_text(message, severity=severity)

    loguru_logger.add(sink)
示例#23
0
 async def get_sinks(self, project_id: str):
     try:
         client = stackdriverlogging.Client(project=project_id)
         return await run_concurrently(
             lambda: [sink for sink in client.list_sinks()])
     except Exception as e:
         print_exception('Failed to retrieve sinks: {}'.format(e))
         return []
示例#24
0
 def log_event(self):
     """
     Saves the event to the specified Log Sink
     """
     record = self.format_event()
     self.logging_client = logging.Client()
     self.logger = self.logging_client.logger(self.log)
     self.logger.log_struct(record)
示例#25
0
文件: export.py 项目: ianagpawa/nano
def delete_sink(sink_name):
    """Deletes a sink."""
    logging_client = logging.Client()
    sink = logging_client.sink(sink_name)

    sink.delete()

    print('Deleted sink {}'.format(sink.name))
示例#26
0
def gcp_connect_service(service, credentials=None, region_name=None):

    logging.getLogger('googleapiclient.discovery_cache').setLevel(
        logging.ERROR)
    # Set logging level to error for GCP services as otherwise generates a lot of warnings
    logging.getLogger().setLevel(logging.ERROR)

    try:

        if service == 'cloudresourcemanager':
            return discovery.build('cloudresourcemanager',
                                   'v1',
                                   cache_discovery=False,
                                   cache=MemoryCache())

        elif service == 'cloudresourcemanager-v2':
            return discovery.build('cloudresourcemanager',
                                   'v2',
                                   cache_discovery=False,
                                   cache=MemoryCache())

        elif service == 'cloudstorage':
            return storage.Client()

        elif service == 'cloudsql':
            return discovery.build('sqladmin',
                                   'v1beta4',
                                   cache_discovery=False,
                                   cache=MemoryCache())

        elif service == 'iam':
            return discovery.build('iam',
                                   'v1',
                                   cache_discovery=False,
                                   cache=MemoryCache())

        if service == 'stackdriverlogging':
            return stackdriver_logging.Client()

        if service == 'stackdrivermonitoring':
            return monitoring_v3.MetricServiceClient()

        elif service == 'computeengine':
            return discovery.build('compute',
                                   'v1',
                                   cache_discovery=False,
                                   cache=MemoryCache())

        elif service == 'kubernetesengine':
            return container_v1.ClusterManagerClient()

        else:
            printException('Service %s not supported' % service)
            return None

    except Exception as e:
        printException(e)
        return None
 def _client(self) -> gcp_logging.Client:
     """The Cloud Library API client"""
     credentials, project = self._credentials_and_project
     client = gcp_logging.Client(
         credentials=credentials,
         project=project,
         client_info=ClientInfo(client_library_version='airflow_v' + version.version),
     )
     return client
示例#28
0
    def __init__(self, log_name):
        log_client = logging.Client()
        # we can't set the resource type, this will default to 'global'
        # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2673
        self.logger = log_client.logger(log_name)
        self.logger.log_text('Logging initialized.')

        rospy.Subscriber('/rosout_agg', rosgraph_msgs.msg.Log, self.callback)
        rospy.loginfo('forward "rosout_agg" to "%s"' % self.logger.path)
 def _client(self) -> gcp_logging.Client:
     """The Cloud Library API client"""
     credentials, project = self._credentials_and_project
     client = gcp_logging.Client(
         credentials=credentials,
         project=project,
         client_info=CLIENT_INFO,
     )
     return client
示例#30
0
def create_logger():
	"""
	Creates a log in Stackdriver
	"""
	instance_name = get_hostname()
	logname = '%s.log' % instance_name
	logging_client = logging.Client()
	logger = logging_client.logger(logname)
	return logger