Beispiel #1
0
def new_stats_exporter(options=None, interval=None):
    """Get a stats exporter and running transport thread.

    Create a new `StackdriverStatsExporter` with the given options and start
    periodically exporting stats to stackdriver in the background.

    Fall back to default auth if `options` is null. This will raise
    `google.auth.exceptions.DefaultCredentialsError` if default credentials
    aren't configured.

    See `opencensus.metrics.transport.get_exporter_thread` for details on the
    transport thread.

    :type options: :class:`Options`
    :param exporter: Options to pass to the exporter

    :type interval: int or float
    :param interval: Seconds between export calls.

    :rtype: :class:`StackdriverStatsExporter`
    :return: The newly-created exporter.
    """
    if options is None:
        _, project_id = google.auth.default()
        options = Options(project_id=project_id)
    if str(options.project_id).strip() == "":
        raise ValueError(ERROR_BLANK_PROJECT_ID)

    ci = client_info.ClientInfo(client_library_version=get_user_agent_slug())
    client = monitoring_v3.MetricServiceClient(client_info=ci)
    exporter = StackdriverStatsExporter(client=client, options=options)

    transport.get_exporter_thread([stats.stats], exporter, interval=interval)
    return exporter
Beispiel #2
0
def _make_bqstorage_client(use_bqstorage_api, credentials):
    if not use_bqstorage_api:
        return None

    try:
        from google.cloud import bigquery_storage_v1
    except ImportError as err:
        customized_error = ImportError(
            "The default BigQuery Storage API client cannot be used, install "
            "the missing google-cloud-bigquery-storage and pyarrow packages "
            "to use it. Alternatively, use the classic REST API by specifying "
            "the --use_rest_api magic option.")
        six.raise_from(customized_error, err)

    try:
        from google.api_core.gapic_v1 import client_info as gapic_client_info
    except ImportError as err:
        customized_error = ImportError(
            "Install the grpcio package to use the BigQuery Storage API.")
        six.raise_from(customized_error, err)

    return bigquery_storage_v1.BigQueryReadClient(
        credentials=credentials,
        client_info=gapic_client_info.ClientInfo(
            user_agent=IPYTHON_USER_AGENT),
    )
Beispiel #3
0
def _make_bqstorage_client(use_bqstorage_api, credentials):
    if not use_bqstorage_api:
        return None

    try:
        from google.cloud import bigquery_storage_v1beta1
    except ImportError as err:
        customized_error = ImportError(
            "Install the google-cloud-bigquery-storage and pyarrow packages "
            "to use the BigQuery Storage API."
        )
        six.raise_from(customized_error, err)

    try:
        from google.api_core.gapic_v1 import client_info as gapic_client_info
    except ImportError as err:
        customized_error = ImportError(
            "Install the grpcio package to use the BigQuery Storage API."
        )
        six.raise_from(customized_error, err)

    return bigquery_storage_v1beta1.BigQueryStorageClient(
        credentials=credentials,
        client_info=gapic_client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
    )
Beispiel #4
0
def test_constructor_defaults():
    info = client_info.ClientInfo()

    assert info.python_version is not None
    assert info.grpc_version is not None
    assert info.api_core_version is not None
    assert info.gapic_version is None
    assert info.client_library_version is None
Beispiel #5
0
def test_to_user_agent_minimal():
    info = client_info.ClientInfo(python_version='1',
                                  grpc_version='2',
                                  api_core_version='3')

    user_agent = info.to_user_agent()

    assert user_agent == 'gl-python/1 gax/3 grpc/2'
def test_to_user_agent_minimal():
    info = client_info.ClientInfo(
        python_version="1", api_core_version="2", grpc_version=None
    )

    user_agent = info.to_user_agent()

    assert user_agent == "gl-python/1 gax/2"
Beispiel #7
0
def test_to_user_agent_full():
    info = client_info.ClientInfo(python_version='1',
                                  grpc_version='2',
                                  api_core_version='3',
                                  gapic_version='4',
                                  client_library_version='5')

    user_agent = info.to_user_agent()

    assert user_agent == 'gl-python/1 gccl/5 gapic/4 gax/3 grpc/2'
Beispiel #8
0
def get_client(project, namespace):
  """Returns a Cloud Datastore client."""
  _client_info = client_info.ClientInfo(
      client_library_version=__version__,
      gapic_version=__version__,
      user_agent=f'beam-python-sdk/{beam_version}')
  _client = client.Client(
      project=project, namespace=namespace, client_info=_client_info)
  # Avoid overwriting user setting. BEAM-7608
  if not os.environ.get(environment_vars.GCD_HOST, None):
    _client.base_url = 'https://batch-datastore.googleapis.com'  # BEAM-1387
  return _client
Beispiel #9
0
def test_constructor_options():
    info = client_info.ClientInfo(python_version='1',
                                  grpc_version='2',
                                  api_core_version='3',
                                  gapic_version='4',
                                  client_library_version='5')

    assert info.python_version == '1'
    assert info.grpc_version == '2'
    assert info.api_core_version == '3'
    assert info.gapic_version == '4'
    assert info.client_library_version == '5'
def test_to_user_agent_full():
    info = client_info.ClientInfo(
        python_version="1",
        grpc_version="2",
        api_core_version="3",
        gapic_version="4",
        client_library_version="5",
    )

    user_agent = info.to_user_agent()

    assert user_agent == "gl-python/1 grpc/2 gax/3 gapic/4 gccl/5"
Beispiel #11
0
    def output(self):
        if 'bucket' not in self.output_config:
            raise NotConfiguredException(
                'No destination bucket defined in GCS output.')
        if 'object' not in self.output_config:
            raise NotConfiguredException(
                'No destination object defined in GCS output.')

        bucket_template = self.jinja_environment.from_string(
            self.output_config['bucket'])
        bucket_template.name = 'bucket'
        destination_bucket = bucket_template.render()

        object_template = self.jinja_environment.from_string(
            self.output_config['object'])
        object_template.name = 'object'
        destination_object = object_template.render()

        self.logger.debug('Creating destination file in bucket.',
                          extra={
                              'url':
                              'gs://%s/%s' %
                              (destination_bucket, destination_object)
                          })

        contents_template = self.jinja_environment.from_string(
            self.output_config['contents'])
        contents_template.name = 'contents'
        contents = contents_template.render()
        if 'base64decode' in self.output_config and self.output_config[
                'base64decode']:
            contents = base64.decodebytes(contents.encode('ascii'))

        client_info = grpc_client_info.ClientInfo(
            user_agent='google-pso-tool/pubsub2inbox/1.1.0')
        project = self.output_config[
            'project'] if 'project' in self.output_config else None
        storage_client = storage.Client(client_info=client_info,
                                        project=project)

        bucket = storage_client.bucket(destination_bucket)
        blob = bucket.blob(destination_object)
        blob.upload_from_string(contents)

        self.logger.info('Object created in Cloud Storage bucket.',
                         extra={
                             'url':
                             'gs://%s/%s' %
                             (destination_bucket, destination_object),
                             'size':
                             len(contents)
                         })
Beispiel #12
0
def test_constructor_w_client_info(mock_transport):
    from google.cloud.bigquery_storage_v1beta1 import client

    def transport_callable(credentials=None, default_class=None):
        return mock_transport

    client_under_test = client.BigQueryStorageClient(
        transport=transport_callable,
        client_info=client_info.ClientInfo(
            client_library_version="test-client-version"),
    )

    user_agent = client_under_test._client_info.to_user_agent()
    assert "test-client-version" in user_agent
def test_constructor_options():
    info = client_info.ClientInfo(
        python_version="1",
        grpc_version="2",
        api_core_version="3",
        gapic_version="4",
        client_library_version="5",
    )

    assert info.python_version == "1"
    assert info.grpc_version == "2"
    assert info.api_core_version == "3"
    assert info.gapic_version == "4"
    assert info.client_library_version == "5"
    def process(self):
        if 'bigquery' not in self.config:
            raise NotConfiguredException(
                'No BigQuery configuration specified in config!')

        bigquery_config = self.config['bigquery']
        if 'query' not in bigquery_config:
            raise NotConfiguredException(
                'No BigQuery query specified in configuration!')

        data = json.loads(self.data)
        self.jinja_environment.globals = {
            **self.jinja_environment.globals,
            **data
        }

        query_template = self.jinja_environment.from_string(
            bigquery_config['query'])
        query_template.name = 'query'
        query = query_template.render()

        dialect = 'legacy' if 'dialect' in bigquery_config and bigquery_config[
            'dialect'].lower() == 'legacy' else 'standard'
        self.logger.debug('Running BigQuery query.', extra={'query': query})

        client_info = grpc_client_info.ClientInfo(
            user_agent='google-pso-tool/pubsub2inbox/1.1.0')
        project = bigquery_config[
            'project'] if 'project' in bigquery_config else None
        client = bigquery.Client(client_info=client_info, project=project)
        labels = {}
        if 'labels' in bigquery_config:
            labels = bigquery_config['labels']
        job_options = bigquery.job.QueryJobConfig(
            use_legacy_sql=True if dialect == 'legacy' else False,
            labels=labels)

        query_job = client.query(query, job_config=job_options)
        results = query_job.result()
        records = []
        for row in results:
            record = {}
            for k in row.keys():
                record[k] = row.get(k)
            records.append(record)
        self.logger.debug('BigQuery execution finished.',
                          extra={'count': len(records)})
        return {
            'records': records,
        }
Beispiel #15
0
def new_stats_exporter(options):
    """ new_stats_exporter returns an exporter that
        uploads stats data to Stackdriver Monitoring.
    """
    if str(options.project_id).strip() == "":
        raise Exception(ERROR_BLANK_PROJECT_ID)

    ci = client_info.ClientInfo(client_library_version=get_user_agent_slug())
    client = monitoring_v3.MetricServiceClient(client_info=ci)

    exporter = StackdriverStatsExporter(client=client, options=options)

    if options.default_monitoring_labels is not None:
        exporter.set_default_labels(options.default_monitoring_labels)
    return exporter
Beispiel #16
0
    def __init__(
        self,
        project=None,
        credentials=None,
        read_only=False,
        admin=False,
        client_info=None,
        client_options=None,
        admin_client_options=None,
        channel=None,
    ):
        if client_info is None:
            client_info = client_info_lib.ClientInfo(
                client_library_version=bigtable.__version__, )
        if read_only and admin:
            raise ValueError("A read-only client cannot also perform"
                             "administrative actions.")

        # NOTE: We set the scopes **before** calling the parent constructor.
        #       It **may** use those scopes in ``with_scopes_if_required``.
        self._read_only = bool(read_only)
        self._admin = bool(admin)
        self._client_info = client_info
        self._emulator_host = os.getenv(BIGTABLE_EMULATOR)

        if self._emulator_host is not None:
            if credentials is None:
                credentials = AnonymousCredentials()
            if project is None:
                project = _DEFAULT_BIGTABLE_EMULATOR_CLIENT

        if channel is not None:
            warnings.warn(
                "'channel' is deprecated and no longer used.",
                DeprecationWarning,
                stacklevel=2,
            )

        self._client_options = client_options
        self._admin_client_options = admin_client_options
        self._channel = channel
        self.SCOPE = self._get_scopes()
        super(Client, self).__init__(
            project=project,
            credentials=credentials,
            client_options=client_options,
        )
Beispiel #17
0
    def __init__(self, project=None, instance=None, database=None, credentials=None, with_pager=False,
                 inp=None, output=None):
        # setup environment variables
        # less option for pager
        if not os.environ.get(config.EnvironmentVariables.LESS):
            os.environ[config.EnvironmentVariables.LESS] = config.Constants.LESS_FLAG
        self.with_pager = with_pager
        self.logger = logging.getLogger('spanner-cli')
        self.logger.debug("Staring spanner-cli project=%s, instance=%s, database=%s", project, instance, database)
        self.project = project
        with warnings.catch_warnings(record=True) as warns:
            warnings.simplefilter("always")
            self.client = spanner.Client(
                project=self.project,
                credentials=credentials,
                client_info=client_info.ClientInfo(user_agent=__name__),
            )
            if len(warns) > 0:
                for w in warns:
                    self.logger.debug(w)
                    click.echo(message=w.message, err=True, nl=True)

        self.instance = self.client.instance(instance)
        self.database = self.instance.database(database)
        self.prompt_message = self.get_prompt_message()
        self.completer = SQLCompleter()
        self.open_history_file()
        self.rehash()
        self.session = PromptSession(
            message=self.prompt_message,
            lexer=PygmentsLexer(lexer.SpannerLexer),
            completer=DynamicCompleter(lambda: self.completer),
            style=style_from_pygments_cls(get_style_by_name(config.get_pygment_style())),
            history=self.history,
            auto_suggest=AutoSuggestFromHistory(),
            input_processors=[ConditionalProcessor(
                processor=HighlightMatchingBracketProcessor(
                    chars='[](){}'),
                filter=HasFocus(DEFAULT_BUFFER) & ~IsDone()  # pylint: disable=invalid-unary-operand-type
            )],
            input=inp,
            output=output,
        )

        self.formatter = tabular_output.TabularOutputFormatter('ascii')
Beispiel #18
0
def load_configuration(file_name):
    if os.getenv('CONFIG'):
        logger = logging.getLogger('pubsub2inbox')
        secret_manager_url = os.getenv('CONFIG')
        logger.debug('Loading configuration from Secret Manager: %s' %
                     (secret_manager_url))
        client_info = grpc_client_info.ClientInfo(
            user_agent='google-pso-tool/pubsub2inbox/1.0.0')
        client = secretmanager.SecretManagerServiceClient(
            client_info=client_info)
        response = client.access_secret_version(name=secret_manager_url)
        configuration = response.payload.data.decode('UTF-8')
    else:
        with open(file_name) as config_file:
            configuration = config_file.read()

    cfg = yaml.load(configuration, Loader=yaml.SafeLoader)
    return cfg
def new_stats_exporter(options):
    """ new_stats_exporter returns an exporter that
        uploads stats data to Stackdriver Monitoring.
    """
    if str(options.project_id).strip() == "":
        raise Exception(ERROR_BLANK_PROJECT_ID)

    ci = client_info.ClientInfo(client_library_version=get_user_agent_slug())
    client = monitoring_v3.MetricServiceClient(client_info=ci)

    exporter = StackdriverStatsExporter(client=client, options=options)

    if options.default_monitoring_labels is not None:
        exporter.set_default_labels(options.default_monitoring_labels)
    else:
        label = {}
        key = remove_non_alphanumeric(get_task_value())
        label[key] = OPENCENSUS_TASK_DESCRIPTION
        exporter.set_default_labels(label)
    return exporter
def test_constructor_w_client_info():
    from google.cloud import bigquery_storage

    class MyTransport:
        def __init__(self, *args, **kwargs):
            self.args = args
            self.kwargs = kwargs

    transport_class_patcher = mock.patch.object(
        bigquery_storage.BigQueryReadClient,
        "get_transport_class",
        return_value=MyTransport,
    )

    with transport_class_patcher:
        client_under_test = bigquery_storage.BigQueryReadClient(
            client_info=client_info.ClientInfo(
                client_library_version="test-client-version"), )

    transport_client_info = client_under_test._transport.kwargs["client_info"]
    user_agent = transport_client_info.to_user_agent()
    assert "test-client-version" in user_agent
Beispiel #21
0
 def _read_charging_codes(self):
     if len(self.charging_codes) == 0:
         client_info = grpc_client_info.ClientInfo(
             user_agent='google-pso-tool/turbo-project-factory/1.0.0')
         storage_client = storage.Client(client_info=client_info)
         bucket = storage_client.bucket(self.bucket)
         blob = bucket.blob(self.object_path)
         if blob.exists():
             blob_contents = io.BytesIO()
             storage_client.download_blob_to_file(blob, blob_contents)
             charging_codes = json.loads(
                 blob_contents.getvalue().decode('utf-8'))
             if isinstance(charging_codes, list):
                 for code in charging_codes:
                     self.charging_codes.append({
                         'id': code,
                         'title': code,
                         'description': None,
                         'group': ''
                     })
             else:
                 self.charging_codes = charging_codes
Beispiel #22
0
def make_datastore_api(client):
    """Create an instance of the GAPIC Datastore API.

    :type client: :class:`~google.cloud.datastore.client.Client`
    :param client: The client that holds configuration details.

    :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient`
    :returns: A datastore API instance with the proper credentials.
    """
    parse_result = six.moves.urllib_parse.urlparse(client._base_url)
    host = parse_result.netloc
    if parse_result.scheme == "https":
        channel = make_secure_channel(client._credentials, DEFAULT_USER_AGENT,
                                      host)
    else:
        channel = insecure_channel(host)

    return datastore_client.DatastoreClient(
        channel=channel,
        client_info=client_info.ClientInfo(client_library_version=__version__,
                                           gapic_version=__version__),
    )
Beispiel #23
0
def test_to_grpc_metadata():
    info = client_info.ClientInfo()

    metadata = info.to_grpc_metadata()

    assert metadata == (client_info.METRICS_METADATA_KEY, info.to_user_agent())
Beispiel #24
0
def process_message(config, data, event, context):
    logger = logging.getLogger('pubsub2inbox')

    # Ignore messages submitted before our retry period
    retry_period = '2 days ago'
    if 'retryPeriod' in config:
        retry_period = config['retryPeriod']
    retry_period_parsed = parsedatetime.Calendar().parse(retry_period)
    if len(retry_period_parsed) > 1:
        retry_earliest = datetime.fromtimestamp(mktime(retry_period_parsed[0]),
                                                timezone.utc)
    else:
        retry_earliest = datetime.fromtimestamp(mktime(retry_period_parsed),
                                                timezone.utc)
    message_time = parser.parse(context.timestamp)
    if (message_time - retry_earliest) < timedelta(0, 0):
        logger.warning('Ignoring message because it\'s past the retry period.',
                       extra={
                           'event_id': context.event_id,
                           'retry_period': retry_period,
                           'retry_earliest': retry_earliest.strftime('%c'),
                           'event_timestamp': message_time
                       })
        raise MessageTooOldException(
            'Ignoring message because it\'s past the retry period.')

    template_variables = {
        'data': data,
        'event': event,
        'context': context,
    }

    jinja_environment = get_jinja_environment()
    if 'processors' in config:
        for processor in config['processors']:
            logger.debug('Processing message using input processor: %s' %
                         processor)
            mod = __import__('processors.%s' % processor)
            processor_module = getattr(mod, processor)
            processor_class = getattr(processor_module,
                                      '%sProcessor' % processor.capitalize())
            processor_instance = processor_class(config, jinja_environment,
                                                 data, event, context)
            processor_variables = processor_instance.process()
            template_variables.update(processor_variables)

    jinja_environment.globals = template_variables

    if 'processIf' in config:
        processif_template = jinja_environment.from_string(config['processIf'])
        processif_template.name = 'processif'
        processif_contents = processif_template.render()
        if processif_contents.strip() == '':
            logger.info(
                'Will not send message because processIf evaluated to empty.')
            return

    if 'resendBucket' in config:
        if 'resendPeriod' not in config:
            raise NoResendConfigException(
                'No resendPeriod configured, even though resendBucket is set!')

        resend_key_hash = hashlib.sha256()
        if 'resendKey' not in config:
            default_resend_key = template_variables.copy()
            default_resend_key.pop('context')
            resend_key_hash.update(
                json.dumps(default_resend_key).encode('utf-8'))
        else:
            key_template = jinja_environment.from_string(config['resendKey'])
            key_template.name = 'resend'
            key_contents = key_template.render()
            resend_key_hash.update(key_contents.encode('utf-8'))

        resend_file = resend_key_hash.hexdigest()
        logger.debug('Checking for resend object in bucket...',
                     extra={
                         'bucket': config['resendBucket'],
                         'blob': resend_file
                     })
        client_info = grpc_client_info.ClientInfo(
            user_agent='google-pso-tool/pubsub2inbox/1.0.0')

        storage_client = storage.Client(client_info=client_info)
        bucket = storage_client.bucket(config['resendBucket'])
        resend_blob = bucket.blob(resend_file)
        if resend_blob.exists():
            resend_blob.reload()
            resend_period = config['resendPeriod']
            resend_period_parsed = parsedatetime.Calendar().parse(
                resend_period, sourceTime=resend_blob.time_created)
            if len(resend_period_parsed) > 1:
                resend_earliest = datetime.fromtimestamp(
                    mktime(resend_period_parsed[0]))
            else:
                resend_earliest = datetime.fromtimestamp(
                    mktime(resend_period_parsed))

            if datetime.now() >= resend_earliest:
                logger.debug('Resending the message now.',
                             extra={
                                 'resend_earliest': resend_earliest,
                                 'blob_time_created': resend_blob.time_created
                             })
                resend_blob.upload_from_string('')
            else:
                logger.info(
                    'Can\'t resend the message now, resend period not elapsed.',
                    extra={
                        'resend_earliest': resend_earliest,
                        'blob_time_created': resend_blob.time_created
                    })
                return
        else:
            try:
                resend_blob.upload_from_string('', if_generation_match=0)
            except Exception as exc:
                # Handle TOCTOU condition
                if 'conditionNotMet' in str(exc):
                    logger.warning(
                        'Message (re)sending already in progress (resend key already exist).',
                        extra={'exception': exc})
                    return
                else:
                    raise exc
                return

    if 'outputs' in config:
        for output_config in config['outputs']:
            if 'type' not in output_config:
                raise NoTypeConfiguredException(
                    'No type configured for output!')
            logger.debug('Processing message using output processor: %s' %
                         output_config['type'])

            output_type = output_config['type']
            mod = __import__('output.%s' % output_type)
            output_module = getattr(mod, output_type)
            output_class = getattr(output_module,
                                   '%sOutput' % output_type.capitalize())
            output_instance = output_class(config, output_config,
                                           jinja_environment, data, event,
                                           context)
            output_instance.output()
    else:
        raise NoOutputsConfiguredException('No outputs configured!')
    def process(self):
        if 'recommendations' not in self.config:
            raise NotConfiguredException(
                'No Recommender configuration specified in config!')

        recommender_config = self.config['recommendations']

        for recommender in recommender_config['recommender_types']:
            if recommender not in self.recommenders:
                raise UnknownRecommenderException(
                    'Unknown recommender %s specified in config!' %
                    (recommender))

        data = json.loads(self.data)
        self.jinja_environment.globals = {
            **self.jinja_environment.globals,
            **data
        }

        projects = []
        if 'projects' in recommender_config:
            projects = self._jinja_var_to_list(recommender_config['projects'],
                                               'projects')
        folders = []
        if 'folders' in recommender_config:
            folders = self._jinja_var_to_list(recommender_config['folders'],
                                              'folders')
        organizations = []
        if 'organizations' in recommender_config:
            organizations = self._jinja_var_to_list(
                recommender_config['organizations'], 'organizations')
        billing_accounts = []
        if 'billingAccounts' in recommender_config:
            billing_accounts = self._jinja_var_to_list(
                recommender_config['billingAccounts'], 'billing_accounts')

        if len(projects) == 0 and len(folders) == 0 and len(
                organizations) == 0 and len(billing_accounts) == 0:
            raise NotConfiguredException(
                'No projects, organizations, folders or billing accounts specified in config!'
            )

        location_filters = self._jinja_var_to_list(
            recommender_config['locations'], 'locations')
        if len(location_filters) == 0:
            raise NotConfiguredException(
                'No location filters specified in config!')

        client_info = grpc_client_info.ClientInfo(
            user_agent='google-pso-tool/pubsub2inbox/1.1.0')
        client = RecommenderClient(client_info=client_info)

        credentials, project_id = google.auth.default(
            ['https://www.googleapis.com/auth/cloud-platform'])
        branded_http = google_auth_httplib2.AuthorizedHttp(credentials)
        branded_http = http.set_user_agent(
            branded_http, 'google-pso-tool/pubsub2inbox/1.1.0')

        compute_service = discovery.build('compute', 'v1', http=branded_http)
        if len(projects) == 0:
            raise NotConfiguredException(
                'Please specify at least one project to fetch regions and zones.'
            )
        all_zones = self.get_zones(compute_service, projects[0],
                                   location_filters)
        all_regions = self.get_regions(compute_service, projects[0],
                                       location_filters)
        all_locations = all_zones + all_regions
        self.logger.debug('Fetched all available locations.',
                          extra={'locations': all_locations})

        parents = []
        for project in self.expand_projects(projects):
            parents.append(('projects/%s' % project[1], project))
        for organization in organizations:
            parents.append(('organizations/%s' % organization, [organization]))
        for folder in folders:
            parents.append(('folder/%s' % folder, [folder]))
        for billing_account in billing_accounts:
            parents.append(
                ('billingAccounts/%s' % billing_account, [billing_account]))
        self.logger.debug('Determined all parents.',
                          extra={'parents': parents})

        recommendations = {}
        recommendations_rollup = {}
        if 'fetch_recommendations' in recommender_config:
            fetch_recommendations = self._jinja_expand_bool(
                recommender_config['fetch_recommendations'])
            if fetch_recommendations:
                recommendations = self.get_recommendations(
                    client, recommender_config['recommender_types'], parents,
                    all_locations, recommender_config['recommendation_filter']
                    if 'recommendation_filter' in recommender_config else None)
                recommendations_rollup = self.rollup_recommendations(
                    recommendations)

        insights = {}
        insights_rollup = {}
        if 'fetch_insights' in recommender_config:
            fetch_insights = self._jinja_expand_bool(
                recommender_config['fetch_insights'])
            if fetch_insights:
                insights = self.get_insights(
                    client, recommender_config['insight_types'], parents,
                    all_locations, recommender_config['insight_filter']
                    if 'insight_filter' in recommender_config else None)
                insights_rollup = self.rollup_insights(insights)

        self.logger.debug('Fetching recommendations and/or insights finished.')
        _ret = {
            'recommendations': recommendations,
            'recommendations_rollup': recommendations_rollup,
            'insights': insights,
            'insights_rollup': insights_rollup,
        }
        if 'vars' in recommender_config:
            return {**recommender_config['vars'], **_ret}
        return _ret
Beispiel #26
0
                                     description='Group members'),
                             )),
        bigquery.SchemaField("chargingCode",
                             "STRING",
                             mode="REQUIRED",
                             description="Chargeback code"),
        bigquery.SchemaField("budget",
                             "INT64",
                             mode="NULLABLE",
                             description="Project budget"),
        bigquery.SchemaField("environment",
                             "STRING",
                             mode="REQUIRED",
                             description="Environment"),
        bigquery.SchemaField("status",
                             "STRING",
                             mode="REQUIRED",
                             description="Project status"),
    ],
    write_disposition=bigquery.job.WriteDisposition.WRITE_TRUNCATE)

client_info = grpc_client_info.ClientInfo(
    user_agent='google-pso-tool/turbo-project-factory/1.0.0')
client = bigquery.Client(client_info=client_info)
print('Writing projects to Bigquery (%s)...' % config['bigqueryDestination'],
      file=sys.stderr)
client.load_table_from_json(rows,
                            bigquery.table.Table(config['bigqueryDestination']),
                            job_config=job_config)
print('All done.', file=sys.stderr)
Beispiel #27
0
from google.cloud.firestore_v1.base_collection import BaseCollectionReference
from google.cloud.firestore_v1.base_document import BaseDocumentReference
from google.cloud.firestore_v1.base_transaction import BaseTransaction
from google.cloud.firestore_v1.base_batch import BaseWriteBatch
from google.cloud.firestore_v1.base_query import BaseQuery

DEFAULT_DATABASE = "(default)"
"""str: The default database used in a :class:`~google.cloud.firestore_v1.client.Client`."""
_DEFAULT_EMULATOR_PROJECT = "google-cloud-firestore-emulator"
_BAD_OPTION_ERR = ("Exactly one of ``last_update_time`` or ``exists`` "
                   "must be provided.")
_BAD_DOC_TEMPLATE: str = (
    "Document {!r} appeared in response but was not present among references")
_ACTIVE_TXN: str = "There is already an active transaction."
_INACTIVE_TXN: str = "There is no active transaction."
_CLIENT_INFO: Any = client_info.ClientInfo(client_library_version=__version__)
_FIRESTORE_EMULATOR_HOST: str = "FIRESTORE_EMULATOR_HOST"


class BaseClient(ClientWithProject):
    """Client for interacting with Google Cloud Firestore API.

    .. note::

        Since the Cloud Firestore API requires the gRPC transport, no
        ``_http`` argument is accepted by this class.

    Args:
        project (Optional[str]): The project which the client acts on behalf
            of. If not passed, falls back to the default inferred
            from the environment.
Beispiel #28
0
    def output(self):
        if 'sourceBucket' not in self.output_config:
            raise NotConfiguredException(
                'No source bucket defined in GCS output.')
        if 'sourceObject' not in self.output_config:
            raise NotConfiguredException(
                'No source object defined in GCS output.')
        if 'destinationBucket' not in self.output_config:
            raise NotConfiguredException(
                'No destination bucket defined in GCS output.')
        if 'destinationObject' not in self.output_config:
            raise NotConfiguredException(
                'No destination object defined in GCS output.')

        bucket_template = self.jinja_environment.from_string(
            self.output_config['sourceBucket'])
        bucket_template.name = 'bucket'
        source_bucket = bucket_template.render()

        object_template = self.jinja_environment.from_string(
            self.output_config['sourceObject'])
        object_template.name = 'object'
        source_object = object_template.render()

        bucket_template = self.jinja_environment.from_string(
            self.output_config['destinationBucket'])
        bucket_template.name = 'bucket'
        destination_bucket = bucket_template.render()

        object_template = self.jinja_environment.from_string(
            self.output_config['destinationObject'])
        object_template.name = 'object'
        destination_object = object_template.render()

        self.logger.debug('Starting to copy source to destination.',
                          extra={
                              'source_url':
                              'gs://%s/%s' % (source_bucket, source_object),
                              'destination_url':
                              'gs://%s/%s' %
                              (destination_bucket, destination_object)
                          })

        client_info = grpc_client_info.ClientInfo(
            user_agent='google-pso-tool/pubsub2inbox/1.1.0')
        project = self.output_config[
            'project'] if 'project' in self.output_config else None
        storage_client = storage.Client(client_info=client_info,
                                        project=project)

        bucket = storage_client.bucket(source_bucket)
        source_blob = bucket.blob(source_object)

        bucket = storage_client.bucket(destination_bucket)
        destination_blob = bucket.blob(destination_object)
        token = None
        while True:
            self.logger.debug(
                'Copying file...',
                extra={
                    'token':
                    token,
                    'source_url':
                    'gs://%s/%s' % (source_bucket, source_object),
                    'destination_url':
                    'gs://%s/%s' % (destination_bucket, destination_object)
                })
            ret = destination_blob.rewrite(source_blob, token=token)
            token = ret[0]
            if token is None:
                break

        self.logger.info('Object copied from source to destination.',
                         extra={
                             'bytes_rewritten':
                             ret[1],
                             'total_bytes':
                             ret[2],
                             'source_url':
                             'gs://%s/%s' % (source_bucket, source_object),
                             'destination_url':
                             'gs://%s/%s' %
                             (destination_bucket, destination_object)
                         })