예제 #1
0
def _main(project=None,
          bq_project=None,
          bq_dataset=None,
          bq_table=None,
          read_time=None,
          verbose=False):
    'Module entry point used by cli and cloud function wrappers.'

    _configure_logging(verbose)
    if not read_time:
        read_time = datetime.datetime.now()
    client = asset_v1.AssetServiceClient()
    parent = 'projects/%s' % project
    content_type = asset_v1.ContentType.RESOURCE
    output_config = asset_v1.OutputConfig()
    output_config.bigquery_destination.dataset = 'projects/%s/datasets/%s' % (
        bq_project, bq_dataset)
    output_config.bigquery_destination.table = '%s_%s' % (
        bq_table, read_time.strftime('%Y%m%d'))
    output_config.bigquery_destination.force = True
    try:
        response = client.export_assets(
            request={
                'parent': parent,
                'read_time': read_time,
                'content_type': content_type,
                'output_config': output_config
            })
    except (GoogleAPIError, googleapiclient.errors.HttpError) as e:
        logging.debug('API Error: %s', e, exc_info=True)
        raise RuntimeError(
            'Error fetching Asset Inventory entries (project: %s)' % parent, e)
def export_assets(project_id, dump_file_path):
    # [START asset_quickstart_export_assets]
    from google.cloud import asset_v1

    # TODO project_id = 'Your Google Cloud Project ID'
    # TODO dump_file_path = 'Your asset dump file path'

    client = asset_v1.AssetServiceClient()
    parent = "projects/{}".format(project_id)
    output_config = asset_v1.OutputConfig()
    output_config.gcs_destination.uri = dump_file_path
    response = client.export_assets(request={
        "parent": parent,
        "output_config": output_config
    })
    print(response.result())
예제 #3
0
def hello_pubsub(event, context):
    
    from google.cloud import asset_v1
    
    parent_id = os.environ['PARENT']
    
    dump_file_path = os.environ['GCS_FILE_PATH']
    now = time.time()

    client = asset_v1.AssetServiceClient()
    output_config = asset_v1.OutputConfig()
    output_config.gcs_destination.uri = dump_file_path+str(now)
    content_type = asset_v1.ContentType.RESOURCE

    response = client.export_assets(
        request={
            "parent": parent_id,
            "content_type": content_type,
            "output_config": output_config
            }
    )
def sample_export_assets():
    # Create a client
    client = asset_v1.AssetServiceClient()

    # Initialize request argument(s)
    output_config = asset_v1.OutputConfig()
    output_config.gcs_destination.uri = "uri_value"

    request = asset_v1.ExportAssetsRequest(
        parent="parent_value",
        output_config=output_config,
    )

    # Make the request
    operation = client.export_assets(request=request)

    print("Waiting for operation to complete...")

    response = operation.result()

    # Handle the response
    print(response)
def export_assets_bigquery(project_id, dataset, table, content_type):
    # [START asset_quickstart_export_assets_bigquery]
    from google.cloud import asset_v1

    # TODO project_id = 'Your Google Cloud Project ID'
    # TODO dataset = 'Your BigQuery dataset path'
    # TODO table = 'Your BigQuery table name'
    # TODO content_type ="Content type to export"

    client = asset_v1.AssetServiceClient()
    parent = "projects/{}".format(project_id)
    output_config = asset_v1.OutputConfig()
    output_config.bigquery_destination.dataset = dataset
    output_config.bigquery_destination.table = table
    output_config.bigquery_destination.force = True
    response = client.export_assets(
        request={
            "parent": parent,
            "content_type": content_type,
            "output_config": output_config
        })
    print(response.result())