Example #1
0
def channel_clear(config, task):

    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Channels',
        Discovery_To_BigQuery('displayvideo',
                              'v1').method_schema('advertisers.channels.list'))
Example #2
0
def cm_site_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'CM_Sites',
        Discovery_To_BigQuery('dfareporting',
                              'v3.4').method_schema('sites.list',
                                                    iterate=True))
Example #3
0
def custom_list_clear(config, task):

    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Custom_Lists',
        Discovery_To_BigQuery('displayvideo',
                              'v1').method_schema('customLists.list'))
Example #4
0
def first_and_third_party_audience_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_First_And_Third_Party_Audiences',
        Discovery_To_BigQuery(
            'displayvideo',
            'v1').method_schema('firstAndThirdPartyAudiences.list'))
Example #5
0
def location_list_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Location_Lists',
        Discovery_To_BigQuery(
            'displayvideo',
            'v1').method_schema('advertisers.locationLists.list'))
Example #6
0
def insertion_order_clear(config, task):
    table_create(
        config, task["auth_bigquery"], config.project, task["dataset"],
        "DV_InsertionOrders",
        Discovery_To_BigQuery(
            "displayvideo",
            "v1").method_schema("advertisers.insertionOrders.list"))
Example #7
0
def inventory_group_clear(config, task):

    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Inventory_Groups',
        Discovery_To_BigQuery(
            'displayvideo', 'v1').method_schema('inventorySourceGroups.list'))
Example #8
0
def cm_placement_group_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'CM_PlacementGroups',
        Discovery_To_BigQuery('dfareporting',
                              'v3.4').method_schema('placementGroups.list',
                                                    iterate=True))
Example #9
0
def cm_site_load(config, task):

    # load multiple partners from user defined sheet
    def load_multiple():
        advertisers = [
            str(lookup_id(r)) for r in set(
                get_rows(config,
                         task['auth_cm'], {
                             'sheets': {
                                 'sheet': task['sheet'],
                                 'tab': 'CM Advertisers',
                                 'header': False,
                                 'range': 'A2:A'
                             }
                         },
                         unnest=True))
        ]

        for row in get_rows(
                config, task['auth_sheets'], {
                    'sheets': {
                        'sheet': task['sheet'],
                        'tab': 'CM Accounts',
                        'header': False,
                        'range': 'A2:A'
                    }
                }):
            if row:
                account_id = lookup_id(row[0])

                is_superuser, profile_id = get_profile_for_api(
                    config, task['auth_cm'], account_id)
                kwargs = {'profileId': profile_id, 'approved': True}
                if is_superuser:
                    kwargs['accountId'] = account_id

                yield from API_DCM(
                    config,
                    task['auth_cm'],
                    iterate=True,
                    internal=is_superuser).sites().list(**kwargs).execute()

    cm_site_clear(config, task)

    # write sites to database
    put_rows(
        config, task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                task['dataset'],
                'table':
                'CM_Sites',
                'schema':
                Discovery_To_BigQuery('dfareporting', 'v3.4').method_schema(
                    'sites.list', iterate=True),
                'format':
                'JSON'
            }
        }, load_multiple())
Example #10
0
def negative_keyword_list_clear(config, task):

    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Negative_Keywod_Lists',
        Discovery_To_BigQuery(
            'displayvideo',
            'v1').method_schema('advertisers.negativeKeywordLists.list'))
Example #11
0
def dv_advertiser_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Advertisers',
        Discovery_To_BigQuery('displayvideo',
                              'v1').method_schema('advertisers.list'))

    sheets_clear(config, task['auth_sheets'], task['sheet'], 'DV Advertisers',
                 'B2:D')
Example #12
0
def line_item_clear(config, task):
    table_create(
        config, task["auth_bigquery"], config.project, task["dataset"],
        "DV_LineItems",
        Discovery_To_BigQuery(
            "displayvideo", "v1").method_schema("advertisers.lineItems.list"))

    sheets_clear(config, task["auth_sheets"], task["sheet"], "Line Items",
                 "B2:H")
Example #13
0
def google_api_execute(config, auth, api_call, results, errors, limit=None):
    """Execute the actual API call and write to the end points defined.

  The API call is completely defined at this point.
  The results and error definition is optional.

  Args:
    auth (string): either "user" or "service" to make the API call.
    api_call (dict): the JSON for the API call as defined in recipe.
    results (dict): defines where the data will be written
    errors (dict): defines where the errors will be written
    limit (int): Reduce the number of calls ( mostly for debugging )

  Returns (dict):
    None, all data is transfered between API / BigQuery

  Raises:
    ValueError: If a required key in the recipe is missing.
  """

    try:
        rows = API(config, api_call).execute()

        if results:
            # check if single object needs conversion to rows
            if isinstance(rows, dict):
                rows = [rows]

            # check if simple string API results
            elif results.get('bigquery', {}).get('format', 'JSON') == 'CSV':
                rows = [[r] for r in rows]

            rows = map(lambda r: Discovery_To_BigQuery.clean(r), rows)
            put_rows(config, auth, results, rows)

            if 'bigquery' in results:
                results['bigquery']['disposition'] = 'WRITE_APPEND'

    except HttpError as e:

        if errors:
            rows = [{
                'Error':
                str(e),
                'Parameters': [{
                    'Key': k,
                    'Value': str(v)
                } for k, v in api_call['kwargs'].items()]
            }]
            put_rows(config, auth, errors, rows)

            if 'bigquery' in errors:
                errors['bigquery']['disposition'] = 'WRITE_APPEND'

        else:
            raise e
Example #14
0
def cm_profile_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'CM_Profiles',
        Discovery_To_BigQuery('dfareporting',
                              'v3.4').method_schema('userProfiles.list',
                                                    iterate=True))

    sheets_clear(config, task['auth_sheets'], task['sheet'], 'CM Profiles',
                 'B2:E')
Example #15
0
def inventory_source_clear(config, task):

    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Inventory_Sources',
        Discovery_To_BigQuery('displayvideo',
                              'v1').method_schema('inventorySources.list'))

    sheets_clear(config, task['auth_sheets'], task['sheet'],
                 'Inventory Sources', 'A2:Z')
Example #16
0
def dv_algorithm_clear(config, task):
    table_create(
        config, task['auth_bigquery'], config.project, task['dataset'],
        'DV_Algorithms',
        Discovery_To_BigQuery(
            'displayvideo',
            'v1').method_schema('customBiddingAlgorithms.list'))

    sheets_clear(config, task['auth_sheets'], task['sheet'], 'DV Algorithms',
                 'B2:D')
Example #17
0
def dv_line_item_load(config, task):

    # load multiple advertisers from user defined sheet
    def dv_line_item_load_multiple():
        campaigns = set([
            lookup_id(row[0]) for row in get_rows(
                config, task["auth_sheets"], {
                    "sheets": {
                        "sheet": task["sheet"],
                        "tab": "DV Campaigns",
                        "header": False,
                        "range": "A2:A"
                    }
                })
        ])

        rows = get_rows(
            config, task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "DV Advertisers",
                    "header": False,
                    "range": "A2:A"
                }
            })

        # String for filtering which entityStatus enums we want to see in the sheet
        for row in rows:
            for record in API_DV360(
                    config, task["auth_dv"], iterate=True
            ).advertisers().lineItems(
            ).list(advertiserId=lookup_id(row[0]),
                   filter=
                   'entityStatus="ENTITY_STATUS_PAUSED" OR entityStatus="ENTITY_STATUS_ACTIVE" OR entityStatus="ENTITY_STATUS_DRAFT"'
                   ).execute():
                if not campaigns or record['campaignId'] in campaigns:
                    yield record

    # write line_items to database
    put_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "DV_LineItems",
                "schema":
                Discovery_To_BigQuery(
                    "displayvideo",
                    "v1").method_schema("advertisers.lineItems.list"),
                "format":
                "JSON"
            }
        }, dv_line_item_load_multiple())
Example #18
0
def partner_clear(config, task):
    table_create(
        config,
        task['auth_bigquery'],
        config.project,
        task['dataset'],
        'DV_Partners',
        Discovery_To_BigQuery('displayvideo',
                              'v1').method_schema('partners.list'),
    )

    sheets_clear(config, task['auth_sheets'], task['sheet'], 'Partners',
                 'B2:Z')
Example #19
0
def creative_clear(config, task):
    table_create(
        config,
        task["auth_bigquery"],
        config.project,
        task["dataset"],
        "DV_Creatives",
        Discovery_To_BigQuery(
            "displayvideo", "v1").method_schema("advertisers.creatives.list"),
    )

    sheets_clear(config, task["auth_sheets"], task["sheet"], "Creatives",
                 "B2:Z")
Example #20
0
def vision_api(config, task):

  # Eventually add format detection or parameters to put_rows
  if 'bigquery' in task['responses']:
    task['responses']['bigquery']['format'] = 'JSON'

  schema = Discovery_To_BigQuery(
    'vision',
    'v1'
  ).resource_schema(
    'AnnotateImageResponse'
  )

  # append URI to results for mapping
  schema.insert(0, {'description': 'Mapping back to request.', 'name': 'imageUri', 'type': 'STRING', 'mode': 'REQUIRED'})

  put_rows(
    config,
    task['auth'],
    task['responses'],
    vision_annotate(config, task),
    schema
  )
Example #21
0
def google_audience_clear(config, task):

  table_create(
    config,
    task['auth_bigquery'],
    config.project,
    task['dataset'],
    'DV_Google_Audiences',
    Discovery_To_BigQuery(
      'displayvideo',
      'v1'
    ).method_schema(
      'googleAudiences.list'
    )
  )
Example #22
0
def bqflow(config, task):

    if config.verbose: print('BQFLOW')

    endpoints = []

    # load dataset / table list
    for dataset, table, kind in table_list(config, task['auth'],
                                           config.project):
        if table.startswith('BQFlow__') and not table.startswith(
                'BQFlow__RESULTS__') and not table.startswith(
                    'BQFlow__ERRORS__'):
            print(table, kind)
            endpoints.append({'dataset': dataset, kind.lower(): table})

    for endpoint in endpoints:
        if 'table' in endpoint:
            _, api, function = endpoint['table'].split('__', 2)
            function = function.replace('__', '.')

            api_call = {
                'auth':
                'user',
                'api':
                api,
                'version':
                Discovery_To_BigQuery.preferred_version(api, task.get('key')),
                'function':
                function,
            }

            kwargs_list = get_rows(config,
                                   task['auth'],
                                   build_request(endpoint),
                                   as_object=True)

            results = build_results(config, task['auth'], api_call, endpoint)
            errors = build_errors(config, task['auth'], api_call, endpoint)

            for kwargs in kwargs_list:
                api_call['kwargs'] = kwargs

                if config.verbose: print('BQFLOW API CALL:', api_call)

                google_api_initilaize(config, api_call)
                google_api_execute(config, task['auth'], api_call, results,
                                   errors)
Example #23
0
def campaign_load(config, task):

  # load multiple partners from user defined sheet
  def campaign_load_multiple():
    for row in get_rows(
      config,
      task['auth_sheets'],
      { 'sheets': {
        'sheet': task['sheet'],
        'tab': 'Advertisers',
        'header':False,
        'range': 'A2:A'
      }}
    ):
      if row:
        yield from API_DV360(
          config,
          task['auth_dv'],
          iterate=True
        ).advertisers().campaigns().list(
          advertiserId=lookup_id(row[0]),
          filter='entityStatus="ENTITY_STATUS_PAUSED" OR entityStatus="ENTITY_STATUS_ACTIVE" OR entityStatus="ENTITY_STATUS_DRAFT"',
          fields='campaigns.displayName,campaigns.campaignId,campaigns.advertiserId,nextPageToken'
        ).execute()

  campaign_clear(config, task)

  # write to database
  put_rows(
    config,
    task['auth_bigquery'],
    { 'bigquery': {
      'dataset': task['dataset'],
      'table': 'DV_Campaigns',
      'schema': Discovery_To_BigQuery(
        'displayvideo',
        'v1'
      ).method_schema(
        'advertisers.campaigns.list'
      ),
      'format': 'JSON'
    }},
    campaign_load_multiple()
  )
Example #24
0
def dv_partner_load(config, task):

    dv_partner_clear(config, task)

    # write partners to BQ
    put_rows(
        config, task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                task['dataset'],
                'table':
                'DV_Partners',
                'schema':
                Discovery_To_BigQuery('displayvideo',
                                      'v1').method_schema('partners.list'),
                'format':
                'JSON'
            }
        },
        API_DV360(config, task['auth_dv'], iterate=True).partners().list(
            filter='entityStatus="ENTITY_STATUS_ACTIVE"').execute())

    # write partners to sheet
    put_rows(
        config, task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'DV Partners',
                'header': False,
                'range': 'B2'
            }
        },
        get_rows(
            config, task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    "SELECT CONCAT(displayName, ' - ', partnerId), entityStatus  FROM `%s.DV_Partners`"
                    % task['dataset'],
                    'legacy':
                    False
                }
            }))
Example #25
0
def cm_profile_load(config, task):

    cm_profile_clear(config, task)

    # write accounts to BQ
    put_rows(
        config, task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                task['dataset'],
                'table':
                'CM_Profiles',
                'schema':
                Discovery_To_BigQuery('dfareporting', 'v3.4').method_schema(
                    'userProfiles.list', iterate=True),
                'format':
                'JSON'
            }
        },
        API_DCM(config, task['auth_cm'],
                iterate=True).userProfiles().list().execute())

    # write accounts to sheet
    put_rows(
        config, task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'CM Profiles',
                'header': False,
                'range': 'B2'
            }
        },
        get_rows(
            config, task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    "SELECT CONCAT(accountName, ' - ', accountId), CONCAT(subAccountName, ' - ', subAccountId), profileId, userName FROM `%s.CM_Profiles`"
                    % task['dataset'],
                    'legacy':
                    False
                }
            }))
Example #26
0
def google_api_build_results(config, auth, api_call, results):
    """Builds the BigQuery table to house the Google API call results.

  Optional piece of the recipe, will create a BigQuery table for results.
  Takes results, which defines a bigquery endpoint, and adds fields.

  Args:
    auth (string): either "user" or "service" to make the BigQuery call.
    api_call (dict): the JSON for the API call as defined in recipe.
    results (dict): defines where the data will be written

  Returns (dict):
    A modified results JSON with additional API values added.

  Raises:
    ValueError: If a required key in the recipe is missing.
  """

    if 'bigquery' in results:

        if 'schema' not in results['bigquery']:
            results['bigquery']['schema'] = Discovery_To_BigQuery(
                api_call['api'],
                api_call['version'],
                api_call.get('key', None),
            ).method_schema(api_call['function'],
                            api_call.get('iterate', False))

        if 'format' not in results['bigquery']:
            results['bigquery']['format'] = 'JSON'

        results['bigquery']['skip_rows'] = 0

        table_create(config,
                     results['bigquery'].get('auth', auth),
                     config.project,
                     results['bigquery']['dataset'],
                     results['bigquery']['table'],
                     results['bigquery']['schema'],
                     overwrite=False)

    return results
Example #27
0
def insertion_order_load(config, task):

    # load multiple from user defined sheet
    def insertion_order_load_multiple():
        for row in get_rows(
                config, task["auth_sheets"], {
                    "sheets": {
                        "sheet": task["sheet"],
                        "tab": "Advertisers",
                        "header": False,
                        "range": "A2:A"
                    }
                }):
            if row:
                yield from API_DV360(
                    config, task["auth_dv"], iterate=True
                ).advertisers().insertionOrders().list(
                    advertiserId=lookup_id(row[0]),
                    filter=
                    'entityStatus="ENTITY_STATUS_PAUSED" OR entityStatus="ENTITY_STATUS_ACTIVE" OR entityStatus="ENTITY_STATUS_DRAFT"'
                ).execute()

    insertion_order_clear(config, task)

    # write to database
    put_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "DV_InsertionOrders",
                "schema":
                Discovery_To_BigQuery(
                    "displayvideo",
                    "v1").method_schema("advertisers.insertionOrders.list"),
                "format":
                "JSON"
            }
        }, insertion_order_load_multiple())
Example #28
0
def cm_account_clear(config, task):
  table_create(
    config,
    task['auth_bigquery'],
    config.project,
    task['dataset'],
    'CM_Accounts',
    Discovery_To_BigQuery(
      'dfareporting',
      'v3.4'
    ).method_schema(
      'accounts.list',
      iterate=True
    )
  )

  sheets_clear(
    config,
    task['auth_sheets'],
    task['sheet'],
    'CM Accounts',
    'B2:D'
  )
Example #29
0
def cm_campaign_clear(config, task):
  table_create(
    config,
    task['auth_bigquery'],
    config.project,
    task['dataset'],
    'CM_Campaigns',
    Discovery_To_BigQuery(
      'dfareporting',
      'v3.4'
    ).method_schema(
      'campaigns.list',
      iterate=True
    )
  )

  sheets_clear(
    config,
    task['auth_sheets'],
    task['sheet'],
    'CM Campaigns',
    'B2:G'
  )
Example #30
0
def cm_account_load(config, task):

  # load multiple partners from user defined sheet
  def load_multiple():
    for row in get_rows(
      config,
      task['auth_sheets'],
      { 'sheets': {
        'sheet': task['sheet'],
        'tab': 'CM Profiles',
        'header':False,
        'range': 'A2:A'
      }}
    ):
      if row:
        account_id = lookup_id(row[0])
        is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
        kwargs = { 'profileId': profile_id, 'accountId': account_id } if is_superuser else { 'profileId': profile_id }
        yield from API_DCM(
          config,
          task['auth_cm'],
          iterate=True,
          internal=is_superuser
        ).accounts().list(**kwargs).execute()

  cm_account_clear(config, task)

  # write accounts to database
  put_rows(
    config,
    task['auth_bigquery'],
    { 'bigquery': {
      'dataset': task['dataset'],
      'table': 'CM_Accounts',
      'schema': Discovery_To_BigQuery(
        'dfareporting',
        'v3.4'
      ).method_schema(
        'accounts.list',
        iterate=True
      ),
      'format':'JSON'
    }},
    load_multiple()
  )

  # write accounts to sheet
  put_rows(
    config,
    task['auth_sheets'],
    { 'sheets': {
      'sheet': task['sheet'],
      'tab': 'CM Accounts',
      'header':False,
      'range': 'B2'
    }},
    get_rows(
      config,
      task['auth_bigquery'],
      { 'bigquery': {
        'dataset': task['dataset'],
        'query': "SELECT CONCAT(name, ' - ', id), active  FROM `%s.CM_Accounts`" % task['dataset'],
        'legacy': False
      }}
    )
  )