Пример #1
0
def sdf():
    if project.verbose:
        print "SDF TO TABLE", project.task['out']['bigquery']['table']

    # Set is time partition and write disposition
    is_time_partition = project.task['out']['bigquery'].get(
        'is_time_partition', False)
    disposition = 'WRITE_TRUNCATE'
    if is_time_partition:
        disposition = 'WRITE_APPEND'

    # Read Filter Ids
    filter_id_rows = get_rows(project.task['auth'],
                              project.task['read']['filter_ids'])
    filter_ids = [
        filter_id_rows[i:i + FILTER_ID_CHUNK_SIZE]
        for i in xrange(0, len(filter_id_rows), FILTER_ID_CHUNK_SIZE)
    ]
    # Loop through requested file types
    for file_type in project.task['file_types']:
        current_filter_id_iteration = 0
        i = 0
        table_names = []

        # Create the destination table
        destination_table = '%s_%s' % (
            project.task['out']['bigquery']['table'], file_type.lower())
        create_table_if_not_exist(
            project.task['auth'],
            project.task['out']['bigquery']['project_id'],
            project.task['out']['bigquery']['dataset'], destination_table,
            is_time_partition)

        # Request 5 filter ids at a time so the API doesn't timeout
        for partial_filter_ids in filter_ids:
            rows = sdf_read(project.task['auth'], [file_type],
                            project.task['filter_type'], partial_filter_ids)

            if rows:
                schema = _sdf_schema(rows.next)
                table_suffix = '%s_%s' % (current_filter_id_iteration,
                                          file_type.lower())
                table_name = '%s%s' % (
                    project.task['out']['bigquery']['table'], table_suffix)
                filename = '%s_%s.csv' % (file_type, project.date)
                # Check to see if the table exists, if not create it
                create_table_if_not_exist(
                    project.task['auth'],
                    project.task['out']['bigquery']['project_id'],
                    project.task['out']['bigquery']['dataset'], table_name)

                if 'bigquery' in project.task['out']:
                    project.task['out']['bigquery']['schema'] = schema
                    project.task['out']['bigquery']['skip_rows'] = 0

                put_rows(project.task['auth'],
                         project.task['out'],
                         filename,
                         rows,
                         variant=table_suffix)

                table_names.append(table_name)

            current_filter_id_iteration = current_filter_id_iteration + 1

        query = _construct_combine_query(
            file_type, table_names,
            project.task['out']['bigquery']['project_id'],
            project.task['out']['bigquery']['dataset'], destination_table)

        query_to_table(project.task['auth'],
                       project.task['out']['bigquery']['project_id'],
                       project.task['out']['bigquery']['dataset'],
                       destination_table,
                       query,
                       disposition=disposition,
                       legacy=False)

        # Delete all the temporary tables that were created
        for table_name in table_names:
            drop_table(project.task['auth'],
                       project.task['out']['bigquery']['project_id'],
                       project.task['out']['bigquery']['dataset'], table_name)
Пример #2
0
def bid_strategy_load():

    # write bid_strategy to sheet
    rows = get_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "query":
                """SELECT * FROM (
          SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId) AS IO_Display,
         CAST(NULL AS STRING),
         I.bidStrategy.fixedBid.bidAmountMicros / 100000,
         I.bidStrategy.fixedBid.bidAmountMicros / 100000,
         I.bidStrategy.maximizeSpendAutoBid.performanceGoalType,
         I.bidStrategy.maximizeSpendAutoBid.performanceGoalType,
         I.bidStrategy.maximizeSpendAutoBid.maxAverageCpmBidAmountMicros / 100000,
         I.bidStrategy.maximizeSpendAutoBid.maxAverageCpmBidAmountMicros / 100000,
         I.bidStrategy.maximizeSpendAutoBid.customBiddingAlgorithmId,
         I.bidStrategy.maximizeSpendAutoBid.customBiddingAlgorithmId,
         I.bidStrategy.performanceGoalAutoBid.performanceGoalType,
         I.bidStrategy.performanceGoalAutoBid.performanceGoalType,
         I.bidStrategy.performanceGoalAutoBid.performanceGoalAmountMicros / 100000,
         I.bidStrategy.performanceGoalAutoBid.performanceGoalAmountMicros / 100000,
         I.bidStrategy.performanceGoalAutoBid.maxAverageCpmBidAmountMicros / 100000,
         I.bidStrategy.performanceGoalAutoBid.maxAverageCpmBidAmountMicros / 100000,
         I.bidStrategy.performanceGoalAutoBid.customBiddingAlgorithmId,
         I.bidStrategy.performanceGoalAutoBid.customBiddingAlgorithmId
       FROM `{dataset}.DV_InsertionOrders` AS I
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId) AS IO_Display,
         CONCAT(L.displayName, ' - ', L.lineItemId),
         L.bidStrategy.fixedBid.bidAmountMicros / 100000,
         L.bidStrategy.fixedBid.bidAmountMicros / 100000,
         L.bidStrategy.maximizeSpendAutoBid.performanceGoalType,
         L.bidStrategy.maximizeSpendAutoBid.performanceGoalType,
         L.bidStrategy.maximizeSpendAutoBid.maxAverageCpmBidAmountMicros / 100000,
         L.bidStrategy.maximizeSpendAutoBid.maxAverageCpmBidAmountMicros / 100000,
         L.bidStrategy.maximizeSpendAutoBid.customBiddingAlgorithmId,
         L.bidStrategy.maximizeSpendAutoBid.customBiddingAlgorithmId,
         L.bidStrategy.performanceGoalAutoBid.performanceGoalType,
         L.bidStrategy.performanceGoalAutoBid.performanceGoalType,
         L.bidStrategy.performanceGoalAutoBid.performanceGoalAmountMicros / 100000,
         L.bidStrategy.performanceGoalAutoBid.performanceGoalAmountMicros / 100000,
         L.bidStrategy.performanceGoalAutoBid.maxAverageCpmBidAmountMicros / 100000,
         L.bidStrategy.performanceGoalAutoBid.maxAverageCpmBidAmountMicros / 100000,
         L.bidStrategy.performanceGoalAutoBid.customBiddingAlgorithmId,
         L.bidStrategy.performanceGoalAutoBid.customBiddingAlgorithmId
       FROM `{dataset}.DV_LineItems` AS L
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId )
       ORDER BY IO_Display
       """.format(**project.task),
                "legacy":
                False
            }
        })

    put_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Bid Strategy",
                "range": "A2:U"
            }
        }, rows)
Пример #3
0
def dv_algorithm_load(config, task):

    # load multiple partners from user defined sheet
    def load_multiple():
        for row in get_rows(
                config, task['auth_sheets'], {
                    'sheets': {
                        'sheet': task['sheet'],
                        'tab': 'DV Partners',
                        'header': False,
                        'range': 'A2:A'
                    }
                }):
            if row:
                yield from API_DV360(
                    config, task['auth_dv'],
                    iterate=True).customBiddingAlgorithms().list(
                        partnerId=lookup_id(row[0])).execute()

    dv_algorithm_clear(config, task)

    # write algorithms to database
    put_rows(
        config, task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                task['dataset'],
                'table':
                'DV_Algorithms',
                'schema':
                Discovery_To_BigQuery(
                    'displayvideo',
                    'v1').method_schema('customBiddingAlgorithms.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write algorithms to sheet
    put_rows(
        config, task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'DV Algorithms',
                'range': 'B2'
            }
        },
        get_rows(
            config, task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    """SELECT
           CONCAT(P.displayName, ' - ', P.partnerId),
           CONCAT(A.displayName, ' - ', A.advertiserId),
           CONCAT(B.displayName, ' - ', B.customBiddingAlgorithmId),
           B.entityStatus
           FROM `{dataset}.DV_Algorithms` AS B
           LEFT JOIN `{dataset}.DV_Partners` AS P
           ON B.partnerId=P.partnerId
           LEFT JOIN `{dataset}.DV_Advertisers` AS A
           ON B.advertiserId=A.advertiserId
        """.format(**task),
                    'legacy':
                    False
                }
            }))
Пример #4
0
def campaign_load():

    # load multiple partners from user defined sheet
    def campaign_load_multiple():
        rows = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Advertisers',
                    'range': 'A2:A'
                }
            })

        for row in rows:
            yield from API_DV360(project.task['auth_dv'],
                                 iterate=True).advertisers().campaigns().list(
                                     advertiserId=lookup_id(row[0])).execute()

    # write campaigns to database and sheet
    put_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'table':
                'DV_Campaigns',
                'schema':
                Discovery_To_BigQuery(
                    'displayvideo',
                    'v1').method_schema('advertisers.campaigns.list'),
                'format':
                'JSON'
            }
        }, campaign_load_multiple())

    # write campaigns to sheet
    rows = get_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'query':
                """SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         C.entityStatus
         FROM `{dataset}.DV_Campaigns` AS C
         LEFT JOIN `{dataset}.DV_Advertisers` AS A
         ON C.advertiserId=A.advertiserId
         LEFT JOIN `{dataset}.DV_Partners` AS P
         ON A.partnerId=P.partnerId
         ORDER BY C.displayName
       """.format(**project.task),
                'legacy':
                False
            }
        })

    put_rows(
        project.task['auth_sheets'], {
            'sheets': {
                'sheet': project.task['sheet'],
                'tab': 'Campaigns',
                'range': 'B2'
            }
        }, rows)
Пример #5
0
def bigquery():

    if 'run' in project.task and 'query' in project.task.get('run', {}):
        if project.verbose: print("QUERY", project.task['run']['query'])
        run_query(
            project.task['auth'],
            project.id,
            project.task['run']['query'],
            project.task['run'].get('legacy', True),
            #project.task['run'].get('billing_project_id', None)
        )

    elif 'values' in project.task['from']:
        rows = get_rows(project.task['auth'], project.task['from'])

        rows_to_table(project.task['to'].get('auth', project.task['auth']),
                      project.id, project.task['to']['dataset'],
                      project.task['to']['table'], rows,
                      project.task.get('schema', []), 0)

    elif 'query' in project.task['from']:
        if 'table' in project.task['to']:
            if project.verbose:
                print("QUERY TO TABLE", project.task['to']['table'])

            if 'pre_process_query' in project.task['to']:
                print('executing statement')
                execute_statement(project.task['auth'],
                                  project.id,
                                  project.task['to']['dataset'],
                                  project.task['to']['pre_process_query'],
                                  use_legacy_sql=project.task['from'].get(
                                      'legacy', project.task['from'].get(
                                          'useLegacySql', True)))
            query_to_table(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['table'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                disposition=project.task['write_disposition']
                if 'write_disposition' in project.task else 'WRITE_TRUNCATE',
                legacy=project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql,
                target_project_id=project.task['to'].get(
                    'project_id', project.id))
        # NOT USED SO RIPPING IT OUT
        # Mauriciod: Yes, it is used, look at project/mauriciod/target_winrate.json
        elif 'storage' in project.task['to']:
            if project.verbose:
                print("QUERY TO STORAGE", project.task['to']['storage'])
            local_file_name = '/tmp/%s' % str(uuid.uuid1())
            rows = query_to_rows(project.task['auth'], project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'])

            f = open(local_file_name, 'wb')
            writer = csv.writer(f)
            writer.writerows(rows)
            f.close()

            f = open(local_file_name, 'rb')
            object_put(project.task['auth'], project.task['to']['storage'], f)
            f.close()

            os.remove(local_file_name)
        elif 'sheet' in project.task['to']:
            if project.verbose:
                print("QUERY TO SHEET", project.task['to']['sheet'])
            rows = query_to_rows(project.task['auth'],
                                 project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'],
                                 legacy=project.task['from'].get(
                                     'legacy', True))

            # makes sure types are correct in sheet
            rows = rows_to_type(rows)

            sheets_clear(project.task['auth'], project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'))
            sheets_write(project.task['auth'], project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'), rows)
        elif 'sftp' in project.task['to']:
            rows = query_to_rows(project.task['auth'],
                                 project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'],
                                 legacy=project.task['from'].get(
                                     'use_legacy_sql', True))

            if rows:
                if project.verbose: print("QUERY TO SFTP")
                put_rows(project.task['auth'], project.task['to'], rows)
        else:
            if project.verbose:
                print("QUERY TO VIEW", project.task['to']['view'])
            query_to_view(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['view'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql
                project.task['to'].get('replace', False))
    else:
        if project.verbose:
            print("STORAGE TO TABLE", project.task['to']['table'])
        storage_to_table(
            project.task['auth'], project.id, project.task['to']['dataset'],
            project.task['to']['table'], project.task['from']['bucket'] + ':' +
            project.task['from']['path'], project.task.get('schema', []),
            project.task.get('skip_rows', 1),
            project.task.get('structure', 'CSV'),
            project.task.get('disposition', 'WRITE_TRUNCATE'))
Пример #6
0
def creative_load(config, task):

    # load multiple partners from user defined sheet
    def creative_load_multiple():
        rows = get_rows(
            config, task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "Advertisers",
                    "header": False,
                    "range": "A2:A"
                }
            })

        for row in rows:
            yield from API_DV360(
                config, task["auth_dv"], iterate=True
            ).advertisers().creatives().list(
                advertiserId=lookup_id(row[0]),
                filter='entityStatus="ENTITY_STATUS_ACTIVE"',
                fields=
                'creatives.displayName,creatives.creativeId,creatives.entityStatus,creatives.creativeType,creatives.dimensions,creatives.reviewStatus,nextPageToken'
            ).execute(limit=CREATIVE_COUNT_LIMIT)

    # write creatives to database and sheet
    put_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "DV_Creatives",
                "schema":
                Discovery_To_BigQuery(
                    "displayvideo",
                    "v1").method_schema("advertisers.creatives.list"),
                "format":
                "JSON"
            }
        }, creative_load_multiple())

    # write creatives to sheet
    rows = get_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "query":
                """SELECT
          CONCAT(P.displayName, ' - ', P.partnerId),
          CONCAT(A.displayName, ' - ', A.advertiserId),
          CONCAT(C.displayName, ' - ', C.creativeId),
          C.entityStatus,
          C.creativeType,
          C.dimensions.widthPixels,
          C.dimensions.heightPixels,
          C.reviewStatus.approvalStatus,
          C.reviewStatus.creativeAndLandingPageReviewStatus,
          C.reviewStatus.contentAndPolicyReviewStatus,
          COUNTIF(RS.status='REVIEW_STATUS_UNSPECIFIED') OVER() AS Exchanges_Unspecified,
          COUNTIF(RS.status='REVIEW_STATUS_PENDING') OVER() AS Exchanges_Pending,
          COUNTIF(RS.status='REVIEW_STATUS_REJECTED') OVER() AS Exchanges_Rejected,
          COUNTIF(RS.status='REVIEW_STATUS_APPROVED') OVER() AS Exchanges_Approved,
          COUNTIF(RP.status='REVIEW_STATUS_UNSPECIFIED') OVER() AS Publishers_Unspecified,
          COUNTIF(RP.status='REVIEW_STATUS_PENDING') OVER() AS Publishers_Pending,
          COUNTIF(RP.status='REVIEW_STATUS_REJECTED') OVER() AS Publishers_Rejected,
          COUNTIF(RP.status='REVIEW_STATUS_APPROVED') OVER() AS Publishers_Approved,
          FROM `{dataset}.DV_Creatives` AS C, UNNEST(reviewStatus.exchangeReviewStatuses) AS RS, UNNEST(reviewStatus.publisherReviewStatuses) AS RP
          LEFT JOIN `{dataset}.DV_Advertisers` AS A
          ON C.advertiserId=A.advertiserId
          LEFT JOIN `{dataset}.DV_Partners` AS P
          ON A.partnerId=P.partnerId
        """.format(**task),
                "legacy":
                False
            }
        })

    put_rows(
        config, task["auth_sheets"], {
            "sheets": {
                "sheet": task["sheet"],
                "tab": "Creatives",
                "header": False,
                "range": "B2"
            }
        }, rows)
Пример #7
0
def frequency_cap_load(config, task):

  # write frequency_caps to sheet
  rows = get_rows(
      config,
      task["auth_bigquery"], {
          "bigquery": {
              "dataset":
                  task["dataset"],
              "query":
                  """SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CAST(NULL AS STRING),
         CAST(NULL AS STRING),
         IFNULL(C.frequencyCap.unlimited, FALSE),
         IFNULL(C.frequencyCap.unlimited, FALSE),
         C.frequencyCap.timeUnit,
         C.frequencyCap.timeUnit,
         C.frequencyCap.timeUnitCount,
         C.frequencyCap.timeUnitCount,
         C.frequencyCap.maxImpressions,
         C.frequencyCap.maxImpressions
       FROM `{dataset}.DV_Campaigns` AS C
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON C.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CAST(NULL AS STRING),
         IFNULL(I.frequencyCap.unlimited, FALSE),
         IFNULL(I.frequencyCap.unlimited, FALSE),
         I.frequencyCap.timeUnit,
         I.frequencyCap.timeUnit,
         I.frequencyCap.timeUnitCount,
         I.frequencyCap.timeUnitCount,
         I.frequencyCap.maxImpressions,
         I.frequencyCap.maxImpressions
       FROM `{dataset}.DV_InsertionOrders` AS I
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CONCAT(L.displayName, ' - ', L.lineItemId),
         IFNULL(L.frequencyCap.unlimited, FALSE),
         IFNULL(L.frequencyCap.unlimited, FALSE),
         L.frequencyCap.timeUnit,
         L.frequencyCap.timeUnit,
         L.frequencyCap.timeUnitCount,
         L.frequencyCap.timeUnitCount,
         L.frequencyCap.maxImpressions,
         L.frequencyCap.maxImpressions
       FROM `{dataset}.DV_LineItems` AS L
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       """.format(**task),
              "legacy":
                  False
          }
      })

  put_rows(
      config,
      task["auth_sheets"], {
          "sheets": {
              "sheet": task["sheet"],
              "tab": "Frequency Caps",
              "header":False,
              "range": "A2"
          }
      }, rows)
Пример #8
0
def audit_load():

    bid_strategy_audit()
    integration_detail_audit()
    insertion_order_audit()
    frequency_cap_audit()
    line_item_audit()
    line_item_map_audit()
    pacing_audit()
    partner_cost_audit()
    segment_audit()

    # write audit to sheet
    sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Audit',
                 'A2')

    # write audits to sheet
    rows = get_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'query':
                """SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_InsertionOrders`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_Segments`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_LineItems`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_LineItemMaps`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_Pacing`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_BidStrategy`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_FrequencyCaps`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_PartnerCosts`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_IntegrationDetails`
      """.format(**project.task),
                'legacy':
                False
            }
        })

    put_rows(project.task['auth_sheets'], {
        'sheets': {
            'sheet': project.task['sheet'],
            'tab': 'Audit',
            'range': 'A2'
        }
    }, rows)
Пример #9
0
def insertion_order_audit(config, task):

  # Move Insertion Order To BigQuery
  rows = get_rows(
    config,
    task["auth_sheets"], {
      "sheets": {
        "sheet": task["sheet"],
        "tab": "Insertion Orders",
        "header":False,
        "range": "A2:U"
      }
    }
  )

  put_rows(
    config,
    task["auth_bigquery"], {
      "bigquery": {
        "dataset": task["dataset"],
        "table": "SHEET_InsertionOrders",
        "schema": [
          { "name": "Partner", "type": "STRING" },
          { "name": "Advertiser", "type": "STRING" },
          { "name": "Campaign", "type": "STRING" },
          { "name": "Insertion_Order", "type": "STRING" },
          { "name": "Action", "type": "STRING" },
          { "name": "Status", "type": "STRING" },
          { "name": "Status_Edit", "type": "STRING" },
          { "name": "Name", "type": "STRING" },
          { "name": "Name_Edit", "type": "STRING" },
          { "name": "Budget_Unit", "type": "STRING" },
          { "name": "Budget_Unit_Edit", "type": "STRING" },
          { "name": "Budget_Automation", "type": "STRING" },
          { "name": "Budget_Automation_Edit", "type": "STRING" },
          { "name": "Performance_Goal_Type", "type": "STRING" },
          { "name": "Performance_Goal_Type_Edit", "type": "STRING" },
          { "name": "Performance_Goal_Amount", "type": "FLOAT" },
          { "name": "Performance_Goal_Amount_Edit", "type": "FLOAT" },
          { "name": "Performance_Goal_Percent", "type": "FLOAT" },
          { "name": "Performance_Goal_Percent_Edit", "type": "FLOAT" },
          { "name": "Performance_Goal_String", "type": "STRING" },
          { "name": "Performance_Goal_String_Edit", "type": "STRING" },
        ],
        "format": "CSV"
      }
    },
    rows
  )

  # Create Insert View
  query_to_view(
    config,
    task["auth_bigquery"],
    config.project,
    task["dataset"],
    "INSERT_InsertionOrders",
    """SELECT
      REGEXP_EXTRACT(S_IO.Advertiser, r' - (\d+)$') AS advertiserId,
      REGEXP_EXTRACT(S_IO.Campaign, r' - (\d+)$') AS campaignId,
      S_IO.Insertion_Order AS displayName,
      S_IO.Status_Edit AS entityStatus,
      STRUCT(
        S_PC.Cost_Type_Edit As costType,
        S_PC.Fee_Type_Edit As feeType,
        S_PC.Invoice_Type_Edit AS invoiceType,
        S_PC.Fee_Amount_Edit AS feeAmount,
        S_PC.Fee_Percent_Edit * 1000 AS feePercentageMillis
      ) AS partnerCosts,
      STRUCT(
        S_P.Period_Edit As pacingPeriod,
        S_P.Type_Edit As pacingType,
        S_P.Daily_Budget_Edit AS dailyMaxMicros,
        S_P.Daily_Impressions_Edit AS dailyMaxImpressions
      ) AS pacing,
      STRUCT(
        S_FC.Unlimited_Edit AS unlimited,
        S_FC.Time_Unit_Edit AS timeUnit,
        S_FC.Time_Count_Edit AS timeUnitCount,
        S_FC.Max_impressions_Edit AS maxImpressions
      ) AS frequencyCap,
      STRUCT(
        S_ID.Integration_Code_Edit As integrationCode,
        S_ID.Details_Edit As details
      ) AS integrationDetails,
      STRUCT(
        S_IO.Performance_Goal_Type_Edit AS performanceGoalType,
        S_IO.Performance_Goal_Amount_Edit * 1000000 AS performanceGoalAmountMicros,
        S_IO.Performance_Goal_Percent_Edit * 1000000 AS performanceGoalPercentageMicros,
        S_IO.Performance_Goal_String_Edit AS performanceGoalString
      ) AS performanceGoal,
      STRUCT(
        S_IO.Budget_Unit_Edit AS budgetUnit,
        S_IO.Budget_Automation_Edit AS automationType,
        (SELECT ARRAY(
          SELECT
            STRUCT(
             S_S.Budget_Edit * 1000000 AS budgetAmountMicros,
             S_S.Description_Edit AS description,
             STRUCT (
               STRUCT (
                 EXTRACT(YEAR FROM CAST(S_S.Start_Date_Edit AS Date)) AS year,
                 EXTRACT(MONTH FROM CAST(S_S.Start_Date_Edit AS DATE)) AS month,
                 EXTRACT(DAY FROM CAST(S_S.Start_Date_Edit AS DATE)) AS day
               ) AS startDate,
               STRUCT (
                 EXTRACT(YEAR FROM CAST(S_S.End_Date_Edit AS Date)) AS year,
                 EXTRACT(MONTH FROM CAST(S_S.End_Date_Edit AS DATE)) AS month,
                 EXTRACT(DAY FROM CAST(S_S.End_Date_Edit AS DATE)) AS day
             ) AS endDate
           ) AS dateRange
          ) AS budgetSegments
          FROM `{dataset}.SHEET_Segments` AS  S_S
          WHERE S_IO.Insertion_Order=S_S.Insertion_Order
        )) AS budgetSegments
      ) AS budget,
      STRUCT(
        IF(S_BS.Fixed_Bid_Edit IS NOT NULL,
          STRUCT(
            S_BS.Fixed_Bid_Edit * 1000000 AS bidAmountMicros
          ),
          NULL
        ) AS fixedBid,
        IF(S_BS.Auto_Bid_Goal_Edit IS NOT NULL,
          STRUCT(
            S_BS.Auto_Bid_Goal_Edit AS performanceGoalType,
            S_BS.Auto_Bid_Amount_Edit * 1000000 AS maxAverageCpmBidAmountMicros,
            S_BS.Auto_Bid_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS maximizeSpendAutoBid,
        IF(S_BS.Performance_Goal_Type_Edit IS NOT NULL,
          STRUCT(
            S_BS.Performance_Goal_Type_Edit AS performanceGoalType,
            S_BS.Performance_Goal_Amount_Edit * 1000000 AS performanceGoalAmountMicros,
            S_BS.Performance_Goal_Average_CPM_Bid_Edit * 1000000 AS maxAverageCpmBidAmountMicros,
            S_BS.Performance_Goal_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS performanceGoalAutoBid
      )
      AS bidStrategy
      FROM `{dataset}.SHEET_InsertionOrders` As S_IO
      LEFT JOIN `{dataset}.SHEET_Segments` As S_S ON S_IO.Insertion_Order=S_S.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_PartnerCosts` As S_PC ON S_IO.Insertion_Order=S_PC.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_Pacing` As S_P ON S_IO.Insertion_Order=S_P.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_FrequencyCaps` As S_FC ON S_IO.Insertion_Order=S_FC.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_IntegrationDetails` As S_ID ON S_IO.Insertion_Order=S_ID.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_BidStrategy` As S_BS ON S_IO.Insertion_Order=S_BS.Insertion_Order
      LEFT JOIN `{dataset}.DV_InsertionOrders` As DV_IO ON S_IO.Insertion_Order=DV_IO.displayName
      WHERE S_IO.Action="INSERT"
      AND DV_IO IS NULL
    """.format(**task),
    legacy=False
  )

  # Create Audit View And Write To Sheets
  query_to_view(
    config,
    task["auth_bigquery"],
    config.project,
    task["dataset"],
    "AUDIT_InsertionOrders",
    """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Insertion Order' AS Operation,
            CASE
              WHEN Name_Edit IS NULL THEN 'Missing Name.'
              WHEN Budget_Unit_Edit IS NULL THEN 'Missing Budget Unit.'
              WHEN Budget_Automation_Edit IS NULL THEN 'Missing Budget Automation.'
              WHEN Performance_Goal_Type_Edit IS NULL THEN 'Missing Goal Type.'
              WHEN Performance_Goal_Amount_Edit IS NULL AND Performance_Goal_Percent_Edit IS NULL AND Performance_Goal_String_Edit IS NULL THEN 'Missing Goal Amount / Percent / String.'
              WHEN Performance_Goal_Amount_Edit IS NOT NULL
                AND Performance_Goal_Percent_Edit IS NOT NULL
                AND Performance_Goal_String_Edit IS NOT NULL THEN 'Amount / Percent / String all exist when there can only be 1.'
              WHEN Performance_Goal_Amount_Edit IS NOT NULL AND Performance_Goal_Percent_Edit IS NOT NULL THEN 'Cannot have both Goal Amount and Percent'
              WHEN Performance_Goal_Amount_Edit IS NOT NULL
                AND Performance_Goal_String_Edit IS NOT NULL THEN 'Cannot have both Goal Amount and String'
              WHEN Performance_Goal_Percent_Edit IS NOT NULL AND Performance_Goal_String_Edit IS NOT NULL THEN 'Cannot have both Percent Amount and String'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_InsertionOrders`
        )
        WHERE
          Error IS NOT NULL
      ),
      /* Check duplicate inserts */
      DUPLICATE_ERRORS AS (
        SELECT
          'Insertion_Order' AS Operation,
          'Duplicate Insertion Order name, insert will be ignored.' AS Error,
          'WARNING' AS Severity,
          COALESCE(S_IO.Insertion_Order, 'BLANK') AS Id
        FROM `{dataset}.SHEET_InsertionOrders` As S_IO
        LEFT JOIN `{dataset}.DV_InsertionOrders` AS DV_IO ON S_IO.Insertion_Order=DV_IO.displayName
        WHERE S_IO.Action="INSERT"
        AND DV_IO IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
      ;
    """.format(**task),
    legacy=False
  )

  query_to_view(
    config,
    task["auth_bigquery"],
    config.project,
    task["dataset"],
    "PATCH_InsertionOrders",
    """SELECT *
      FROM `{dataset}.SHEET_InsertionOrders`
      WHERE Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_InsertionOrders` WHERE Severity='ERROR')
    """.format(**task),
    legacy=False
  )
Пример #10
0
def google_audience_load():

    # load multiple from user defined sheet
    def load_multiple():
        partners = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Partners',
                    'header': False,
                    'range': 'A2:A'
                }
            })

        for partner in partners:
            yield from API_DV360(
                project.task['auth_dv'], iterate=True).googleAudiences().list(
                    partnerId=lookup_id(partner[0])).execute()

        advertisers = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Advertisers',
                    'header': False,
                    'range': 'A2:A'
                }
            })

        for advertiser in advertisers:
            yield from API_DV360(
                project.task['auth_dv'], iterate=True).googleAudiences().list(
                    advertiserId=lookup_id(advertiser[0])).execute()

    google_audience_clear()

    # write to database
    put_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'table':
                'DV_Google_Audiences',
                'schema':
                Discovery_To_BigQuery(
                    'displayvideo',
                    'v1').method_schema('googleAudiences.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write to sheet
    put_rows(
        project.task['auth_sheets'], {
            'sheets': {
                'sheet': project.task['sheet'],
                'tab': 'Targeting Options',
                'header': False,
                'range': 'N2:N'
            }
        },
        get_rows(
            project.task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    project.task['dataset'],
                    'query':
                    """SELECT
           CONCAT(SUBSTR(I.googleAudienceType, 22), ': ', I.displayName, ' - ', I.googleAudienceId)
           FROM `{dataset}.DV_Google_Audiences` AS I
           ORDER BY 1
        """.format(**project.task),
                    'legacy':
                    False
                }
            }))
Пример #11
0
def dcm():
  if project.verbose: print('DCM')

  # stores existing report json
  report = None

  # check if report is to be deleted
  if project.task.get('delete', False):
    if project.verbose: print('DCM DELETE', project.task['report'].get('name', None) or project.task['report'].get('body', {}).get('name', None) or project.task['report'].get('report_id', None))
    report_delete(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report'].get('report_id', None),
      project.task['report'].get('name', None) or project.task['report'].get('body', {}).get('name', None),
    )

  # check if report is to be run
  if project.task.get('report_run_only', False):
    if project.verbose: print('DCM REPORT RUN', project.task['report'].get('name', None) or project.task['report'].get('report_id', None))
    report_run(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report'].get('report_id', None),
      project.task['report'].get('name', None),
    )

  # check if report is to be created
  if 'body' in project.task['report']:
    if project.verbose: print('DCM BUILD', project.task['report']['body']['name'])

    if 'filters' in project.task['report']:
      project.task['report']['body'] = report_filter(
        project.task['auth'],
        project.task['report']['body'],
        project.task['report']['filters']
      )

    report = report_build(
      project.task['auth'],
      project.task['report']['body'].get('accountId') or project.task['report']['account'],
      project.task['report']['body']
    )

  # moving a report
  if 'out' in project.task:
    filename, report = report_file(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report'].get('report_id', None),
      project.task['report'].get('name', None) or project.task['report'].get('body', {}).get('name', None),
      project.task['report'].get('timeout', 10),
    )

    if report:
      if project.verbose: print('DCM FILE', filename)

      # clean up the report
      rows = report_to_rows(report)
      rows = report_clean(rows)

      # if bigquery, remove header and determine schema
      schema = None
      if 'bigquery' in project.task['out']:
        schema = report_schema(next(rows))
        project.task['out']['bigquery']['schema'] = schema
        project.task['out']['bigquery']['skip_rows'] = 0

      # write rows using standard out block in json ( allows customization across all scripts )
      if rows: put_rows(project.task['auth'], project.task['out'], rows)
Пример #12
0
def monthly_budget_mover():
    if project.verbose:
        print('MONTHLY BUDGET MOVER')

    # Get Spend report
    r = report_get(project.task['auth'], name=project.task['report_name'])

    report_id = report_get(project.task['auth'],
                           name=project.task['report_name'])['queryId']
    #	report = report_to_list(project.task['auth'], report_id)

    # Get Insertion Order SDF
    # sdf = list(get_single_sdf_rows(
    #     project.task['auth'],
    #     project.task['sdf']['version'],
    #     project.task['sdf']['partner_id'],
    #     project.task['sdf']['file_types'],
    #     project.task['sdf']['filter_type'],
    #     project.task['sdf']['read']['filter_ids'],
    #    	'InsertionOrders'))

    # print('sdf============================================================')
    # print(sdf)
    # print('sdf============================================================')

    sdf = [
        [
            'Io Id', 'Campaign Id', 'Name', 'Timestamp', 'Status', 'Io Type',
            'Billable Outcome', 'Fees', 'Integration Code', 'Details',
            'Pacing', 'Pacing Rate', 'Pacing Amount', 'Frequency Enabled',
            'Frequency Exposures', 'Frequency Period', 'Frequency Amount',
            'Performance Goal Type', 'Performance Goal Value', 'Measure DAR',
            'Measure DAR Channel', 'Budget Type', 'Budget Segments',
            'Auto Budget Allocation', 'Geography Targeting - Include',
            'Geography Targeting - Exclude', 'Language Targeting - Include',
            'Language Targeting - Exclude', 'Device Targeting - Include',
            'Device Targeting - Exclude', 'Browser Targeting - Include',
            'Browser Targeting - Exclude', 'Digital Content Labels - Exclude',
            'Brand Safety Sensitivity Setting', 'Brand Safety Custom Settings',
            'Third Party Verification Services',
            'Third Party Verification Labels', 'Channel Targeting - Include',
            'Channel Targeting - Exclude', 'Site Targeting - Include',
            'Site Targeting - Exclude', 'App Targeting - Include',
            'App Targeting - Exclude', 'App Collection Targeting - Include',
            'App Collection Targeting - Exclude',
            'Category Targeting - Include', 'Category Targeting - Exclude',
            'Keyword Targeting - Include', 'Keyword Targeting - Exclude',
            'Keyword List Targeting - Exclude',
            'Audience Targeting - Similar Audiences',
            'Audience Targeting - Include', 'Audience Targeting - Exclude',
            'Affinity & In Market Targeting - Include',
            'Affinity & In Market Targeting - Exclude',
            'Custom List Targeting',
            'Inventory Source Targeting - Authorized Seller Options',
            'Inventory Source Targeting - Include',
            'Inventory Source Targeting - Exclude',
            'Inventory Source Targeting - Target New Exchanges',
            'Daypart Targeting', 'Daypart Targeting Time Zone',
            'Environment Targeting', 'Viewability Targeting Active View',
            'Position Targeting - Display On Screen',
            'Position Targeting - Video On Screen',
            'Position Targeting - Display Position In Content',
            'Position Targeting - Video Position In Content',
            'Position Targeting - Audio Position In Content',
            'Video Player Size Targeting', 'Demographic Targeting Gender',
            'Demographic Targeting Age',
            'Demographic Targeting Household Income',
            'Demographic Targeting Parental Status',
            'Connection Speed Targeting', 'Carrier Targeting - Include',
            'Carrier Targeting - Exclude', 'Insertion Order Optimization',
            'Bid Strategy Unit', 'Bid Strategy Do Not Exceed',
            'Apply Floor Price For Deals', 'Algorithm Id'
        ],
        [
            '3489402', '294948', 'Audit_creas_Mustang',
            '2020-03-06T22:04:11.821000', 'Paused', 'Standard', 'Impression',
            '(Media; 0.0; Display & Video 360 Fee; True;);', '', '', 'Flight',
            'Even', '0', 'True', '1', 'Lifetime', '0', 'None', '0', 'False',
            '', 'Amount',
            '(12000.0; 05/01/2020; 05/31/2020;);(12000.0; 06/01/2020; 06/30/2020;);',
            'False', '', '', '', '', '2; 502; 202; 302;', '', '', '', '',
            'Use custom',
            'Adult; Alcohol; Derogatory; Downloads & Sharing; Drugs; Gambling; '
            'Profanity; Religion; Sensitive social issues; Suggestive; Tobacco; '
            'Tragedy; Transportation Accidents; Violence; Weapons;', 'None',
            '', '',
            '2203109; 2998109; 2998110; 2998111; 2998112; 2998113; 2998114;',
            '', '', '', '', '', '', '', '', '', '', '', 'False', '', '', '',
            '', '', 'Authorized and Non-Participating Publisher',
            '1; 6; 8; 9; 10; 2; 11; 12; 13; 16; 17; 19; 20; 21; 23; 27; 31; 34; '
            '36; 37; 38; 41; 42; 43; 50; 52; 60;', '', 'True', '', '',
            'Web; App;', '', '', '', '', '', '', '', '', '', '', '', '', '',
            '', 'False', '', '', '', ''
        ],
        [
            '3502002', '294948', 'FR_Mindshare_Ford_Mustang_CTX_VOL_Avril17',
            '2020-03-06T12:07:04.366000', 'Paused', 'Standard', 'Impression',
            '(Media; 0.0; Display & Video 360 Fee; True;);', '', '', 'Flight',
            'Even', '0', 'False', '0', 'Minutes', '0', 'None', '0', 'False',
            '', 'Amount',
            '(12000.0; 05/01/2020; 05/31/2020;);(12000.0; 06/01/2020; 06/30/2020;);',
            'False', '2250;', '', '', '', '2;', '502; 202; 302;', '', '', '',
            'Use custom',
            'Adult; Alcohol; Derogatory; Downloads & Sharing; Drugs; Gambling; '
            'Politics; Profanity; Religion; Sensitive social issues; Suggestive;'
            ' Tobacco; Tragedy; Transportation Accidents; Violence; Weapons;',
            'None', '', '',
            '2203109; 2998109; 2998110; 2998111; 2998112; 2998113; 2998114;',
            '', '', '', '', '', '', '', '', '', '', '', 'False', '', '', '',
            '', '', 'Authorized and Non-Participating Publisher',
            '1; 6; 8; 9; 10; 2; 11; 12; 13; 16; 17; 19; 20; 21; 23; 27; 31; 34; '
            '36; 37; 38; 41; 42; 43; 50; 52; 60;', '', 'True', '', '',
            'Web; App;', '', '', '', '', '', '', '', '', '', '', '', '', '',
            '', 'False', '', '', '', ''
        ],
        [
            '3522675', '294948', 'FR_Mindshare_Ford_Sales_Juin_CTX_VOL_Juin17',
            '2020-03-06T10:44:48.709000', 'Paused', 'Standard', 'Impression',
            '(Media; 0.0; Display & Video 360 Fee; True;);', '', '', 'Flight',
            'Ahead', '0', 'False', '0', 'Minutes', '0', 'None', '0', 'False',
            '', 'Amount',
            '(12000.0; 05/01/2020; 05/31/2020;);(12000.0; 06/01/2020; 06/30/2020;);',
            'False', '2250;', '', '', '', '2;', '502; 202; 302;', '', '', '',
            'Use custom',
            'Adult; Alcohol; Derogatory; Downloads & Sharing; Drugs; Gambling; '
            'Profanity; Religion; Sensitive social issues; Suggestive; Tobacco; '
            'Tragedy; Transportation Accidents; Violence; Weapons;', 'None',
            '', '',
            '2203109; 2998109; 2998110; 2998111; 2998112; 2998113; 2998114;',
            '', '', '', '', '', '', '', '', '', '', '', 'False', '', '', '',
            '', '', 'Authorized and Non-Participating Publisher',
            '1; 6; 8; 9; 10; 2; 11; 12; 13; 16; 17; 19; 20; 21; 23; 27; 31; 34; '
            '36; 37; 38; 41; 42; 43; 50; 52; 60;', '', 'True', '', '',
            'Web; App;', '', '', '', '', '', '', '', '', '', '', '', '', '',
            '', 'False', '', '', '', ''
        ]
    ]

    report = [[
        'Advertiser_Currency', 'Insertion_Order_Id', 'Revenue_Adv_Currency'
    ], ['EUR', '3489402', '893.195881'], ['EUR', '3502002', '14893.195881'],
              ['EUR', '3522675', '893.195881']]
    print('report============================================================')
    print(report)
    print('report============================================================')

    # Prep out blocks depending on where the outputs should be stored
    if project.task['is_colab']:
        project.task['out_old_sdf'].pop('bigquery')
        project.task['out_new_sdf'].pop('bigquery')
        project.task['out_changes'].pop('bigquery')

    else:
        project.task['out_old_sdf'].pop('file')
        project.task['out_new_sdf'].pop('file')
        project.task['out_changes'].pop('file')

        # Build Schemas
        schema = make_schema(sdf[0])
        schema_changes = make_schema(CHANGES_SCHEMA)
        project.task['out_old_sdf']['bigquery']['schema'] = schema
        project.task['out_new_sdf']['bigquery']['schema'] = schema
        project.task['out_changes']['bigquery']['schema'] = schema_changes

    # Write old sdf to table
    put_rows(project.task['auth'], project.task['out_old_sdf'],
             (n for n in sdf))

    # Categorize the IOs to be aggregated together
    if (project.task['budget_categories'] != {}
            and project.task['budget_categories'] != ''
            and project.task['budget_categories'] != None):

        categories = remove_excluded_ios_from_categories(
            project.task['budget_categories'], project.task['excluded_ios'])

        categories_spend = aggregate_io_spend_to_categories(report, categories)

        categories_budget = aggregate_io_budget_to_categories(sdf, categories)

        category_budget_deltas = calc_budget_spend_deltas(
            categories_budget, categories_spend, categories)

        new_sdf, changes = apply_category_budgets(sdf, category_budget_deltas,
                                                  categories)

    # Don't split up the IOs by categories
    else:
        report_dict = convert_report_to_dict(report)
        new_sdf, changes = calc_new_sdf_no_categories(
            sdf, report_dict, project.task['excluded_ios'])

    if project.task['is_colab']:
        changes.insert(0, CHANGES_SCHEMA)

    # Write new sdf to table
    put_rows(project.task['auth'], project.task['out_new_sdf'],
             (n for n in new_sdf))

    # Write log file to table
    put_rows(project.task['auth'], project.task['out_changes'],
             (n for n in changes))
Пример #13
0
def first_and_third_party_audience_load():

    # load multiple from user defined sheet
    def load_multiple():
        #partners = get_rows(
        #  project.task['auth_sheets'],
        #  { 'sheets': {
        #    'sheet': project.task['sheet'],
        #    'tab': 'Partners',
        #    'range': 'A2:A'
        #  }}
        #)

        #for partner in partners:
        #  yield from API_DV360(
        #    project.task['auth_dv'],
        #    iterate=True
        #  ).firstAndThirdPartyAudiences().list(
        #    partnerId=lookup_id(partner[0])
        #  ).execute()

        advertisers = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Advertisers',
                    'range': 'A2:A'
                }
            })

        for advertiser in advertisers:
            yield from API_DV360(
                project.task['auth_dv'],
                iterate=True).firstAndThirdPartyAudiences().list(
                    advertiserId=lookup_id(advertiser[0])).execute()

    # write to database
    put_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'table':
                'DV_First_And_Third_Party_Audiences',
                'schema':
                Discovery_To_BigQuery(
                    'displayvideo',
                    'v1').method_schema('firstAndThirdPartyAudiences.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write to sheet
    put_rows(
        project.task['auth_sheets'], {
            'sheets': {
                'sheet': project.task['sheet'],
                'tab': 'Targeting Options',
                'range': 'Q2'
            }
        },
        get_rows(
            project.task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    project.task['dataset'],
                    'query':
                    """SELECT
           CONCAT(
             SUBSTR(A.firstAndThirdPartyAudienceType, 37), ' > ',
             A.audienceType, ' > ',
             COALESCE(A.audienceSource, 'UNSPECIFIED'), ' > ',
             A.displayName, ' - ', A.firstAndThirdPartyAudienceId
           ),
           FROM `{dataset}.DV_First_And_Third_Party_Audiences` AS A
           ORDER BY 1
        """.format(**project.task),
                    'legacy':
                    False
                }
            }))
Пример #14
0
def cm_placement_load(config, task):

  # load multiple partners from user defined sheet
  def load_multiple():
    campaigns = [str(lookup_id(r)) for r in set(get_rows(
      config,
      task['auth_cm'],
      { 'sheets': {
        'sheet': task['sheet'],
        'tab': 'CM Campaigns',
        'header':False,
        'range': 'A2:A'
      }},
      unnest=True
    ))]

    for row in get_rows(
      config,
      task['auth_sheets'],
      { 'sheets': {
        'sheet': task['sheet'],
        'tab': 'CM Accounts',
        'header':False,
        'range': 'A2:A'
      }}
    ):
      if row:
        account_id = lookup_id(row[0])

        is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
        kwargs = { 'profileId': profile_id, 'campaignIds':campaigns, 'archived':False }
        if is_superuser:
          kwargs['accountId'] = account_id

        yield from API_DCM(
          config,
          task['auth_cm'],
          iterate=True,
          internal=is_superuser
        ).placements().list( **kwargs).execute()

  cm_placement_clear(config, task)

  # write placements to database
  put_rows(
    config,
    task['auth_bigquery'],
    { 'bigquery': {
      'dataset': task['dataset'],
      'table': 'CM_Placements',
      'schema': Discovery_To_BigQuery(
        'dfareporting',
        'v3.4'
      ).method_schema(
        'placements.list',
        iterate=True
      ),
      'format':'JSON'
    }},
    load_multiple()
  )
Пример #15
0
def dv_line_item_audit(config, task):

    put_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "SHEET_LineItems",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Status",
                        "type": "STRING"
                    },
                    {
                        "name": "Status_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Warning",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_Data_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_Data_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_Start_Date",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_Start_Date_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_End_Date",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_End_Date_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_Trigger",
                        "type": "STRING"
                    },
                    {
                        "name": "Flight_Trigger_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Allocation_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Allocation_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Unit_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Max",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Budget_Max_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Partner_Revenue_Model_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner_Revenue_Model_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner_Revenue_Model_Markup_Percent",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Partner_Revenue_Model_Markup_Percent_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Conversion_Percent",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Conversion_Percent_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Targeting_Expansion_Level",
                        "type": "STRING"
                    },
                    {
                        "name": "Targeting_Expansion_Level_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Exclude_1P",
                        "type": "STRING"
                    },
                    {
                        "name": "Exclude_1P_Edit",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            config, task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "Line Items",
                    "header": False,
                    "range": "A2:AI"
                }
            }))

    # Create Insert View
    query_to_view(config,
                  task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "INSERT_LineItems",
                  """SELECT
      REGEXP_EXTRACT(S_LI.Advertiser, r' - (\d+)$') AS advertiserId,
      REGEXP_EXTRACT(S_LI.Campaign, r' - (\d+)$') AS campaignId,
      REGEXP_EXTRACT(S_LI.Insertion_Order, r' - (\d+)$') AS insertionOrderId,
      S_LI.Line_Item AS displayName,
      S_LI.Line_Item_Type_Edit AS lineItemType,
      S_LI.Status_Edit AS entityStatus,
      STRUCT(
        S_PC.Cost_Type_Edit As costType,
        S_PC.Fee_Type_Edit As feeType,
        S_PC.Invoice_Type_Edit AS invoiceType,
        S_PC.Fee_Amount_Edit AS feeAmount,
        S_PC.Fee_Percent_Edit * 1000 AS feePercentageMillis
      ) AS partnerCosts,
      STRUCT(
        S_LI.Flight_Data_Type_Edit AS flightDateType,
        STRUCT (
          STRUCT (
            EXTRACT(YEAR FROM CAST(S_LI.Flight_Start_Date_Edit AS Date)) AS year,
            EXTRACT(MONTH FROM CAST(S_LI.Flight_Start_Date_Edit AS DATE)) AS month,
            EXTRACT(DAY FROM CAST(S_LI.Flight_Start_Date_Edit AS DATE)) AS day
          ) AS startDate,
          STRUCT (
            EXTRACT(YEAR FROM CAST(S_LI.Flight_End_Date_Edit AS Date)) AS year,
            EXTRACT(MONTH FROM CAST(S_LI.Flight_End_Date_Edit AS DATE)) AS month,
            EXTRACT(DAY FROM CAST(S_LI.Flight_End_Date_Edit AS DATE)) AS day
          ) AS endDate
        ) AS dateRange,
        S_LI.Flight_Trigger_Edit AS triggerId
      ) AS flight,
      STRUCT(
        S_LI.Budget_Allocation_Type_Edit AS budgetAllocationType,
        S_LI.Budget_Unit_Edit AS budgetUnit,
        S_LI.Budget_Max_Edit * 1000000 AS maxAmount
      ) AS budget,
      STRUCT(
        S_P.Period_Edit As pacingPeriod,
        S_P.Type_Edit As pacingType,
        S_P.Daily_Budget_Edit AS dailyMaxMicros,
        S_P.Daily_Impressions_Edit AS dailyMaxImpressions
      ) AS pacing,
      STRUCT(
        S_FC.Unlimited_Edit AS unlimited,
        S_FC.Time_Unit_Edit AS timeUnit,
        S_FC.Time_Count_Edit AS timeUnitCount,
        S_FC.Max_impressions_Edit AS maxImpressions
      ) AS frequencyCap,
      STRUCT(
        S_LI.Partner_Revenue_Model_Type_Edit AS markupType,
        S_LI.Partner_Revenue_Model_Markup_Percent_Edit * IF(S_LI.Partner_Revenue_Model_Type_Edit='PARTNER_REVENUE_MODEL_MARKUP_TYPE_CPM', 1000000, 1000) AS markupAmount
      ) AS partnerRevenueModel,
      STRUCT(
        S_LI. Conversion_Percent_Edit * 1000 AS postViewCountPercentageMillis,
        [] AS floodlightActivityConfigs
      ) AS conversionCounting,
      STRUCT(
        IF(S_BS.Fixed_Bid_Edit IS NOT NULL,
          STRUCT(
            S_BS.Fixed_Bid_Edit * 1000000 AS bidAmountMicros
          ),
          NULL
        ) AS fixedBid,
        IF(S_BS.Auto_Bid_Goal_Edit IS NOT NULL,
          STRUCT(
            S_BS.Auto_Bid_Goal_Edit AS performanceGoalType,
            S_BS.Auto_Bid_Amount_Edit * 1000000 AS maxAverageCpmBidAmountMicros,
            S_BS.Auto_Bid_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS maximizeSpendAutoBid,
        IF(S_BS.Performance_Goal_Type_Edit IS NOT NULL,
          STRUCT(
            S_BS.Performance_Goal_Type_Edit AS performanceGoalType,
            S_BS.Performance_Goal_Amount_Edit * 1000000 AS performanceGoalAmountMicros,
            S_BS.Performance_Goal_Average_CPM_Bid_Edit * 1000000 AS maxAverageCpmBidAmountMicros,
            S_BS.Performance_Goal_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS performanceGoalAutoBid
      )
      AS bidStrategy,
      STRUCT(
        S_ID.Integration_Code_Edit AS integrationCode,
        S_ID.Details_Edit AS details
      ) AS integrationDetails,
      STRUCT(
        S_LI.Targeting_Expansion_Level_Edit AS targetingExpansionLevel,
        S_LI.Exclude_1P_Edit AS excludeFirstPartyAudience
      ) AS targetingExpansion
    FROM `{dataset}.SHEET_LineItems` AS S_LI
    LEFT JOIN `{dataset}.SHEET_PartnerCosts` AS S_PC ON S_LI.Line_Item=S_PC.Line_Item
    LEFT JOIN `{dataset}.SHEET_Pacing` AS S_P ON S_LI.Line_Item=S_P.Line_Item
    LEFT JOIN `{dataset}.SHEET_FrequencyCaps` AS S_FC ON S_LI.Line_Item=S_FC.Line_Item
    LEFT JOIN `{dataset}.SHEET_IntegrationDetails` AS S_ID ON S_LI.Line_Item=S_ID.Line_Item
    LEFT JOIN `{dataset}.SHEET_BidStrategy` AS S_BS ON S_LI.Line_Item=S_BS.Line_Item
    LEFT JOIN `{dataset}.DV_LineItems` AS DV_LI ON S_LI.Line_Item=DV_LI.displayName
    WHERE S_LI.Action="INSERT"
    AND DV_LI IS NULL
    """.format(**task),
                  legacy=False)

    # Create Audit View
    query_to_view(config,
                  task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "AUDIT_LineItems",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Line Item' AS Operation,
            CASE
              WHEN Budget_Allocation_Type_Edit IS NULL THEN 'Missing Budget Allocation Type.'
              WHEN Budget_Unit_Edit IS NULL THEN 'Missing Budget Unit.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_LineItems`
        )
        WHERE
          Error IS NOT NULL
      ),
      /* Check duplicate inserts */
      DUPLICATE_ERRORS AS (
        SELECT
          'Line Item' AS Operation,
          'Duplicate Line Item name, insert will be ignored.' AS Error,
          'WARNING' AS Severity,
          COALESCE(S_LI.Line_Item, 'BLANK') AS Id
        FROM `{dataset}.SHEET_LineItems` As S_LI
        LEFT JOIN `{dataset}.DV_LineItems` AS DV_LI ON S_LI.Line_Item=DV_LI.displayName
        WHERE S_LI.Action="INSERT"
        AND DV_LI IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
    """.format(**task),
                  legacy=False)

    query_to_view(config,
                  task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "PATCH_LineItems",
                  """SELECT *
      FROM `{dataset}.SHEET_LineItems`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_LineItems` WHERE Severity='ERROR')
    """.format(**task),
                  legacy=False)
Пример #16
0
def insertion_order_load(config, task):

  # load multiple partners from user defined sheet
  def insertion_order_load_multiple():
    campaigns = set([lookup_id(row[0]) for row in get_rows(
      config,
      task["auth_sheets"],
      { "sheets": {
        "sheet": task["sheet"],
        "tab": "Campaigns",
        "header":False,
        "range": "A2:A"
      }}
    )])

    rows = get_rows(
      config,
      task["auth_sheets"],
      { "sheets": {
        "sheet": task["sheet"],
        "tab": "Advertisers",
        "header":False,
        "range": "A2:A"
      }}
    )

    # String for filtering which entityStatus enums we want to see in the sheet
    for row in rows:
      for record in API_DV360(
        config,
        task["auth_dv"],
        iterate=True
      ).advertisers().insertionOrders().list(
        advertiserId=lookup_id(row[0]),
        filter='entityStatus="ENTITY_STATUS_PAUSED" OR entityStatus="ENTITY_STATUS_ACTIVE" OR entityStatus="ENTITY_STATUS_DRAFT"'
      ).execute():
        if not campaigns or record['campaignId'] in campaigns:
          yield record

  # write insertion orders to database and sheet
  put_rows(
    config,
    task["auth_bigquery"],
    { "bigquery": {
      "dataset": task["dataset"],
      "table": "DV_InsertionOrders",
      "schema": Discovery_To_BigQuery(
        "displayvideo",
        "v1"
      ).method_schema("advertisers.insertionOrders.list"),
      "format": "JSON"
    }},
    insertion_order_load_multiple()
  )

  # write insertion orders to sheet
  put_rows(
    config,
    task["auth_sheets"],
    { "sheets": {
      "sheet": task["sheet"],
      "tab": "Insertion Orders",
      "header":False,
      "range": "A2"
    }},
    get_rows(
      config,
      task["auth_bigquery"],
      { "bigquery": {
        "dataset": task["dataset"],
        "query": """SELECT
            CONCAT(P.displayName, ' - ', P.partnerId),
            CONCAT(A.displayName, ' - ', A.advertiserId),
            CONCAT(C.displayName, ' - ', C.campaignId),
            CONCAT(I.displayName, ' - ', I.insertionOrderId),
            'PATCH',
            I.entityStatus,
            I.entityStatus,
            I.displayName,
            I.displayName,
            I.budget.budgetUnit,
            I.budget.budgetUnit,
            I.budget.automationType,
            I.budget.automationType,
            I.performanceGoal.performanceGoalType,
            I.performanceGoal.performanceGoalType,
            I.performanceGoal.performanceGoalAmountMicros / 1000000,
            I.performanceGoal.performanceGoalAmountMicros / 1000000,
            I.performanceGoal.performanceGoalPercentageMicros / 1000000,
            I.performanceGoal.performanceGoalPercentageMicros / 1000000,
            I.performanceGoal.performanceGoalString,
            I.performanceGoal.performanceGoalString
          FROM `{dataset}.DV_InsertionOrders` AS I
          LEFT JOIN `{dataset}.DV_Campaigns` AS C
          ON I.campaignId=C.campaignId
          LEFT JOIN `{dataset}.DV_Advertisers` AS A
          ON I.advertiserId=A.advertiserId
          LEFT JOIN `{dataset}.DV_Partners` AS P
          ON A.partnerId=P.partnerId
          ORDER BY I.displayName
        """.format(**task),
        "legacy": False
      }}
    )
  )
def inventory_source_load():

    # load multiple from user defined sheet
    def load_multiple():
        partners = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Partners',
                    'range': 'A2:A'
                }
            })

        for partner in partners:
            yield from API_DV360(
                project.task['auth_dv'], iterate=True).inventorySources().list(
                    partnerId=lookup_id(partner[0])).execute()

        advertisers = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Advertisers',
                    'range': 'A2:A'
                }
            })

        for advertiser in advertisers:
            yield from API_DV360(
                project.task['auth_dv'], iterate=True).inventorySources().list(
                    advertiserId=lookup_id(advertiser[0])).execute()

    # write to database
    put_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'table':
                'DV_Inventory_Sources',
                'schema':
                Discovery_To_BigQuery(
                    'displayvideo',
                    'v1').method_schema('inventorySources.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write to sheet
    put_rows(
        project.task['auth_sheets'], {
            'sheets': {
                'sheet': project.task['sheet'],
                'tab': 'Inventory Sources',
                'range': 'A2'
            }
        },
        get_rows(
            project.task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    project.task['dataset'],
                    'query':
                    """SELECT
           CONCAT(I.displayName, ' - ', I.inventorySourceId),
           inventorySourceType,
           status.entityStatus,
           status.sellerStatus,
           status.configStatus,
           exchange,
           publisherName,
           dealId
           FROM `{dataset}.DV_Inventory_Sources` AS I
           GROUP BY 1,2,3,4,5,6,7,8
           ORDER BY 1
        """.format(**project.task),
                    'legacy':
                    False
                }
            }))
Пример #18
0
def fred():

    if 'series' in project.task:

        for parameters in project.task['series']:

            name = 'FRED_SERIES_%s' % parameters['series_id']
            rows = fred_series(project.task['api_key'],
                               project.task['frequency'], **parameters)

            if 'bigquery' in project.task['out']:
                project.task['out']['bigquery']['schema'] = [{
                    "name": "realtime_start",
                    "type": "DATE",
                    "mode": "REQUIRED"
                }, {
                    "name": "realtime_end",
                    "type": "DATE",
                    "mode": "REQUIRED"
                }, {
                    "name": "day",
                    "type": "DATE",
                    "mode": "REQUIRED"
                }, {
                    "name": "value",
                    "type": "FLOAT",
                    "mode": "REQUIRED"
                }]

    elif 'regions' in project.task:
        for parameters in project.task['regions']:

            name = 'FRED_SERIES_%s' % parameters['series_group']
            rows = fred_regional(project.task['api_key'],
                                 project.task['frequency'],
                                 project.task['region_type'], **parameters)

            if 'bigquery' in project.task['out']:
                project.task['out']['bigquery']['schema'] = [{
                    "name": "region",
                    "type": "STRING",
                    "mode": "REQUIRED"
                }, {
                    "name": "code",
                    "type": "INTEGER",
                    "mode": "REQUIRED"
                }, {
                    "name": "series_id",
                    "type": "STRING",
                    "mode": "REQUIRED"
                }, {
                    "name": "value",
                    "type": "FLOAT",
                    "mode": "REQUIRED"
                }]

    else:
        raise Excpetion(
            "MISSING CONFIGURATION: Specify either series_id or series_group.")

    put_rows(project.task['auth'], name, rows)
Пример #19
0
def frequency_cap_audit(config, task):
  rows = get_rows(
      config,
      task["auth_sheets"], {
          "sheets": {
              "sheet": task["sheet"],
              "tab": "Frequency Caps",
              "header":False,
              "range": "A2:M"
          }
      })

  put_rows(
      config,
      task["auth_bigquery"], {
          "bigquery": {
              "dataset": task["dataset"],
              "table": "SHEET_FrequencyCaps",
              "schema": [
                  { "name": "Partner", "type": "STRING" },
                  { "name": "Advertiser", "type": "STRING" },
                  { "name": "Campaign", "type": "STRING" },
                  { "name": "Insertion_Order", "type": "STRING" },
                  { "name": "Line_Item", "type": "STRING" },
                  { "name": "Unlimited", "type": "BOOLEAN" },
                  { "name": "Unlimited_Edit", "type": "BOOLEAN" },
                  { "name": "Time_Unit", "type": "STRING" },
                  { "name": "Time_Unit_Edit", "type": "STRING" },
                  { "name": "Time_Count", "type": "INTEGER" },
                  { "name": "Time_Count_Edit", "type": "INTEGER" },
                  { "name": "Max_impressions", "type": "INTEGER" },
                  { "name": "Max_impressions_Edit", "type": "INTEGER" },
              ],
              "format": "CSV"
          }
      }, rows)

  query_to_view(
      config,
      task["auth_bigquery"],
      config.project,
      task["dataset"],
      "AUDIT_FrequencyCaps",
      """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Frequency Caps' AS Operation,
            CASE WHEN Unlimited_Edit IS TRUE THEN
              CASE WHEN Time_Unit_Edit IS NOT NULL OR Time_Count_Edit IS NOT NULL OR Max_Impressions_Edit IS NOT NULL THEN 'Time Unit and the Other Options are Mutually Exclusive.'
              ELSE NULL
             END
            ELSE
              IF(Time_Unit_Edit IS NULL OR Time_Count_Edit IS NULL OR Max_Impressions_Edit IS NULL, 'If Time Unit is FALSE, the other options are required.', NULL)
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_FrequencyCaps`
        )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      ;
    """.format(**task),
      legacy=False)

  query_to_view(
    config,
    task["auth_bigquery"],
    config.project,
    task["dataset"],
    "PATCH_FrequencyCaps",
    """SELECT *
      FROM `{dataset}.SHEET_FrequencyCaps`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
      AND Campaign NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
    """.format(**task),
    legacy=False
  )
Пример #20
0
def integration_detail_audit():
  rows = get_rows(
      project.task["auth_sheets"], {
          "sheets": {
              "sheet": project.task["sheet"],
              "tab": "Integration Details",
              "range": "A2:I"
          }
      })

  put_rows(
      project.task["auth_bigquery"], {
          "bigquery": {
              "dataset": project.task["dataset"],
              "table": "SHEET_IntegrationDetails",
              "schema": [
                  { "name": "Partner", "type": "STRING" },
                  { "name": "Advertiser", "type": "STRING" },
                  { "name": "Campaign", "type": "STRING" },
                  { "name": "Insertion_Order", "type": "STRING" },
                  { "name": "Line_Item", "type": "STRING" },
                  { "name": "Integration_Code", "type": "STRING" },
                  { "name": "Integration_Code_Edit", "type": "STRING" },
                  { "name": "Details", "type": "STRING" },
                  { "name": "Details_Edit", "type": "STRING" },
              ],
              "format": "CSV"
          }
      }, rows)

  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "AUDIT_IntegrationDetails",
    """WITH
      /* Check if advertiser values are set */
      INPUT_ERRORS AS (
        SELECT
          'Integration Details' AS Operation,
          'Missing Advertiser.' AS Error,
          'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, Advertiser, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_IntegrationDetails`
        WHERE Advertiser IS NULL
      )

      SELECT * FROM INPUT_ERRORS
    """.format(**project.task),
    legacy=False
  )

  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "PATCH_IntegrationDetails",
    """SELECT *
      FROM `{dataset}.SHEET_IntegrationDetails`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_IntegrationDetails` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_IntegrationDetails` WHERE Severity='ERROR')
      AND Campaign NOT IN (SELECT Id FROM `{dataset}.AUDIT_IntegrationDetails` WHERE Severity='ERROR')
    """.format(**project.task),
    legacy=False
  )
Пример #21
0
def inventory_group_load(config, task):

    # load multiple from user defined sheet
    def load_multiple():
        partners = get_rows(
            config, task['auth_sheets'], {
                'sheets': {
                    'sheet': task['sheet'],
                    'tab': 'Partners',
                    'header': False,
                    'range': 'A2:A'
                }
            })

        for partner in partners:
            yield from API_DV360(
                config, task['auth_dv'],
                iterate=True).inventorySourceGroups().list(
                    partnerId=lookup_id(partner[0])).execute()

        advertisers = get_rows(
            config, task['auth_sheets'], {
                'sheets': {
                    'sheet': task['sheet'],
                    'tab': 'Advertisers',
                    'header': False,
                    'range': 'A2:A'
                }
            })

        for advertiser in advertisers:
            yield from API_DV360(
                config, task['auth_dv'],
                iterate=True).inventorySourceGroups().list(
                    advertiserId=lookup_id(advertiser[0])).execute()

    inventory_group_clear(config, task)

    # write inventorys to database
    put_rows(
        config, task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                task['dataset'],
                'table':
                'DV_Inventory_Groups',
                'schema':
                Discovery_To_BigQuery(
                    'displayvideo',
                    'v1').method_schema('inventorySourceGroups.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write inventorys to sheet
    put_rows(
        config, task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'Targeting Options',
                'header': False,
                'range': 'M2'
            }
        },
        get_rows(
            config, task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    """SELECT
           CONCAT(I.displayName, ' - ', I.inventorySourceGroupId),
           FROM `{dataset}.DV_Inventory_Groups` AS I
           ORDER BY 1
        """.format(**task),
                    'legacy':
                    False
                }
            }))
Пример #22
0
def integration_detail_load():

  # write integration_details to sheet
  rows = get_rows(
      project.task["auth_bigquery"], {
          "bigquery": {
              "dataset":
                  project.task["dataset"],
              "query":
                  """SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CAST(NULL AS STRING),
         CAST(NULL AS STRING),
         CAST(NULL AS STRING),
         A.integrationDetails.integrationCode,
         A.integrationDetails.integrationCode,
         A.integrationDetails.details,
         A.integrationDetails.details
       FROM `{dataset}.DV_Advertisers` AS A
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CAST(NULL AS STRING),
         I.integrationDetails.integrationCode,
         I.integrationDetails.integrationCode,
         I.integrationDetails.details,
         I.integrationDetails.details
       FROM `{dataset}.DV_InsertionOrders` AS I
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CONCAT(L.displayName, ' - ', L.lineItemId),
         L.integrationDetails.integrationCode,
         L.integrationDetails.integrationCode,
         L.integrationDetails.details,
         L.integrationDetails.details
       FROM `{dataset}.DV_LineItems` AS L
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       """.format(**project.task),
              "legacy":
                  False
          }
      })

  put_rows(
      project.task["auth_sheets"], {
          "sheets": {
              "sheet": project.task["sheet"],
              "tab": "Integration Details",
              "range": "A2"
          }
      }, rows)
Пример #23
0
def cm_advertiser_load(config, task):

  # load multiple partners from user defined sheet
  def load_multiple():
    for row in get_rows(
      config,
      task['auth_sheets'],
      { 'sheets': {
        'sheet': task['sheet'],
        'tab': 'CM Accounts',
        'header':False,
        'range': 'A2:A'
      }}
    ):
      if row:
        account_id = lookup_id(row[0])
        is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
        kwargs = { 'profileId': profile_id, 'accountId': account_id } if is_superuser else { 'profileId': profile_id }
        yield from API_DCM(
          config,
          task['auth_cm'],
          iterate=True,
          internal=is_superuser
        ).advertisers().list( **kwargs).execute()

  cm_advertiser_clear(config, task)

  # write advertisers to database
  put_rows(
    config,
    task['auth_bigquery'],
    { 'bigquery': {
      'dataset': task['dataset'],
      'table': 'CM_Advertisers',
      'schema': Discovery_To_BigQuery(
        'dfareporting',
        'v3.4'
      ).method_schema(
        'advertisers.list',
        iterate=True
      ),
      'format':'JSON'
    }},
    load_multiple()
  )

  # write advertisers to sheet
  put_rows(
    config,
    task['auth_sheets'],
    { 'sheets': {
      'sheet': task['sheet'],
      'tab': 'CM Advertisers',
      'header':False,
      'range': 'B2'
    }},
    get_rows(
      config,
      task['auth_bigquery'],
      { 'bigquery': {
        'dataset': task['dataset'],
        'query': '''SELECT
          CONCAT(AC.name, ' - ', AC.id),
          CONCAT(AD.name, ' - ', AD.id),
          AD.status
          FROM `{dataset}.CM_Advertisers` AS AD
          LEFT JOIN `{dataset}.CM_Accounts` AS AC
          ON AD.accountId=AC.id
        '''.format(**task),
        'legacy': False
      }}
    )
  )
Пример #24
0
def pacing_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Pacing",
                "range": "A2:Z"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Pacing",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Period",
                        "type": "STRING"
                    },
                    {
                        "name": "Period_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Daily_Budget",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Daily_Budget_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Daily_Impressions",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Daily_Impressions_Edit",
                        "type": "INTEGER"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_Pacing",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Pacing' AS Operation,
            CASE
              WHEN Period_Edit IS NULL THEN 'Missing Period.'
              WHEN Type_Edit IS NULL THEN 'Missing Type.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_Pacing`
        )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_Pacing",
                  """SELECT *
      FROM `{dataset}.SHEET_Pacing`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_Pacing` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_Pacing` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Пример #25
0
def bid_strategy_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Bid Strategy",
                "range": "A2:U"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_BidStrategy",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Fixed_Bid",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fixed_Bid_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Auto_Bid_Goal",
                        "type": "STRING"
                    },
                    {
                        "name": "Auto_Bid_Goal_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Auto_Bid_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Auto_Bid_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Auto_Bid_Algorithm",
                        "type": "STRING"
                    },
                    {
                        "name": "Auto_Bid_Algorithm_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Average_CPM_Bid",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Average_CPM_Bid_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Algorithm",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Algorithm_Edit",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_BidStrategy",
                  """WITH
        /* Check if sheet values are set */ INPUT_ERRORS AS (
        SELECT
          *
        FROM (
          SELECT
            'Bid Strategy' AS Operation,
            CASE
              WHEN Insertion_Order IS NOT NULL AND Line_Item IS NOT NULL THEN
                CASE
                  WHEN Fixed_Bid_Edit IS NOT NULL AND Auto_Bid_Goal_Edit IS NULL AND Auto_Bid_Algorithm_Edit IS NOT NULL THEN 'Both Fixed Bid and Bid Algorithm exist.'
                  WHEN Fixed_Bid_Edit IS NULL AND Auto_Bid_Goal_Edit IS NOT NULL AND Auto_Bid_Algorithm_Edit IS NOT NULL THEN 'Both Bid Goal and Bid Algorithm exist.'
                  WHEN Fixed_Bid_Edit IS NOT NULL AND Auto_Bid_Goal_Edit IS NOT NULL AND Auto_Bid_Algorithm_Edit IS NULL THEN 'Both Fixed Bid and Bid Goal exist.'
                  WHEN Fixed_Bid_Edit IS NOT NULL AND Auto_Bid_Goal_Edit IS NOT NULL AND Auto_Bid_Algorithm_Edit IS NOT NULL THEN 'All bid fields exist.'
                  ELSE NULL
                END
            ELSE
            NULL
          END
            AS Error,
            'ERROR' AS Severity,
            COALESCE(Line_Item,
              Insertion_Order,
              'BLANK') AS Id
          FROM
            `{dataset}.SHEET_BidStrategy` )
        WHERE
          Error IS NOT NULL )
      SELECT
        *
      FROM
        INPUT_ERRORS ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_BidStrategy",
                  """SELECT *
      FROM `{dataset}.SHEET_BidStrategy`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_BidStrategy` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_BidStrategy` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Пример #26
0
def pacing_load():

    # write pacings to sheet
    rows = get_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "query":
                """SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         NULL,
         I.pacing.pacingPeriod,
         I.pacing.pacingPeriod,
         I.pacing.pacingType,
         I.pacing.pacingType,
         CAST(I.pacing.dailyMaxMicros AS INT64) / 100000,
         CAST(I.pacing.dailyMaxMicros AS INT64) / 100000,
         I.pacing.dailyMaxImpressions,
         I.pacing.dailyMaxImpressions
       FROM `{dataset}.DV_InsertionOrders` AS I
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CONCAT(L.displayName, ' - ', L.lineItemId),
         L.pacing.pacingPeriod,
         L.pacing.pacingPeriod,
         L.pacing.pacingType,
         L.pacing.pacingType,
         CAST(L.pacing.dailyMaxMicros AS INT64) / 100000,
         CAST(L.pacing.dailyMaxMicros AS INT64) / 100000,
         L.pacing.dailyMaxImpressions,
         L.pacing.dailyMaxImpressions
       FROM `{dataset}.DV_LineItems` AS L
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       """.format(**project.task),
                "legacy":
                False
            }
        })

    put_rows(project.task["auth_sheets"], {
        "sheets": {
            "sheet": project.task["sheet"],
            "tab": "Pacing",
            "range": "A2"
        }
    }, rows)
Пример #27
0
def custom_list_load(config, task):

    # load multiple from user defined sheet
    def load_multiple():
        advertisers = get_rows(
            config, task['auth_sheets'], {
                'sheets': {
                    'sheet': task['sheet'],
                    'tab': 'Advertisers',
                    'header': False,
                    'range': 'A2:A'
                }
            })

        for advertiser in advertisers:
            yield from API_DV360(
                config, task['auth_dv'], iterate=True).customLists().list(
                    advertiserId=lookup_id(advertiser[0])).execute()

    custom_list_clear(config, task)

    # write to database
    put_rows(
        config, task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                task['dataset'],
                'table':
                'DV_Custom_Lists',
                'schema':
                Discovery_To_BigQuery('displayvideo',
                                      'v1').method_schema('customLists.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write to sheet
    put_rows(
        config, task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'Targeting Options',
                'header': False,
                'range': 'P2:P'
            }
        },
        get_rows(
            config, task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    """SELECT
           CONCAT(I.displayName, ' - ', I.customListId),
           FROM `{dataset}.DV_Custom_Lists` AS I
           GROUP BY 1
           ORDER BY 1
        """.format(**task),
                    'legacy':
                    False
                }
            }))
Пример #28
0
def line_item_load():

    # load multiple partners from user defined sheet
    def load_multiple():
        rows = get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Advertisers",
                    "range": "A2:A"
                }
            })

        for row in rows:
            yield from API_DV360(project.task["auth_dv"],
                                 iterate=True).advertisers().lineItems().list(
                                     advertiserId=lookup_id(row[0])).execute()

    # write line_items to database
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "DV_LineItems",
                "schema":
                Discovery_To_BigQuery(
                    "displayvideo",
                    "v1").method_schema("advertisers.lineItems.list"),
                "format":
                "JSON"
            }
        }, load_multiple())

    # write line items to sheet
    rows = get_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "query":
                """SELECT
        CONCAT(P.displayName, ' - ', P.partnerId),
        CONCAT(A.displayName, ' - ', A.advertiserId),
        CONCAT(C.displayName, ' - ', C.campaignId),
        CONCAT(I.displayName, ' - ', I.insertionOrderId),
        CONCAT(L.displayName, ' - ', L.lineItemId),
        L.entityStatus,
        ARRAY_TO_STRING(L.warningMessages, '\\n'),
        FROM `{dataset}.DV_LineItems` AS L
        LEFT JOIN `{dataset}.DV_Advertisers` AS A
        ON L.advertiserId=A.advertiserId
        LEFT JOIN `{dataset}.DV_Campaigns` AS C
        ON L.advertiserId=C.advertiserId
        LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
        ON L.insertionOrderId=I.insertionOrderId
        LEFT JOIN `{dataset}.DV_Partners` AS P
        ON A.partnerId=P.partnerId
      """.format(**project.task),
                "legacy":
                False
            }
        })

    put_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Line Items",
                "range": "B2"
            }
        }, rows)
Пример #29
0
def line_item_map_audit():
  rows = get_rows(
      project.task['auth_sheets'], {
          'sheets': {
              'sheet': project.task['sheet'],
              'tab': 'Line Items Map',
              'range': 'A2:Z'
          }
      })

  put_rows(
      project.task['auth_bigquery'], {
          'bigquery': {
              'dataset': project.task['dataset'],
              'table': 'SHEET_LineItemMaps',
              'schema': [{
                  'name': 'Action',
                  'type': 'STRING'
              }, {
                  'name': 'Line_Item',
                  'type': 'STRING'
              }, {
                  'name': 'Creative',
                  'type': 'STRING'
              }],
              'format': 'CSV'
          }
      }, rows)

  query_to_view(
      project.task['auth_bigquery'],
      project.id,
      project.task['dataset'],
      'AUDIT_LineItemMaps',
      """WITH
      LINEITEM_ERRORS AS (
      SELECT
        'Line Items Map' AS Operation,
        'Missing Line Item.' AS Error,
        'ERROR' AS Severity,
        COALESCE(Line_Item, 'BLANK') AS Id
      FROM
        `{dataset}.SHEET_LineItemMaps` AS M
      LEFT JOIN
        `{dataset}.DV_LineItems` AS L
      ON
        M.Line_Item=CONCAT(L.displayName, ' - ', L.lineItemId)
      WHERE L IS NULL
      ),
      CREATIVE_ERRORS AS (
      SELECT
        'Line Items Map' AS Operation,
        'Missing Line Item.' AS Error,
        'ERROR' AS Severity,
        COALESCE(Line_Item, 'BLANK') AS Id
      FROM
        `{dataset}.SHEET_LineItemMaps` AS M
      LEFT JOIN
        `{dataset}.DV_Creatives` AS C
      ON
        M.Line_Item=CONCAT(C.displayName, ' - ', C.creativeId)
      WHERE C IS NULL
      )
      SELECT * FROM LINEITEM_ERRORS
      UNION ALL
      SELECT * FROM CREATIVE_ERRORS
      ;
    """.format(**project.task),
      legacy=False)
Пример #30
0
def negative_keyword_list_load():

    # load multiple from user defined sheet
    def load_multiple():
        advertisers = get_rows(
            project.task['auth_sheets'], {
                'sheets': {
                    'sheet': project.task['sheet'],
                    'tab': 'Advertisers',
                    'header': False,
                    'range': 'A2:A'
                }
            })

        for advertiser in advertisers:
            yield from API_DV360(
                project.task['auth_dv'],
                iterate=True).advertisers().negativeKeywordLists().list(
                    advertiserId=lookup_id(advertiser[0])).execute()

    negative_keyword_list_clear()

    # write to database
    put_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'table':
                'DV_Negative_Keywod_Lists',
                'schema':
                Discovery_To_BigQuery('displayvideo', 'v1').method_schema(
                    'advertisers.negativeKeywordLists.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write to sheet
    put_rows(
        project.task['auth_sheets'], {
            'sheets': {
                'sheet': project.task['sheet'],
                'tab': 'Targeting Options',
                'header': False,
                'range': 'J2:J'
            }
        },
        get_rows(
            project.task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    project.task['dataset'],
                    'query':
                    """SELECT
           CONCAT(A.displayName, ' - ', A.advertiserId, ' > ', L.displayName, ' - ', L.negativeKeywordListId),
           FROM `{dataset}.DV_Negative_Keywod_Lists` AS L
           LEFT JOIN `{dataset}.DV_Advertisers` AS A
           ON L.advertiserId=A.advertiserId
        """.format(**project.task),
                    'legacy':
                    False
                }
            }))