Ejemplo n.º 1
0
def copy_view(view_id):
    if project.verbose:
        print('ANONYMIZE VIEW', project.task['bigquery']['to']['dataset'],
              view_id)

    view = API_BigQuery(project.task['auth']).tables().get(
        projectId=project.task['bigquery']['from']['project'],
        datasetId=project.task['bigquery']['from']['dataset'],
        tableId=view_id).execute()['view']

    project_dataset_template = '[%s:%s.' if view['useLegacySql'] else '`%s.%s.'

    query = view['query'].replace(
        project_dataset_template %
        (project.task['bigquery']['from']['project'],
         project.task['bigquery']['from']['dataset']),
        project_dataset_template % (project.task['bigquery']['to']['project'],
                                    project.task['bigquery']['to']['dataset']),
    )

    query_to_view(project.task['auth'],
                  project.task['bigquery']['to']['project'],
                  project.task['bigquery']['to']['dataset'],
                  view_id,
                  query,
                  legacy=view['useLegacySql'],
                  replace=True)
Ejemplo n.º 2
0
def mapping():
    if project.verbose: print 'MAPPING'

    # create the sheet from template if it does not exist
    sheets_tab_copy(project.task['auth'], TEMPLATE_SHEET, TEMPLATE_TAB,
                    project.task['sheet'], project.task['tab'])

    # move if specified
    dimensions = {}
    defaults = {}
    rows = sheets_read(project.task['auth'], project.task['sheet'],
                       project.task['tab'], 'A1:D')

    # if rows don't exist, query is still created without mapping ( allows blank maps )
    if rows:
        # sanitize mapping
        # 0 = Dimension, 1 = Tag, 2 = Column, 3 = Keyword
        for row in rows[1:]:
            if project.verbose: print 'ROW: ', row
            # sanitize row
            row = map(lambda c: RE_SQLINJECT.sub('', c.strip()), row)
            if len(row) == 2:  # default
                defaults.setdefault(row[0], row[1])
            else:  # tag
                dimensions.setdefault(row[0], {})  # dimension
                dimensions[row[0]].setdefault(row[1], {})
                dimensions[row[0]].setdefault(row[1], {})  # tag
                dimensions[row[0]][row[1]].setdefault(row[2], [])  # column
                dimensions[row[0]][row[1]][row[2]].extend(
                    [k.strip() for k in row[3].split(',') if k])  # keywords

    # construct query
    query = 'SELECT\n  *,\n'
    for dimension, tags in dimensions.items():
        query += '  CASE\n'
        for tag, columns in tags.items():
            query += '    WHEN '
            for column, keywords in columns.items():
                for count, keyword in enumerate(keywords):
                    if count != 0: query += 'OR '
                    query += '%s CONTAINS "%s" ' % (column, keyword)
            query += 'THEN "%s"\n' % tag
        query += '    ELSE "%s"\n  END AS %s,\n' % (defaults.get(
            dimension, ''), dimension)
    query += 'FROM [%s.%s]' % (project.task['in']['dataset'],
                               project.task['in']['table'])

    if project.verbose: print 'QUERY: ', query

    # write to view
    query_to_view(project.task['out']['auth'],
                  project.id,
                  project.task['out']['dataset'],
                  project.task['out']['view'],
                  query,
                  replace=True)
Ejemplo n.º 3
0
def view_combine(name, combos_table, main_table, shadow_table):
  if project.verbose: print("DYNAMIC COSTS VIEW:", name)

  if shadow_table:
    query = """
      SELECT
        combos.*,
        (combos.Impressions / main.Impressions) * shadow.Dbm_Cost_Account_Currency AS Scaled_Dbm_Cost_Account_Currency
      FROM `%(project)s.%(dataset)s.%(combos_table)s` combos
      JOIN `%(project)s.%(dataset)s.%(main_table)s` main
      ON combos.Placement_Id = main.Placement_Id
      JOIN `%(project)s.%(dataset)s.%(shadow_table)s` shadow
      ON STARTS_WITH(shadow.Placement, combos.Placement)
      """ % {
        "project": project.id,
        "dataset": project.task['out']['dataset'],
        "combos_table": combos_table,
        "main_table": main_table,
        "shadow_table": shadow_table
      }
  else:
    query = """
      SELECT
        combos.*,
        (combos.Impressions / main.Impressions) * main.Dbm_Cost_Account_Currency AS Scaled_Dbm_Cost_Account_Currency
      FROM `%(project)s.%(dataset)s.%(combos_table)s` combos
      JOIN `%(project)s.%(dataset)s.%(main_table)s` main
      ON combos.Placement_Id = main.Placement_Id
      """ % {
        "project": project.id,
        "dataset": project.task['out']['dataset'],
        "combos_table": combos_table,
        "main_table": main_table
      }

  query_to_view(
    project.task['out']['auth'],
    project.id,
    project.task['out']['dataset'],
    'Dynamic_Costs_%s_Analysis' % name,
    query,
    False
  )
Ejemplo n.º 4
0
def census_write(query, table):

    if project.verbose: print('%s: %s' % (table, query))

    if project.task['to']['type'] == 'table':
        query_to_view(project.task['auth'],
                      project.id,
                      project.task['to']['dataset'],
                      table,
                      query,
                      legacy=False)

    elif project.task['to']['type'] == 'view':
        query_to_view(project.task['auth'],
                      project.id,
                      project.task['to']['dataset'],
                      table,
                      query,
                      legacy=False)
Ejemplo n.º 5
0
def census_write(config, task, query, table):

    if config.verbose:
        print('%s: %s' % (table, query))

    if task['to']['type'] == 'table':
        query_to_table(config,
                       task['auth'],
                       config.project,
                       task['to']['dataset'],
                       table,
                       query,
                       legacy=False)

    elif task['to']['type'] == 'view':
        query_to_view(config,
                      task['auth'],
                      config.project,
                      task['to']['dataset'],
                      table,
                      query,
                      legacy=False)
Ejemplo n.º 6
0
def view_combine(config, task, name, combos_table, main_table, shadow_table):
    if config.verbose:
        print('DYNAMIC COSTS VIEW:', name)

    if shadow_table:
        query = """
      SELECT
        combos.*,
        (combos.Impressions / main.Impressions) * shadow.Dbm_Cost_Account_Currency AS Scaled_Dbm_Cost_Account_Currency
      FROM `%(project)s.%(dataset)s.%(combos_table)s` combos
      JOIN `%(project)s.%(dataset)s.%(main_table)s` main
      ON combos.Placement_Id = main.Placement_Id
      JOIN `%(project)s.%(dataset)s.%(shadow_table)s` shadow
      ON STARTS_WITH(shadow.Placement, combos.Placement)
      """ % {
            'project': config.project,
            'dataset': task['out']['dataset'],
            'combos_table': combos_table,
            'main_table': main_table,
            'shadow_table': shadow_table
        }
    else:
        query = """
      SELECT
        combos.*,
        (combos.Impressions / main.Impressions) * main.Dbm_Cost_Account_Currency AS Scaled_Dbm_Cost_Account_Currency
      FROM `%(project)s.%(dataset)s.%(combos_table)s` combos
      JOIN `%(project)s.%(dataset)s.%(main_table)s` main
      ON combos.Placement_Id = main.Placement_Id
      """ % {
            'project': config.project,
            'dataset': task['out']['dataset'],
            'combos_table': combos_table,
            'main_table': main_table
        }

    query_to_view(config, task['out']['auth'], config.project,
                  task['out']['dataset'], 'Dynamic_Costs_%s_Analysis' % name,
                  query, False)
Ejemplo n.º 7
0
def preview_li_insert(config, task):

    # download IO Inserts
    put_rows(
        task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "SHEET_LI_Inserts",
                "schema": [
                    {
                        "name": "status",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "error",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "action",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "advertiser",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "campaign",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "insertionOrder",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "displayName",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "lineItemType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "entityStatus",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "bidAmount",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "budgetSegmentStartDate",
                        "type": "DATE",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "budgetSegmentEndDate",
                        "type": "DATE",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "lineItemBudgetAllocationType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "pacingPeriod",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "pacingType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "dailyMaxMicros",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "dailyMaxImpressions",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "frequencyCapUnlimited",
                        "type": "BOOLEAN",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "frequencyCapTimeUnit",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "frequencyCapTimeUnitCount",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "frequencyCapMaxImpressions",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "postViewCountPercentageMillis",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "postViewLookbackWindowDays",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "postClickLookbackWindowDays",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "biddingStrategyPerformanceGoalType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "performanceGoalAmountMicros",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "maxAverageCpmBidAmountMicros",
                        "type": "INTEGER",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "customBiddingAlgorithm",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "floodlightActivityIds",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "inventorySourceIds",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "partnerCPMFeeCostType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "partnerCPMFeeInvoiceType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "partnerCPMFeeAmount",
                        "type": "FLOAT",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "partnerMediaFeeCostType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "partnerMediaFeeInvoiceType",
                        "type": "STRING",
                        "mode": "NULLABLE"
                    },
                    {
                        "name": "partnerMediaFeePercent",
                        "type": "FLOAT",
                        "mode": "NULLABLE"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "LI Preview",
                    "header": False,
                    "range": "A2:AJ"
                }
            }))

    # create insert view
    query_to_view(task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "INSERT_LI",
                  """
      SELECT
        REGEXP_EXTRACT(advertiser, r' - (\d+)$') AS advertiserId,
        STRUCT(
          REGEXP_EXTRACT(advertiser, r' - (\d+)$') AS advertiserId,
          REGEXP_EXTRACT(campaign, r' - (\d+)$') AS campaignId,
          REGEXP_EXTRACT(insertionOrder, r' - (\d+)$') AS insertionOrderId,
          displayName,
          lineItemType,
          entityStatus,
          ARRAY((
            SELECT partnerCost FROM (
              SELECT
              IF(partnerCPMFeeAmount IS NOT NULL,
                STRUCT(
                  'PARTNER_COST_FEE_TYPE_CPM_FEE' AS feeType,
                  partnerCPMFeeCostType AS costType,
                  partnerCPMFeeInvoiceType AS invoiceType,
                  COALESCE(partnerCPMFeeAmount, 0) * 1000000 AS feeAmount
                ), NULL) AS partnerCost
                UNION ALL
                SELECT
              IF(partnerMediaFeePercent IS NOT NULL,
              STRUCT(
                'PARTNER_COST_FEE_TYPE_MEDIA_FEE' AS feeType,
                partnerMediaFeeCostType AS costType,
                partnerMediaFeeInvoiceType AS invoiceType,
                COALESCE(partnerMediaFeePercent, 0) * 1000 AS feePercentageMillis
              ), NULL) AS partnerCost
            ) WHERE partnerCost IS NOT NULL)
          ) AS partnerCosts,
          STRUCT( 'LINE_ITEM_FLIGHT_DATE_TYPE_INHERITED' AS flightDateType ) AS flight,
          STRUCT ( lineItemBudgetAllocationType AS budgetAllocationType ) AS budget,
          STRUCT (
            pacingPeriod,
            pacingType,
            IF(dailyMaxMicros IS NOT NULL, dailyMaxMicros * 1000000, NULL) AS dailyMaxMicros,
            IF(dailyMaxMicros IS NULL, dailyMaxImpressions, NULL) AS dailyMaxImpressions
          ) AS pacing,
          STRUCT ( CAST(frequencyCapUnlimited AS BOOL) AS unlimited,
            frequencyCapTimeUnit AS timeUnit,
            CAST(frequencyCapTimeUnitCount AS INT64) AS timeUnitCount,
            CAST(frequencyCapMaxImpressions AS INT64) AS maxImpressions
          ) AS frequencyCap,
          STRUCT ( 'PARTNER_REVENUE_MODEL_MARKUP_TYPE_TOTAL_MEDIA_COST_MARKUP' AS markupType ) AS partnerRevenueModel,
          STRUCT ( STRUCT ( CAST(bidAmount * 1000000 AS INT64) AS bidAmountMicros ) AS fixedBid ) AS bidStrategy,
          STRUCT(
            postViewCountPercentageMillis AS postViewCountPercentageMillis,
            ARRAY(
              SELECT
                STRUCT(
                  floodlightActivityId,
                  postClickLookbackWindowDays,
                  postViewLookbackWindowDays
                )
                FROM UNNEST(SPLIT(floodlightActivityIds)) AS floodlightActivityId
            ) AS floodlightActivityConfigs
          ) AS conversionCounting
        ) AS body
      FROM `{dataset}.SHEET_LI_Inserts`
      WHERE action = 'INSERT'
    """.format(**task),
                  legacy=False)

    # write LIs to API
    for row in get_rows(
            task["auth_bigquery"],
        {"bigquery": {
            "dataset": task["dataset"],
            "table": "INSERT_LI",
        }},
            as_object=True):
        try:
            response = API_DV360(
                task['auth_dv']).advertisers().lineItems().create(
                    **row).execute()
            log_write('LI', row, response['lineItemId'], None)
        except Exception as e:
            log_write('LI', row, None, str(e))

    log_write(config)
Ejemplo n.º 8
0
def line_item_map_audit():
  rows = get_rows(
      project.task['auth_sheets'], {
          'sheets': {
              'sheet': project.task['sheet'],
              'tab': 'Line Items Map',
              'range': 'A2:Z'
          }
      })

  put_rows(
      project.task['auth_bigquery'], {
          'bigquery': {
              'dataset': project.task['dataset'],
              'table': 'SHEET_LineItemMaps',
              'schema': [{
                  'name': 'Action',
                  'type': 'STRING'
              }, {
                  'name': 'Line_Item',
                  'type': 'STRING'
              }, {
                  'name': 'Creative',
                  'type': 'STRING'
              }],
              'format': 'CSV'
          }
      }, rows)

  query_to_view(
      project.task['auth_bigquery'],
      project.id,
      project.task['dataset'],
      'AUDIT_LineItemMaps',
      """WITH
      LINEITEM_ERRORS AS (
      SELECT
        'Line Items Map' AS Operation,
        'Missing Line Item.' AS Error,
        'ERROR' AS Severity,
        COALESCE(Line_Item, 'BLANK') AS Id
      FROM
        `{dataset}.SHEET_LineItemMaps` AS M
      LEFT JOIN
        `{dataset}.DV_LineItems` AS L
      ON
        M.Line_Item=CONCAT(L.displayName, ' - ', L.lineItemId)
      WHERE L IS NULL
      ),
      CREATIVE_ERRORS AS (
      SELECT
        'Line Items Map' AS Operation,
        'Missing Line Item.' AS Error,
        'ERROR' AS Severity,
        COALESCE(Line_Item, 'BLANK') AS Id
      FROM
        `{dataset}.SHEET_LineItemMaps` AS M
      LEFT JOIN
        `{dataset}.DV_Creatives` AS C
      ON
        M.Line_Item=CONCAT(C.displayName, ' - ', C.creativeId)
      WHERE C IS NULL
      )
      SELECT * FROM LINEITEM_ERRORS
      UNION ALL
      SELECT * FROM CREATIVE_ERRORS
      ;
    """.format(**project.task),
      legacy=False)
Ejemplo n.º 9
0
def segment_audit():

    # Move Segments To BigQuery
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Segments",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Start_Date",
                        "type": "STRING"
                    },
                    {
                        "name": "Start_Date_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "End_Date",
                        "type": "STRING"
                    },
                    {
                        "name": "End_Date_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Budget_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Description",
                        "type": "STRING"
                    },
                    {
                        "name": "Description_Edit",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Segments",
                    "header": False,
                    "range": "A2:M"
                }
            }))

    # Create Audit View And Write To Sheets
    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_Segments",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Segment' AS Operation,
            CASE
              WHEN Start_Date_Edit IS NULL THEN 'Missing Start Date.'
              WHEN End_Date_Edit IS NULL THEN 'Missing End Date .'
              WHEN Budget_Edit IS NULL THEN 'Missing Budget.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_Segments`
        )
        WHERE
          Error IS NOT NULL
      ),

      /* Check if duplicate Segments */
      DUPLICATE_ERRORS AS (
        SELECT
          'Segment' AS Operation,
          'Duplicate Segment.' AS Error,
          'WARNING' AS Severity,
          COALESCE(Insertion_Order, 'BLANK') AS Id
        FROM (
          SELECT Insertion_Order, Start_Date_Edit, End_Date_Edit, COUNT(*) AS count
          FROM `{dataset}.SHEET_Segments`
          GROUP BY 1, 2, 3
          HAVING count > 1
        )
      ),

      /* Check if budget segments are current */
      SEGMENT_ERRORS AS (
        SELECT
          *
        FROM (
          SELECT
            'Segments' AS Operation,
            CASE
              WHEN SAFE_CAST(Budget_Edit AS FLOAT64) > 1000000 THEN 'Segment has excessive spend.'
              WHEN SAFE_CAST(Start_Date AS DATE) != SAFE_CAST(Start_Date_Edit AS DATE) AND SAFE_CAST(Start_Date_Edit AS DATE) < CURRENT_DATE() THEN 'Segment starts in past.'
              WHEN SAFE_CAST(End_Date AS DATE) != SAFE_CAST(End_Date_Edit AS DATE) AND SAFE_CAST(End_Date_Edit AS DATE) < CURRENT_DATE() THEN 'Segment ends in past.'
            ELSE
            NULL
          END
            AS Error,
            'ERROR' AS Severity,
            COALESCE(Insertion_Order, 'BLANK') AS Id
          FROM
            `{dataset}.SHEET_Segments`
          )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
      UNION ALL
      SELECT * FROM SEGMENT_ERRORS
      ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_Segments",
                  """SELECT *
      FROM `{dataset}.SHEET_Segments`
      WHERE Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_Segments` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Ejemplo n.º 10
0
def targeting_combine():

    # read destination targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Destination_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Authorized_Seller",
                        "type": "STRING"
                    },
                    {
                        "name": "User_Rewarded_Content",
                        "type": "STRING"
                    },
                    {
                        "name": "Exchange",
                        "type": "STRING"
                    },
                    {
                        "name": "Sub_Exchange",
                        "type": "STRING"
                    },
                    {
                        "name": "Channel",
                        "type": "STRING"
                    },
                    {
                        "name": "Channel_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Inventory_Source",
                        "type": "STRING"
                    },
                    {
                        "name": "Inventory_Group",
                        "type": "STRING"
                    },
                    {
                        "name": "URL",
                        "type": "STRING"
                    },
                    {
                        "name": "URL_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "App",
                        "type": "STRING"
                    },
                    {
                        "name": "App_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "App_Category",
                        "type": "STRING"
                    },
                    {
                        "name": "App_Category_Negative",
                        "type": "BOOLEAN"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Destination Targeting",
                    "range": "A2:Z"
                }
            }))

    # read brand safety targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Brand_Safety_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Content_Label",
                        "type": "STRING"
                    },
                    {
                        "name": "Sensitive_Category",
                        "type": "STRING"
                    },
                    {
                        "name": "Negative_Keyword_List",
                        "type": "STRING"
                    },
                    {
                        "name": "Category",
                        "type": "STRING"
                    },
                    {
                        "name": "Category_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Keyword",
                        "type": "STRING"
                    },
                    {
                        "name": "Keyword_Negative",
                        "type": "BOOLEAN"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Brand Safety Targeting",
                    "range": "A2:Z"
                }
            }))

    # read demographic targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Demographic_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Age_Range",
                        "type": "STRING"
                    },
                    {
                        "name": "Gender",
                        "type": "STRING"
                    },
                    {
                        "name": "Parental_Status",
                        "type": "STRING"
                    },
                    {
                        "name": "Household_Income",
                        "type": "STRING"
                    },
                    {
                        "name": "Language",
                        "type": "STRING"
                    },
                    {
                        "name": "Language_Negative",
                        "type": "BOOLEAN"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Demographic Targeting",
                    "range": "A2:Z"
                }
            }))

    # read audience targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Audience_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Included_1P_And_3P",
                        "type": "STRING"
                    },
                    {
                        "name": "Included_1P_And_3P_Recency",
                        "type": "STRING"
                    },
                    {
                        "name": "Excluded_1P_And_3P",
                        "type": "STRING"
                    },
                    {
                        "name": "Excluded_1P_And_3P_Recency",
                        "type": "STRING"
                    },
                    {
                        "name": "Included_Google",
                        "type": "STRING"
                    },
                    {
                        "name": "Excluded_Google",
                        "type": "STRING"
                    },
                    {
                        "name": "Included_Custom",
                        "type": "STRING"
                    },
                    {
                        "name": "Included_Combined",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Audience Targeting",
                    "range": "A2:Z"
                }
            }))

    # read device targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Device_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Device_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Make_Model",
                        "type": "STRING"
                    },
                    {
                        "name": "Make_Model_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Operating_System",
                        "type": "STRING"
                    },
                    {
                        "name": "Operating_System_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Browser",
                        "type": "STRING"
                    },
                    {
                        "name": "Browser_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Environment",
                        "type": "STRING"
                    },
                    {
                        "name": "Carrier_And_ISP",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Device Targeting",
                    "range": "A2:Z"
                }
            }))

    # read geography targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Geography_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Day_Of_Week",
                        "type": "STRING"
                    },
                    {
                        "name": "Hour_Start",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Hour_End",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Timezone",
                        "type": "STRING"
                    },
                    {
                        "name": "Geo_Region",
                        "type": "STRING"
                    },
                    {
                        "name": "Geo_Region_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Geo_Region_Negative",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Proximity_Location_List",
                        "type": "STRING"
                    },
                    {
                        "name": "Proximity_Location_List_Radius_Range",
                        "type": "STRING"
                    },
                    {
                        "name": "Regional_Location_List",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Geography Targeting",
                    "range": "A2:Z"
                }
            }))

    # read viewability targeting
    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_Viewability_Targeting",
                "schema": [
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "LineItem",
                        "type": "STRING"
                    },
                    {
                        "name": "Video_Player_Size",
                        "type": "STRING"
                    },
                    {
                        "name": "In_Stream_Position",
                        "type": "STRING"
                    },
                    {
                        "name": "Out_Stream_Position",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "On_Screen_Position",
                        "type": "STRING"
                    },
                    {
                        "name": "Viewability",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            project.task["auth_sheets"], {
                "sheets": {
                    "sheet": project.task["sheet"],
                    "tab": "Viewability Targeting",
                    "range": "A2:Z"
                }
            }))

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "SHEET_Combined_Targeting",
                  """SELECT
        L.advertiserId AS Advertiser_Lookup,
        *
      FROM (
      SELECT
        COALESCE(A.Action,B.Action,C.Action,D.Action,E.Action,F.Action,G.Action) AS Action,
        COALESCE(A.partner,B.Partner,C.Partner,D.partner,E.Partner,F.Partner,G.Partner) AS Partner,
        COALESCE(A.Advertiser,B.Advertiser,C.Advertiser,D.Advertiser,E.Advertiser,F.Advertiser,G.Advertiser) AS Advertiser,
        COALESCE(A.LineItem,B.LineItem,C.LineItem,D.LineItem,E.LineItem,F.LineItem,G.LineItem) AS LineItem,
        * EXCEPT (Action, Partner, Advertiser, LineItem)
      FROM `DV_Target_Demo.SHEET_Destination_Targeting` AS A
      FULL OUTER JOIN `DV_Target_Demo.SHEET_Brand_Safety_Targeting` AS B
      ON A.Action=B.Action
      AND A.Partner=B.Partner
      AND A.Advertiser=B.Advertiser
      AND A.LineItem=B.LineItem
      FULL OUTER JOIN `DV_Target_Demo.SHEET_Demographic_Targeting` AS C
      ON A.Action=C.Action
      AND A.Partner=C.Partner
      AND A.Advertiser=C.Advertiser
      AND A.LineItem=C.LineItem
      FULL OUTER JOIN `DV_Target_Demo.SHEET_Audience_Targeting` AS D
      ON A.Action=D.Action
      AND A.Partner=D.Partner
      AND A.Advertiser=D.Advertiser
      AND A.LineItem=D.LineItem
      FULL OUTER JOIN `DV_Target_Demo.SHEET_Device_Targeting` AS E
      ON A.Action=E.Action
      AND A.Partner=E.Partner
      AND A.Advertiser=E.Advertiser
      AND A.LineItem=E.LineItem
      FULL OUTER JOIN `DV_Target_Demo.SHEET_Geography_Targeting` AS F
      ON A.Action=F.Action
      AND A.Partner=F.Partner
      AND A.Advertiser=F.Advertiser
      AND A.LineItem=F.LineItem
      FULL OUTER JOIN `DV_Target_Demo.SHEET_Viewability_Targeting` AS G
      ON A.Action=G.Action
      AND A.Partner=G.Partner
      AND A.Advertiser=G.Advertiser
      AND A.LineItem=G.LineItem
      ) AS T
      LEFT JOIN `{dataset}.DV_LineItems` AS L
      ON CAST(REGEXP_EXTRACT(T.LineItem, r' - (\d+)$') AS INT64)=L.lineItemId
    """.format(**project.task),
                  legacy=False)
Ejemplo n.º 11
0
def preview_li_load(config, task):

    preview_li_clear(config, task)

    # download LI Rules
    put_rows(
        task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "SHEET_LI_Rules",
                "schema": [
                    {
                        "name": "CM_Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "DV_Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Allocation",
                        "type": "STRING"
                    },
                    {
                        "name": "Pacing_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Pacing_Period",
                        "type": "STRING"
                    },
                    {
                        "name": "Pacing_Period_Max_Spend",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Pacing_Period_Max_Impressions",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Frequency_Cap_Unlimited",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Frequency_Cap_Time_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Frequency_Cap_Time_Unit_Count",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Frequency_Cap_Max_Impressions",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Post_View_Count_Percent",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Performance_Goal_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Amount",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Max_Average_CPM_Amount",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Custom_Bidding_Algorithm",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "LI Rules",
                    "header": False,
                    "range": "A2:AQ"
                }
            }))

    # create LI preview ( main logic )
    query_to_view(task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "PREVIEW_LI",
                  """WITH
      cm AS (
        SELECT
          CM_P.name,
          CM_P.advertiserId,
          CM_C.id AS campaignId,
          CM_C.name AS campaignName,
          CM_P.compatibility,
          CM_PG.pricingSchedule.startDate AS budgetSegmentStartDate,
          CM_PG.pricingSchedule.endDate AS budgetSegmentEndDate,
          NULLIF(CAST(CM_PP.rateOrCostNanos / 1000000000 AS INT64), 0) AS bidAmount,
          CM_PG.name AS ioDisplayName,
          CM_P.name AS liDisplayName
        FROM `{dataset}.CM_PlacementGroups` AS CM_PG, UNNEST(childPlacementIds) AS childPlacementId, UNNEST(CM_PG.pricingSchedule.pricingPeriods) AS CM_PP
        JOIN `{dataset}.CM_Placements` CM_P
        ON childPlacementId = CM_P.id
        JOIN `{dataset}.CM_Campaigns` AS CM_C
        ON CM_P.campaignId = CM_C.id
        JOIN `{dataset}.CM_Sites` AS CM_S
        ON CM_PG.siteId = CM_S.id AND CM_S.name = 'Google DBM'
        WHERE
          pg_ProductCode IS NOT NULL
          AND p_ProductCode IS NOT NULL
      ),

      sheet AS (
        SELECT
          CONCAT(dv_a.displayName, ' - ', dv_a.advertiserid) AS DV_Advertiser,
          sheet.*
        FROM `{dataset}.SHEET_LI_Rules` as sheet
        LEFT JOIN `{dataset}.DV_Campaigns` AS dv_c
        ON CAST(REGEXP_EXTRACT(sheet.DV_Campaign, r' - (\d+)$') AS INT64) = dv_c.campaignId
        LEFT JOIN `{dataset}.DV_Advertisers` AS dv_a
        ON dv_a.advertiserid=dv_c.advertiserId
      ),

      li_flattened AS (
        SELECT
          lineItemId,
          displayName,
          MAX(postViewLookbackWindowDays) AS postViewLookbackWindowDays,
          MAX(postClickLookbackWindowDays) AS postClickLookbackWindowDays,
          ARRAY_TO_STRING(ARRAY_AGG(CAST(floodlightActivityConfig.floodlightActivityId AS STRING) IGNORE NULLS), ",") AS floodlightActivityIds,
          ARRAY_TO_STRING(ARRAY_AGG(CAST(inventorySourceId AS STRING) IGNORE NULLS), ",") AS inventorySourceIds
        FROM `{dataset}.DV_LineItems`
        LEFT JOIN UNNEST(conversionCounting.floodlightActivityConfigs) AS floodlightActivityConfig
        LEFT JOIN UNNEST(inventorySourceIds) AS inventorySourceId
        GROUP BY 1,2
      ),

      io_flattened AS (
        SELECT
          insertionOrderId,
          displayName,
          MIN(DATE(segments.dateRange.startDate.year, segments.dateRange.startDate.month, segments.dateRange.startDate.day)) AS budgetSegmentStartDate,
          MAX(DATE(segments.dateRange.endDate.year, segments.dateRange.endDate.month, segments.dateRange.endDate.day)) AS budgetSegmentEndtDate,
        FROM `{dataset}.DV_InsertionOrders`
        LEFT JOIN UNNEST(budget.budgetSegments) AS segments
        GROUP BY 1,2
      )

      SELECT
        'PREVIEW' AS action,
        sheet.DV_Advertiser,
        sheet.DV_Campaign,
        CONCAT(dv_io.displayName, ' - ', dv_io.insertionOrderId) as DV_InsertionOrder,
        cm.liDisplayName AS displayName,
        sheet.Type AS lineItemType,
        'ENTITY_STATUS_DRAFT' AS entityStatus,
        CAST(NULL AS INT64) AS bidAmount,
        dv_io.budgetSegmentStartDate,
        dv_io.budgetSegmentEndtDate,
        sheet.Budget_Allocation AS lineItemBudgetAllocationType,
        sheet.Pacing_Period AS pacingPeriod,
        sheet.Pacing_Type AS pacingType,
        sheet.Pacing_Period_Max_Spend AS dailyMaxMicros,
        sheet.Pacing_Period_Max_Impressions AS dailyMaxImpressions,
        sheet.Frequency_Cap_Unlimited AS frequencyCapUnlimited,
        sheet.Frequency_Cap_Time_Unit AS frequencyCapTimeUnit,
        sheet.Frequency_Cap_Time_Unit_Count AS frequencyCapTimeUnitCount,
        sheet.Frequency_Cap_Max_Impressions AS frequencyCapMaxImpressions,
        sheet.Post_View_Count_Percent AS postViewCountPercentageMillis,
        90 AS postViewLookbackWindowDays,
        90 AS postClickLookbackWindowDays,
        sheet.Performance_Goal_Type AS biddingStrategyPerformanceGoalType,
        sheet.Performance_Goal_Amount AS performanceGoalAmountMicros,
        sheet.Max_Average_CPM_Amount AS maxAverageCpmBidAmountMicros,
        sheet.Custom_Bidding_Algorithm,
        dv_li.floodlightActivityIds,
        dv_li.inventorySourceIds,
        CAST(NULL AS STRING) AS Partner_Cost_CPM_Fee_Cost_Type,
        CAST(NULL AS STRING) AS Partner_Cost_CPM_Fee_Invoice_Type,
        CAST(NULL AS STRING) AS Partner_Cost_CPM_Fee_Amount,
        CAST(NULL AS STRING) AS Partner_Cost_Media_Fee_Cost_Type,
        CAST(NULL AS STRING) AS Partner_Cost_Media_Fee_Invoice_Type,
        CAST(NULL AS STRING) AS Partner_Cost_Media_Fee_Percent
      FROM sheet
      LEFT JOIN cm
      ON CAST(REGEXP_EXTRACT(sheet.CM_Campaign, r' - (\d+)$') AS INT64) = cm.campaignId
      AND (
        (SPLIT(cm.name,'_')[OFFSET(0)] = 'VID' AND LOWER(SPLIT(sheet.Type , '_')[SAFE_OFFSET(3)]) = 'video')
        OR (NOT SPLIT(cm.name, '_')[OFFSET(0)] = 'VID' AND LOWER(SPLIT(sheet.Type, '_')[SAFE_OFFSET(3)]) = 'display')
      )
      LEFT JOIN io_flattened dv_io
      ON dv_io.displayName = cm.ioDisplayName
      LEFT JOIN li_flattened dv_li
      ON dv_li.displayName = cm.liDisplayName
    """.format(**task),
                  legacy=False)

    # create audit view
    query_to_view(task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "AUDIT_LI",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
          'LI Rules' AS Operation,
          'Missing Sheet input value.' AS Error,
          'ERROR' AS Severity,
          CAST(NULL AS STRING) AS DV_Advertiser,
          DV_Campaign,
          CAST(NULL AS STRING) AS DV_InsertionOrder,
          CM_Campaign AS DV_LineItem
        FROM `{dataset}.SHEET_LI_Rules`
        WHERE
          CM_Campaign IS NULL
          OR DV_Campaign IS NULL
          OR Type IS NULL
          OR Budget_Allocation IS NULL
          OR Pacing_Period IS NULL
          OR Pacing_Type IS NULL
          OR Pacing_Period_Max_Spend IS NULL
          OR Pacing_Period_Max_Impressions IS NULL
          OR Frequency_Cap_Unlimited IS NULL
          OR Frequency_Cap_Time_Unit IS NULL
          OR Frequency_Cap_Time_Unit_Count IS NULL
          OR Frequency_Cap_Max_Impressions IS NULL
          OR Post_View_Count_Percent IS NULL
          OR Performance_Goal_Type IS NULL
          OR Performance_Goal_Amount IS NULL
          OR Max_Average_CPM_Amount IS NULL
          OR Custom_Bidding_Algorithm IS NULL
      ),

      /* Check if duplicate LI */
      DUPLICATE_ERRORS AS (
        SELECT
          'LI Rules' AS Operation,
          'Duplicate Line Item.' AS Error,
          'WARNING' AS Severity,
           DV_Advertiser,
           DV_Campaign,
           DV_InsertionOrder,
           DV_R.displayName AS DV_LineItem
        FROM
          `{dataset}.PREVIEW_LI` AS DV_R
        LEFT JOIN (
          SELECT
          advertiserId,
          campaignId,
          insertionOrderId,
          displayName
          FROM `{dataset}.DV_LineItems`
          GROUP BY 1,2,3,4
        ) AS DV_LI
        ON DV_R.displayName = DV_LI.displayName
        AND CAST(REGEXP_EXTRACT(DV_R.DV_Campaign, r' - (\d+)$') AS INT64) = DV_LI.campaignId
        AND CAST(REGEXP_EXTRACT(DV_R.DV_InsertionOrder, r' - (\d+)$') AS INT64) = DV_LI.insertionOrderId
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
    """.format(**task),
                  legacy=False)

    # write io preview to sheet
    put_rows(
        task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'LI Preview',
                'header': False,
                'range': 'A2'
            }
        },
        get_rows(
            task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    """SELECT
          A.Severity,
          A.Error,
          P.*
          FROM `{dataset}.PREVIEW_LI` AS P
          LEFT JOIN (
            SELECT
              DV_Advertiser,
              DV_Campaign,
              DV_InsertionOrder,
              DV_LineItem,
              CASE
                WHEN 'ERROR' IN UNNEST(ARRAY_AGG(Severity)) THEN 'ERROR'
                WHEN 'WARNING' IN UNNEST(ARRAY_AGG(Severity)) THEN 'WARNING'
                ELSE 'OK'
              END AS Severity,
              ARRAY_TO_STRING(ARRAY_AGG(CONCAT(Severity, ': ', Error)), '\\n') AS Error,
            FROM `{dataset}.AUDIT_LI`
            GROUP BY 1,2,3,4
          ) AS A
          ON P.DV_Advertiser=A.DV_Advertiser
          AND P.DV_Campaign=A.DV_Campaign
          AND P.DV_InsertionOrder=A.DV_InsertionOrder
          AND P.displayName=A.DV_LineItem
        """.format(**task),
                }
            }))
Ejemplo n.º 12
0
def bigquery_query():
    """Execute a query and write results to table.

  TODO: Replace with get_rows and put_rows combination.

  See: scripts/bigquery_query.json
       scripts/bigquery_storage.json
       scripts/bigquery_to_sheet.json
       scripts/bigquery_view.json
  """

    if 'table' in project.task['to']:
        if project.verbose:
            print('QUERY TO TABLE', project.task['to']['table'])

        query_to_table(
            project.task['auth'],
            project.id,
            project.task['to']['dataset'],
            project.task['to']['table'],
            query_parameters(project.task['from']['query'],
                             project.task['from'].get('parameters')),
            disposition=project.task['write_disposition']
            if 'write_disposition' in project.task else 'WRITE_TRUNCATE',
            legacy=project.task['from'].get(
                'legacy',
                project.task['from'].get('useLegacySql',
                                         True)),  # DEPRECATED: useLegacySql,
            target_project_id=project.task['to'].get('project_id', project.id))

    elif 'sheet' in project.task['to']:
        if project.verbose:
            print('QUERY TO SHEET', project.task['to']['sheet'])

        rows = query_to_rows(project.task['auth'],
                             project.id,
                             project.task['from']['dataset'],
                             query_parameters(
                                 project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                             legacy=project.task['from'].get('legacy', True))

        # makes sure types are correct in sheet
        rows = rows_to_type(rows)

        sheets_clear(project.task['to'].get('auth', project.task['auth']),
                     project.task['to']['sheet'], project.task['to']['tab'],
                     project.task['to'].get('range', 'A2'))
        sheets_write(project.task['to'].get('auth', project.task['auth']),
                     project.task['to']['sheet'], project.task['to']['tab'],
                     project.task['to'].get('range', 'A2'), rows)

    elif 'sftp' in project.task['to']:
        if project.verbose:
            print('QUERY TO SFTP')

        rows = query_to_rows(
            project.task['auth'],
            project.id,
            project.task['from']['dataset'],
            query_parameters(project.task['from']['query'],
                             project.task['from'].get('parameters')),
            legacy=project.task['from'].get('use_legacy_sql', True))

        if rows:
            put_rows(project.task['auth'], project.task['to'], rows)

    else:
        if project.verbose:
            print('QUERY TO VIEW', project.task['to']['view'])

        query_to_view(
            project.task['auth'],
            project.id,
            project.task['to']['dataset'],
            project.task['to']['view'],
            query_parameters(project.task['from']['query'],
                             project.task['from'].get('parameters')),
            project.task['from'].get(
                'legacy',
                project.task['from'].get('useLegacySql',
                                         True)),  # DEPRECATED: useLegacySql
            project.task['to'].get('replace', False))
Ejemplo n.º 13
0
def frequency_cap_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Frequency Caps",
                "range": "A2:M"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_FrequencyCaps",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Unlimited",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Unlimited_Edit",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Time_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Time_Unit_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Time_Count",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Time_Count_Edit",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Max_impressions",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Max_impressions_Edit",
                        "type": "INTEGER"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_FrequencyCaps",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Frequency Caps' AS Operation,
            CASE WHEN Unlimited_Edit IS TRUE THEN
              CASE WHEN Time_Unit_Edit IS NOT NULL OR Time_Count_Edit IS NOT NULL OR Max_Impressions_Edit IS NOT NULL THEN 'Time Unit and the Other Options are Mutually Exclusive.'
              ELSE NULL
             END
            ELSE
              IF(Time_Unit_Edit IS NULL OR Time_Count_Edit IS NULL OR Max_Impressions_Edit IS NULL, 'If Time Unit is FALSE, the other options are required.', NULL)
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_FrequencyCaps`
        )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_FrequencyCaps",
                  """SELECT *
      FROM `{dataset}.SHEET_FrequencyCaps`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
      AND Campaign NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Ejemplo n.º 14
0
def line_item_audit():

  rows = get_rows(
    project.task["auth_sheets"],
    { "sheets": {
      "sheet": project.task["sheet"],
      "tab": "Line Items",
      "range": "A2:Z"
    }}
  )

  put_rows(
    project.task["auth_bigquery"],
    { "bigquery": {
      "dataset": project.task["dataset"],
      "table": "SHEET_LineItems",
      "schema": [
        { "name": "Partner", "type": "STRING" },
        { "name": "Advertiser", "type": "STRING" },
        { "name": "Campaign", "type": "STRING" },
        { "name": "Insertion_Order", "type": "STRING" },
        { "name": "Line_Item", "type": "STRING" },
        { "name": "Action", "type": "STRING" },
        { "name": "Status", "type": "STRING" },
        { "name": "Status_Edit", "type": "STRING" },
        { "name": "Warning", "type": "STRING" },
        { "name": "Line_Item_Type", "type": "STRING" },
        { "name": "Line_Item_Type_Edit", "type": "STRING" },
        { "name": "Flight_Data_Type", "type": "STRING" },
        { "name": "Flight_Data_Type_Edit", "type": "STRING" },
        { "name": "Flight_Start_Date", "type": "STRING" },
        { "name": "Flight_Start_Date_Edit", "type": "STRING" },
        { "name": "Flight_End_Date", "type": "STRING" },
        { "name": "Flight_End_Date_Edit", "type": "STRING" },
        { "name": "Flight_Trigger", "type": "STRING" },
        { "name": "Flight_Trigger_Edit", "type": "STRING" },
        { "name": "Budget_Allocation_Type", "type": "STRING" },
        { "name": "Budget_Allocation_Type_Edit", "type": "STRING" },
        { "name": "Budget_Unit", "type": "STRING" },
        { "name": "Budget_Unit_Edit", "type": "STRING" },
        { "name": "Budget_Max", "type": "FLOAT" },
        { "name": "Budget_Max_Edit", "type": "FLOAT" },
        { "name": "Partner_Revenue_Model", "type": "STRING" },
        { "name": "Partner_Revenue_Model_Edit", "type": "STRING" },
        { "name": "Partner_Markup", "type": "FLOAT" },
        { "name": "Partner_Markup_Edit", "type": "FLOAT" },
        { "name": "Conversion_Percent", "type": "FLOAT" },
        { "name": "Conversion_Percent_Edit", "type": "FLOAT" },
        { "name": "Targeting_Expansion_Level", "type": "STRING" },
        { "name": "Targeting_Expansion_Level_Edit", "type": "STRING" },
        { "name": "Exclude_1P", "type": "STRING" },
        { "name": "Exclude_1P_Edit", "type": "STRING" },
      ],
      "format": "CSV"
    }},
    rows
  )

  # Create Insert View
  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "INSERT_LineItems",
    """SELECT
      REGEXP_EXTRACT(S_LI.Advertiser, r' - (\d+)$') AS advertiserId,
      REGEXP_EXTRACT(S_LI.Campaign, r' - (\d+)$') AS campaignId,
      REGEXP_EXTRACT(S_LI.Insertion_Order, r' - (\d+)$') AS insertionOrderId,
      S_LI.Line_Item AS displayName,
      S_LI.Line_Item_Type_Edit AS lineItemType,
      S_LI.Status_Edit AS entityStatus,
      STRUCT(
        S_PC.Cost_Type_Edit As costType,
        S_PC.Fee_Type_Edit As feeType,
        S_PC.Invoice_Type_Edit AS invoiceType,
        S_PC.Fee_Amount_Edit AS feeAmount,
        S_PC.Fee_Percent_Edit * 1000 AS feePercentageMillis
      ) AS partnerCosts,
      STRUCT(
        S_LI.Flight_Data_Type_Edit AS flightDateType,
        STRUCT (
          STRUCT (
            EXTRACT(YEAR FROM CAST(S_LI.Flight_Start_Date_Edit AS Date)) AS year,
            EXTRACT(MONTH FROM CAST(S_LI.Flight_Start_Date_Edit AS DATE)) AS month,
            EXTRACT(DAY FROM CAST(S_LI.Flight_Start_Date_Edit AS DATE)) AS day
          ) AS startDate,
          STRUCT (
            EXTRACT(YEAR FROM CAST(S_LI.Flight_End_Date_Edit AS Date)) AS year,
            EXTRACT(MONTH FROM CAST(S_LI.Flight_End_Date_Edit AS DATE)) AS month,
            EXTRACT(DAY FROM CAST(S_LI.Flight_End_Date_Edit AS DATE)) AS day
          ) AS endDate
        ) AS dateRange,
        S_LI.Flight_Trigger_Edit AS triggerId
      ) AS flight,
      STRUCT(
        S_LI.Budget_Allocation_Type_Edit AS budgetAllocationType,
        S_LI.Budget_Unit_Edit AS budgetUnit,
        S_LI.Budget_Max_Edit * 100000 AS maxAmount
      ) AS budget,
      STRUCT(
        S_P.Period_Edit As pacingPeriod,
        S_P.Type_Edit As pacingType,
        S_P.Daily_Budget_Edit AS dailyMaxMicros,
        S_P.Daily_Impressions_Edit AS dailyMaxImpressions
      ) AS pacing,
      STRUCT(
        S_FC.Unlimited_Edit AS unlimited,
        S_FC.Time_Unit_Edit AS timeUnit,
        S_FC.Time_Count_Edit AS timeUnitCount,
        S_FC.Max_impressions_Edit AS maxImpressions
      ) AS frequencyCap,
      STRUCT(
        S_LI.Partner_Revenue_Model_Edit AS markupType,
        S_LI.Partner_Markup_Edit * 100000 AS markupAmount
      ) AS partnerRevenueModel,
      STRUCT(
        S_LI. Conversion_Percent_Edit * 1000 AS postViewCountPercentageMillis,
        [] AS floodlightActivityConfigs
      ) AS conversionCounting,
      STRUCT(
        IF(S_BS.Fixed_Bid_Edit IS NOT NULL,
          STRUCT(
            S_BS.Fixed_Bid_Edit * 100000 AS bidAmountMicros
          ),
          NULL
        ) AS fixedBid,
        IF(S_BS.Auto_Bid_Goal_Edit IS NOT NULL,
          STRUCT(
            S_BS.Auto_Bid_Goal_Edit AS performanceGoalType,
            S_BS.Auto_Bid_Amount_Edit * 100000 AS maxAverageCpmBidAmountMicros,
            S_BS.Auto_Bid_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS maximizeSpendAutoBid,
        IF(S_BS.Performance_Goal_Type_Edit IS NOT NULL,
          STRUCT(
            S_BS.Performance_Goal_Type_Edit AS performanceGoalType,
            S_BS.Performance_Goal_Amount_Edit * 100000 AS performanceGoalAmountMicros,
            S_BS.Performance_Goal_Average_CPM_Bid_Edit * 100000 AS maxAverageCpmBidAmountMicros,
            S_BS.Performance_Goal_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS performanceGoalAutoBid
      )
      AS bidStrategy,
      STRUCT(
        S_ID.Integration_Code_Edit AS integrationCode,
        S_ID.Details_Edit AS details
      ) AS integrationDetails,
      STRUCT(
        S_LI.Targeting_Expansion_Level_Edit AS targetingExpansionLevel,
        S_LI.Exclude_1P_Edit AS excludeFirstPartyAudience
      ) AS targetingExpansion
    FROM `{dataset}.SHEET_LineItems` AS S_LI
    LEFT JOIN `{dataset}.SHEET_PartnerCosts` AS S_PC ON S_LI.Line_Item=S_PC.Line_Item
    LEFT JOIN `{dataset}.SHEET_Pacing` AS S_P ON S_LI.Line_Item=S_P.Line_Item
    LEFT JOIN `{dataset}.SHEET_FrequencyCaps` AS S_FC ON S_LI.Line_Item=S_FC.Line_Item
    LEFT JOIN `{dataset}.SHEET_IntegrationDetails` AS S_ID ON S_LI.Line_Item=S_ID.Line_Item
    LEFT JOIN `{dataset}.SHEET_BidStrategy` AS S_BS ON S_LI.Line_Item=S_BS.Line_Item
    LEFT JOIN `{dataset}.DV_LineItems` AS DV_LI ON S_LI.Line_Item=DV_LI.displayName
    WHERE S_LI.Action="INSERT"
    AND DV_LI IS NULL
    """.format(**project.task),
    legacy=False
  )

  # Create Audit View
  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "AUDIT_LineItems",
    """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Line Item' AS Operation,
            CASE
              WHEN Budget_Allocation_Type_Edit IS NULL THEN 'Missing Budget Allocation Type.'
              WHEN Budget_Unit_Edit IS NULL THEN 'Missing Budget Unit.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_LineItems`
        )
        WHERE
          Error IS NOT NULL
      ),
      /* Check duplicate inserts */
      DUPLICATE_ERRORS AS (
        SELECT
          'Line Item' AS Operation,
          'Duplicate Line Item name, insert will be ignored.' AS Error,
          'WARNING' AS Severity,
          COALESCE(S_LI.Line_Item, 'BLANK') AS Id
        FROM `{dataset}.SHEET_LineItems` As S_LI
        LEFT JOIN `{dataset}.DV_LineItems` AS DV_LI ON S_LI.Line_Item=DV_LI.displayName
        WHERE S_LI.Action="INSERT"
        AND DV_LI IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
    """.format(**project.task),
    legacy=False
  )

  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "PATCH_LineItems",
    """SELECT *
      FROM `{dataset}.SHEET_LineItems`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_LineItems` WHERE Severity='ERROR')
    """.format(**project.task),
    legacy=False
  )
Ejemplo n.º 15
0
def bigquery():

    if 'run' in project.task and 'query' in project.task.get('run', {}):
        if project.verbose: print("QUERY", project.task['run']['query'])
        run_query(
            project.task['auth'],
            project.id,
            project.task['run']['query'],
            project.task['run'].get('legacy', True),
            #project.task['run'].get('billing_project_id', None)
        )

    elif 'values' in project.task['from']:
        rows = get_rows(project.task['auth'], project.task['from'])

        rows_to_table(project.task['to'].get('auth', project.task['auth']),
                      project.id, project.task['to']['dataset'],
                      project.task['to']['table'], rows,
                      project.task.get('schema', []), 0)

    elif 'query' in project.task['from']:
        if 'table' in project.task['to']:
            if project.verbose:
                print("QUERY TO TABLE", project.task['to']['table'])

            if 'pre_process_query' in project.task['to']:
                print('executing statement')
                execute_statement(project.task['auth'],
                                  project.id,
                                  project.task['to']['dataset'],
                                  project.task['to']['pre_process_query'],
                                  use_legacy_sql=project.task['from'].get(
                                      'legacy', project.task['from'].get(
                                          'useLegacySql', True)))
            query_to_table(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['table'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                disposition=project.task['write_disposition']
                if 'write_disposition' in project.task else 'WRITE_TRUNCATE',
                legacy=project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql,
                target_project_id=project.task['to'].get(
                    'project_id', project.id))
        # NOT USED SO RIPPING IT OUT
        # Mauriciod: Yes, it is used, look at project/mauriciod/target_winrate.json
        elif 'storage' in project.task['to']:
            if project.verbose:
                print("QUERY TO STORAGE", project.task['to']['storage'])
            local_file_name = '/tmp/%s' % str(uuid.uuid1())
            rows = query_to_rows(project.task['auth'], project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'])

            f = open(local_file_name, 'wb')
            writer = csv.writer(f)
            writer.writerows(rows)
            f.close()

            f = open(local_file_name, 'rb')
            object_put(project.task['auth'], project.task['to']['storage'], f)
            f.close()

            os.remove(local_file_name)
        elif 'sheet' in project.task['to']:
            if project.verbose:
                print("QUERY TO SHEET", project.task['to']['sheet'])
            rows = query_to_rows(project.task['auth'],
                                 project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'],
                                 legacy=project.task['from'].get(
                                     'legacy', True))

            # makes sure types are correct in sheet
            rows = rows_to_type(rows)

            sheets_clear(project.task['auth'], project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'))
            sheets_write(project.task['auth'], project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'), rows)
        elif 'sftp' in project.task['to']:
            rows = query_to_rows(project.task['auth'],
                                 project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'],
                                 legacy=project.task['from'].get(
                                     'use_legacy_sql', True))

            if rows:
                if project.verbose: print("QUERY TO SFTP")
                put_rows(project.task['auth'], project.task['to'], rows)
        else:
            if project.verbose:
                print("QUERY TO VIEW", project.task['to']['view'])
            query_to_view(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['view'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql
                project.task['to'].get('replace', False))
    else:
        if project.verbose:
            print("STORAGE TO TABLE", project.task['to']['table'])
        storage_to_table(
            project.task['auth'], project.id, project.task['to']['dataset'],
            project.task['to']['table'], project.task['from']['bucket'] + ':' +
            project.task['from']['path'], project.task.get('schema', []),
            project.task.get('skip_rows', 1),
            project.task.get('structure', 'CSV'),
            project.task.get('disposition', 'WRITE_TRUNCATE'))
Ejemplo n.º 16
0
def preview_io_insert(config, task):

    # download IO Inserts
    put_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "SHEET_IO_Inserts",
                "schema": [{
                    "name": "status",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "error",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "action",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "advertiser",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "campaign",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "displayName",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "pacingPeriod",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "pacingType",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "dailyMaxMicros",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "dailyMaxImpressions",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "frequencyCapUnlimited",
                    "type": "BOOLEAN",
                    "mode": "NULLABLE"
                }, {
                    "name": "frequencyCapTimeUnit",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "frequencyCapTimeUnitCount",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "frequencyCapMaxImpressions",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "performanceGoalType",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "performanceGoalAmount",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "performanceGoalPercentageMicros",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "performanceGoalString",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetUnit",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetAutomationType",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetSegmentAmount",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetSegmentDescription",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetSegmentStartDate",
                    "type": "DATE",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetSegmentEndDate",
                    "type": "DATE",
                    "mode": "NULLABLE"
                }, {
                    "name": "budgetSegmentCampaignBudgetId",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "biddingStrategyFixedBid",
                    "type": "INTEGER",
                    "mode": "NULLABLE"
                }, {
                    "name": "integrationCode",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }, {
                    "name": "integrationDetails",
                    "type": "STRING",
                    "mode": "NULLABLE"
                }],
                "format":
                "CSV"
            }
        },
        get_rows(
            config, task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "IO Preview",
                    "header": False,
                    "range": "A2:AC"
                }
            }))

    # create insert view
    query_to_view(config,
                  task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "INSERT_IO",
                  """
      SELECT
        REGEXP_EXTRACT(advertiser, r' - (\d+)$') AS advertiserId,
        STRUCT(
          REGEXP_EXTRACT(advertiser, r' - (\d+)$') AS advertiserId,
          REGEXP_EXTRACT(campaign, r' - (\d+)$') AS campaignId,
          displayName,
          'ENTITY_STATUS_DRAFT' AS entityStatus,
          STRUCT(
            pacingPeriod,
            pacingType,
            IF(dailyMaxMicros IS NOT NULL,  dailyMaxMicros * 1000000, NULL) AS dailyMaxMicros,
            IF(dailyMaxMicros IS NULL, dailyMaxImpressions, NULL) AS dailyMaxImpressions
          ) AS pacing,
          STRUCT(
            CAST(frequencyCapUnlimited AS bool) AS unlimited,
            frequencyCapTimeUnit AS timeUnit,
            frequencyCapTimeUnitCount AS timeUnitCount,
            frequencyCapMaxImpressions AS maxImpressions
          ) AS frequencyCap,
          STRUCT(
            performanceGoalType,
            CAST(NULLIF(CAST(performanceGoalAmount AS INT64) * 1000000, 0) AS STRING) AS performanceGoalAmountMicros
          ) AS performanceGoal,
          STRUCT(
            budgetUnit,
            budgetAutomationType AS automationType,
            [
              STRUCT(
                IF(
                  budgetSegmentAmount IS NOT NULL,
                  budgetSegmentAmount * 1000000,
                  NULL
                ) AS budgetAmountMicros,
                budgetSegmentDescription AS description,
                STRUCT(
                  STRUCT(
                    EXTRACT(YEAR FROM budgetSegmentStartDate) AS year,
                    EXTRACT(MONTH FROM budgetSegmentStartDate) AS month,
                    EXTRACT(DAY FROM budgetSegmentStartDate) AS day
                  ) AS startDate,
                  STRUCT(
                    EXTRACT(YEAR FROM budgetSegmentEndDate) AS year,
                    EXTRACT(MONTH FROM budgetSegmentEndDate) AS month,
                    EXTRACT(DAY FROM budgetSegmentEndDate) AS day
                  ) AS endDate
                ) AS dateRange,
                budgetSegmentCampaignBudgetId AS campaignBudgetId
              )
            ] AS budgetSegments
          ) AS budget,
          STRUCT(
            STRUCT(
              NULLIF(biddingStrategyFixedBid * 1000000, 0) AS bidAmountMicros
            ) AS fixedBid
          ) AS bidStrategy
        ) AS body
      FROM `{dataset}.SHEET_IO_Inserts`
      WHERE action = 'INSERT'
    """.format(**task),
                  legacy=False)

    # write IOs to API
    for row in get_rows(
            config,
            task["auth_bigquery"],
        {"bigquery": {
            "dataset": task["dataset"],
            "table": "INSERT_IO",
        }},
            as_object=True):
        try:
            response = API_DV360(
                config,
                task['auth_dv']).advertisers().insertionOrders().create(
                    **row).execute()
            log_write('IO', row, response['insertionOrderId'], None)
        except Exception as e:
            log_write('IO', row, None, str(e))

    log_write(config)
Ejemplo n.º 17
0
def preview_io_load(config, task):

    preview_io_clear(config, task)

    # download IO rules
    put_rows(
        config, task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                task["dataset"],
                "table":
                "SHEET_IO_Rules",
                "schema": [
                    {
                        "name": "CM_Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "DV_Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Pacing_Period",
                        "type": "STRING"
                    },
                    {
                        "name": "Pacing_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Frequency_Cap_Unlimited",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Frequency_Cap_Time_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Frequency_Cap_Time_Unit_Count",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Frequency_Cap_Max_Impressions",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Performance_Goal_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Automation_Type",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        },
        get_rows(
            config, task["auth_sheets"], {
                "sheets": {
                    "sheet": task["sheet"],
                    "tab": "IO Rules",
                    "header": False,
                    "range": "A2:K"
                }
            }))

    # create IO preview (main logic)
    query_to_view(config,
                  task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "PREVIEW_IO",
                  """WITH
      cm AS (
        SELECT
          CM_C.id AS campaignId,
          NULLIF(CAST(CM_PP.rateOrCostNanos / 1000000000 AS INT64), 0) AS performanceGoalAmount,
          CM_PG.pricingSchedule.startDate AS budgetSegmentStartDate,
          CM_PG.pricingSchedule.endDate AS budgetSegmentEndDate,
          CM_PG.name AS displayName
        FROM `{dataset}.CM_Advertisers` AS CM_A
        LEFT JOIN `{dataset}.CM_Campaigns` AS CM_C
        ON CM_A.Id = CM_C.advertiserId
        LEFT JOIN `{dataset}.CM_Sites` AS CM_S
        ON CM_S.name = 'Google DBM'
        LEFT JOIN `{dataset}.CM_PlacementGroups` CM_PG
        ON CM_PG.advertiserId = CM_C.advertiserId
        AND CM_PG.campaignId = CM_C.Id
        AND CM_PG.siteId = CM_S.id,
        UNNEST(pricingSchedule.pricingPeriods) AS CM_PP
      )

      SELECT
        'PREVIEW' AS action,
        CONCAT(dv_a.displayName, ' - ', dv_a.advertiserId) as DV_Advertiser,
        sheet.DV_Campaign AS DV_Campaign,
        cm.displayName,
        sheet.Pacing_Period AS pacingPeriod,
        sheet.Pacing_Type AS pacingType,
        CAST(NULL AS INT64) AS dailyMaxMicros,
        CAST(NULL AS INT64) dailyMaxImpressions,
        sheet.Frequency_Cap_Unlimited AS frequencyCapUnlimited,
        sheet.Frequency_Cap_Time_Unit AS frequencyCapTimeUnit,
        sheet.Frequency_Cap_Time_Unit_Count AS frequencyCapTimeUnitCount,
        sheet.Frequency_Cap_Max_Impressions AS frequencyCapMaxImpressions,
        sheet.Performance_Goal_Type AS performanceGoalType,
        cm.performanceGoalAmount AS performanceGoalAmount,
        CAST(NULL AS INT64) AS performanceGoalPercentageMicros,
        CAST(NULL AS STRING) AS performanceGoalString,
        sheet.Budget_Unit AS budgetUnit,
        CAST(NULL AS STRING) AS budgetAutomationType,
        CAST(NULL AS INT64) AS budgetSegmentAmount,
        CAST(NULL AS STRING) AS budgetSegmentDescription,
        cm.budgetSegmentStartDate AS budgetSegmentStartDate,
        cm.budgetSegmentEndDate AS budgetSegmentEndDate,
        CAST(NULL AS INT64) AS budgetSegmentCampaignBudgetId,
        0 AS biddingStrategyFixedBid,
        CAST(NULL AS STRING) AS integrationCode,
        CAST(NULL AS STRING) AS integrationDetails
      FROM `{dataset}.SHEET_IO_Rules` as sheet
      LEFT JOIN cm
      ON CAST(REGEXP_EXTRACT(sheet.CM_Campaign, r' - (\d+)$') AS INT64) = cm.campaignId
      LEFT JOIN `{dataset}.DV_Campaigns` AS dv_c
      ON CAST(REGEXP_EXTRACT(sheet.DV_Campaign, r' - (\d+)$') AS INT64) = dv_c.campaignId
      LEFT JOIN `{dataset}.DV_Advertisers` AS dv_a
      ON dv_c.advertiserId = dv_a.advertiserId
    """.format(**task),
                  legacy=False)

    # create audits
    query_to_view(config,
                  task["auth_bigquery"],
                  config.project,
                  task["dataset"],
                  "AUDIT_IO",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
          'IO Rules' AS Operation,
          'Missing Sheet input value.' AS Error,
          'ERROR' AS Severity,
          DV_Advertiser,
          DV_Campaign,
          displayName AS DV_InsertionOrder,
          CAST(NULL AS STRING) AS DV_LineItem
        FROM `{dataset}.PREVIEW_IO`
        WHERE
          displayName IS NULL
          OR pacingPeriod IS NULL
          OR pacingType IS NULL
          OR (dailyMaxMicros IS NULL AND dailyMaxImpressions IS NULL)
          OR frequencyCapUnlimited IS NULL
          OR frequencyCapTimeUnitCount IS NULL
          OR frequencyCapMaxImpressions IS NULL
          OR performanceGoalType IS NULL
          OR (performanceGoalAmount IS NULL AND performanceGoalPercentageMicros IS NULL)
          OR performanceGoalString IS NULL
          OR budgetUnit IS NULL
          OR budgetAutomationType IS NULL
          OR budgetSegmentAmount IS NULL
          OR budgetSegmentStartDate IS NULL
          OR budgetSegmentEndDate IS NULL
          OR biddingStrategyFixedBid IS NULL
      ),

      /* Check if duplicate IO */
      DUPLICATE_ERRORS AS (
        SELECT
          'IO Rules' AS Operation,
          'Duplicate Insertion Order.' AS Error,
          'WARNING' AS Severity,
            DV_R.DV_Advertiser AS DV_Advertiser,
            DV_R.DV_Campaign AS DV_Campaign,
            DV_R.displayName AS DV_InsertionOrder,
            CAST(NULL AS STRING) AS DV_LineItem
        FROM
          `{dataset}.PREVIEW_IO` AS DV_R
        LEFT JOIN (
          SELECT DISTINCT(displayName)
          FROM `{dataset}.DV_InsertionOrders` ) AS DV_IO
        ON DV_R.displayName = DV_IO.displayName
      ),

      /* Check if budget segments are current */
      SEGMENT_ERRORS AS (
        SELECT *
        FROM (
          SELECT
            'IO Rules' AS Operation,
            CASE
              WHEN CAST(budgetSegmentAmount AS INT64) > 1 THEN 'Segment has excessive spend.'
              WHEN budgetSegmentStartDate IS NULL THEN 'Segment missing start date.'
              WHEN budgetSegmentEndDate IS NULL THEN 'Segment missing end date.'
              WHEN CAST(budgetSegmentStartDate AS DATE) < CURRENT_DATE() THEN 'Segment starts in the past.'
              WHEN CAST(budgetSegmentEndDate AS DATE) < CURRENT_DATE() THEN 'Segment ends in the past.'
            ELSE
            NULL
          END
            AS Error,
            'ERROR' AS Severity,
            DV_Advertiser,
            DV_Campaign,
            displayName AS DV_InsertionOrder,
            CAST(NULL AS STRING) AS DV_LineItem
          FROM `{dataset}.PREVIEW_IO`
        )
        WHERE Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
      UNION ALL
      SELECT * FROM SEGMENT_ERRORS

    """.format(**task),
                  legacy=False)

    # write io preview to sheet with audits
    put_rows(
        config, task['auth_sheets'], {
            'sheets': {
                'sheet': task['sheet'],
                'tab': 'IO Preview',
                'header': False,
                'range': 'A2'
            }
        },
        get_rows(
            config, task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    task['dataset'],
                    'query':
                    """SELECT
          A.Severity,
          A.Error,
          P.*
          FROM `{dataset}.PREVIEW_IO` AS P
          LEFT JOIN (
            SELECT
              DV_Advertiser,
              DV_Campaign,
              DV_InsertionOrder,
              DV_LineItem,
              CASE
                WHEN 'ERROR' IN UNNEST(ARRAY_AGG(Severity)) THEN 'ERROR'
                WHEN 'WARNING' IN UNNEST(ARRAY_AGG(Severity)) THEN 'WARNING'
                ELSE 'OK'
              END AS Severity,
              ARRAY_TO_STRING(ARRAY_AGG(CONCAT(Severity, ': ', Error)), '\\n') AS Error,
            FROM `{dataset}.AUDIT_IO`
            GROUP BY 1,2,3,4
          ) AS A
          ON P.DV_Advertiser=A.DV_Advertiser
          AND P.DV_Campaign=A.DV_Campaign
          AND P.displayName=A.DV_InsertionOrder
        """.format(**task),
                }
            }))
Ejemplo n.º 18
0
def bigquery():

    if 'function' in project.task:
        query = None

        if project.task['function'] == 'pearson_significance_test':
            query = pearson_significance_test()

        if query:
            run_query(project.task['auth'], project.id, query, False,
                      project.task['to']['dataset'])

    elif 'run' in project.task and 'query' in project.task.get('run', {}):
        if project.verbose:
            print('QUERY', project.task['run']['query'])
        run_query(
            project.task['auth'],
            project.id,
            query_parameters(project.task['run']['query'],
                             project.task['run'].get('parameters')),
            project.task['run'].get('legacy', True),
        )

    elif 'values' in project.task['from']:
        rows = get_rows(project.task['auth'], project.task['from'])

        rows_to_table(project.task['to'].get('auth', project.task['auth']),
                      project.id, project.task['to']['dataset'],
                      project.task['to']['table'], rows,
                      project.task.get('schema', []), 0)

    elif 'query' in project.task['from']:

        if 'table' in project.task['to']:
            if project.verbose:
                print('QUERY TO TABLE', project.task['to']['table'])

            query_to_table(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['table'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                disposition=project.task['write_disposition']
                if 'write_disposition' in project.task else 'WRITE_TRUNCATE',
                legacy=project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql,
                target_project_id=project.task['to'].get(
                    'project_id', project.id))

        elif 'sheet' in project.task['to']:
            if project.verbose:
                print('QUERY TO SHEET', project.task['to']['sheet'])
            rows = query_to_rows(
                project.task['auth'],
                project.id,
                project.task['from']['dataset'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                legacy=project.task['from'].get('legacy', True))

            # makes sure types are correct in sheet
            rows = rows_to_type(rows)

            sheets_clear(project.task['to'].get('auth', project.task['auth']),
                         project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'))
            sheets_write(project.task['to'].get('auth', project.task['auth']),
                         project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'), rows)

        elif 'sftp' in project.task['to']:
            rows = query_to_rows(
                project.task['auth'],
                project.id,
                project.task['from']['dataset'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                legacy=project.task['from'].get('use_legacy_sql', True))

            if rows:
                if project.verbose:
                    print('QUERY TO SFTP')
                put_rows(project.task['auth'], project.task['to'], rows)

        else:
            if project.verbose:
                print('QUERY TO VIEW', project.task['to']['view'])
            query_to_view(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['view'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql
                project.task['to'].get('replace', False))

    else:
        if project.verbose:
            print('STORAGE TO TABLE', project.task['to']['table'])
        storage_to_table(
            project.task['auth'], project.id, project.task['to']['dataset'],
            project.task['to']['table'], project.task['from']['bucket'] + ':' +
            project.task['from']['path'], project.task.get('schema', []),
            project.task.get('skip_rows', 1),
            project.task.get('structure', 'CSV'),
            project.task.get('disposition', 'WRITE_TRUNCATE'))
Ejemplo n.º 19
0
def bid_strategy_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Bid Strategy",
                "range": "A2:Z"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_BidStrategy",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Fixed_Bid",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fixed_Bid_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Auto_Bid_Goal",
                        "type": "STRING"
                    },
                    {
                        "name": "Auto_Bid_Goal_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Auto_Bid_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Auto_Bid_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Auto_Bid_Algorithm",
                        "type": "STRING"
                    },
                    {
                        "name": "Auto_Bid_Algorithm_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Average_CPM_Bid",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Average_CPM_Bid_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Algorithm",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Algorithm_Edit",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_BidStrategy",
                  """WITH
        /* Check if sheet values are set */ INPUT_ERRORS AS (
        SELECT
          *
        FROM (
          SELECT
            'Bid Strategy' AS Operation,
            CASE
              WHEN Insertion_Order IS NOT NULL AND Line_Item IS NOT NULL THEN
                CASE
                  WHEN Fixed_Bid_Edit IS NOT NULL AND Auto_Bid_Goal_Edit IS NULL AND Auto_Bid_Algorithm_Edit IS NOT NULL THEN 'Both Fixed Bid and Bid Algorithm exist.'
                  WHEN Fixed_Bid_Edit IS NULL AND Auto_Bid_Goal_Edit IS NOT NULL AND Auto_Bid_Algorithm_Edit IS NOT NULL THEN 'Both Bid Goal and Bid Algorithm exist.'
                  WHEN Fixed_Bid_Edit IS NOT NULL AND Auto_Bid_Goal_Edit IS NOT NULL AND Auto_Bid_Algorithm_Edit IS NULL THEN 'Both Fixed Bid and Bid Goal exist.'
                  WHEN Fixed_Bid_Edit IS NOT NULL AND Auto_Bid_Goal_Edit IS NOT NULL AND Auto_Bid_Algorithm_Edit IS NOT NULL THEN 'All bid fields exist.'
                  ELSE NULL
                END
            ELSE
            NULL
          END
            AS Error,
            'ERROR' AS Severity,
            COALESCE(Line_Item,
              Insertion_Order,
              'BLANK') AS Id
          FROM
            `{dataset}.SHEET_BidStrategy` )
        WHERE
          Error IS NOT NULL )
      SELECT
        *
      FROM
        INPUT_ERRORS ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_BidStrategy",
                  """SELECT *
      FROM `{dataset}.SHEET_BidStrategy`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_BidStrategy` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_BidStrategy` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Ejemplo n.º 20
0
def pacing_audit():
  rows = get_rows(project.task["auth_sheets"], {
      "sheets": {
          "sheet": project.task["sheet"],
          "tab": "Pacing",
          "range": "A2:Z"
      }
  })

  put_rows(
      project.task["auth_bigquery"], {
          "bigquery": {
              "dataset": project.task["dataset"],
              "table": "SHEET_Pacing",
              "schema": [
                  { "name": "Partner", "type": "STRING" },
                  { "name": "Advertiser", "type": "STRING" },
                  { "name": "Campaign", "type": "STRING" },
                  { "name": "Insertion_Order", "type": "STRING" },
                  { "name": "Line_Item", "type": "STRING" },
                  { "name": "Period", "type": "STRING" },
                  { "name": "Period_Edit", "type": "STRING" },
                  { "name": "Type", "type": "STRING" },
                  { "name": "Type_Edit", "type": "STRING" },
                  { "name": "Daily_Budget", "type": "FLOAT" },
                  { "name": "Daily_Budget_Edit", "type": "FLOAT" },
                  { "name": "Daily_Impressions", "type": "INTEGER" },
                  { "name": "Daily_Impressions_Edit", "type": "INTEGER" },
              ],
              "format": "CSV"
          }
      }, rows)

  query_to_view(
      project.task["auth_bigquery"],
      project.id,
      project.task["dataset"],
      "AUDIT_Pacing",
      """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Pacing' AS Operation,
            CASE
              WHEN Period_Edit IS NULL THEN 'Missing Period.'
              WHEN Type_Edit IS NULL THEN 'Missing Type.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_Pacing`
        )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      ;
    """.format(**project.task),
      legacy=False)

  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "PATCH_Pacing",
    """SELECT *
      FROM `{dataset}.SHEET_Pacing`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_Pacing` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_Pacing` WHERE Severity='ERROR')
    """.format(**project.task),
    legacy=False
  )
Ejemplo n.º 21
0
def insertion_order_audit():

    # Move Insertion Order To BigQuery
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Insertion Orders",
                "range": "A2:Z"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_InsertionOrders",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Action",
                        "type": "STRING"
                    },
                    {
                        "name": "Status",
                        "type": "STRING"
                    },
                    {
                        "name": "Name",
                        "type": "STRING"
                    },
                    {
                        "name": "Name_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Unit_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Automation",
                        "type": "STRING"
                    },
                    {
                        "name": "Budget_Automation_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Percent",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_Percent_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Performance_Goal_String",
                        "type": "STRING"
                    },
                    {
                        "name": "Performance_Goal_String_Edit",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    # Create Insert View
    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "INSERT_InsertionOrders",
                  """SELECT
      REGEXP_EXTRACT(S_IO.Advertiser, r' - (\d+)$') AS advertiserId,
      REGEXP_EXTRACT(S_IO.Campaign, r' - (\d+)$') AS campaignId,
      S_IO.Insertion_Order AS displayName,
      'ENTITY_STATUS_DRAFT' AS entityStatus,
      STRUCT(
        S_PC.Cost_Type_Edit As costType,
        S_PC.Fee_Type_Edit As feeType,
        S_PC.Invoice_Type_Edit AS invoiceType,
        S_PC.Fee_Amount_Edit AS feeAmount,
        S_PC.Fee_Percent_Edit * 1000 AS feePercentageMillis
      ) AS partnerCosts,
      STRUCT(
        S_P.Period_Edit As pacingPeriod,
        S_P.Type_Edit As pacingType,
        S_P.Daily_Budget_Edit AS dailyMaxMicros,
        S_P.Daily_Impressions_Edit AS dailyMaxImpressions
      ) AS pacing,
      STRUCT(
        S_FC.Unlimited_Edit AS unlimited,
        S_FC.Time_Unit_Edit AS timeUnit,
        S_FC.Time_Count_Edit AS timeUnitCount,
        S_FC.Max_impressions_Edit AS maxImpressions
      ) AS frequencyCap,
      STRUCT(
        S_ID.Integration_Code_Edit As integrationCode,
        S_ID.Details_Edit As details
      ) AS integrationDetails,
      STRUCT(
        S_IO.Performance_Goal_Type_Edit AS performanceGoalType,
        S_IO.Performance_Goal_Amount_Edit * 100000 AS performanceGoalAmountMicros,
        S_IO.Performance_Goal_Percent_Edit * 100000 AS performanceGoalPercentageMicros,
        S_IO.Performance_Goal_String_Edit AS performanceGoalString
      ) AS performanceGoal,
      STRUCT(
        S_IO.Budget_Unit_Edit AS budgetUnit,
        S_IO.Budget_Automation_Edit AS automationType,
        (SELECT ARRAY(
          SELECT
            STRUCT(
             S_S.Budget_Edit * 100000 AS budgetAmountMicros,
             S_S.Description_Edit AS description,
             STRUCT (
               STRUCT (
                 EXTRACT(YEAR FROM CAST(S_S.Start_Date_Edit AS Date)) AS year,
                 EXTRACT(MONTH FROM CAST(S_S.Start_Date_Edit AS DATE)) AS month,
                 EXTRACT(DAY FROM CAST(S_S.Start_Date_Edit AS DATE)) AS day
               ) AS startDate,
               STRUCT (
                 EXTRACT(YEAR FROM CAST(S_S.End_Date_Edit AS Date)) AS year,
                 EXTRACT(MONTH FROM CAST(S_S.End_Date_Edit AS DATE)) AS month,
                 EXTRACT(DAY FROM CAST(S_S.End_Date_Edit AS DATE)) AS day
             ) AS endDate
           ) AS dateRange
          ) AS budgetSegments
          FROM `{dataset}.SHEET_Segments` AS  S_S
          WHERE S_IO.Insertion_Order=S_S.Insertion_Order
        )) AS budgetSegments
      ) AS budget,
      STRUCT(
        IF(S_BS.Fixed_Bid_Edit IS NOT NULL,
          STRUCT(
            S_BS.Fixed_Bid_Edit * 100000 AS bidAmountMicros
          ),
          NULL
        ) AS fixedBid,
        IF(S_BS.Auto_Bid_Goal_Edit IS NOT NULL,
          STRUCT(
            S_BS.Auto_Bid_Goal_Edit AS performanceGoalType,
            S_BS.Auto_Bid_Amount_Edit * 100000 AS maxAverageCpmBidAmountMicros,
            S_BS.Auto_Bid_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS maximizeSpendAutoBid,
        IF(S_BS.Performance_Goal_Type_Edit IS NOT NULL,
          STRUCT(
            S_BS.Performance_Goal_Type_Edit AS performanceGoalType,
            S_BS.Performance_Goal_Amount_Edit * 100000 AS performanceGoalAmountMicros,
            S_BS.Performance_Goal_Average_CPM_Bid_Edit * 100000 AS maxAverageCpmBidAmountMicros,
            S_BS.Performance_Goal_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS performanceGoalAutoBid
      )
      AS bidStrategy
      FROM `{dataset}.SHEET_InsertionOrders` As S_IO
      LEFT JOIN `{dataset}.SHEET_Segments` As S_S ON S_IO.Insertion_Order=S_S.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_PartnerCosts` As S_PC ON S_IO.Insertion_Order=S_PC.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_Pacing` As S_P ON S_IO.Insertion_Order=S_P.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_FrequencyCaps` As S_FC ON S_IO.Insertion_Order=S_FC.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_IntegrationDetails` As S_ID ON S_IO.Insertion_Order=S_ID.Insertion_Order
      LEFT JOIN `{dataset}.SHEET_BidStrategy` As S_BS ON S_IO.Insertion_Order=S_BS.Insertion_Order
      LEFT JOIN `{dataset}.DV_InsertionOrders` As DV_IO ON S_IO.Insertion_Order=DV_IO.displayName
      WHERE S_IO.Action="INSERT"
      AND DV_IO IS NULL
    """.format(**project.task),
                  legacy=False)

    # Create Audit View And Write To Sheets
    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_InsertionOrders",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Insertion Order' AS Operation,
            CASE
              WHEN Name_Edit IS NULL THEN 'Missing Name.'
              WHEN Budget_Unit_Edit IS NULL THEN 'Missing Budget Unit.'
              WHEN Budget_Automation_Edit IS NULL THEN 'Missing Budget Automation.'
              WHEN Performance_Goal_Type_Edit IS NULL THEN 'Missing Goal Type.'
              WHEN Performance_Goal_Amount_Edit IS NULL
              AND Performance_Goal_Percent_Edit IS NULL
              AND Performance_Goal_String_Edit IS NULL THEN 'Missing Goal Amount / Percent / String.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_InsertionOrders`
        )
        WHERE
          Error IS NOT NULL
      ),
      /* Check duplicate inserts */
      DUPLICATE_ERRORS AS (
        SELECT
          'Insertion_Order' AS Operation,
          'Duplicate Insertion Order name, insert will be ignored.' AS Error,
          'WARNING' AS Severity,
          COALESCE(S_IO.Insertion_Order, 'BLANK') AS Id
        FROM `{dataset}.SHEET_InsertionOrders` As S_IO
        LEFT JOIN `{dataset}.DV_InsertionOrders` AS DV_IO ON S_IO.Insertion_Order=DV_IO.displayName
        WHERE S_IO.Action="INSERT"
        AND DV_IO IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
      ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_InsertionOrders",
                  """SELECT *
      FROM `{dataset}.SHEET_InsertionOrders`
      WHERE Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_InsertionOrders` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Ejemplo n.º 22
0
def partner_cost_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Partner Costs",
                "range": "A2:Z"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_PartnerCosts",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Cost_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Cost_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Fee_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Fee_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Invoice_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Invoice_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Fee_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fee_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fee_Percent",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fee_Percent_Edit",
                        "type": "FLOAT"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_PartnerCosts",
                  """WITH
        /* Check if sheet values are set */ INPUT_ERRORS AS (
        SELECT
          *
        FROM (
          SELECT
            'Partner Costs' AS Operation,
            CASE
              WHEN Cost_Type_Edit IS NULL THEN 'Missing Cost Type.'
              WHEN Fee_Type_Edit IS NULL THEN 'Missing Fee Type.'
              WHEN Invoice_Type_Edit IS NULL THEN 'Missing Invoice Type.'
              WHEN Fee_Amount_Edit IS NULL
            AND Fee_Percent_Edit IS NULL THEN 'You must select a Fee Amount OR Fee Percent'
            ELSE IF
            (Fee_Amount_Edit IS NOT NULL
              AND Fee_Percent_Edit IS NOT NULL,
              'You must select a Fee Amount OR Fee Percent, not both',
              NULL)
          END
            AS Error,
            'ERROR' AS Severity,
            COALESCE(Line_Item,
              Insertion_Order,
              'BLANK') AS Id
          FROM
            `{dataset}.SHEET_PartnerCosts` )
        WHERE
          Error IS NOT NULL )
      SELECT
        *
      FROM
        INPUT_ERRORS ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_PartnerCosts",
                  """SELECT *
      FROM `{dataset}.SHEET_PartnerCosts`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_PartnerCosts` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_PartnerCosts` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Ejemplo n.º 23
0
def integration_detail_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Integration Details",
                "range": "A2:I"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_IntegrationDetails",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Integration_Code",
                        "type": "STRING"
                    },
                    {
                        "name": "Integration_Code_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Details",
                        "type": "STRING"
                    },
                    {
                        "name": "Details_Edit",
                        "type": "STRING"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_IntegrationDetails",
                  """WITH
      /* Check if advertiser values are set */
      INPUT_ERRORS AS (
        SELECT
          'Integration Details' AS Operation,
          'Missing Advertiser.' AS Error,
          'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, Advertiser, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_IntegrationDetails`
        WHERE Advertiser IS NULL
      )

      SELECT * FROM INPUT_ERRORS
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_IntegrationDetails",
                  """SELECT *
      FROM `{dataset}.SHEET_IntegrationDetails`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_IntegrationDetails` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_IntegrationDetails` WHERE Severity='ERROR')
      AND Campaign NOT IN (SELECT Id FROM `{dataset}.AUDIT_IntegrationDetails` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)