Exemplo n.º 1
0
def url_fetch(config, task) -> Iterator[dict]:
    """Fetch URL list and return both status code and/or contents.

  Takes no parameters, it operates on recipe JSON directly. Core
  function is to call urlopen on each passed in URL.

  Returns:
    Produces a dictionary generator with record matching URL_SCHEMA.

  """

    for url, uri in get_rows(config, task['auth'], task['urls']):

        if config.verbose:
            print('URL/URI', url, uri)

        record = {'URL': url, 'URI': None if uri is None else str(uri)}

        url_request = request.Request(url, data=task.get('data'))
        try:
            url_response = request.urlopen(url_request)

            if task.get('status', False):
                record['Status'] = url_response.status

            if task.get('read', False):
                record['Read'] = url_response.read()

        except InvalidURL as error:
            if task.get('status', False):
                record['Status'] = 400

        except HTTPError as error:
            if task.get('status', False):
                record['Status'] = error.status

        except Exception as error:
            if task.get('status', False):
                record['Status'] = 500

        yield record
Exemplo n.º 2
0
def report_filter(auth, body, filters):
    """ Adds filters to a report body

  Filters cannot be easily added to the reports without templateing, this allows
  filters to be passed as lists.
  Values are specified using get_rows(...) helper, see
  starthinker/util/data/__init__.py.
  To specify a filter, use the official filter name and a list of values.

  For exmaple:

  ```
  filters = {
    "FILTER_PARTNER": {
      "values":789
    },
    "FILTER_ADVERTISER": {
      "values":[1234, 5678, 91011]
    }
  }
  ```

  Args:
    * auth: (string) Either user or service.
    * body: (json) the report body ( with or without filters )
    * filters: (json) a dictionary of filters to apply ( see above examples )

  Returns:
    * body: ( json ) modified report body
  """

    new_body = body.copy()

    for f, d in filters.items():
        for v in get_rows(project.task['auth'], d):
            new_body['params'].setdefault('filters', []).append({
                'type': f,
                'value': v
            })

    return new_body
Exemplo n.º 3
0
 def load_multiple():
   for row in get_rows(
     config,
     task['auth_sheets'],
     { 'sheets': {
       'sheet': task['sheet'],
       'tab': 'CM Profiles',
       'header':False,
       'range': 'A2:A'
     }}
   ):
     if row:
       account_id = lookup_id(row[0])
       is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
       kwargs = { 'profileId': profile_id, 'accountId': account_id } if is_superuser else { 'profileId': profile_id }
       yield from API_DCM(
         config,
         task['auth_cm'],
         iterate=True,
         internal=is_superuser
       ).accounts().list(**kwargs).execute()
Exemplo n.º 4
0
 def campaign_load_multiple():
   for row in get_rows(
     config,
     task['auth_sheets'],
     { 'sheets': {
       'sheet': task['sheet'],
       'tab': 'Advertisers',
       'header':False,
       'range': 'A2:A'
     }}
   ):
     if row:
       yield from API_DV360(
         config,
         task['auth_dv'],
         iterate=True
       ).advertisers().campaigns().list(
         advertiserId=lookup_id(row[0]),
         filter='entityStatus="ENTITY_STATUS_PAUSED" OR entityStatus="ENTITY_STATUS_ACTIVE" OR entityStatus="ENTITY_STATUS_DRAFT"',
         fields='campaigns.displayName,campaigns.campaignId,campaigns.advertiserId,nextPageToken'
       ).execute()
Exemplo n.º 5
0
def google_api():

    if project.verbose:
        print('GOOGLE_API', project.task['api'], project.task['version'],
              project.task['function'])

    api_call = {
        'auth': project.task['auth'],
        'api': project.task['api'],
        'version': project.task['version'],
        'function': project.task['function'],
        'iterate': project.task.get('iterate', False),
        'key': project.key,
        'headers': project.task.get('headers'),
    }

    results = google_api_build_results(project.task['auth'], api_call,
                                       project.task.get('results', {}))

    errors = google_api_build_errors(project.task['auth'], api_call,
                                     project.task.get('errors', {}))

    if 'kwargs' in project.task:
        kwargs_list = project.task['kwargs'] if isinstance(
            project.task['kwargs'],
            (list, tuple)) else [project.task['kwargs']]
    elif 'kwargs_remote' in project.task:
        kwargs_list = get_rows(project.task['auth'],
                               project.task['kwargs_remote'],
                               as_object=True)
    else:
        kwargs_list = [{}]

    for kwargs in kwargs_list:

        api_call['kwargs'] = kwargs

        google_api_initilaize(api_call, project.task.get('alias'))

        google_api_execute(project.task['auth'], api_call, results, errors)
Exemplo n.º 6
0
def partner_load():
  # write partners to BQ
  rows = API_DV360(project.task['auth_dv'], iterate=True).partners().list().execute()

  put_rows(
      project.task['auth_bigquery'], {
          'bigquery': {
              'dataset':
                  project.task['dataset'],
              'table':
                  'DV_Partners',
              'schema':
                  Discovery_To_BigQuery('displayvideo',
                                        'v1').method_schema('partners.list'),
              'format':
                  'JSON'
          }
      }, rows)

  # write partners to sheet
  rows = get_rows(
      project.task['auth_bigquery'], {
          'bigquery': {
              'dataset':
                  project.task['dataset'],
              'query':
                  "SELECT CONCAT(displayName, ' - ', partnerId), entityStatus  FROM `%s.DV_Partners`"
                  % project.task['dataset'],
              'legacy':
                  False
          }
      })

  put_rows(project.task['auth_sheets'], {
      'sheets': {
          'sheet': project.task['sheet'],
          'tab': 'Partners',
          'range': 'B2'
      }
  }, rows)
Exemplo n.º 7
0
def vision_annotate(config, task):

  body = {
    "requests":[],
    "parent": 'projects/' + config.project
  }

  uri_buffer = []

  for request_index, request in enumerate(get_rows(config, task['auth'], task['requests'], as_object=True)):

   # submit 16 requests at a time
   if request_index > 0 and request_index % 16 == 0:
     for response_index, response in enumerate(API_Vision(config, task['auth'], iterate=True).images().annotate(body=body).execute()):
       response['imageUri'] = uri_buffer[response_index]
       yield response

     uri_buffer = []
     body['requests'] = []

   # buffer the URI, and add requests to the batch request
   else:
     uri = request['image'].get('source', {}).get('imageUri', 'image %s' % request_index)
     uri_buffer.append(uri)

     if config.verbose:
       print('URI', uri)

     if 'content' in request['image'] and 'source' in request['image']:
       del request['image']['source']

     body['requests'].append(request)

   # clean up last requests
   if body['requests']:
     for response_index, response in enumerate(API_Vision(config, task['auth'], iterate=True).images().annotate(body=body).execute()):
       response['imageUri'] = uri_buffer[response_index]
       yield response
Exemplo n.º 8
0
def conversion_upload():
  """Entry point for conversion_upload task, which uploads conversins to CM360.

  Prints sucess or failure to STDOUT.
  Currently only does batchInsert, not batchUpdate.
  """

  rows = get_rows(
    project.task['auth'],
    project.task['from'],
    as_object=False
  )

  if project.verbose:
    print('CONVERSION UPLOAD')

  statuses = conversions_upload(
    project.task['auth'],
    project.task['account_id'],
    project.task['activity_id'],
    project.task['conversion_type'], rows,
    project.task['encryptionInfo']
  )

  has_rows = False
  for status in statuses:
    has_rows = True
    if 'errors' in status:
      if project.verbose:
        print( 'ERROR:', status['conversion']['ordinal'], '\n'.join([e['message'] for e in status['errors']]))
    else:
      if project.verbose:
        print('OK:', status['conversion']['ordinal'])

  if not has_rows:
    if project.verbose:
      print('NO ROWS')
Exemplo n.º 9
0
def sdf_download(config, auth, version, partner_id, file_types, filter_type,
                 filter_ids_obj):
    #Read Filter Ids
    filter_ids = list(get_rows(config, auth, filter_ids_obj))

    body = {
        'version': version,
        'partnerId': partner_id,
        'parentEntityFilter': {
            'fileType': file_types,
            'filterType': filter_type,
            'filterIds': filter_ids
        },
        'idFilter': None
    }

    operation = API_DV360(config,
                          auth).sdfdownloadtasks().create(body=body).execute()

    if operation and 'name' in operation:
        request = API_DV360(
            config,
            auth).sdfdownloadtasks().operations().get(name=operation['name'])

        # This is the eng recommended way of getting the operation
        while True:
            response = request.execute()
            if 'done' in response and response['done']:
                break
            time.sleep(30)
    else:
        print('error')

    if 'error' in response:
        raise Exception(response['error']['message'])

    return download_media(config, 'user', response['response']['resourceName'])
Exemplo n.º 10
0
def pacing_load():

  # write pacings to sheet
  rows = get_rows(
      project.task["auth_bigquery"], {
          "bigquery": {
              "dataset":
                  project.task["dataset"],
              "query":
                  """SELECT * FROM (
                  SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId) AS IO_Display,
         NULL,
         I.pacing.pacingPeriod,
         I.pacing.pacingPeriod,
         I.pacing.pacingType,
         I.pacing.pacingType,
         CAST(I.pacing.dailyMaxMicros AS INT64) / 100000,
         CAST(I.pacing.dailyMaxMicros AS INT64) / 100000,
         I.pacing.dailyMaxImpressions,
         I.pacing.dailyMaxImpressions
       FROM `{dataset}.DV_InsertionOrders` AS I
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId) AS IO_Display,
         CONCAT(L.displayName, ' - ', L.lineItemId),
         L.pacing.pacingPeriod,
         L.pacing.pacingPeriod,
         L.pacing.pacingType,
         L.pacing.pacingType,
         CAST(L.pacing.dailyMaxMicros AS INT64) / 100000,
         CAST(L.pacing.dailyMaxMicros AS INT64) / 100000,
         L.pacing.dailyMaxImpressions,
         L.pacing.dailyMaxImpressions
       FROM `{dataset}.DV_LineItems` AS L
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId )
       ORDER BY IO_Display
       """.format(**project.task),
              "legacy":
                  False
          }
      })

  put_rows(project.task["auth_sheets"], {
      "sheets": {
          "sheet": project.task["sheet"],
          "tab": "Pacing",
          "range": "A2"
      }
  }, rows)
Exemplo n.º 11
0
def pacing_patch(commit=False):

  patches = []

  rows = get_rows(
    project.task["auth_bigquery"],
    { "bigquery": {
      "dataset": project.task["dataset"],
      "table":"PATCH_Pacing",
    }},
    as_object=True
  )

  for row in rows:

    pacing = {}

    if row['Period'] != row['Period_Edit']:
      pacing.setdefault("pacing", {})
      pacing["pacing"]["pacingPeriod"] = row['Period_Edit']

    if row['Type'] != row['Type_Edit']:
      pacing.setdefault("pacing", {})
      pacing["pacing"]["pacingType"] = row['Type_Edit']

    if row['Daily_Budget'] != row['Daily_Budget_Edit']:
      pacing.setdefault("pacing", {})
      pacing["pacing"]["dailyMaxMicros"] = int(float(row['Daily_Budget_Edit']) * 100000)

    if row['Daily_Impressions'] != row['Daily_Impressions_Edit']:
      pacing.setdefault("pacing", {})
      pacing["pacing"]["dailyMaxImpressions"] = row['Daily_Impressions_Edit']

    if pacing:
      patch = {
          "operation": "Pacing",
          "action": "PATCH",
          "partner": row['Partner'],
          "advertiser": row['Advertiser'],
          "campaign": row['Campaign'],
          "parameters": {
              "advertiserId": lookup_id(row['Advertiser']),
              "body": pacing
          }
      }

      if row['Line_Item']:
        patch["line_item"] = row['Line_Item']
        patch["parameters"]["lineItemId"] = lookup_id(row['Line_Item'])
      else:
        patch["insertion_order"] = row['Insertion_Order']
        patch["parameters"]["insertionOrderId"] = lookup_id(row['Insertion_Order'])

      patches.append(patch)

  patch_masks(patches)

  if commit:
    insertion_order_commit(patches)
    line_item_commit(patches)
  else:
    patch_preview(patches)
Exemplo n.º 12
0
def pacing_audit():
  rows = get_rows(project.task["auth_sheets"], {
      "sheets": {
          "sheet": project.task["sheet"],
          "tab": "Pacing",
          "range": "A2:Z"
      }
  })

  put_rows(
      project.task["auth_bigquery"], {
          "bigquery": {
              "dataset": project.task["dataset"],
              "table": "SHEET_Pacing",
              "schema": [
                  { "name": "Partner", "type": "STRING" },
                  { "name": "Advertiser", "type": "STRING" },
                  { "name": "Campaign", "type": "STRING" },
                  { "name": "Insertion_Order", "type": "STRING" },
                  { "name": "Line_Item", "type": "STRING" },
                  { "name": "Period", "type": "STRING" },
                  { "name": "Period_Edit", "type": "STRING" },
                  { "name": "Type", "type": "STRING" },
                  { "name": "Type_Edit", "type": "STRING" },
                  { "name": "Daily_Budget", "type": "FLOAT" },
                  { "name": "Daily_Budget_Edit", "type": "FLOAT" },
                  { "name": "Daily_Impressions", "type": "INTEGER" },
                  { "name": "Daily_Impressions_Edit", "type": "INTEGER" },
              ],
              "format": "CSV"
          }
      }, rows)

  query_to_view(
      project.task["auth_bigquery"],
      project.id,
      project.task["dataset"],
      "AUDIT_Pacing",
      """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Pacing' AS Operation,
            CASE
              WHEN Period_Edit IS NULL THEN 'Missing Period.'
              WHEN Type_Edit IS NULL THEN 'Missing Type.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_Pacing`
        )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      ;
    """.format(**project.task),
      legacy=False)

  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "PATCH_Pacing",
    """SELECT *
      FROM `{dataset}.SHEET_Pacing`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_Pacing` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_Pacing` WHERE Severity='ERROR')
    """.format(**project.task),
    legacy=False
  )
Exemplo n.º 13
0
def advertiser_load():

    # load multiple partners from user defined sheet
    def load_multiple():
        for row in get_rows(
                project.task['auth_sheets'], {
                    'sheets': {
                        'sheet': project.task['sheet'],
                        'tab': 'Partners',
                        'header': False,
                        'range': 'A2:A'
                    }
                }):
            if row:
                yield from API_DV360(
                    project.task['auth_dv'], iterate=True).advertisers().list(
                        partnerId=lookup_id(row[0])).execute()

    advertiser_clear()

    # write advertisers to database
    put_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'table':
                'DV_Advertisers',
                'schema':
                Discovery_To_BigQuery('displayvideo',
                                      'v1').method_schema('advertisers.list'),
                'format':
                'JSON'
            }
        }, load_multiple())

    # write advertisers to sheet
    put_rows(
        project.task['auth_sheets'], {
            'sheets': {
                'sheet': project.task['sheet'],
                'tab': 'Advertisers',
                'range': 'B2'
            }
        },
        get_rows(
            project.task['auth_bigquery'], {
                'bigquery': {
                    'dataset':
                    project.task['dataset'],
                    'query':
                    """SELECT
           CONCAT(P.displayName, ' - ', P.partnerId),
           CONCAT(A.displayName, ' - ', A.advertiserId),
           A.entityStatus
           FROM `{dataset}.DV_Advertisers` AS A
           LEFT JOIN `{dataset}.DV_Partners` AS P
           ON A.partnerId=P.partnerId
        """.format(**project.task),
                    'legacy':
                    False
                }
            }))
Exemplo n.º 14
0
def cm_account_load(config, task):

  # load multiple partners from user defined sheet
  def load_multiple():
    for row in get_rows(
      config,
      task['auth_sheets'],
      { 'sheets': {
        'sheet': task['sheet'],
        'tab': 'CM Profiles',
        'header':False,
        'range': 'A2:A'
      }}
    ):
      if row:
        account_id = lookup_id(row[0])
        is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
        kwargs = { 'profileId': profile_id, 'accountId': account_id } if is_superuser else { 'profileId': profile_id }
        yield from API_DCM(
          config,
          task['auth_cm'],
          iterate=True,
          internal=is_superuser
        ).accounts().list(**kwargs).execute()

  cm_account_clear(config, task)

  # write accounts to database
  put_rows(
    config,
    task['auth_bigquery'],
    { 'bigquery': {
      'dataset': task['dataset'],
      'table': 'CM_Accounts',
      'schema': Discovery_To_BigQuery(
        'dfareporting',
        'v3.4'
      ).method_schema(
        'accounts.list',
        iterate=True
      ),
      'format':'JSON'
    }},
    load_multiple()
  )

  # write accounts to sheet
  put_rows(
    config,
    task['auth_sheets'],
    { 'sheets': {
      'sheet': task['sheet'],
      'tab': 'CM Accounts',
      'header':False,
      'range': 'B2'
    }},
    get_rows(
      config,
      task['auth_bigquery'],
      { 'bigquery': {
        'dataset': task['dataset'],
        'query': "SELECT CONCAT(name, ' - ', id), active  FROM `%s.CM_Accounts`" % task['dataset'],
        'legacy': False
      }}
    )
  )
Exemplo n.º 15
0
def frequency_cap_load():

    # write frequency_caps to sheet
    rows = get_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "query":
                """SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CAST(NULL AS STRING),
         CAST(NULL AS STRING),
         IFNULL(C.frequencyCap.unlimited, FALSE),
         IFNULL(C.frequencyCap.unlimited, FALSE),
         C.frequencyCap.timeUnit,
         C.frequencyCap.timeUnit,
         C.frequencyCap.timeUnitCount,
         C.frequencyCap.timeUnitCount,
         C.frequencyCap.maxImpressions,
         C.frequencyCap.maxImpressions
       FROM `{dataset}.DV_Campaigns` AS C
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON C.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CAST(NULL AS STRING),
         IFNULL(I.frequencyCap.unlimited, FALSE),
         IFNULL(I.frequencyCap.unlimited, FALSE),
         I.frequencyCap.timeUnit,
         I.frequencyCap.timeUnit,
         I.frequencyCap.timeUnitCount,
         I.frequencyCap.timeUnitCount,
         I.frequencyCap.maxImpressions,
         I.frequencyCap.maxImpressions
       FROM `{dataset}.DV_InsertionOrders` AS I
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId),
         CONCAT(L.displayName, ' - ', L.lineItemId),
         IFNULL(L.frequencyCap.unlimited, FALSE),
         IFNULL(L.frequencyCap.unlimited, FALSE),
         L.frequencyCap.timeUnit,
         L.frequencyCap.timeUnit,
         L.frequencyCap.timeUnitCount,
         L.frequencyCap.timeUnitCount,
         L.frequencyCap.maxImpressions,
         L.frequencyCap.maxImpressions
       FROM `{dataset}.DV_LineItems` AS L
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       """.format(**project.task),
                "legacy":
                False
            }
        })

    put_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Frequency Caps",
                "range": "A2"
            }
        }, rows)
Exemplo n.º 16
0
def partner_cost_load():

    # write partner_costs to sheet
    rows = get_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "query":
                """SELECT * FROM (
                  SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId) AS IO_Display,
         NULL,
         PC.costType,
         PC.costType,
         PC.feeType,
         PC.feeType,
         PC.invoiceType,
         PC.invoiceType,
         PC.feeAmount / 100000,
         PC.feeAmount / 100000,
         PC.feePercentageMillis / 1000,
         PC.feePercentageMillis / 1000
       FROM `{dataset}.DV_InsertionOrders` AS I, UNNEST(partnerCosts) AS PC
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON I.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON I.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId
       UNION ALL
       SELECT
         CONCAT(P.displayName, ' - ', P.partnerId),
         CONCAT(A.displayName, ' - ', A.advertiserId),
         CONCAT(C.displayName, ' - ', C.campaignId),
         CONCAT(I.displayName, ' - ', I.insertionOrderId) AS IO_Display,
         CONCAT(L.displayName, ' - ', L.lineItemId),
         PC.costType,
         PC.costType,
         PC.feeType,
         PC.feeType,
         PC.invoiceType,
         PC.invoiceType,
         PC.feeAmount / 100000,
         PC.feeAmount / 100000,
         PC.feePercentageMillis / 1000,
         PC.feePercentageMillis / 1000
       FROM `{dataset}.DV_LineItems` AS L, UNNEST(partnerCosts) AS PC
       LEFT JOIN `{dataset}.DV_Campaigns` AS C
       ON L.campaignId=C.campaignId
       LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
       ON L.insertionOrderId=I.insertionOrderId
       LEFT JOIN `{dataset}.DV_Advertisers` AS A
       ON L.advertiserId=A.advertiserId
       LEFT JOIN `{dataset}.DV_Partners` AS P
       ON A.partnerId=P.partnerId )
       ORDER BY IO_Display
       """.format(**project.task),
                "legacy":
                False
            }
        })

    put_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Partner Costs",
                "range": "A2"
            }
        }, rows)
Exemplo n.º 17
0
def partner_cost_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Partner Costs",
                "range": "A2:Z"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_PartnerCosts",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Cost_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Cost_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Fee_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Fee_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Invoice_Type",
                        "type": "STRING"
                    },
                    {
                        "name": "Invoice_Type_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Fee_Amount",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fee_Amount_Edit",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fee_Percent",
                        "type": "FLOAT"
                    },
                    {
                        "name": "Fee_Percent_Edit",
                        "type": "FLOAT"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_PartnerCosts",
                  """WITH
        /* Check if sheet values are set */ INPUT_ERRORS AS (
        SELECT
          *
        FROM (
          SELECT
            'Partner Costs' AS Operation,
            CASE
              WHEN Cost_Type_Edit IS NULL THEN 'Missing Cost Type.'
              WHEN Fee_Type_Edit IS NULL THEN 'Missing Fee Type.'
              WHEN Invoice_Type_Edit IS NULL THEN 'Missing Invoice Type.'
              WHEN Fee_Amount_Edit IS NULL
            AND Fee_Percent_Edit IS NULL THEN 'You must select a Fee Amount OR Fee Percent'
            ELSE IF
            (Fee_Amount_Edit IS NOT NULL
              AND Fee_Percent_Edit IS NOT NULL,
              'You must select a Fee Amount OR Fee Percent, not both',
              NULL)
          END
            AS Error,
            'ERROR' AS Severity,
            COALESCE(Line_Item,
              Insertion_Order,
              'BLANK') AS Id
          FROM
            `{dataset}.SHEET_PartnerCosts` )
        WHERE
          Error IS NOT NULL )
      SELECT
        *
      FROM
        INPUT_ERRORS ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_PartnerCosts",
                  """SELECT *
      FROM `{dataset}.SHEET_PartnerCosts`
      WHERE (
        REGEXP_CONTAINS(Insertion_Order, r" - (\d+)$")
        OR REGEXP_CONTAINS(Line_Item, r" - (\d+)$")
      )
      AND Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_PartnerCosts` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_PartnerCosts` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Exemplo n.º 18
0
def bigquery():

    if 'function' in project.task:
        query = None

        if project.task['function'] == 'pearson_significance_test':
            query = pearson_significance_test()

        if query:
            run_query(project.task['auth'], project.id, query, False,
                      project.task['to']['dataset'])

    elif 'run' in project.task and 'query' in project.task.get('run', {}):
        if project.verbose:
            print('QUERY', project.task['run']['query'])
        run_query(
            project.task['auth'],
            project.id,
            query_parameters(project.task['run']['query'],
                             project.task['run'].get('parameters')),
            project.task['run'].get('legacy', True),
        )

    elif 'values' in project.task['from']:
        rows = get_rows(project.task['auth'], project.task['from'])

        rows_to_table(project.task['to'].get('auth', project.task['auth']),
                      project.id, project.task['to']['dataset'],
                      project.task['to']['table'], rows,
                      project.task.get('schema', []), 0)

    elif 'query' in project.task['from']:

        if 'table' in project.task['to']:
            if project.verbose:
                print('QUERY TO TABLE', project.task['to']['table'])

            query_to_table(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['table'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                disposition=project.task['write_disposition']
                if 'write_disposition' in project.task else 'WRITE_TRUNCATE',
                legacy=project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql,
                target_project_id=project.task['to'].get(
                    'project_id', project.id))

        elif 'sheet' in project.task['to']:
            if project.verbose:
                print('QUERY TO SHEET', project.task['to']['sheet'])
            rows = query_to_rows(
                project.task['auth'],
                project.id,
                project.task['from']['dataset'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                legacy=project.task['from'].get('legacy', True))

            # makes sure types are correct in sheet
            rows = rows_to_type(rows)

            sheets_clear(project.task['to'].get('auth', project.task['auth']),
                         project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'))
            sheets_write(project.task['to'].get('auth', project.task['auth']),
                         project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'), rows)

        elif 'sftp' in project.task['to']:
            rows = query_to_rows(
                project.task['auth'],
                project.id,
                project.task['from']['dataset'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                legacy=project.task['from'].get('use_legacy_sql', True))

            if rows:
                if project.verbose:
                    print('QUERY TO SFTP')
                put_rows(project.task['auth'], project.task['to'], rows)

        else:
            if project.verbose:
                print('QUERY TO VIEW', project.task['to']['view'])
            query_to_view(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['view'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql
                project.task['to'].get('replace', False))

    else:
        if project.verbose:
            print('STORAGE TO TABLE', project.task['to']['table'])
        storage_to_table(
            project.task['auth'], project.id, project.task['to']['dataset'],
            project.task['to']['table'], project.task['from']['bucket'] + ':' +
            project.task['from']['path'], project.task.get('schema', []),
            project.task.get('skip_rows', 1),
            project.task.get('structure', 'CSV'),
            project.task.get('disposition', 'WRITE_TRUNCATE'))
Exemplo n.º 19
0
def bigquery():

    if 'run' in project.task and 'query' in project.task.get('run', {}):
        if project.verbose: print("QUERY", project.task['run']['query'])
        run_query(
            project.task['auth'],
            project.id,
            project.task['run']['query'],
            project.task['run'].get('legacy', True),
            #project.task['run'].get('billing_project_id', None)
        )

    elif 'values' in project.task['from']:
        rows = get_rows(project.task['auth'], project.task['from'])

        rows_to_table(project.task['to'].get('auth', project.task['auth']),
                      project.id, project.task['to']['dataset'],
                      project.task['to']['table'], rows,
                      project.task.get('schema', []), 0)

    elif 'query' in project.task['from']:
        if 'table' in project.task['to']:
            if project.verbose:
                print("QUERY TO TABLE", project.task['to']['table'])

            if 'pre_process_query' in project.task['to']:
                print('executing statement')
                execute_statement(project.task['auth'],
                                  project.id,
                                  project.task['to']['dataset'],
                                  project.task['to']['pre_process_query'],
                                  use_legacy_sql=project.task['from'].get(
                                      'legacy', project.task['from'].get(
                                          'useLegacySql', True)))
            query_to_table(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['table'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                disposition=project.task['write_disposition']
                if 'write_disposition' in project.task else 'WRITE_TRUNCATE',
                legacy=project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql,
                target_project_id=project.task['to'].get(
                    'project_id', project.id))
        # NOT USED SO RIPPING IT OUT
        # Mauriciod: Yes, it is used, look at project/mauriciod/target_winrate.json
        elif 'storage' in project.task['to']:
            if project.verbose:
                print("QUERY TO STORAGE", project.task['to']['storage'])
            local_file_name = '/tmp/%s' % str(uuid.uuid1())
            rows = query_to_rows(project.task['auth'], project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'])

            f = open(local_file_name, 'wb')
            writer = csv.writer(f)
            writer.writerows(rows)
            f.close()

            f = open(local_file_name, 'rb')
            object_put(project.task['auth'], project.task['to']['storage'], f)
            f.close()

            os.remove(local_file_name)
        elif 'sheet' in project.task['to']:
            if project.verbose:
                print("QUERY TO SHEET", project.task['to']['sheet'])
            rows = query_to_rows(project.task['auth'],
                                 project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'],
                                 legacy=project.task['from'].get(
                                     'legacy', True))

            # makes sure types are correct in sheet
            rows = rows_to_type(rows)

            sheets_clear(project.task['auth'], project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'))
            sheets_write(project.task['auth'], project.task['to']['sheet'],
                         project.task['to']['tab'],
                         project.task['to'].get('range', 'A2'), rows)
        elif 'sftp' in project.task['to']:
            rows = query_to_rows(project.task['auth'],
                                 project.id,
                                 project.task['from']['dataset'],
                                 project.task['from']['query'],
                                 legacy=project.task['from'].get(
                                     'use_legacy_sql', True))

            if rows:
                if project.verbose: print("QUERY TO SFTP")
                put_rows(project.task['auth'], project.task['to'], rows)
        else:
            if project.verbose:
                print("QUERY TO VIEW", project.task['to']['view'])
            query_to_view(
                project.task['auth'],
                project.id,
                project.task['to']['dataset'],
                project.task['to']['view'],
                query_parameters(project.task['from']['query'],
                                 project.task['from'].get('parameters')),
                project.task['from'].get(
                    'legacy', project.task['from'].get(
                        'useLegacySql', True)),  # DEPRECATED: useLegacySql
                project.task['to'].get('replace', False))
    else:
        if project.verbose:
            print("STORAGE TO TABLE", project.task['to']['table'])
        storage_to_table(
            project.task['auth'], project.id, project.task['to']['dataset'],
            project.task['to']['table'], project.task['from']['bucket'] + ':' +
            project.task['from']['path'], project.task.get('schema', []),
            project.task.get('skip_rows', 1),
            project.task.get('structure', 'CSV'),
            project.task.get('disposition', 'WRITE_TRUNCATE'))
Exemplo n.º 20
0
def dbm():
    if project.verbose: print('DBM')

    # legacy translations ( changed report title to name )
    if 'title' in project.task['report']:
        project.task['report']['name'] = project.task['report']['title']

    # check if report is to be deleted
    if project.task.get('delete', False):
        if project.verbose: print('DBM DELETE', end='')
        report_delete(project.task['auth'],
                      project.task['report'].get('report_id', None),
                      project.task['report'].get('name', None))

    # check if report is to be created ( LEGACY, DO NOT USE, SEE body format below )
    # REASON: this call tried to pass all parts of the json as parameters, this does not scale
    #         the new body call simply passes the report json in, leaving flexibility in the JSON recipe
    if 'type' in project.task['report']:

        if project.verbose: print('DBM CREATE', end='')

        partners = get_rows(
            project.task['auth'], project.task['report']
            ['partners']) if 'partners' in project.task['report'] else []
        advertisers = get_rows(
            project.task['auth'], project.task['report']['advertisers']
        ) if 'advertisers' in project.task['report'] else []

        report_create(
            project.task['auth'], project.task['report']['name'],
            project.task['report']['type'], partners, advertisers,
            project.task['report'].get('filters'),
            project.task['report'].get('dimensions'),
            project.task['report'].get('metrics'),
            project.task['report'].get('data_range'),
            project.task['report'].get('timezone', 'America/Los Angeles'),
            project.id, project.task['report'].get('dataset_id', None))

    # check if report is to be created
    if 'body' in project.task['report']:
        if project.verbose:
            print('DBM BUILD',
                  project.task['report']['body']['metadata']['title'])

        # filters can be passed using special get_rows handler, allows reading values from sheets etc...
        if 'filters' in project.task['report']:
            for f, d in project.task['report']['filters'].items():
                for v in get_rows(project.task['auth'], d):
                    project.task['report']['body']['params'].setdefault(
                        'filters', []).append({
                            "type": f,
                            "value": v
                        })

        # create the report
        report = report_build(project.task['auth'],
                              project.task['report']['body'])

    # moving a report
    if 'out' in project.task:

        filename, report = report_file(
            project.task['auth'],
            project.task['report'].get('report_id', None),
            project.task['report'].get('name', None),
            project.task['report'].get('timeout', 10), DBM_CHUNKSIZE)

        # if a report exists
        if report:
            if project.verbose: print('DBM FILE', filename)

            # clean up the report
            rows = report_to_rows(report)
            rows = report_clean(rows,
                                datastudio=project.task.get(
                                    'datastudio', False),
                                nulls=True)

            # write rows using standard out block in json ( allows customization across all scripts )
            if rows: put_rows(project.task['auth'], project.task['out'], rows)
Exemplo n.º 21
0
def barnacle():
    if project.verbose: print('BARNACLE')

    accounts = set(get_rows("user", project.task['accounts']))

    # Accounts
    rows = get_accounts(accounts)
    put_rows(project.task['out']['auth'],
             put_json('CM_Accounts', ACCOUNTS_SCHEMA), rows)

    # Profiles
    rows = get_profiles(accounts)
    put_rows(project.task['out']['auth'],
             put_json('CM_Profiles', PROFILES_SCHEMA), rows)

    # Profiles Campaigns
    if project.verbose: print('DCM Profile Campaigns')
    put_rows(project.task['out']['auth'],
             put_json('CM_Profile_Campaigns', PROFILE_CAMPAIGNS_SCHEMA),
             PROFILE_CAMPAIGNS)

    # Profiles Sites
    if project.verbose: print('DCM Profile Sites')
    put_rows(project.task['out']['auth'],
             put_json('CM_Profile_Sites', PROFILE_SITES_SCHEMA), PROFILE_SITES)

    # Profiles Roles
    if project.verbose: print('DCM Profile Roles')
    put_rows(project.task['out']['auth'],
             put_json('CM_Profile_Roles', PROFILE_ROLES_SCHEMA), PROFILE_ROLES)

    # Profiles Advertisers
    if project.verbose: print('DCM Profile Advertisers')
    put_rows(project.task['out']['auth'],
             put_json('CM_Profile_Advertisers', PROFILE_ADVERTISERS_SCHEMA),
             PROFILE_ADVERTISERS)

    # Subaccounts
    rows = get_subaccounts(accounts)
    put_rows(project.task['out']['auth'],
             put_json('CM_SubAccounts', SUBACCOUNTS_SCHEMA), rows)

    # Advertisers
    rows = get_advertisers(accounts)
    put_rows(project.task['out']['auth'],
             put_json('CM_Advertisers', ADVERTISERS_SCHEMA), rows)

    # Campaigns
    rows = get_campaigns(accounts)
    put_rows(project.task['out']['auth'],
             put_json('CM_Campaigns', CAMPAIGNS_SCHEMA), rows)

    # Sites
    rows = get_sites(accounts)
    put_rows(project.task['out']['auth'], put_json('CM_Sites', SITES_SCHEMA),
             rows)

    # Sites Contacts
    if project.verbose: print('DCM Site Contacts')
    put_rows(project.task['out']['auth'],
             put_json('CM_Site_Contacts', SITE_CONTACTS_SCHEMA), SITE_CONTACTS)

    # Roles
    rows = get_roles(accounts)
    put_rows(project.task['out']['auth'], put_json('CM_Roles', ROLES_SCHEMA),
             rows)

    if project.task.get('reports', False) == True:
        # Reports
        rows = get_reports(accounts)
        put_rows(project.task['out']['auth'],
                 put_json('CM_Reports', REPORTS_SCHEMA), rows)

        # Reports Deliveries
        if project.verbose: print('DCM Deliveries')
        put_rows(project.task['out']['auth'],
                 put_json('CM_Report_Deliveries', REPORT_DELIVERIES_SCHEMA),
                 REPORT_DELIVERIES)
Exemplo n.º 22
0
def frequency_cap_audit():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Frequency Caps",
                "range": "A2:M"
            }
        })

    put_rows(
        project.task["auth_bigquery"], {
            "bigquery": {
                "dataset":
                project.task["dataset"],
                "table":
                "SHEET_FrequencyCaps",
                "schema": [
                    {
                        "name": "Partner",
                        "type": "STRING"
                    },
                    {
                        "name": "Advertiser",
                        "type": "STRING"
                    },
                    {
                        "name": "Campaign",
                        "type": "STRING"
                    },
                    {
                        "name": "Insertion_Order",
                        "type": "STRING"
                    },
                    {
                        "name": "Line_Item",
                        "type": "STRING"
                    },
                    {
                        "name": "Unlimited",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Unlimited_Edit",
                        "type": "BOOLEAN"
                    },
                    {
                        "name": "Time_Unit",
                        "type": "STRING"
                    },
                    {
                        "name": "Time_Unit_Edit",
                        "type": "STRING"
                    },
                    {
                        "name": "Time_Count",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Time_Count_Edit",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Max_impressions",
                        "type": "INTEGER"
                    },
                    {
                        "name": "Max_impressions_Edit",
                        "type": "INTEGER"
                    },
                ],
                "format":
                "CSV"
            }
        }, rows)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "AUDIT_FrequencyCaps",
                  """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Frequency Caps' AS Operation,
            CASE WHEN Unlimited_Edit IS TRUE THEN
              CASE WHEN Time_Unit_Edit IS NOT NULL OR Time_Count_Edit IS NOT NULL OR Max_Impressions_Edit IS NOT NULL THEN 'Time Unit and the Other Options are Mutually Exclusive.'
              ELSE NULL
             END
            ELSE
              IF(Time_Unit_Edit IS NULL OR Time_Count_Edit IS NULL OR Max_Impressions_Edit IS NULL, 'If Time Unit is FALSE, the other options are required.', NULL)
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, Insertion_Order, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_FrequencyCaps`
        )
        WHERE
          Error IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      ;
    """.format(**project.task),
                  legacy=False)

    query_to_view(project.task["auth_bigquery"],
                  project.id,
                  project.task["dataset"],
                  "PATCH_FrequencyCaps",
                  """SELECT *
      FROM `{dataset}.SHEET_FrequencyCaps`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
      AND Insertion_Order NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
      AND Campaign NOT IN (SELECT Id FROM `{dataset}.AUDIT_FrequencyCaps` WHERE Severity='ERROR')
    """.format(**project.task),
                  legacy=False)
Exemplo n.º 23
0
def frequency_cap_patch(commit=False):
    patches = []

    rows = get_rows(project.task["auth_bigquery"], {
        "bigquery": {
            "dataset": project.task["dataset"],
            "table": "PATCH_FrequencyCaps",
        }
    },
                    as_object=True)

    for row in rows:

        frequency_cap = {}

        if row['Unlimited'] != row['Unlimited_Edit']:
            frequency_cap.setdefault("frequencyCap", {})
            frequency_cap["frequencyCap"]["unlimited"] = row['Unlimited_Edit']
        if row['Time_Unit'] != row['Time_Unit_Edit']:
            frequency_cap.setdefault("frequencyCap", {})
            frequency_cap["frequencyCap"]["timeUnit"] = row['Time_Unit_Edit']
        if row['Time_Count'] != row['Time_Count_Edit']:
            frequency_cap.setdefault("frequencyCap", {})
            frequency_cap["frequencyCap"]["timeUnitCount"] = row[
                'Time_Count_Edit']
        if row['Max_impressions'] != row['Max_impressions_Edit']:
            frequency_cap.setdefault("frequencyCap", {})
            frequency_cap["frequencyCap"]["maxImpressions"] = row[
                'Max_impressions_Edit']

        if frequency_cap:
            patch = {
                "operation": "Frequency Caps",
                "action": "PATCH",
                "partner": row['Partner'],
                "advertiser": row['Advertiser'],
                "parameters": {
                    "advertiserId": lookup_id(row['Advertiser']),
                    "body": frequency_cap
                }
            }

            if row['Line_Item']:
                patch["line_item"] = row['Line_Item']
                patch["parameters"]["lineItemId"] = lookup_id(row['Line_Item'])
            elif row['Insertion_Order']:
                patch["insertion_order"] = row['Insertion_Order']
                patch["parameters"]["insertionOrderId"] = lookup_id(
                    row['Insertion_Order'])
            else:
                patch["campaign"] = row['Campaign']
                patch["parameters"]["campaignId"] = lookup_id(row['Campaign'])

            patches.append(patch)

    patch_masks(patches)

    if commit:
        insertion_order_commit(patches)
        line_item_commit(patches)
        campaign_commit(patches)
    else:
        patch_preview(patches)
Exemplo n.º 24
0
def audit_load():

    bid_strategy_audit()
    integration_detail_audit()
    frequency_cap_audit()
    line_item_map_audit()
    pacing_audit()
    partner_cost_audit()
    segment_audit()
    insertion_order_audit()
    line_item_audit()

    # write audit to sheet
    sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Audit',
                 'A2')

    # write audits to sheet
    rows = get_rows(
        project.task['auth_bigquery'], {
            'bigquery': {
                'dataset':
                project.task['dataset'],
                'query':
                """SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_InsertionOrders`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_Segments`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_LineItems`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_LineItemMaps`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_Pacing`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_BidStrategy`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_FrequencyCaps`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_PartnerCosts`
      UNION ALL
        SELECT Operation, Severity, Id, Error
        FROM `{dataset}.AUDIT_IntegrationDetails`
      """.format(**project.task),
                'legacy':
                False
            }
        })

    put_rows(project.task['auth_sheets'], {
        'sheets': {
            'sheet': project.task['sheet'],
            'tab': 'Audit',
            'range': 'A2'
        }
    }, rows)
Exemplo n.º 25
0
def line_item_audit():

  rows = get_rows(
    project.task["auth_sheets"],
    { "sheets": {
      "sheet": project.task["sheet"],
      "tab": "Line Items",
      "range": "A2:Z"
    }}
  )

  put_rows(
    project.task["auth_bigquery"],
    { "bigquery": {
      "dataset": project.task["dataset"],
      "table": "SHEET_LineItems",
      "schema": [
        { "name": "Partner", "type": "STRING" },
        { "name": "Advertiser", "type": "STRING" },
        { "name": "Campaign", "type": "STRING" },
        { "name": "Insertion_Order", "type": "STRING" },
        { "name": "Line_Item", "type": "STRING" },
        { "name": "Action", "type": "STRING" },
        { "name": "Status", "type": "STRING" },
        { "name": "Status_Edit", "type": "STRING" },
        { "name": "Warning", "type": "STRING" },
        { "name": "Line_Item_Type", "type": "STRING" },
        { "name": "Line_Item_Type_Edit", "type": "STRING" },
        { "name": "Flight_Data_Type", "type": "STRING" },
        { "name": "Flight_Data_Type_Edit", "type": "STRING" },
        { "name": "Flight_Start_Date", "type": "STRING" },
        { "name": "Flight_Start_Date_Edit", "type": "STRING" },
        { "name": "Flight_End_Date", "type": "STRING" },
        { "name": "Flight_End_Date_Edit", "type": "STRING" },
        { "name": "Flight_Trigger", "type": "STRING" },
        { "name": "Flight_Trigger_Edit", "type": "STRING" },
        { "name": "Budget_Allocation_Type", "type": "STRING" },
        { "name": "Budget_Allocation_Type_Edit", "type": "STRING" },
        { "name": "Budget_Unit", "type": "STRING" },
        { "name": "Budget_Unit_Edit", "type": "STRING" },
        { "name": "Budget_Max", "type": "FLOAT" },
        { "name": "Budget_Max_Edit", "type": "FLOAT" },
        { "name": "Partner_Revenue_Model", "type": "STRING" },
        { "name": "Partner_Revenue_Model_Edit", "type": "STRING" },
        { "name": "Partner_Markup", "type": "FLOAT" },
        { "name": "Partner_Markup_Edit", "type": "FLOAT" },
        { "name": "Conversion_Percent", "type": "FLOAT" },
        { "name": "Conversion_Percent_Edit", "type": "FLOAT" },
        { "name": "Targeting_Expansion_Level", "type": "STRING" },
        { "name": "Targeting_Expansion_Level_Edit", "type": "STRING" },
        { "name": "Exclude_1P", "type": "STRING" },
        { "name": "Exclude_1P_Edit", "type": "STRING" },
      ],
      "format": "CSV"
    }},
    rows
  )

  # Create Insert View
  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "INSERT_LineItems",
    """SELECT
      REGEXP_EXTRACT(S_LI.Advertiser, r' - (\d+)$') AS advertiserId,
      REGEXP_EXTRACT(S_LI.Campaign, r' - (\d+)$') AS campaignId,
      REGEXP_EXTRACT(S_LI.Insertion_Order, r' - (\d+)$') AS insertionOrderId,
      S_LI.Line_Item AS displayName,
      S_LI.Line_Item_Type_Edit AS lineItemType,
      S_LI.Status_Edit AS entityStatus,
      STRUCT(
        S_PC.Cost_Type_Edit As costType,
        S_PC.Fee_Type_Edit As feeType,
        S_PC.Invoice_Type_Edit AS invoiceType,
        S_PC.Fee_Amount_Edit AS feeAmount,
        S_PC.Fee_Percent_Edit * 1000 AS feePercentageMillis
      ) AS partnerCosts,
      STRUCT(
        S_LI.Flight_Data_Type_Edit AS flightDateType,
        STRUCT (
          STRUCT (
            EXTRACT(YEAR FROM CAST(S_LI.Flight_Start_Date_Edit AS Date)) AS year,
            EXTRACT(MONTH FROM CAST(S_LI.Flight_Start_Date_Edit AS DATE)) AS month,
            EXTRACT(DAY FROM CAST(S_LI.Flight_Start_Date_Edit AS DATE)) AS day
          ) AS startDate,
          STRUCT (
            EXTRACT(YEAR FROM CAST(S_LI.Flight_End_Date_Edit AS Date)) AS year,
            EXTRACT(MONTH FROM CAST(S_LI.Flight_End_Date_Edit AS DATE)) AS month,
            EXTRACT(DAY FROM CAST(S_LI.Flight_End_Date_Edit AS DATE)) AS day
          ) AS endDate
        ) AS dateRange,
        S_LI.Flight_Trigger_Edit AS triggerId
      ) AS flight,
      STRUCT(
        S_LI.Budget_Allocation_Type_Edit AS budgetAllocationType,
        S_LI.Budget_Unit_Edit AS budgetUnit,
        S_LI.Budget_Max_Edit * 100000 AS maxAmount
      ) AS budget,
      STRUCT(
        S_P.Period_Edit As pacingPeriod,
        S_P.Type_Edit As pacingType,
        S_P.Daily_Budget_Edit AS dailyMaxMicros,
        S_P.Daily_Impressions_Edit AS dailyMaxImpressions
      ) AS pacing,
      STRUCT(
        S_FC.Unlimited_Edit AS unlimited,
        S_FC.Time_Unit_Edit AS timeUnit,
        S_FC.Time_Count_Edit AS timeUnitCount,
        S_FC.Max_impressions_Edit AS maxImpressions
      ) AS frequencyCap,
      STRUCT(
        S_LI.Partner_Revenue_Model_Edit AS markupType,
        S_LI.Partner_Markup_Edit * 100000 AS markupAmount
      ) AS partnerRevenueModel,
      STRUCT(
        S_LI. Conversion_Percent_Edit * 1000 AS postViewCountPercentageMillis,
        [] AS floodlightActivityConfigs
      ) AS conversionCounting,
      STRUCT(
        IF(S_BS.Fixed_Bid_Edit IS NOT NULL,
          STRUCT(
            S_BS.Fixed_Bid_Edit * 100000 AS bidAmountMicros
          ),
          NULL
        ) AS fixedBid,
        IF(S_BS.Auto_Bid_Goal_Edit IS NOT NULL,
          STRUCT(
            S_BS.Auto_Bid_Goal_Edit AS performanceGoalType,
            S_BS.Auto_Bid_Amount_Edit * 100000 AS maxAverageCpmBidAmountMicros,
            S_BS.Auto_Bid_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS maximizeSpendAutoBid,
        IF(S_BS.Performance_Goal_Type_Edit IS NOT NULL,
          STRUCT(
            S_BS.Performance_Goal_Type_Edit AS performanceGoalType,
            S_BS.Performance_Goal_Amount_Edit * 100000 AS performanceGoalAmountMicros,
            S_BS.Performance_Goal_Average_CPM_Bid_Edit * 100000 AS maxAverageCpmBidAmountMicros,
            S_BS.Performance_Goal_Algorithm_Edit AS customBiddingAlgorithmId
          ),
          NULL
        ) AS performanceGoalAutoBid
      )
      AS bidStrategy,
      STRUCT(
        S_ID.Integration_Code_Edit AS integrationCode,
        S_ID.Details_Edit AS details
      ) AS integrationDetails,
      STRUCT(
        S_LI.Targeting_Expansion_Level_Edit AS targetingExpansionLevel,
        S_LI.Exclude_1P_Edit AS excludeFirstPartyAudience
      ) AS targetingExpansion
    FROM `{dataset}.SHEET_LineItems` AS S_LI
    LEFT JOIN `{dataset}.SHEET_PartnerCosts` AS S_PC ON S_LI.Line_Item=S_PC.Line_Item
    LEFT JOIN `{dataset}.SHEET_Pacing` AS S_P ON S_LI.Line_Item=S_P.Line_Item
    LEFT JOIN `{dataset}.SHEET_FrequencyCaps` AS S_FC ON S_LI.Line_Item=S_FC.Line_Item
    LEFT JOIN `{dataset}.SHEET_IntegrationDetails` AS S_ID ON S_LI.Line_Item=S_ID.Line_Item
    LEFT JOIN `{dataset}.SHEET_BidStrategy` AS S_BS ON S_LI.Line_Item=S_BS.Line_Item
    LEFT JOIN `{dataset}.DV_LineItems` AS DV_LI ON S_LI.Line_Item=DV_LI.displayName
    WHERE S_LI.Action="INSERT"
    AND DV_LI IS NULL
    """.format(**project.task),
    legacy=False
  )

  # Create Audit View
  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "AUDIT_LineItems",
    """WITH
      /* Check if sheet values are set */
      INPUT_ERRORS AS (
        SELECT
        *
        FROM (
          SELECT
            'Line Item' AS Operation,
            CASE
              WHEN Budget_Allocation_Type_Edit IS NULL THEN 'Missing Budget Allocation Type.'
              WHEN Budget_Unit_Edit IS NULL THEN 'Missing Budget Unit.'
            ELSE
              NULL
            END AS Error,
            'ERROR' AS Severity,
          COALESCE(Line_Item, 'BLANK') AS Id
        FROM
          `{dataset}.SHEET_LineItems`
        )
        WHERE
          Error IS NOT NULL
      ),
      /* Check duplicate inserts */
      DUPLICATE_ERRORS AS (
        SELECT
          'Line Item' AS Operation,
          'Duplicate Line Item name, insert will be ignored.' AS Error,
          'WARNING' AS Severity,
          COALESCE(S_LI.Line_Item, 'BLANK') AS Id
        FROM `{dataset}.SHEET_LineItems` As S_LI
        LEFT JOIN `{dataset}.DV_LineItems` AS DV_LI ON S_LI.Line_Item=DV_LI.displayName
        WHERE S_LI.Action="INSERT"
        AND DV_LI IS NOT NULL
      )

      SELECT * FROM INPUT_ERRORS
      UNION ALL
      SELECT * FROM DUPLICATE_ERRORS
    """.format(**project.task),
    legacy=False
  )

  query_to_view(
    project.task["auth_bigquery"],
    project.id,
    project.task["dataset"],
    "PATCH_LineItems",
    """SELECT *
      FROM `{dataset}.SHEET_LineItems`
      WHERE Line_Item NOT IN (SELECT Id FROM `{dataset}.AUDIT_LineItems` WHERE Severity='ERROR')
    """.format(**project.task),
    legacy=False
  )
Exemplo n.º 26
0
def barnacle():
  if project.verbose: print 'BARNACLE'

  accounts = set(get_rows("user", project.task['accounts']))

  if 'accounts' in project.task['endpoints']:
    # Accounts
    rows = get_accounts(accounts)
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Accounts', ACCOUNTS_SCHEMA),
      "DCM_Accounts.csv",
      rows
    )

  if 'profiles' in project.task['endpoints']:
    # Profiles
    rows = get_profiles(accounts)
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Profiles', PROFILES_SCHEMA),
      "DCM_Profiles.csv",
      rows
    )

    # Profiles Campaigns
    if project.verbose: print 'DCM Profile Campaigns'
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Profile_Campaigns', PROFILE_CAMPAIGNS_SCHEMA),
      "DCM_Profile_Campaigns.csv",
      PROFILE_CAMPAIGNS
    )

    # Profiles Sites
    if project.verbose: print 'DCM Profile Sites'
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Profile_Sites', PROFILE_SITES_SCHEMA),
      "DCM_Profile_Sites.csv",
      PROFILE_SITES
    )

    # Profiles Roles
    if project.verbose: print 'DCM Profile Roles'
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Profile_Roles', PROFILE_ROLES_SCHEMA),
      "DCM_Profile_Roles.csv",
      PROFILE_ROLES
    )

    # Profiles Advertisers
    if project.verbose: print 'DCM Profile Advertisers'
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Profile_Advertisers', PROFILE_ADVERTISERS_SCHEMA),
      "DCM_Profile_Advertisers.csv",
      PROFILE_ADVERTISERS
    )

  if 'subaccounts' in project.task['endpoints']:
    # Subaccounts
    rows = get_subaccounts(accounts)
    put_rows(
      project.task['out']['auth'],
      put_json('CM_SubAccounts', SUBACCOUNTS_SCHEMA),
      "DCM_SubAccounts.csv",
      rows
    )

  if 'advertisers' in project.task['endpoints']:
    # Advertisers
    rows = get_advertisers(accounts)
    put_rows(
      project.task['out']['auth'],
      put_json('CM_Advertisers', ADVERTISERS_SCHEMA),
      "DCM_Advertisers.csv",
      rows
    )

  #if 'changelogs' in project.task['endpoints']:
  #  # Changelogs 
  #  rows = get_changelogs(accounts)
  #  put_rows(
  #    project.task['out']['auth'],
  #    put_json('CM_ChangeLogs', CHANGELOGS_SCHEMA),
  #    "DCM_ChangeLogs.csv",
  #    rows
  #  )

  if 'campaigns' in project.task['endpoints']:
    # Campaigns 
    rows = get_campaigns(accounts)
    put_rows(
      project.task['out']['auth'],
      put_json('CM_Campaigns', CAMPAIGNS_SCHEMA),
      "DCM_Campaigns.csv",
      rows
    )

  if 'sites' in project.task['endpoints']:
    # Sites 
    rows = get_sites(accounts)
    put_rows(
      project.task['out']['auth'],
      put_json('CM_Sites', SITES_SCHEMA),
      "DCM_Sites.csv",
      rows
    )
  
    # Sites Contacts
    if project.verbose: print 'DCM Site Contacts'
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Site_Contacts', SITE_CONTACTS_SCHEMA),
      "DCM_Site_Contacts.csv",
      SITE_CONTACTS
    )

  if 'roles' in project.task['endpoints']:
    # Roles
    rows = get_roles(accounts)
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Roles', ROLES_SCHEMA),
      "DCM_Roles.csv",
      rows
    )
  
  if 'reports' in project.task['endpoints']:
    # Reports
    rows = get_reports(accounts)
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Reports', REPORTS_SCHEMA),
      "DCM_Reports.csv",
      rows
    )

    # Reports Deliveries
    if project.verbose: print 'DCM Deliveries'
    put_rows(
      project.task['out']['auth'], 
      put_json('CM_Report_Deliveries', REPORT_DELIVERIES_SCHEMA),
      "DCM_Report_Deliveriess.csv",
      REPORT_DELIVERIES
    )
Exemplo n.º 27
0
def partner_cost_patch(commit=False):
    patches = {}
    changed = set()

    rows = get_rows(project.task["auth_bigquery"], {
        "bigquery": {
            "dataset": project.task["dataset"],
            "table": "PATCH_PartnerCosts",
        }
    },
                    as_object=True)

    for row in rows:

        lookup = row['Line_Item'] or row['Insertion_Order']

        patches.setdefault(
            lookup, {
                "operation": "Partner Costs",
                "action": "PATCH",
                "partner": row['Partner'],
                "advertiser": row['Advertiser'],
                "campaign": row['Campaign'],
                "parameters": {
                    "advertiserId": lookup_id(row['Advertiser']),
                    "body": {
                        "partnerCosts": []
                    }
                }
            })

        if row['Line_Item']:
            patches[lookup]["line_item"] = row['Line_Item']
            patches[lookup]["parameters"]["lineItemId"] = lookup_id(
                row['Line_Item'])
        else:
            patches[lookup]["insertion_order"] = row['Insertion_Order']
            patches[lookup]["parameters"]["insertionOrderId"] = lookup_id(
                row['Insertion_Order'])

        patches[lookup]["parameters"]["body"]["partnerCosts"].append({
            "costType":
            row['Cost_Type_Edit'],
            "feeType":
            row['Fee_Type_Edit'],
            "invoiceType":
            row['Invoice_Type_Edit'],
            "feeAmount":
            int(float(row['Fee_Amount_Edit']) *
                100000) if row['Fee_Amount_Edit'] else None,
            "feePercentageMillis":
            int(float(row['Fee_Percent_Edit']) *
                1000) if row['Fee_Percent_Edit'] else None
        })

        if row['Cost_Type'] != row['Cost_Type_Edit'] \
          or row['Fee_Type'] != row['Fee_Type_Edit'] \
          or row['Invoice_Type'] != row['Invoice_Type_Edit'] \
          or row['Fee_Amount'] != row['Fee_Amount_Edit'] \
          or row['Fee_Percent'] != row['Fee_Percent_Edit']:
            changed.add(lookup)

    # Remove any patches where partner costs have not changed
    for pc in list(patches.keys()):
        if pc not in changed:
            del patches[pc]
    patches = list(patches.values())

    patch_masks(patches)

    if commit:
        insertion_order_commit(patches)
        line_item_commit(patches)
    else:
        patch_preview(patches)
Exemplo n.º 28
0
def line_item_patch(commit=False):

  def date_edited(value):
    y, m, d = value.split("-")
    return {"year": y, "month": m, "day": d}

  patches = []

  rows = get_rows(
    project.task["auth_bigquery"],
    { "bigquery": {
      "dataset": project.task["dataset"],
      "table":"PATCH_LineItems",
    }},
    as_object=True
  )

  for row in rows:

    if row['Action'] == "DELETE":
      patches.append({
        "operation": "Line Items",
        "action": "DELETE",
        "partner": row['Partner'],
        "advertiser": row['Advertiser'],
        "campaign": row['Campaign'],
        "line_item": row['Line_Item'],
        "parameters": {
          "advertiserId": lookup_id(row['Advertiser']),
          "lineItemId": lookup_id(row['Line_Item'])
        }
      })

    elif row['Action'] == "PATCH":
      line_item = {}

      if row['Line_Item_Type'] != row['Line_Item_Type_Edit']:
        line_item["lineItemType"] = row['Line_Item_Type_Edit']

      if row['Status'] != row['Status_Edit']:
        line_item['entityStatus'] = row['Status_Edit']

      if row['Flight_Data_Type'] != row['Flight_Data_Type_Edit']:
        line_item.setdefault("flight", {})
        line_item["flight"]["flightDateType"] = row['Flight_Data_Type_Edit']

      if row['Flight_Start_Date'] != row['Flight_Start_Date_Edit']:
        line_item.setdefault("flight", {}).setdefault("dateRange", {})
        line_item["flight"]["dateRange"]["startDate"] = date_edited(row['Flight_Start_Date_Edit'])

      if row['Flight_End_Date'] != row['Flight_End_Date_Edit']:
        line_item.setdefault("flight", {}).setdefault("endDate", {})
        line_item["flight"]["dateRange"]["endDate"] = date_edited(row['Flight_End_Date_Edit'])

      if row['Flight_Trigger'] != row['Flight_Trigger_Edit']:
        line_item.setdefault("flight", {})
        line_item["flight"]["triggerId"] = row['Flight_Trigger_Edit']

      if row['Budget_Allocation_Type'] != row['Budget_Allocation_Type_Edit']:
        line_item.setdefault("budget", {})
        line_item["budget"]["budgetAllocationType"] = row['Budget_Allocation_Type_Edit']

      if row['Budget_Unit'] != row['Budget_Unit_Edit']:
        line_item.setdefault("budget", {})
        line_item["budget"]["budgetUnit"] = row['Budget_Unit_Edit']

      if row['Budget_Max'] != row['Budget_Max_Edit']:
        line_item.setdefault("budget", {})
        line_item["budget"]["maxAmount"] = int(
          float(row['Budget_Max_Edit']) * 100000
        )

      if row['Partner_Revenue_Model'] != row['Partner_Revenue_Model_Edit']:
        line_item.setdefault("partnerRevenueModel", {})
        line_item["partnerRevenueModel"]["markupType"] = row['Partner_Revenue_Model_Edit']

      if row['Partner_Markup'] != row['Partner_Markup_Edit']:
        line_item.setdefault("partnerRevenueModel", {})
        line_item["partnerRevenueModel"]["markupAmount"] = int(
          float(row['Partner_Markup_Edit']) * 100000
        )

      if row['Conversion_Percent'] != row['Conversion_Percent_Edit']:
        line_item.setdefault("conversionCounting", {})
        line_item["conversionCounting"]["postViewCountPercentageMillis"] = int(
          float(row['Conversion_Percent_Edit']) * 1000
        )

      if row['Targeting_Expansion_Level'] != row['Targeting_Expansion_Level_Edit']:
        line_item.setdefault("targetingExpansion", {})
        line_item["targetingExpansion"]["targetingExpansionLevel"] = row['Targeting_Expansion_Level_Edit']

      if row['Exclude_1P'] != row['Exclude_1P_Edit']:
        line_item.setdefault("targetingExpansion", {})
        line_item["targetingExpansion"]["excludeFirstPartyAudience"] = row['Exclude_1P_Edit']

      if line_item:
        patches.append({
          "operation": "Line Items",
          "action": "PATCH",
          "partner": row['Partner'],
          "advertiser": row['Advertiser'],
          "campaign": row['Campaign'],
          "line_item": row['Line_Item'],
          "parameters": {
            "advertiserId": lookup_id(row['Advertiser']),
            "lineItemId": lookup_id(row['Line_Item']),
            "body": line_item
          }
        })

  patch_masks(patches)
  patch_preview(patches)

  if commit:
    line_item_commit(patches)
Exemplo n.º 29
0
def dcm():
  if project.verbose: print('DCM')

  # stores existing report json
  report = None

  # check if report is to be deleted
  if project.task.get('delete', False):
    if project.verbose: print('DCM DELETE', project.task['report'].get('name', None) or project.task['report'].get('body', {}).get('name', None) or project.task['report'].get('report_id', None))
    report_delete(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report'].get('report_id', None),
      project.task['report'].get('name', None) or project.task['report'].get('body', {}).get('name', None),
    )

  # check if report is to be run
  if project.task.get('report_run_only', False):
    if project.verbose: print('DCM REPORT RUN', project.task['report'].get('name', None) or project.task['report'].get('report_id', None))
    report_run(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report'].get('report_id', None),
      project.task['report'].get('name', None),
    )

  # check if report is to be created - DEPRECATED
  if 'type' in project.task['report']:
    if project.verbose: print('DCM CREATE')
    report = report_create(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report']['name'],
      project.task['report']
    )

  # check if report is to be created
  if 'body' in project.task['report']:
    if project.verbose: print('DCM BUILD', project.task['report']['body']['name'])

    # filters can be passed using special get_rows handler, allows reading values from sheets etc...
    if 'filters' in project.task['report']:
      for f, d in project.task['report']['filters'].items():
        for v in get_rows(project.task['auth'], d):
          # accounts are specified in a unique part of the report json
          if f in 'accountId':
            project.task['report']['body']['accountId'] = v
          # activities are specified in a unique part of the report json
          elif f in 'dfa:activity':
            project.task['report']['body']['reachCriteria']['activities'].setdefault('filters', []).append({
              "kind":"dfareporting#dimensionValue",
              "dimensionName": f,
              "id": v
            })
          # all other filters go in the same place
          else:
            project.task['report']['body']['criteria'].setdefault('dimensionFilters', []).append({
              "kind":"dfareporting#dimensionValue",
              "dimensionName": f,
              "id": v,
              "matchType": "EXACT"
            })

    report = report_build(
      project.task['auth'],
      project.task['report']['body'].get('accountId') or project.task['report']['account'],
      project.task['report']['body']
    )

  # moving a report
  if 'out' in project.task:
    filename, report = report_file(
      project.task['auth'],
      project.task['report']['account'],
      project.task['report'].get('report_id', None),
      project.task['report'].get('name', None) or project.task['report'].get('body', {}).get('name', None),
      project.task['report'].get('timeout', 10),
    )

    if report:
      if project.verbose: print('DCM FILE', filename)

      # clean up the report
      rows = report_to_rows(report)
      rows = report_clean(rows)

      # if bigquery, remove header and determine schema
      schema = None
      if 'bigquery' in project.task['out']:
        schema = report_schema(next(rows))
        project.task['out']['bigquery']['schema'] = schema
        project.task['out']['bigquery']['skip_rows'] = 0

      # write rows using standard out block in json ( allows customization across all scripts )
      if rows: put_rows(project.task['auth'], project.task['out'], rows)
Exemplo n.º 30
0
def line_item_load():

  # load multiple partners from user defined sheet
  def line_item_load_multiple():
    rows = get_rows(
        project.task["auth_sheets"], {
            "sheets": {
                "sheet": project.task["sheet"],
                "tab": "Advertisers",
                "range": "A2:A"
            }
        })

    # String for filtering which entityStatus enums we want to see in the sheet
    filter_string = 'entityStatus = "ENTITY_STATUS_ARCHIVED" OR entityStatus="ENTITY_STATUS_PAUSED" OR entityStatus="ENTITY_STATUS_ACTIVE" OR entityStatus="ENTITY_STATUS_DRAFT"'
    for row in rows:
      yield from API_DV360(
          project.task["auth_dv"], iterate=True).advertisers().lineItems().list(
              advertiserId=lookup_id(row[0]),
              filter=filter_string).execute()

  # write line_items to database
  put_rows(
      project.task["auth_bigquery"], {
          "bigquery": {
              "dataset":
                  project.task["dataset"],
              "table":
                  "DV_LineItems",
              "schema":
                  Discovery_To_BigQuery(
                      "displayvideo",
                      "v1").method_schema("advertisers.lineItems.list"),
              "format":
                  "JSON"
          }
      }, line_item_load_multiple())

  # write line_items to sheet
  rows = get_rows(
    project.task["auth_bigquery"],
    { "bigquery": {
      "dataset": project.task["dataset"],
      "query": """SELECT
        CONCAT(P.displayName, ' - ', P.partnerId),
        CONCAT(A.displayName, ' - ', A.advertiserId),
        CONCAT(C.displayName, ' - ', C.campaignId),
        CONCAT(I.displayName, ' - ', I.insertionOrderId),
        CONCAT(L.displayName, ' - ', L.lineItemId),
        'PATCH',
        L.entityStatus,
        L.entityStatus,
        ARRAY_TO_STRING(L.warningMessages, '\\n'),

        L.lineItemType,
        L.lineItemType,

        L.flight.flightDateType,
        L.flight.flightDateType,
        CONCAT(L.flight.dateRange.startDate.year, '-', L.flight.dateRange.startDate.month, '-', L.flight.dateRange.startDate.day),
        CONCAT(L.flight.dateRange.startDate.year, '-', L.flight.dateRange.startDate.month, '-', L.flight.dateRange.startDate.day),
        CONCAT(L.flight.dateRange.endDate.year, '-', L.flight.dateRange.endDate.month, '-', L.flight.dateRange.endDate.day),
        CONCAT(L.flight.dateRange.endDate.year, '-', L.flight.dateRange.endDate.month, '-', L.flight.dateRange.endDate.day),
        L.flight.triggerId,
        L.flight.triggerId,

        L.budget.budgetAllocationType,
        L.budget.budgetAllocationType,
        L.budget.budgetUnit,
        L.budget.budgetUnit,
        L.budget.maxAmount / 100000,
        L.budget.maxAmount / 100000,

        L.partnerRevenueModel.markupType,
        L.partnerRevenueModel.markupType,
        CAST(L.partnerRevenueModel.markupAmount AS FLOAT64) / 100000,
        CAST(L.partnerRevenueModel.markupAmount AS FLOAT64) / 100000,

        CAST(L.conversionCounting.postViewCountPercentageMillis AS Float64) / 1000,
        CAST(L.conversionCounting.postViewCountPercentageMillis AS Float64) / 1000,

        L.targetingExpansion.targetingExpansionLevel,
        L.targetingExpansion.targetingExpansionLevel,
        L.targetingExpansion.excludeFirstPartyAudience,
        L.targetingExpansion.excludeFirstPartyAudience,

        FROM `{dataset}.DV_LineItems` AS L
        LEFT JOIN `{dataset}.DV_Advertisers` AS A
        ON L.advertiserId=A.advertiserId
        LEFT JOIN `{dataset}.DV_Campaigns` AS C
        ON L.campaignId=C.campaignId
        LEFT JOIN `{dataset}.DV_InsertionOrders` AS I
        ON L.insertionOrderId=I.insertionOrderId
        LEFT JOIN `{dataset}.DV_Partners` AS P
        ON A.partnerId=P.partnerId
        ORDER BY I.displayName, L.displayName
      """.format(**project.task),
      "legacy": False
    }}
  )

  put_rows(
    project.task["auth_sheets"],
    { "sheets": {
      "sheet": project.task["sheet"],
      "tab": "Line Items",
      "range": "A2"
    }},
    rows
  )