Exemple #1
0
def main():

  parser = argparse.ArgumentParser(
    formatter_class=argparse.RawDescriptionHelpFormatter,
    description=textwrap.dedent('''\
      Command line to help debug DV360 reports and build reporting tools.

      Examples:
        To get list of reports: python helper.py --list -u [user credentials path]
        To get report json: python helper.py --report [id] -u [user credentials path]
        To get report schema: python helper.py --schema [id] -u [user credentials path]
        To get report sample: python helper.py --sample [id] -u [user credentials path]

  '''))

  # create parameters
  parser.add_argument('--report', help='report ID to pull json definition', default=None)
  parser.add_argument('--schema', help='report ID to pull schema format', default=None)
  parser.add_argument('--sample', help='report ID to pull sample data', default=None)
  parser.add_argument('--list', help='list reports', action='store_true')

  # initialize project
  project.from_commandline(parser=parser, arguments=('-u', '-c', '-s', '-v'))
  auth = 'service' if project.args.service else 'user'

  # get report
  if project.args.report:
    report = API_DBM(auth).queries().getquery(queryId=project.args.report).execute()
    print(json.dumps(report, indent=2, sort_keys=True))

  # get schema
  elif project.args.schema:
    filename, report = report_file(auth, project.args.schema, None, 10)
    rows = report_to_rows(report)
    rows = report_clean(rows)
    rows = rows_to_type(rows)
    print(json.dumps(get_schema(rows)[1], indent=2, sort_keys=True))

  # get sample
  elif project.args.sample:
    filename, report = report_file(auth, project.args.sample, None, 10)
    rows = report_to_rows(report)
    rows = report_clean(rows)
    rows = rows_to_type(rows)
    for r in rows_print(rows, row_min=0, row_max=20): pass

  # get list
  else:
    for report in API_DBM(auth, iterate=True).queries().listqueries().execute():
      print(json.dumps(report, indent=2, sort_keys=True))
Exemple #2
0
def dbm():
    if project.verbose:
        print('DBM')

    # name is redundant if title is given, allow skipping use of name for creating reports
    if 'body' in project.task['report'] and 'name' not in project.task[
            'report']:
        project.task['report']['name'] = project.task['report']['body'][
            'metadata']['title']

    # check if report is to be deleted
    if project.task.get('delete', False):
        if project.verbose:
            print('DBM DELETE')
        report_delete(project.task['auth'],
                      project.task['report'].get('report_id', None),
                      project.task['report'].get('name', None))

    # check if report is to be created
    if 'body' in project.task['report']:
        if project.verbose:
            print('DBM BUILD',
                  project.task['report']['body']['metadata']['title'])

        # ceck if filters given ( returns new body )
        if 'filters' in project.task['report']:
            project.task['report']['body'] = report_filter(
                project.task['auth'], project.task['report']['body'],
                project.task['report']['filters'])

        # create the report
        report = report_build(project.task['auth'],
                              project.task['report']['body'])

    # moving a report
    if 'out' in project.task:

        filename, report = report_file(
            project.task['auth'],
            project.task['report'].get('report_id', None),
            project.task['report'].get('name', None),
            project.task['report'].get('timeout', 10))

        # if a report exists
        if report:
            if project.verbose:
                print('DBM FILE', filename)

            # clean up the report
            rows = report_to_rows(report)
            rows = report_clean(rows)

            # write rows using standard out block in json ( allows customization across all scripts )
            if rows:
                put_rows(project.task['auth'], project.task['out'], rows)
def sov_process_client(report_name):
    sov_rows = {}

    # Download DBM report: ['Advertiser', 'Advertiser ID', 'Advertiser Status', 'Advertiser Integration Code', 'Date', 'Month', 'Device Type', 'Creative Type', 'Region', 'Region ID', 'City', 'City ID', 'Impressions']
    #                        0             1                2                    3                              4       5        6              7                8         9            10      11         12
    filename, report = report_file(project.task['auth'], None, report_name, 60,
                                   DBM_CHUNKSIZE)

    # if a report exists
    if report:
        if project.verbose: print 'CLIENT FILE', filename

        # convert report to array
        rows = report_to_rows(report)
        rows = report_clean(rows, datastudio=True, nulls=True)
        rows = rows_header_trim(rows)

        # pull only needed fields ( see: SCHEMA )
        for row in rows:
            key = ''.join(row[4:-1])  # Everything except impressions

            # if peer is in sov, then just add the impressions
            if key in sov_rows:
                sov_rows[key][7] += long(row[12])

            # otherwise, create a new anonymous peer row
            else:
                sov_rows[key] = [
                    row[4],  # 0 Report_Day
                    row[5],  # 1 Year_Month
                    'Client',  # 2 Advertiser_Type
                    row[6],  # 3 Platform_Type
                    row[7],  # 4 Creative_Type
                    RE_STATE.sub('', row[8]),  # 5 State_Region
                    row[10],  # 6 Designated_Market_Area
                    long(row[12]),  # 7 Client_Impressions
                    0  # 8 Peer_Impressions
                ]

    else:
        if project.verbose: print 'SOV REPORT NOT READY YET'

    # return only row values, hash key no longer necessary
    return sov_rows.values()
Exemple #4
0
def dbm():
    if project.verbose: print('DBM')

    # legacy translations ( changed report title to name )
    if 'title' in project.task['report']:
        project.task['report']['name'] = project.task['report']['title']

    # check if report is to be deleted
    if project.task.get('delete', False):
        if project.verbose: print('DBM DELETE', end='')
        report_delete(project.task['auth'],
                      project.task['report'].get('report_id', None),
                      project.task['report'].get('name', None))

    # check if report is to be created ( LEGACY, DO NOT USE, SEE body format below )
    # REASON: this call tried to pass all parts of the json as parameters, this does not scale
    #         the new body call simply passes the report json in, leaving flexibility in the JSON recipe
    if 'type' in project.task['report']:

        if project.verbose: print('DBM CREATE', end='')

        partners = get_rows(
            project.task['auth'], project.task['report']
            ['partners']) if 'partners' in project.task['report'] else []
        advertisers = get_rows(
            project.task['auth'], project.task['report']['advertisers']
        ) if 'advertisers' in project.task['report'] else []

        report_create(
            project.task['auth'], project.task['report']['name'],
            project.task['report']['type'], partners, advertisers,
            project.task['report'].get('filters'),
            project.task['report'].get('dimensions'),
            project.task['report'].get('metrics'),
            project.task['report'].get('data_range'),
            project.task['report'].get('timezone', 'America/Los Angeles'),
            project.id, project.task['report'].get('dataset_id', None))

    # check if report is to be created
    if 'body' in project.task['report']:
        if project.verbose:
            print('DBM BUILD',
                  project.task['report']['body']['metadata']['title'])

        # filters can be passed using special get_rows handler, allows reading values from sheets etc...
        if 'filters' in project.task['report']:
            for f, d in project.task['report']['filters'].items():
                for v in get_rows(project.task['auth'], d):
                    project.task['report']['body']['params'].setdefault(
                        'filters', []).append({
                            "type": f,
                            "value": v
                        })

        # create the report
        report = report_build(project.task['auth'],
                              project.task['report']['body'])

    # moving a report
    if 'out' in project.task:

        filename, report = report_file(
            project.task['auth'],
            project.task['report'].get('report_id', None),
            project.task['report'].get('name', None),
            project.task['report'].get('timeout', 10), DBM_CHUNKSIZE)

        # if a report exists
        if report:
            if project.verbose: print('DBM FILE', filename)

            # clean up the report
            rows = report_to_rows(report)
            rows = report_clean(rows,
                                datastudio=project.task.get(
                                    'datastudio', False),
                                nulls=True)

            # write rows using standard out block in json ( allows customization across all scripts )
            if rows: put_rows(project.task['auth'], project.task['out'], rows)
  parser.add_argument('--sample', help='report ID to pull sample data', default=None)
  parser.add_argument('--list', help='list reports', action='store_true')

  # initialize project
  project.from_commandline(parser=parser)
  auth = 'service' if project.args.service else 'user'

  # get report
  if project.args.report:
    report = API_DBM(auth).queries().getquery(queryId=project.args.report).execute()
    print json.dumps(report, indent=2, sort_keys=True)

  # get schema
  elif project.args.schema:
    filename, report = report_file(auth, project.args.schema, None, 10)
    rows = report_to_rows(report)
    rows = report_clean(rows)
    rows = rows_to_type(rows)
    print json.dumps(get_schema(rows)[1], indent=2, sort_keys=True)

  # get sample
  elif project.args.sample:
    filename, report = report_file(auth, project.args.sample, None, 10)
    rows = report_to_rows(report)
    rows = report_clean(rows)
    rows = rows_to_type(rows)
    for r in rows_print(rows, row_min=0, row_max=20): pass

  # get list
  else:
    for report in API_DBM(auth, iterate=True).queries().listqueries().execute():
def sov_process_peer(report_name):
    sov_rows = {}
    sov_mix = {}
    mix_ratio_high = 50
    warnings = []
    errors = []

    # Download DBM report: ['Advertiser', 'Advertiser ID', 'Advertiser Status', 'Advertiser Integration Code', 'Date', 'Month', 'Device Type', 'Creative Type', 'Region', 'Region ID', 'City', 'City ID', 'Impressions']
    #                        0             1                2                    3                              4       5        6              7                8         9            10      11         12
    filename, report = report_file(project.task['auth'], None, report_name, 60,
                                   DBM_CHUNKSIZE)

    # if a report exists
    if report:
        if project.verbose: print 'CLIENT FILE', filename

        # convert report to array
        rows = report_to_rows(report)
        rows = report_clean(rows, datastudio=True, nulls=True)
        rows = rows_header_trim(rows)

        for row in rows:
            key = ''.join(row[4:-1])  # Everything except impressions

            # track advertiser level mix
            sov_mix[row[1]] = sov_mix.get(row[1], 0) + long(row[12])

            # if peer is in sov, then just add the impressions
            if key in sov_rows:
                sov_rows[key][8] += long(row[12])

            # otherwise, create a new anonymous peer row
            else:
                sov_rows[key] = [
                    row[4],  # 0 Report_Day
                    row[5],  # 1 Year_Month
                    'Peer',  # 2 Advertiser_Type
                    row[6],  # 3 Platform_Type
                    row[7],  # 4 Creative_Type
                    RE_STATE.sub('', row[8]),  # 5 State_Region
                    row[10],  # 6 Designated_Market_Area
                    0,  # 7 Client_Impressions
                    long(row[12])  # 8 Peer_Impressions
                ]

        # CHECK: Mix must be right, make sure we've got obfuscated data, no peer has more than 50%
        mix_total = sum(sov_mix.values())

        for account, impressions in sov_mix.items():
            percent = (100 * impressions) / mix_total
            if project.verbose:
                print 'EXPECTED MIX %d%% ACTUAL MIX: %s %d%%' % (
                    mix_ratio_high, account, percent)

            if impressions == 0:
                warnings.append('Warning advertiser %s has no impressions.' %
                                account)
            elif percent > mix_ratio_high:
                errors.append(
                    'Advertiser %s has too much weight %d%%, expected under %d%%, add other big peers!'
                    % (account, percent, mix_ratio_high))

        if len(sov_mix.keys()) < 5:
            errors.extend(warnings)
            errors.append(
                'Need at least 5 DBM advertisers with impressions to ensure anonymity!'
            )

        # raise all errors at once so user can clean up multiple errors at once
        if errors: raise Exception('\n'.join(errors))

    else:
        if project.verbose: print 'SOV REPORT NOT READY YET'

    # return only row values, hash key no longer necessary
    return sov_rows.values()