def report_main(config, task, name, dateRange, schedule, advertiser, campaign, shadow=True): if config.verbose: print('DYNAMIC COSTS MAIN:', name) # base report schema schema = { 'kind': 'dfareporting#report', 'type': 'STANDARD', 'name': 'Dynamic Costs %s - Main Advertiser ( StarThinker )' % name, 'schedule': schedule, 'criteria': { 'dateRange': dateRange, 'dimensionFilters': [{ 'kind': 'dfareporting#dimensionValue', 'dimensionName': 'dfa:advertiser', 'id': advertiser, 'matchType': 'EXACT' }, { 'kind': 'dfareporting#dimensionValue', 'dimensionName': 'dfa:campaign', 'id': campaign, 'matchType': 'EXACT' }], 'dimensions': [{ 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:placement' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:placementId' }], 'metricNames': ['dfa:impressions', 'dfa:clicks'] } } # if not using shadow advertiser, pull DBM cost here if not shadow: schema['criteria']['metricNames'].append('dfa:dbmCost') # create the report if it does not exist report = report_build(config, task['auth'], task['account'], schema) # fetch report file if it exists ( timeout = 0 means grab most reent ready ) filename, filedata = report_file(config, task['auth'], task['account'], report['id'], None, 60, DCM_CHUNK_SIZE) # write report to a table ( avoid collisions as best as possible ) table_name = 'Dynamic_Costs_%s_Main_Advertiser' % name write_report(config, task, filedata, task['out']['dataset'], table_name) return table_name
def report_shadow(config, task, name, dateRange, schedule, advertiser, campaign): if config.verbose: print('DYNAMIC COSTS SHADOW:', name) # create the report if it does not exist report = report_build( config, task['auth'], task['account'], { 'kind': 'dfareporting#report', 'type': 'STANDARD', 'name': 'Dynamic Costs %s - Shadow Advertiser ( StarThinker )' % name, 'schedule': schedule, 'criteria': { 'dateRange': dateRange, 'dimensionFilters': [{ 'dimensionName': 'dfa:advertiser', 'id': advertiser, 'kind': 'dfareporting#dimensionValue', 'matchType': 'EXACT' }, { 'dimensionName': 'dfa:campaign', 'id': campaign, 'kind': 'dfareporting#dimensionValue', 'matchType': 'EXACT' }], 'dimensions': [{ 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:placement' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:placementId' }], 'metricNames': ['dfa:dbmCost'] } }) # fetch report file if it exists ( timeout = 0 means grab most reent ready ) filename, filedata = report_file(config, task['auth'], task['account'], report['id']) # write report to a table ( avoid collisions as best as possible ) table_name = 'Dynamic_Costs_%s_Shadow_Advertiser' % name write_report(config, task, filedata, task['out']['dataset'], table_name) return table_name
def dcm_replicate_create(config, task, account, advertisers, name, template): print('DCM REPLICATE CREATE', name) # check if report is to be deleted if task['report'].get('delete', False): report_delete(config, task['auth'], account, None, name) # add account and advertiser filters ( return new disctionary) body = report_filter( config, task['auth'], template, { 'accountId': { 'values': account }, 'dfa:advertiser': { 'values': advertisers } }) body['name'] = name #print('BODY', body) # create and run the report if it does not exist report = report_build(config, task['auth'], account, body)
def floodlight_report(config, task: dict, floodlight_id: int) -> int: """ Create a report for a specific floodlight if it does not exist. Args: floodlight_id - the floodlight being monitored Returns: The id of the created report. """ account_id, subaccount_id = parse_account(config, task['auth'], task['account']) name = 'Floodlight Monitor %s %s ( StarThinker )' % (account_id, floodlight_id) if config.verbose: print('FLOODLIGHT MONITOR REPORT: ', name) # create report if it does not exists report = report_build( config, task['auth'], task['account'], { 'kind': 'dfareporting#report', 'type': 'FLOODLIGHT', 'accountId': account_id, 'name': name, 'fileName': name.replace('( ', '').replace(' )', '').replace( ' ', '_'), 'format': 'CSV', 'delivery': { 'emailOwner': False }, 'floodlightCriteria': { 'dateRange': { 'kind': 'dfareporting#dateRange', 'relativeDateRange': 'LAST_7_DAYS' }, 'dimensions': [{ 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:date' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:floodlightConfigId' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:activityGroupId' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:activityGroup' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:activityId' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:activity' }], 'floodlightConfigId': { 'dimensionName': 'dfa:floodlightConfigId', 'kind': 'dfareporting#dimensionValue', 'matchType': 'EXACT', 'value': floodlight_id }, 'metricNames': ['dfa:floodlightImpressions'], 'reportProperties': { 'includeUnattributedCookieConversions': False, 'includeUnattributedIPConversions': False } }, 'schedule': { 'active': True, 'every': 1, 'repeats': 'DAILY', 'startDate': str(date.today()), 'expirationDate': str((date.today() + timedelta(days=365))), }, }) return report['id']
def dcm(config, task): if config.verbose: print('DCM') # stores existing report json report = None # check if report is to be deleted if task.get('delete', False): if config.verbose: print( 'DCM DELETE', task['report'].get('name', None) or task['report'].get('body', {}).get('name', None) or task['report'].get('report_id', None)) report_delete( config, task['auth'], task['report']['account'], task['report'].get('report_id', None), task['report'].get('name', None) or task['report'].get('body', {}).get('name', None), ) # check if report is to be run if task.get('report_run_only', False): if config.verbose: print( 'DCM REPORT RUN', task['report'].get('name', None) or task['report'].get('report_id', None)) report_run( config, task['auth'], task['report']['account'], task['report'].get('report_id', None), task['report'].get('name', None), ) # check if report is to be created if 'body' in task['report']: if config.verbose: print('DCM BUILD', task['report']['body']['name']) if 'filters' in task['report']: task['report']['body'] = report_filter(config, task['auth'], task['report']['body'], task['report']['filters']) report = report_build( config, task['auth'], task['report']['body'].get('accountId') or task['report']['account'], task['report']['body']) # moving a report if 'out' in task: filename, report = report_file( config, task['auth'], task['report']['account'], task['report'].get('report_id', None), task['report'].get('name', None) or task['report'].get('body', {}).get('name', None), task['report'].get('timeout', 10), ) if report: if config.verbose: print('DCM FILE', filename) # clean up the report rows = report_to_rows(report) rows = report_clean(rows) # if bigquery, remove header and determine schema schema = None if 'bigquery' in task['out']: schema = report_schema(next(rows)) task['out']['bigquery']['schema'] = schema task['out']['bigquery']['skip_rows'] = 0 # write rows using standard out block in json ( allows customization across all scripts ) if rows: put_rows(config, task['auth'], task['out'], rows)
def run_floodlight_reports(config, task): if config.verbose: print('Creating Floodlight reports') body = { "kind": "dfareporting#report", "name": '', # this is updated below based on Floodlight Config ID "format": "CSV", "type": "FLOODLIGHT", "floodlightCriteria": { "dateRange": { "kind": "dfareporting#dateRange", "relativeDateRange": "LAST_60_DAYS" }, "floodlightConfigId": { "kind": "dfareporting#dimensionValue", "dimensionName": "dfa:floodlightConfigId", "value": 0, # updated below and replaced with Floodlight Config ID "matchType": "EXACT" }, "reportProperties": { "includeUnattributedIPConversions": False, "includeUnattributedCookieConversions": True }, "dimensions": [ { "kind": "dfareporting#sortedDimension", "name": "dfa:site" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:floodlightAttributionType" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:interactionType" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:pathType" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:browserPlatform" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:platformType" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:week" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:placementId" }, { "kind": "dfareporting#sortedDimension", "name": "dfa:floodlightConfigId" }], "metricNames": [ "dfa:activityClickThroughConversions", "dfa:activityViewThroughConversions", "dfa:totalConversions", "dfa:totalConversionsRevenue" ] }, "schedule": { "active": True, "repeats": "WEEKLY", "every": 1, "repeatsOnWeekDays":["Sunday"] }, "delivery": { "emailOwner": False } } reports = [] floodlightConfigs = task.get('floodlightConfigIds', None) for configId in floodlightConfigs: body['name'] = task.get('reportPrefix', '') + "_" + str(configId) body['floodlightCriteria']['floodlightConfigId']['value'] = configId report = report_build( config, task['auth_cm'], task['account'], body ) reports.append(report['id']) if config.verbose: print('Finished creating Floodlight reports - moving to BQ') queries = [] for createdReportId in reports: filename, report = report_file( config, task['auth_cm'], task['account'], createdReportId, None, task.get('timeout', 10), ) if report: if config.verbose: print('Floodlight config report ', filename) # clean up the report rows = report_to_rows(report) rows = report_clean(rows) # determine schema schema = report_schema(next(rows)) out_block = {} out_block['bigquery'] = {} out_block['bigquery']['dataset'] = task['dataset'] out_block['bigquery']['schema'] = schema out_block['bigquery']['skip_rows'] = 0 out_block['bigquery']['table'] = 'z_Floodlight_CM_Report_' + str(createdReportId) # write rows using standard out block in json ( allows customization across all scripts ) if rows: put_rows(config, task['auth_bq'], out_block, rows) queries.append('SELECT * FROM `{0}.{1}.{2}`'.format(config.project, out_block['bigquery']['dataset'], out_block['bigquery']['table'])) if config.verbose: print('Moved reports to BQ tables - starting join') finalQuery = ' UNION ALL '.join(queries) query_to_table( config, task['auth_bq'], config.project, task['dataset'], CM_FLOODLIGHT_OUTPUT_TABLE, finalQuery, legacy=False ) if config.verbose: print('Finished with Floodlight Config reports')
def report_combos(config, task, name, dateRange, schedule, advertiser, campaign, dynamicProfile): if config.verbose: print('DYNAMIC COSTS COMBOS:', name) # basic report schema, with no dynamic elements schema = { 'kind': 'dfareporting#report', 'type': 'STANDARD', 'name': 'Dynamic Costs %s - Dynamic Combos ( StarThinker )' % name, 'schedule': schedule, 'criteria': { 'dateRange': dateRange, 'dimensionFilters': [{ 'kind': 'dfareporting#dimensionValue', 'dimensionName': 'dfa:dynamicProfile', 'id': dynamicProfile, 'matchType': 'EXACT' }, { 'kind': 'dfareporting#dimensionValue', 'dimensionName': 'dfa:advertiser', 'id': advertiser, 'matchType': 'EXACT' }, { 'kind': 'dfareporting#dimensionValue', 'dimensionName': 'dfa:campaign', 'id': campaign, 'matchType': 'EXACT' }], 'dimensions': [{ 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:placement' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:placementId' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:activity' }, { 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:activityId' }], 'metricNames': ['dfa:impressions', 'dfa:clicks', 'dfa:totalConversions'] } } # add in all reasonable dynamic elements for i in range(1, 5 + 1): # 5 elements/feeds for j in range(1, 6 + 1): # 6 fields per element schema['criteria']['dimensions'].append({ 'kind': 'dfareporting#sortedDimension', 'name': 'dfa:dynamicElement%iField%iValue' % (i, j) }) print(json.dumps(schema, indent=2)) # create the report if it does not exist report = report_build(config, task['auth'], task['account'], schema) # fetch report file if it exists ( timeout = 0 means grab most reent ready ) filename, filedata = report_file(config, task['auth'], task['account'], report['id'], None, 60, DCM_CHUNK_SIZE) # write report to a table ( avoid collisions as best as possible ) table_name = 'Dynamic_Costs_%s_Dynamic_Combos' % name write_report(config, task, filedata, task['out']['dataset'], table_name) return table_name