def first_and_third_party_audience_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_First_And_Third_Party_Audiences', Discovery_To_BigQuery( 'displayvideo', 'v1').method_schema('firstAndThirdPartyAudiences.list'))
def cm_placement_group_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'CM_PlacementGroups', Discovery_To_BigQuery('dfareporting', 'v3.4').method_schema('placementGroups.list', iterate=True))
def channel_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Channels', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('advertisers.channels.list'))
def run_dv_360_queries(project): # Create empty DV360 Custom Segments table for join until sheet is created table_create('service', project.id, project.task['dataset'], DV360_CUSTOM_SEGMENTS_TABLE) # Create DV360 Segments Table create_dv360_segments(project) # Clean DV360 Browser Report run_query_from_file( os.path.join(os.path.dirname(__file__), SQL_DIRECTORY + CLEAN_BROWSER_REPORT_FILENAME), project.id, project.task['dataset'], CLEAN_BROWSER_REPORT_TABLE) # Browser Performance 2 years if project.verbose: print('RUN Browser Performance 2 years Query') run_query_from_file( os.path.join(os.path.dirname(__file__), SQL_DIRECTORY + BROWSER_PERFORMANCE_2YEARS_FILENAME), project.id, project.task['dataset'], BROWSER_PERFORMANCE_2YEARS_TABLE) #Safari Distribution 90 days if project.verbose: print('RUN Safari Distribution 90 days Query') run_query_from_file( os.path.join(os.path.dirname(__file__), SQL_DIRECTORY + SAFARI_DISTRIBUTION_90DAYS_FILENAME), project.id, project.task['dataset'], SAFARI_DISTRIBUTION_90DAYS_TABLE) # Browser Shares Multichart if project.verbose: print('RUN Dv360 Browser Share Multichart') run_query_from_file( os.path.join(os.path.dirname(__file__), SQL_DIRECTORY + DV360_BROWSER_SHARES_MULTICHART_FILENAME), project.id, project.task['dataset'], DV360_BROWSER_SHARES_MULTICHART_TABLE)
def cm_site_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'CM_Sites', Discovery_To_BigQuery('dfareporting', 'v3.4').method_schema('sites.list', iterate=True))
def edit_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'EDIT_Preview', SCHEMA_PREVIEW ) table_create(project.task['auth_bigquery'], project.id, project.task['dataset'], 'EDIT_Log', SCHEMA_LOG ) sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Preview', 'A2:Z' ) sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Error', 'A2:Z' ) sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Success', 'A2:Z' )
def location_list_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Location_Lists', Discovery_To_BigQuery( 'displayvideo', 'v1').method_schema('advertisers.locationLists.list'))
def combined_audience_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Combined_Audiences', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('combinedAudiences.list'))
def insertion_order_clear(config, task): table_create( config, task["auth_bigquery"], config.project, task["dataset"], "DV_InsertionOrders", Discovery_To_BigQuery( "displayvideo", "v1").method_schema("advertisers.insertionOrders.list"))
def sdf_to_bigquery(config, auth, sdf_zip_file, project_id, dataset, time_partitioned_table, create_single_day_table, table_suffix=''): with zipfile.ZipFile(sdf_zip_file, 'r', zipfile.ZIP_DEFLATED) as d: file_names = d.namelist() for file_name in file_names: if config.verbose: print('SDF: Loading: ' + file_name) with d.open(file_name) as sdf_file: rows = csv_to_rows(sdf_file.read().decode('utf-8')) if not rows: if config.verbose: print('SDF: Empty file ' + file_name) continue table_name = file_name.split('.')[0].replace( '-', '_') + table_suffix schema = sdf_schema(next(rows)) # Check if each SDF should have a dated table if create_single_day_table: table_name_dated = table_name + date.today().strftime( '%Y_%m_%d') # Create table and upload data table_create(auth, project_id, dataset, table_name_dated) rows_to_table(config, auth, project_id, dataset, table_name_dated, rows, schema=schema, skip_rows=1, disposition='WRITE_TRUNCATE') # Create end result table if it doesn't already exist if not table_exists(config, auth, project_id, dataset, table_name): table_create(config, auth, project_id, dataset, table_name, is_time_partition=time_partitioned_table) rows_to_table(config, auth, project_id, dataset, table_name, rows, schema=schema, skip_rows=1, disposition='WRITE_APPEND' if time_partitioned_table else 'WRITE_TRUNCATE')
def run_dv_360_queries(project): # Create empty DV360 Custom Segments table for join until sheet is created table_create('service', project.id, project.task['dataset'], DV360_CUSTOM_SEGMENTS_TABLE) # Create DV360 Segments Table create_dv360_segments(project) # Clean DV360 Browser Report run_query_from_file(Queries.clean_browser_report, CLEAN_BROWSER_REPORT_TABLE) # Browser Performance 2 years if project.verbose: print('RUN Browser Performance 2 years Query') run_query_from_file(Queries.browser_2_year, BROWSER_PERFORMANCE_2YEARS_TABLE) #Safari Distribution 90 days if project.verbose: print('RUN Safari Distribution 90 days Query') run_query_from_file(Queries.safari_distribution_90days, SAFARI_DISTRIBUTION_90DAYS_TABLE) # Browser Shares Multichart if project.verbose: print('RUN Dv360 Browser Share Multichart') run_query_from_file(Queries.browser_share_multichart, DV360_BROWSER_SHARES_MULTICHART_TABLE)
def inventory_group_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Inventory_Groups', Discovery_To_BigQuery( 'displayvideo', 'v1').method_schema('inventorySourceGroups.list'))
def custom_list_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Custom_Lists', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('customLists.list'))
def google_api_build_errors(config, auth, api_call, errors): """Builds the BigQuery table to house the Google API call errors. Optional piece of the recipe, will create a BigQuery table for errors. Takes errors, which defines a bigquery endpoint, and adds fields. Args: auth (string): either "user" or "service" to make the BigQuery call. api_call (dict): the JSON for the API call as defined in recipe. errors (dict): defines where the data will be written Returns (dict): A modified results JSON with additional API values added. Raises: ValueError: If a required key in the recipe is missing. """ if 'bigquery' in errors: errors['bigquery']['schema'] = ERROR_SCHEMA errors['bigquery']['format'] = 'JSON' errors['bigquery']['skip_rows'] = 0 errors['bigquery']['disposition'] = 'WRITE_TRUNCATE' table_create(config, errors['bigquery'].get('auth', auth), config.project, errors['bigquery']['dataset'], errors['bigquery']['table'], errors['bigquery']['schema'], overwrite=False) return errors
def negative_keyword_list_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Negative_Keywod_Lists', Discovery_To_BigQuery( 'displayvideo', 'v1').method_schema('advertisers.negativeKeywordLists.list'))
def line_item_clear(config, task): table_create( config, task["auth_bigquery"], config.project, task["dataset"], "DV_LineItems", Discovery_To_BigQuery( "displayvideo", "v1").method_schema("advertisers.lineItems.list")) sheets_clear(config, task["auth_sheets"], task["sheet"], "Line Items", "B2:H")
def advertiser_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Advertisers', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('advertisers.list')) sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Advertisers', 'B2:D')
def dv_advertiser_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Advertisers', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('advertisers.list')) sheets_clear(config, task['auth_sheets'], task['sheet'], 'DV Advertisers', 'B2:D')
def line_item_clear(): table_create( project.task["auth_bigquery"], project.id, project.task["dataset"], "DV_LineItems", Discovery_To_BigQuery( "displayvideo", "v1").method_schema("advertisers.lineItems.list")) sheets_clear(project.task["auth_sheets"], project.task["sheet"], "Line Items", "A2:AI")
def inventory_source_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Inventory_Sources', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('inventorySources.list')) sheets_clear(config, task['auth_sheets'], task['sheet'], 'Inventory Sources', 'A2:Z')
def insertion_order_clear(): table_create( project.task["auth_bigquery"], project.id, project.task["dataset"], "DV_InsertionOrders", Discovery_To_BigQuery( "displayvideo", "v1").method_schema("advertisers.insertionOrders.list")) sheets_clear(project.task["auth_sheets"], project.task["sheet"], "Insertion Orders", "A2:Z")
def cm_profile_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'CM_Profiles', Discovery_To_BigQuery('dfareporting', 'v3.4').method_schema('userProfiles.list', iterate=True)) sheets_clear(config, task['auth_sheets'], task['sheet'], 'CM Profiles', 'B2:E')
def dv_algorithm_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Algorithms', Discovery_To_BigQuery( 'displayvideo', 'v1').method_schema('customBiddingAlgorithms.list')) sheets_clear(config, task['auth_sheets'], task['sheet'], 'DV Algorithms', 'B2:D')
def partner_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Partners', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('partners.list'), ) sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Partners', 'B2:Z')
def campaign_clear(): table_create( project.task['auth_bigquery'], project.id, project.task['dataset'], 'DV_Campaigns', Discovery_To_BigQuery('displayvideo', 'v1').method_schema('advertisers.campaigns.list'), ) sheets_clear(project.task['auth_sheets'], project.task['sheet'], 'Campaigns', 'B2:Z')
def creative_clear(): table_create( project.task["auth_bigquery"], project.id, project.task["dataset"], "DV_Creatives", Discovery_To_BigQuery( "displayvideo", "v1").method_schema("advertisers.creatives.list"), ) sheets_clear(project.task["auth_sheets"], project.task["sheet"], "Creatives", "B2:Z")
def patch_clear(config, task): table_create(config, task['auth_bigquery'], config.project, task['dataset'], 'PATCH_Preview', SCHEMA_PREVIEW) table_create(config, task['auth_bigquery'], config.project, task['dataset'], 'PATCH_Log', SCHEMA_LOG) sheets_clear(config, task['auth_sheets'], task['sheet'], 'Preview', 'A2:Z') sheets_clear(config, task['auth_sheets'], task['sheet'], 'Error', 'A2:Z') sheets_clear(config, task['auth_sheets'], task['sheet'], 'Success', 'A2:Z')
def campaign_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Campaigns', Discovery_To_BigQuery( 'displayvideo', 'v1' ).method_schema( 'advertisers.campaigns.list' ) )
def google_audience_clear(config, task): table_create( config, task['auth_bigquery'], config.project, task['dataset'], 'DV_Google_Audiences', Discovery_To_BigQuery( 'displayvideo', 'v1' ).method_schema( 'googleAudiences.list' ) )
def google_api_build_errors(auth, api_call, errors): if 'bigquery' in errors: errors['bigquery']['schema'] = ERROR_SCHEMA errors['bigquery']['format'] = 'JSON' errors['bigquery']['skip_rows'] = 0 errors['bigquery']['disposition'] = 'WRITE_TRUNCATE' table_create(errors['bigquery'].get('auth', auth), project.id, errors['bigquery']['dataset'], errors['bigquery']['table'], errors['bigquery']['schema'], overwrite=False) return errors