def _create_click_conversion( client: GoogleAdsClient, customer_id: str, action_name: str, gclid: str, utc_dt: datetime, ) -> UploadClickConversionsResponse: ca = get_conversion_action(client, customer_id, action_name) if ca is None: raise MissingResourceError( f"Could not find conversion_action with name {action_name}") cc = client.get_type("ClickConversion", version="v6") cc.conversion_action = ca.resource_name cc.gclid = gclid cc.conversion_date_time = utc_dt.strftime("%Y-%m-%d %H:%M:%S+00:00") conversion_upload_service = client.get_service("ConversionUploadService", version="v6") res = conversion_upload_service.upload_click_conversions( customer_id, [cc], partial_failure=True) if is_partial_failure_error_present(res): print('Click conversion failed.') print(res) raise Exception(res) return res
def map_language_to_string_value( client: GoogleAdsClient, language_id: str, ) -> Any: # google.protobuf.StringValue language = client.get_type("StringValue") language.value = client.get_service( "LanguageConstantService", version="v3").language_constant_path(language_id) return language
def map_locations_to_string_values( client: GoogleAdsClient, location_ids: List[str], ) -> List: gtc_service = client.get_service("GeoTargetConstantService", version="v3") locations = [] location_id: str for location_id in location_ids: # google.protobuf.StringValue location = client.get_type("StringValue") location.value = gtc_service.geo_target_constant_path(location_id) locations.append(location) return locations
def get_conversion_action(client: GoogleAdsClient, customer_id: str, name: str) -> ConversionAction: ga_service = client.get_service("GoogleAdsService", version="v6") query = f""" SELECT conversion_action.id, conversion_action.name FROM conversion_action WHERE conversion_action.name = '{name}' """ response = ga_service.search_stream(customer_id, query=query) try: row = next(row for batch in response for row in batch.results) return row.conversion_action except StopIteration: return None
def create_conversion_action(client: GoogleAdsClient, customer_id: str, name: str) -> MutateConversionActionsResponse: conversion_action_service = client.get_service("ConversionActionService", version="v6") conversion_action_operation = client.get_type("ConversionActionOperation", version="v6") action = conversion_action_operation.create action.name = name action.type = client.get_type("ConversionActionTypeEnum").UPLOAD_CLICKS action.category = client.get_type("ConversionActionCategoryEnum").DEFAULT action.status = client.get_type("ConversionActionStatusEnum").ENABLED res = conversion_action_service.mutate_conversion_actions( customer_id, [conversion_action_operation]) return res
def query_ga_campaign(query: str, client: GoogleAdsClient, customer_id: str): ga_service = client.get_service('GoogleAdsService', version='v4') # Issues a search request using streaming. customer_id = customer_id.replace('-', '') response = ga_service.search_stream(customer_id, query=query) select_section = re.search('(?<=SELECT )(.*)(?= FROM)', query)[0] extract_values = select_section.split(', ') campaigns = [] try: for batch in response: for row in batch.results: campaign = {} #for col_table_and_name in col_tables_and_names: for extract_value in extract_values: attr = get_nested_attr(row, extract_value) col_name = extract_value.split('.')[-1] if type(attr) in [int]: campaign_attr = { col_name: attr } else: campaign_attr = { col_name: getattr(attr, 'value') } campaign.update(campaign_attr) campaigns.append(campaign) except GoogleAdsException as ex: print(f'Request with ID "{ex.request_id}" failed with status ' f'"{ex.error.code().name}" and includes the following errors:') for error in ex.failure.errors: print(f'\tError with message "{error.message}".') if error.location: for field_path_element in error.location.field_path_elements: print(f'\t\tOn field: {field_path_element.field_name}') sys.exit(1) df_campaigns = pd.DataFrame.from_dict(campaigns) return df_campaigns
def download_expansion( city: str, client: GoogleAdsClient, customer_id: str, only_expand_missing: bool, folder_expansion_raw: str, folder_expansion_parents: str, folder_keywords_google: str, list_source_priority_order: Tuple[str] = DEFAULT_SOURCE_PRIORITY_ORDER, folder_keywords: str = FOLDER_SEEDWORDS, language_id: str = DEFAULT_LANGUAGE_ID_ENGLISH, ) -> None: location_id: str = DEFAULT_CITIES.get(city, {}).get(GOOGLE_GEO_CODE, "") list_location_ids: List[str] = [location_id] keyword_plan_idea_service = client.get_service("KeywordPlanIdeaService", version="v3") keyword_competition_level_enum = (client.get_type( "KeywordPlanCompetitionLevelEnum", version="v3").KeywordPlanCompetitionLevel) keyword_plan_network = client.get_type("KeywordPlanNetworkEnum", version="v3").GOOGLE_SEARCH locations = map_locations_to_string_values( client=client, location_ids=list_location_ids, ) language = map_language_to_string_value( client=client, language_id=language_id, ) url_seed = None keyword_url_seed = None dict_keywords: dict = trends.generate_keywords( folder_keywords=folder_keywords, ) source: str for source in list_source_priority_order: folder_expansion_raw_source: str = generate_source_folder( source=source, folder=folder_expansion_raw, ) list_source_keywords: List[str] = dict_keywords[source] list_already_expanded_filenames_in_raw_source: Generator[ str, None, List[str]] = import_paths_from_folder( folder=folder_expansion_raw_source, list_paths_filter_conditions=(city, ), ) seed_keyword: str for seed_keyword in list_source_keywords: # noinspection PyArgumentList nt_filename_expansion_raw: tuple = NT_filename_expansion_raw( city=city, keyword=seed_keyword, ) filename_expansion_raw: str = generate_filename( nt_filename=nt_filename_expansion_raw, delimiter=HYPHEN, extension=CSV, ) if only_expand_missing and filename_expansion_raw in list_already_expanded_filenames_in_raw_source: continue print(f"expansion : {city} : {seed_keyword}") keyword_seed = client.get_type("KeywordSeed", version="v3") keyword_protos = map_keywords_to_string_values( client=client, keywords=[seed_keyword], ) keyword_seed.keywords.extend(keyword_protos) time.sleep(2) try: keyword_ideas = keyword_plan_idea_service.generate_keyword_ideas( customer_id, language, locations, keyword_plan_network, url_seed=url_seed, keyword_seed=keyword_seed, keyword_and_url_seed=keyword_url_seed, ) except GoogleAdsException as ex: log_error( error=f"{city}{HYPHEN}{seed_keyword}{HYPHEN}exception") log_exception(exception=ex) write_errors_to_disk(clear_task_origin=False, overwrite=False) continue list_expanded_keywords: List[str] = [] list_individual_keyword_ideas_results: List[pd.DataFrame] = [] for idea in keyword_ideas.results: dict_keyword_ideas_result = {} dict_keyword_ideas_result.update({CITY: city}) expanded_keyword = idea.text.value dict_keyword_ideas_result.update({KEYWORD: expanded_keyword}) dict_keyword_ideas_result.update({SOURCE: source}) list_expanded_keywords.append( f"{city}{HYPHEN}{expanded_keyword}") dict_keyword_ideas_result.update({ AVG_MONTHLY_SEARCH: idea.keyword_idea_metrics.avg_monthly_searches.value }) competition_value = keyword_competition_level_enum.Name( idea.keyword_idea_metrics.competition) dict_keyword_ideas_result.update( {COMPETITION_VALUE: competition_value}) list_individual_keyword_ideas_results.append( pd.DataFrame( dict_keyword_ideas_result, index=[0], )) # noinspection PyArgumentList filename_expansion_parent: str = generate_filename( nt_filename=NT_filename_expansion_parents( expanded_keyword=expanded_keyword, ), extension=TXT, ) write_list_to_file( filename=f"{filename_expansion_parent}", folder=folder_expansion_parents, list_strings=[f"{city}{HYPHEN}{seed_keyword}"], ) # noinspection PyArgumentList output_keywords_google_filename: str = generate_filename( nt_filename=NT_filename_keywords_google( seed_keyword=seed_keyword, ), extension=TXT, ) write_list_to_file( filename=f"{output_keywords_google_filename}", folder=generate_source_folder( source=source, folder=folder_keywords_google, ), list_strings=list_expanded_keywords, ) df_keyword_ideas_for_city: pd.DataFrame if len(list_individual_keyword_ideas_results) > 0: df_keyword_ideas_for_city = pd.concat( list_individual_keyword_ideas_results, ignore_index=True, ) else: df_keyword_ideas_for_city = pd.DataFrame() log_error( error=f"{city}{HYPHEN}{seed_keyword}{HYPHEN}{ERROR_EMPTY}") df_keyword_ideas_for_city.to_csv( f"{folder_expansion_raw_source}{filename_expansion_raw}", index=False, )