Esempio n. 1
0
              f'"{response.results[0].resource_name}".')
    except GoogleAdsException as ex:
        print(f'Request with ID "{ex.request_id}" failed with status '
              f'"{ex.error.code().name}" and includes the following errors:')
        for error in ex.failure.errors:
            print(f'\tError with message "{error.message}".')
            if error.location:
                for field_path_element in error.location.field_path_elements:
                    print(f"\t\tOn field: {field_path_element.field_name}")
        sys.exit(1)


if __name__ == "__main__":
    # GoogleAdsClient will read the google-ads.yaml configuration file in the
    # home directory if none is specified.
    google_ads_client = GoogleAdsClient.load_from_storage()

    parser = argparse.ArgumentParser(
        description="Update sitelink extension feed item with the specified "
        "link text.")
    # The following argument(s) should be provided to run the example.
    parser.add_argument(
        "-c",
        "--customer_id",
        type=str,
        required=True,
        help="The Google Ads customer ID",
    )
    parser.add_argument(
        "-f",
        "--feed_item_id",
Esempio n. 2
0
        account_assets = executor.map(
            lambda account: get_accounts_assets(client, str(account['id'])),
            accounts)
    for account, assets in zip(accounts, account_assets):
        account['assets'] = assets
    return accounts


def get_account_adgroup_structure(client, customer_id):
    """Account structre of the form account:adgroups."""
    builder = AccountAdGroupStructureBuilder(client, customer_id)
    return builder.build()


if __name__ == '__main__':
    googleads_client = GoogleAdsClient.load_from_storage(
        'app/config/google-ads.yaml')
    # create_mcc_struct(googleads_client,
    #                   'app/cache/account_struct.json',
    #                   'app/cache/asset_to_ag.json')
    # print(json.dumps(get_accounts(googleads_client), indent=2))
    # print(json.dumps(
    #     get_assets_from_adgroup(googleads_client, 8791307154, 79845268520),
    #     indent=2))

    print(
        json.dumps(get_accounts_assets(googleads_client, '9489090398'),
                   indent=2))
    # print(json.dumps(get_all_accounts_assets(googleads_client), indent=2))

    # print(get_accounts(googleads_client))
Esempio n. 3
0
import os
import json

# my local directory is right now
src_path = os.path.dirname(os.path.realpath(__file__))

dir_path = os.path.join(src_path, '..')

# credentials dictonary
creds = {"google_ads": dir_path + "/creds/googleads.yaml"}

if not os.path.isfile(creds["google_ads"]):
    raise FileExistsError(
        "File doesn't exists. Please create folder src/creds and put googleads.yaml file there. "
    )

resources = {"config": dir_path + "/config/config.json"}

# This logging allows to see additional information on debugging
import logging
logging.basicConfig(level=logging.INFO,
                    format='[%(asctime)s - %(levelname)s] %(message).5000s')
logging.getLogger('google.ads.google_ads.client').setLevel(logging.INFO)

# Initialize the google_ads client
from google.ads.google_ads.client import GoogleAdsClient
gads_client = GoogleAdsClient.load_from_storage(creds["google_ads"])

# Initialize all global configurations
config = json.load(open(resources["config"], "r"))
Esempio n. 4
0
    for keyword in keywords:
        operation = client.get_type('KeywordPlanKeywordOperation',
                                    version='v2')
        operation.create.CopyFrom(keyword)
        operations.append(operation)

    response = keyword_plan_keyword_service.mutate_keyword_plan_keywords(
        customer_id, operations)

    ind = response.results[0].resource_name.split('/')[-1]
    return ind


if __name__ == '__main__':
    google_ads_client = GoogleAdsClient.load_from_storage(
        '/Users/Asset/iCloud/coding/zuri/google-ads-keyword-research/zuri-google-ads/google-ads.yaml'
    )

    params = {
        'location_ids': _DEFAULT_LOCATION_IDS,
        'language_id': _DEFAULT_LANGUAGE_ID,
        'keywords': '',
        'page_urls': '',
        'limit': _DEFAULT_LIMIT
    }
    fileobj = open('../config/config.txt')

    for line in fileobj:
        line = line.strip()
        key_value = line.split('=')
        if key_value[1]:
Esempio n. 5
0
client = ''
googleads_client = ''

# check if config is valid. if yes, init clients and create struct
try:
    with open(CONFIG_FILE_PATH, 'r') as f:
        config_file = yaml.load(f, Loader=yaml.FullLoader)
except FileNotFoundError:
    config_file = {'config_valid': 0}

if config_file['config_valid']:
    setup.set_api_configs()
    client = adwords.AdWordsClient.LoadFromStorage(CONFIG_PATH /
                                                   'googleads.yaml')
    googleads_client = GoogleAdsClient.load_from_storage(CONFIG_PATH /
                                                         'google-ads.yaml')
    try:
        structure.create_mcc_struct(googleads_client, account_struct_json_path,
                                    asset_to_ag_json_path)
    except Exception as e:
        logging.exception('Error when trying to create struct')
        Service_Class.reset_cid(client)


@server.route('/')
def upload_frontend():
    return render_template('index.html')


@server.route('/config/', methods=['GET'])
def get_configs():
Esempio n. 6
0
def main(
    called_from_main: bool = False,
    list_cities: Tuple[str, ...] = tuple(DEFAULT_CITIES),
    partition_group: int = 1,
    partition_total: int = 1,
) -> None:
    set_error_file_origin(KEYWORD)
    set_error_folder(FOLDER_ERROR)
    set_partition_group(partition_group)
    set_partition_total(partition_total)
    with open(f"{KEYWORD}{HYPHEN}{PARAMETERS}{JSON}") as json_file:
        json_data: dict = json.load(json_file)
        count_expansion_parents: bool
        download: bool
        only_download_missing: bool
        aggregate: bool
        customer_id: str
        credentials: str
        list_partitioned_cities: Tuple[str, ...]
        list_source_priority_order: List[str]
        if called_from_main:
            download = json_data[DOWNLOAD]
            only_download_missing = json_data[PARAM_ONLY_DOWNLOAD_MISSING]
            aggregate = json_data[AGGREGATE]
            count_expansion_parents = json_data[PARAM_COUNT_EXPANSION_PARENTS]
            credentials = json_data[PARAM_CREDENTIALS]
            customer_id = json_data[CUSTOMER_ID]
            parameters: dict = json_data[KEYWORD]
            list_source_priority_order = json_data[PARAM_SOURCE_PRIORITY_ORDER]
            list_input_cities: List[str] = parameters[CITY]
            list_input_cities.sort()
            list_partitioned_cities = tuple(
                partition_list(
                    list_partition_candidates=list_input_cities,
                    partition_group=get_partition_group(),
                    partition_total=get_partition_total(),
                ))
        else:
            download = False
            only_download_missing = True
            aggregate = False
            count_expansion_parents = False
            credentials = ""
            customer_id = ""
            list_source_priority_order = []
            list_partitioned_cities = list_cities
    json_file.close()

    google_ads_client: GoogleAdsClient = GoogleAdsClient.load_from_storage(
        credentials)

    if download:
        set_error_task_origin(task_origin=DOWNLOAD)
        city: str
        for city in list_partitioned_cities:
            download_expansion(
                city=city,
                client=google_ads_client,
                customer_id=customer_id,
                only_expand_missing=only_download_missing,
                folder_expansion_raw=FOLDER_EXPANSION_RAW,
                folder_expansion_parents=FOLDER_EXPANSION_PARENTS,
                folder_keywords_google=FOLDER_SEEDWORDS_GOOGLE,
                list_source_priority_order=tuple(list_source_priority_order),
            )
            write_errors_to_disk(overwrite=False)

    if count_expansion_parents:
        set_error_task_origin(task_origin=FREQUENCY)
        dict_keywords: dict = trends.generate_keywords(
            folder_keywords=FOLDER_SEEDWORDS, )
        already_downloaded_keywords: List[str] = list(
            set((keyword for sub_dict_keywords in dict_keywords.values()
                 for keyword in sub_dict_keywords.keys())))

        list_parent_file_names: Generator[
            str, None, List[str]] = import_paths_from_folder(
                folder=FOLDER_EXPANSION_PARENTS, )
        dict_expansion_frequency: dict = {}

        parent_filename: str
        for parent_file_name in list_parent_file_names:
            frequency = 0
            for _ in open(f"{FOLDER_EXPANSION_PARENTS}{parent_file_name}"):
                frequency += 1
            expansion_word: str = parent_file_name.rstrip(TXT)
            if expansion_word in dict_expansion_frequency:
                log_error(
                    error=f"duplicate_expansion_word{HYPHEN}{expansion_word}")
            dict_expansion_frequency.update({expansion_word: frequency})

        dict_expansion_frequency = dict(
            sorted(dict_expansion_frequency.items(),
                   key=lambda x: x[1],
                   reverse=True))
        try:
            with open(f"{FOLDER_SEEDWORDS}parent_frequency.csv",
                      'w') as parent_frequency_file:
                writer = csv.writer(parent_frequency_file)
                writer.writerow(
                    ["expanded_keyword", "frequency", "already_downloaded"])
                for key, value in dict_expansion_frequency.items():
                    writer.writerow(
                        [key, value, (key in already_downloaded_keywords)])
        except IOError:
            log_error(error="I/O error")
        write_errors_to_disk()

    if aggregate:
        set_error_task_origin(task_origin=AGGREGATE)
        is_valid_for_aggregation = check_partition_valid_for_aggregation(
            error_label=AGGREGATE,
            partition_group=get_partition_group(),
            partition_total=get_partition_total(),
        )
        if is_valid_for_aggregation:
            source: str
            for source in list_source_priority_order:
                log_error(f"{AGGREGATE} : {SOURCE} : {source}", log=True)
                aggregate_data_in_folder(
                    filename_label=source,
                    folder_input=generate_source_folder(
                        source=source,
                        folder=FOLDER_EXPANSION_RAW,
                    ),
                    folder_output_aggregate=FOLDER_EXPANSION_AGGREGATE,
                    list_cities=list_partitioned_cities,
                    bool_suppress_print=True,
                )
                write_errors_to_disk(clear_task_origin=False,
                                     bool_suppress_print=True,
                                     overwrite=False)

            log_error(f"{AGGREGATE} : {EXPANSION}", log=True)
            aggregate_data_in_folder(
                filename_label=KEYWORD,
                folder_input=FOLDER_EXPANSION_AGGREGATE,
                folder_output_aggregate=FOLDER_EXPANSION_AGGREGATE,
                list_cities=list_partitioned_cities,
            )
        write_errors_to_disk(overwrite=False)