Esempio n. 1
0
def _create_click_conversion(
    client: GoogleAdsClient,
    customer_id: str,
    action_name: str,
    gclid: str,
    utc_dt: datetime,
) -> UploadClickConversionsResponse:
    ca = get_conversion_action(client, customer_id, action_name)
    if ca is None:
        raise MissingResourceError(
            f"Could not find conversion_action with name {action_name}")
    cc = client.get_type("ClickConversion", version="v6")
    cc.conversion_action = ca.resource_name
    cc.gclid = gclid
    cc.conversion_date_time = utc_dt.strftime("%Y-%m-%d %H:%M:%S+00:00")

    conversion_upload_service = client.get_service("ConversionUploadService",
                                                   version="v6")

    res = conversion_upload_service.upload_click_conversions(
        customer_id, [cc], partial_failure=True)

    if is_partial_failure_error_present(res):
        print('Click conversion failed.')
        print(res)
        raise Exception(res)

    return res
Esempio n. 2
0
def map_language_to_string_value(
    client: GoogleAdsClient,
    language_id: str,
) -> Any:
    # google.protobuf.StringValue
    language = client.get_type("StringValue")
    language.value = client.get_service(
        "LanguageConstantService",
        version="v3").language_constant_path(language_id)
    return language
Esempio n. 3
0
def map_locations_to_string_values(
    client: GoogleAdsClient,
    location_ids: List[str],
) -> List:
    gtc_service = client.get_service("GeoTargetConstantService", version="v3")
    locations = []
    location_id: str
    for location_id in location_ids:
        # google.protobuf.StringValue
        location = client.get_type("StringValue")
        location.value = gtc_service.geo_target_constant_path(location_id)
        locations.append(location)
    return locations
Esempio n. 4
0
def init_clients():
    """Sets up googleads.yaml and google-ads.yaml and inits both clients.
  tries to create struct. if succesful, marks config_valid=1 in config.yaml
  to mark config is valid. Marks 0 otherwise."""

    setup.set_api_configs()

    status = 0

    global client
    global googleads_client

    try:

        client = adwords.AdWordsClient.LoadFromStorage(CONFIG_PATH /
                                                       'googleads.yaml')
        googleads_client = GoogleAdsClient.load_from_storage(CONFIG_PATH /
                                                             'google-ads.yaml')

        with open(CONFIG_FILE_PATH, 'r') as f:
            config = yaml.load(f, Loader=yaml.FullLoader)

        config['config_valid'] = 1

        with open(CONFIG_FILE_PATH, 'w') as f:
            yaml.dump(config, f)

    except Exception as e:
        logging.error(str(e))
        status = 1

    return status
Esempio n. 5
0
def create_conversion_action(client: GoogleAdsClient, customer_id: str,
                             name: str) -> MutateConversionActionsResponse:

    conversion_action_service = client.get_service("ConversionActionService",
                                                   version="v6")
    conversion_action_operation = client.get_type("ConversionActionOperation",
                                                  version="v6")

    action = conversion_action_operation.create
    action.name = name
    action.type = client.get_type("ConversionActionTypeEnum").UPLOAD_CLICKS
    action.category = client.get_type("ConversionActionCategoryEnum").DEFAULT
    action.status = client.get_type("ConversionActionStatusEnum").ENABLED

    res = conversion_action_service.mutate_conversion_actions(
        customer_id, [conversion_action_operation])

    return res
Esempio n. 6
0
def map_keywords_to_string_values(
    client: GoogleAdsClient,
    keywords: List[str],
) -> List:
    keyword_protos: List = []
    keyword: str
    for keyword in keywords:
        # google.protobuf.StringValue
        string_val = client.get_type("StringValue")
        string_val.value = keyword
        keyword_protos.append(string_val)
    return keyword_protos
Esempio n. 7
0
 def _get_service(self) -> GoogleAdsClient:
     """Connects and authenticates with the Google Ads API using a service account"""
     with NamedTemporaryFile("w", suffix=".json") as secrets_temp:
         self._get_config()
         self._update_config_with_secret(secrets_temp)
         try:
             client = GoogleAdsClient.load_from_dict(self.google_ads_config)
             return client.get_service("GoogleAdsService",
                                       version=self.api_version)
         except GoogleAuthError as e:
             self.log.error("Google Auth Error: %s", e)
             raise
Esempio n. 8
0
def get_keywords_data(customer_id, location_ids, language_id, keyword_texts,
                      page_url):
    # GoogleAdsClient will read the google-ads.yaml configuration file in the
    # home directory if none is specified.

    # Heroku env
    google_ads_client = GoogleAdsClient.load_from_storage(
        '/app/google-ads.yaml')
    # local Docker env
    # google_ads_client = GoogleAdsClient.load_from_storage('/home/KJA_APP/google-ads.yaml')
    return main(google_ads_client, customer_id, location_ids, language_id,
                keyword_texts, page_url)
Esempio n. 9
0
def get_conversion_action(client: GoogleAdsClient, customer_id: str,
                          name: str) -> ConversionAction:
    ga_service = client.get_service("GoogleAdsService", version="v6")

    query = f"""
      SELECT conversion_action.id, conversion_action.name
      FROM conversion_action
      WHERE conversion_action.name = '{name}'
    """

    response = ga_service.search_stream(customer_id, query=query)
    try:
        row = next(row for batch in response for row in batch.results)
        return row.conversion_action
    except StopIteration:
        return None
Esempio n. 10
0
def main():
    env = Env()

    conn = psycopg2.connect(
        dbname=env("PG_DATABASE"),
        user=env("PG_USER"),
        host=env("PG_HOST"),
        port=env("PG_PORT"),
        password=env("PG_PASSWORD"),
    )

    conf_path = env("GOOGLE_ADS_YAML_PATH")
    consumer = Consumer(conn, "signups_ga", "event_consumer_groups")
    client = GoogleAdsClient.load_from_storage(conf_path)
    customer_id = env("GOOGLE_CUSTOMER_ID")

    work = lambda o, n: report_conversions(conn, client, customer_id, o, n)
    consumer.do_work(work)
Esempio n. 11
0
def query_ga_campaign(query: str, client: GoogleAdsClient, customer_id: str):
    ga_service = client.get_service('GoogleAdsService', version='v4')

    # Issues a search request using streaming.
    customer_id = customer_id.replace('-', '')
    response = ga_service.search_stream(customer_id, query=query)

    select_section = re.search('(?<=SELECT )(.*)(?= FROM)', query)[0]
    extract_values = select_section.split(', ')

    campaigns = []
    try:
        for batch in response:
            for row in batch.results:
                campaign = {}
                #for col_table_and_name in col_tables_and_names:
                for extract_value in extract_values:
                    attr = get_nested_attr(row, extract_value)
                    col_name = extract_value.split('.')[-1]
                    if type(attr) in [int]:
                        campaign_attr = {
                            col_name: attr
                        }                   
                    else:
                        campaign_attr = {
                            col_name: getattr(attr, 'value')
                        }
                    campaign.update(campaign_attr)
                campaigns.append(campaign)
                
    except GoogleAdsException as ex:
        print(f'Request with ID "{ex.request_id}" failed with status '
            f'"{ex.error.code().name}" and includes the following errors:')
        for error in ex.failure.errors:
            print(f'\tError with message "{error.message}".')
            if error.location:
                for field_path_element in error.location.field_path_elements:
                    print(f'\t\tOn field: {field_path_element.field_name}')
        sys.exit(1)
    df_campaigns = pd.DataFrame.from_dict(campaigns)
    return df_campaigns
Esempio n. 12
0
def main(
    called_from_main: bool = False,
    list_cities: Tuple[str, ...] = tuple(DEFAULT_CITIES),
    partition_group: int = 1,
    partition_total: int = 1,
) -> None:
    set_error_file_origin(KEYWORD)
    set_error_folder(FOLDER_ERROR)
    set_partition_group(partition_group)
    set_partition_total(partition_total)
    with open(f"{KEYWORD}{HYPHEN}{PARAMETERS}{JSON}") as json_file:
        json_data: dict = json.load(json_file)
        count_expansion_parents: bool
        download: bool
        only_download_missing: bool
        aggregate: bool
        customer_id: str
        credentials: str
        list_partitioned_cities: Tuple[str, ...]
        list_source_priority_order: List[str]
        if called_from_main:
            download = json_data[DOWNLOAD]
            only_download_missing = json_data[PARAM_ONLY_DOWNLOAD_MISSING]
            aggregate = json_data[AGGREGATE]
            count_expansion_parents = json_data[PARAM_COUNT_EXPANSION_PARENTS]
            credentials = json_data[PARAM_CREDENTIALS]
            customer_id = json_data[CUSTOMER_ID]
            parameters: dict = json_data[KEYWORD]
            list_source_priority_order = json_data[PARAM_SOURCE_PRIORITY_ORDER]
            list_input_cities: List[str] = parameters[CITY]
            list_input_cities.sort()
            list_partitioned_cities = tuple(
                partition_list(
                    list_partition_candidates=list_input_cities,
                    partition_group=get_partition_group(),
                    partition_total=get_partition_total(),
                ))
        else:
            download = False
            only_download_missing = True
            aggregate = False
            count_expansion_parents = False
            credentials = ""
            customer_id = ""
            list_source_priority_order = []
            list_partitioned_cities = list_cities
    json_file.close()

    google_ads_client: GoogleAdsClient = GoogleAdsClient.load_from_storage(
        credentials)

    if download:
        set_error_task_origin(task_origin=DOWNLOAD)
        city: str
        for city in list_partitioned_cities:
            download_expansion(
                city=city,
                client=google_ads_client,
                customer_id=customer_id,
                only_expand_missing=only_download_missing,
                folder_expansion_raw=FOLDER_EXPANSION_RAW,
                folder_expansion_parents=FOLDER_EXPANSION_PARENTS,
                folder_keywords_google=FOLDER_SEEDWORDS_GOOGLE,
                list_source_priority_order=tuple(list_source_priority_order),
            )
            write_errors_to_disk(overwrite=False)

    if count_expansion_parents:
        set_error_task_origin(task_origin=FREQUENCY)
        dict_keywords: dict = trends.generate_keywords(
            folder_keywords=FOLDER_SEEDWORDS, )
        already_downloaded_keywords: List[str] = list(
            set((keyword for sub_dict_keywords in dict_keywords.values()
                 for keyword in sub_dict_keywords.keys())))

        list_parent_file_names: Generator[
            str, None, List[str]] = import_paths_from_folder(
                folder=FOLDER_EXPANSION_PARENTS, )
        dict_expansion_frequency: dict = {}

        parent_filename: str
        for parent_file_name in list_parent_file_names:
            frequency = 0
            for _ in open(f"{FOLDER_EXPANSION_PARENTS}{parent_file_name}"):
                frequency += 1
            expansion_word: str = parent_file_name.rstrip(TXT)
            if expansion_word in dict_expansion_frequency:
                log_error(
                    error=f"duplicate_expansion_word{HYPHEN}{expansion_word}")
            dict_expansion_frequency.update({expansion_word: frequency})

        dict_expansion_frequency = dict(
            sorted(dict_expansion_frequency.items(),
                   key=lambda x: x[1],
                   reverse=True))
        try:
            with open(f"{FOLDER_SEEDWORDS}parent_frequency.csv",
                      'w') as parent_frequency_file:
                writer = csv.writer(parent_frequency_file)
                writer.writerow(
                    ["expanded_keyword", "frequency", "already_downloaded"])
                for key, value in dict_expansion_frequency.items():
                    writer.writerow(
                        [key, value, (key in already_downloaded_keywords)])
        except IOError:
            log_error(error="I/O error")
        write_errors_to_disk()

    if aggregate:
        set_error_task_origin(task_origin=AGGREGATE)
        is_valid_for_aggregation = check_partition_valid_for_aggregation(
            error_label=AGGREGATE,
            partition_group=get_partition_group(),
            partition_total=get_partition_total(),
        )
        if is_valid_for_aggregation:
            source: str
            for source in list_source_priority_order:
                log_error(f"{AGGREGATE} : {SOURCE} : {source}", log=True)
                aggregate_data_in_folder(
                    filename_label=source,
                    folder_input=generate_source_folder(
                        source=source,
                        folder=FOLDER_EXPANSION_RAW,
                    ),
                    folder_output_aggregate=FOLDER_EXPANSION_AGGREGATE,
                    list_cities=list_partitioned_cities,
                    bool_suppress_print=True,
                )
                write_errors_to_disk(clear_task_origin=False,
                                     bool_suppress_print=True,
                                     overwrite=False)

            log_error(f"{AGGREGATE} : {EXPANSION}", log=True)
            aggregate_data_in_folder(
                filename_label=KEYWORD,
                folder_input=FOLDER_EXPANSION_AGGREGATE,
                folder_output_aggregate=FOLDER_EXPANSION_AGGREGATE,
                list_cities=list_partitioned_cities,
            )
        write_errors_to_disk(overwrite=False)
Esempio n. 13
0
    def ewah_execute(self, context):
        # Task execution happens here
        def get_data_from_ads_output(fields_dict, values, prefix=None):
            if prefix is None:
                prefix = ''
            elif not prefix[-1] == '_':
                prefix += '_'
                # e.g. 2b prefix = 'ad_group_criterion_'
            data = {}
            for key, value in fields_dict.items():
                # e.g. 1 key = 'metrics', value = ['impressions', 'clicks']
                # e.g. 2a key = 'ad_group_criterion', value = [{'keyword': ['text', 'match_type']}]
                # e.g. 2b key = 'keyword', value = ['text', 'match_type']
                node = getattr(values, key)
                # e.g. 1 node = row.metrics
                # e.g. 2a node = row.ad_group_criterion
                # e.g. 2b node = row.ad_group_criterion.keyword
                for item in value:
                    # e.g. 1 item = 'clicks'
                    # e.g. 2a item = {'keyword': ['text', 'match_type']}
                    # e.g. 2b item = 'text'
                    if type(item) == dict:
                        data.update(
                            get_data_from_ads_output(
                                fields_dict=item,
                                values=node,
                                prefix=prefix +
                                key,  # e.g. 2a '' + 'ad_group_criterion'
                            ))
                    else:
                        # e.g. 1: {'' + 'metrics' + '_' + 'clicks': row.metrics.clicks.value}
                        # e.g. 2b: {'ad_group_criterion_' + 'keyeword' + '_' + 'text': row.ad_group_criterion.keyword.text.value}
                        if hasattr(getattr(node, item), 'value'):
                            data.update({
                                prefix + key + '_' + item: \
                                    getattr(node, item).value
                            })
                        else:
                            # some node ends don't respond to .value but are
                            #   already the value
                            data.update({
                                prefix + key + '_' + item:
                                getattr(node, item)
                            })
            return data

        self.data_until = airflow_datetime_adjustments(self.data_until)
        self.data_until = self.data_until or context['next_execution_date']
        if isinstance(self.data_from, timedelta):
            self.data_from = self.data_until - self.data_from
        else:
            self.data_from = airflow_datetime_adjustments(self.data_from)
            self.data_from = self.data_from or context['execution_date']

        conn = BaseHook.get_connection(self.source_conn_id).extra_dejson
        credentials = {}
        for key in self._REQUIRED_KEYS:
            if not key in conn.keys():
                raise Exception(
                    '{0} must be in connection extra json!'.format(key))
            credentials[key] = conn[key]

        # build the query
        query = 'SELECT {0} FROM {1} WHERE segments.date {2} {3}'.format(
            ', '.join(self.fields_list),
            self.resource,
            "BETWEEN '{0}' AND '{1}'".format(
                self.data_from.strftime('%Y-%m-%d'),
                self.data_until.strftime('%Y-%m-%d'),
            ),
            ('AND' + ' AND '.join(self.conditions)) if self.conditions else '',
        )

        self.log.info('executing this google ads query:\n{0}'.format(query))
        cli = GoogleAdsClient.load_from_dict(credentials)
        service = cli.get_service("GoogleAdsService", version="v3")
        search = service.search(
            self.client_id.replace('-', ''),
            query=query,
        )
        data = [row for row in search]

        # get into uploadable format
        upload_data = []
        while data:
            datum = data.pop(0)
            upload_data += [
                get_data_from_ads_output(
                    deepcopy(self.fields_dict),
                    datum,
                )
            ]

        self.upload_data(upload_data)
Esempio n. 14
0
        account_assets = executor.map(
            lambda account: get_accounts_assets(client, str(account['id'])),
            accounts)
    for account, assets in zip(accounts, account_assets):
        account['assets'] = assets
    return accounts


def get_account_adgroup_structure(client, customer_id):
    """Account structre of the form account:adgroups."""
    builder = AccountAdGroupStructureBuilder(client, customer_id)
    return builder.build()


if __name__ == '__main__':
    googleads_client = GoogleAdsClient.load_from_storage(
        'app/config/google-ads.yaml')
    # create_mcc_struct(googleads_client,
    #                   'app/cache/account_struct.json',
    #                   'app/cache/asset_to_ag.json')
    # print(json.dumps(get_accounts(googleads_client), indent=2))
    # print(json.dumps(
    #     get_assets_from_adgroup(googleads_client, 8791307154, 79845268520),
    #     indent=2))

    print(
        json.dumps(get_accounts_assets(googleads_client, '9489090398'),
                   indent=2))
    # print(json.dumps(get_all_accounts_assets(googleads_client), indent=2))

    # print(get_accounts(googleads_client))
Esempio n. 15
0
import os
import json

# my local directory is right now
src_path = os.path.dirname(os.path.realpath(__file__))

dir_path = os.path.join(src_path, '..')

# credentials dictonary
creds = {"google_ads": dir_path + "/creds/googleads.yaml"}

if not os.path.isfile(creds["google_ads"]):
    raise FileExistsError(
        "File doesn't exists. Please create folder src/creds and put googleads.yaml file there. "
    )

resources = {"config": dir_path + "/config/config.json"}

# This logging allows to see additional information on debugging
import logging
logging.basicConfig(level=logging.INFO,
                    format='[%(asctime)s - %(levelname)s] %(message).5000s')
logging.getLogger('google.ads.google_ads.client').setLevel(logging.INFO)

# Initialize the google_ads client
from google.ads.google_ads.client import GoogleAdsClient
gads_client = GoogleAdsClient.load_from_storage(creds["google_ads"])

# Initialize all global configurations
config = json.load(open(resources["config"], "r"))
Esempio n. 16
0
def setup_client():
    from google.ads.google_ads.client import GoogleAdsClient
    credentials = load_credentials()
    return GoogleAdsClient.load_from_dict(credentials)
Esempio n. 17
0
    for keyword in keywords:
        operation = client.get_type('KeywordPlanKeywordOperation',
                                    version='v2')
        operation.create.CopyFrom(keyword)
        operations.append(operation)

    response = keyword_plan_keyword_service.mutate_keyword_plan_keywords(
        customer_id, operations)

    ind = response.results[0].resource_name.split('/')[-1]
    return ind


if __name__ == '__main__':
    google_ads_client = GoogleAdsClient.load_from_storage(
        '/Users/Asset/iCloud/coding/zuri/google-ads-keyword-research/zuri-google-ads/google-ads.yaml'
    )

    params = {
        'location_ids': _DEFAULT_LOCATION_IDS,
        'language_id': _DEFAULT_LANGUAGE_ID,
        'keywords': '',
        'page_urls': '',
        'limit': _DEFAULT_LIMIT
    }
    fileobj = open('../config/config.txt')

    for line in fileobj:
        line = line.strip()
        key_value = line.split('=')
        if key_value[1]:
Esempio n. 18
0
client = ''
googleads_client = ''

# check if config is valid. if yes, init clients and create struct
try:
    with open(CONFIG_FILE_PATH, 'r') as f:
        config_file = yaml.load(f, Loader=yaml.FullLoader)
except FileNotFoundError:
    config_file = {'config_valid': 0}

if config_file['config_valid']:
    setup.set_api_configs()
    client = adwords.AdWordsClient.LoadFromStorage(CONFIG_PATH /
                                                   'googleads.yaml')
    googleads_client = GoogleAdsClient.load_from_storage(CONFIG_PATH /
                                                         'google-ads.yaml')
    try:
        structure.create_mcc_struct(googleads_client, account_struct_json_path,
                                    asset_to_ag_json_path)
    except Exception as e:
        logging.exception('Error when trying to create struct')
        Service_Class.reset_cid(client)


@server.route('/')
def upload_frontend():
    return render_template('index.html')


@server.route('/config/', methods=['GET'])
def get_configs():
Esempio n. 19
0
def main():
    client = GoogleAdsClient.load_from_env()
    mariadb_engine = sql_utils.create_engine(settings.MARIADB_CONFIG, db_name='output', db_type='mysql')
    LAG_TIME = settings.LAG_TIME
    # put google_campaign_report to mysql database
    if sql_utils.table_exists_notempty(mariadb_engine, 'output', 'google_campaign_report'):
        latest_date = sql_utils.get_latest_date_in_table(mariadb_engine, 'google_campaign_report')
    else:
        latest_date = datetime.today()
        LAG_TIME = 365
    from_date = (latest_date-timedelta(days=LAG_TIME)).strftime('%Y-%m-%d')
    to_date = (datetime.today()+timedelta(days=1)).strftime('%Y-%m-%d')

    google_campaign_report = get_campaign_report(client, settings.YOUSEE_CUSTOMER_ID, time_period_query=f'BETWEEN "{from_date}" AND "{to_date}"')
    google_campaign_report['platform'], google_campaign_report['campaign_type'], \
        google_campaign_report['brandorproduct'], google_campaign_report['campaign'] = \
            get_platform_type_brandorproduct_campaign_from_naming(google_campaign_report['name'])

    cols = ['name', 'platform', 'campaign_type', 'brandorproduct', 'campaign', 'date', 'start_date', 'clicks', 'impressions', 'cost', 'ctr']
    google_campaign_report = google_campaign_report[cols]
    dtype_trans = sql.get_dtype_trans(google_campaign_report)
    dtype_trans.update({'name': String(150)})
    dtype_trans.update({'campaign': String(80)})
    dtype_trans.update({'date': DateTime()})

    if sql_utils.table_exists_notempty(mariadb_engine, 'output', 'google_campaign_report'):
        sql_utils.delete_date_entries_in_table(mariadb_engine, from_date, 'google_campaign_report')
    google_campaign_report.to_sql('google_campaign_report', con=mariadb_engine, dtype=dtype_trans, if_exists='append', index=False)

    #mariadb_engine.execute('CREATE INDEX google_campaign_report_date_IDX USING BTREE ON `output`.google_campaign_report (date);')
    #mariadb_engine.execute('CREATE INDEX google_campaign_report_name_IDX USING HASH ON `output`.google_campaign_report (name, platform, campaign_type, brandorproduct, campaign);')

    # put google_device_campaign_report to mysql database
    if sql_utils.table_exists_notempty(mariadb_engine, 'output', 'google_device_campaign_report'):
        latest_date = sql_utils.get_latest_date_in_table(mariadb_engine, 'google_device_campaign_report')
    else:
        latest_date = datetime.today()
        LAG_TIME = 365
    from_date = (latest_date-timedelta(days=LAG_TIME)).strftime('%Y-%m-%d')
    to_date = (datetime.today()+timedelta(days=1)).strftime('%Y-%m-%d')

    google_device_campaign_report = get_campaign_report_by_device(client, settings.YOUSEE_CUSTOMER_ID, time_period_query=f'BETWEEN "{from_date}" AND "{to_date}"')
    google_device_campaign_report['platform'], google_device_campaign_report['campaign_type'], \
        google_device_campaign_report['brandorproduct'], google_device_campaign_report['campaign'] = \
            get_platform_type_brandorproduct_campaign_from_naming(google_device_campaign_report['name'])

    cols = ['name', 'platform', 'campaign_type', 'brandorproduct', 'campaign', 'date', 'device', 'start_date', 'clicks', 'impressions', 'cost', 'ctr']
    google_device_campaign_report = google_device_campaign_report[cols]
    dtype_trans = sql.get_dtype_trans(google_device_campaign_report)
    dtype_trans.update({'name': String(150)})
    dtype_trans.update({'campaign': String(80)})
    dtype_trans.update({'date': DateTime()})

    if sql_utils.table_exists_notempty(mariadb_engine, 'output', 'google_device_campaign_report'):
        sql_utils.delete_date_entries_in_table(mariadb_engine, from_date, 'google_device_campaign_report')
    google_device_campaign_report.to_sql('google_device_campaign_report', con=mariadb_engine, dtype=dtype_trans, if_exists='append', index=False)

    #mariadb_engine.execute('CREATE INDEX device_campaign_report_date_IDX USING BTREE ON `output`.google_device_campaign_report (date);')
    #mariadb_engine.execute('CREATE INDEX device_campaign_report_name_IDX USING HASH ON `output`.google_device_campaign_report (name, platform, campaign_type, brandorproduct, campaign);')
    #mariadb_engine.execute('CREATE INDEX device_campaign_report_device_IDX USING HASH ON `output`.google_device_campaign_report (device);')

    # put google_conversion_campaign_report to mysql database
    if sql_utils.table_exists_notempty(mariadb_engine, 'output', 'google_conversion_campaign_report'):
        latest_date = sql_utils.get_latest_date_in_table(mariadb_engine, 'google_conversion_campaign_report')
    else:
        latest_date = datetime.today()
        LAG_TIME = 365
    from_date = (latest_date-timedelta(days=LAG_TIME)).strftime('%Y-%m-%d')
    to_date = (datetime.today()+timedelta(days=1)).strftime('%Y-%m-%d')

    google_conversion_campaign_report = get_conversion_campaign_report(client, settings.YOUSEE_CUSTOMER_ID, time_period_query=f'BETWEEN "{from_date}" AND "{to_date}"')
    google_conversion_campaign_report['platform'], google_conversion_campaign_report['campaign_type'], \
        google_conversion_campaign_report['brandorproduct'], google_conversion_campaign_report['campaign'] = \
            get_platform_type_brandorproduct_campaign_from_naming(google_conversion_campaign_report['name'])


    cols = ['name', 'platform', 'campaign_type', 'brandorproduct', 'campaign', 'date', 'start_date', 'conversion_action_name', 'conversion_action_category', 'conversions', 'conversions_value']
    google_conversion_campaign_report = google_conversion_campaign_report[cols]
    dtype_trans = sql.get_dtype_trans(google_conversion_campaign_report)
    dtype_trans.update({'name': String(150)})
    dtype_trans.update({'campaign': String(80)})
    dtype_trans.update({'date': DateTime()})
    mariadb_engine = sql_utils.create_engine(settings.MARIADB_CONFIG, db_name='output', db_type='mysql')

    if sql_utils.table_exists_notempty(mariadb_engine, 'output', 'google_conversion_campaign_report'):
        sql_utils.delete_date_entries_in_table(mariadb_engine, from_date, 'google_conversion_campaign_report')
    google_conversion_campaign_report.to_sql('google_conversion_campaign_report', con=mariadb_engine, dtype=dtype_trans, if_exists='append', index=False)
Esempio n. 20
0
def get_client():
    path = os.path.join(settings.BASE_DIR, 'static')
    client = GoogleAdsClient.load_from_storage(path + '/google-ads.yaml')
    return client
Esempio n. 21
0
def download_expansion(
    city: str,
    client: GoogleAdsClient,
    customer_id: str,
    only_expand_missing: bool,
    folder_expansion_raw: str,
    folder_expansion_parents: str,
    folder_keywords_google: str,
    list_source_priority_order: Tuple[str] = DEFAULT_SOURCE_PRIORITY_ORDER,
    folder_keywords: str = FOLDER_SEEDWORDS,
    language_id: str = DEFAULT_LANGUAGE_ID_ENGLISH,
) -> None:
    location_id: str = DEFAULT_CITIES.get(city, {}).get(GOOGLE_GEO_CODE, "")
    list_location_ids: List[str] = [location_id]

    keyword_plan_idea_service = client.get_service("KeywordPlanIdeaService",
                                                   version="v3")
    keyword_competition_level_enum = (client.get_type(
        "KeywordPlanCompetitionLevelEnum",
        version="v3").KeywordPlanCompetitionLevel)
    keyword_plan_network = client.get_type("KeywordPlanNetworkEnum",
                                           version="v3").GOOGLE_SEARCH
    locations = map_locations_to_string_values(
        client=client,
        location_ids=list_location_ids,
    )
    language = map_language_to_string_value(
        client=client,
        language_id=language_id,
    )

    url_seed = None
    keyword_url_seed = None

    dict_keywords: dict = trends.generate_keywords(
        folder_keywords=folder_keywords, )

    source: str
    for source in list_source_priority_order:
        folder_expansion_raw_source: str = generate_source_folder(
            source=source,
            folder=folder_expansion_raw,
        )
        list_source_keywords: List[str] = dict_keywords[source]
        list_already_expanded_filenames_in_raw_source: Generator[
            str, None, List[str]] = import_paths_from_folder(
                folder=folder_expansion_raw_source,
                list_paths_filter_conditions=(city, ),
            )

        seed_keyword: str
        for seed_keyword in list_source_keywords:
            # noinspection PyArgumentList
            nt_filename_expansion_raw: tuple = NT_filename_expansion_raw(
                city=city,
                keyword=seed_keyword,
            )
            filename_expansion_raw: str = generate_filename(
                nt_filename=nt_filename_expansion_raw,
                delimiter=HYPHEN,
                extension=CSV,
            )

            if only_expand_missing and filename_expansion_raw in list_already_expanded_filenames_in_raw_source:
                continue

            print(f"expansion : {city} : {seed_keyword}")
            keyword_seed = client.get_type("KeywordSeed", version="v3")
            keyword_protos = map_keywords_to_string_values(
                client=client,
                keywords=[seed_keyword],
            )
            keyword_seed.keywords.extend(keyword_protos)
            time.sleep(2)
            try:
                keyword_ideas = keyword_plan_idea_service.generate_keyword_ideas(
                    customer_id,
                    language,
                    locations,
                    keyword_plan_network,
                    url_seed=url_seed,
                    keyword_seed=keyword_seed,
                    keyword_and_url_seed=keyword_url_seed,
                )
            except GoogleAdsException as ex:
                log_error(
                    error=f"{city}{HYPHEN}{seed_keyword}{HYPHEN}exception")
                log_exception(exception=ex)
                write_errors_to_disk(clear_task_origin=False, overwrite=False)
                continue

            list_expanded_keywords: List[str] = []
            list_individual_keyword_ideas_results: List[pd.DataFrame] = []
            for idea in keyword_ideas.results:
                dict_keyword_ideas_result = {}
                dict_keyword_ideas_result.update({CITY: city})
                expanded_keyword = idea.text.value
                dict_keyword_ideas_result.update({KEYWORD: expanded_keyword})
                dict_keyword_ideas_result.update({SOURCE: source})
                list_expanded_keywords.append(
                    f"{city}{HYPHEN}{expanded_keyword}")
                dict_keyword_ideas_result.update({
                    AVG_MONTHLY_SEARCH:
                    idea.keyword_idea_metrics.avg_monthly_searches.value
                })
                competition_value = keyword_competition_level_enum.Name(
                    idea.keyword_idea_metrics.competition)
                dict_keyword_ideas_result.update(
                    {COMPETITION_VALUE: competition_value})
                list_individual_keyword_ideas_results.append(
                    pd.DataFrame(
                        dict_keyword_ideas_result,
                        index=[0],
                    ))

                # noinspection PyArgumentList
                filename_expansion_parent: str = generate_filename(
                    nt_filename=NT_filename_expansion_parents(
                        expanded_keyword=expanded_keyword, ),
                    extension=TXT,
                )
                write_list_to_file(
                    filename=f"{filename_expansion_parent}",
                    folder=folder_expansion_parents,
                    list_strings=[f"{city}{HYPHEN}{seed_keyword}"],
                )

            # noinspection PyArgumentList
            output_keywords_google_filename: str = generate_filename(
                nt_filename=NT_filename_keywords_google(
                    seed_keyword=seed_keyword, ),
                extension=TXT,
            )
            write_list_to_file(
                filename=f"{output_keywords_google_filename}",
                folder=generate_source_folder(
                    source=source,
                    folder=folder_keywords_google,
                ),
                list_strings=list_expanded_keywords,
            )
            df_keyword_ideas_for_city: pd.DataFrame
            if len(list_individual_keyword_ideas_results) > 0:
                df_keyword_ideas_for_city = pd.concat(
                    list_individual_keyword_ideas_results,
                    ignore_index=True,
                )
            else:
                df_keyword_ideas_for_city = pd.DataFrame()
                log_error(
                    error=f"{city}{HYPHEN}{seed_keyword}{HYPHEN}{ERROR_EMPTY}")
            df_keyword_ideas_for_city.to_csv(
                f"{folder_expansion_raw_source}{filename_expansion_raw}",
                index=False,
            )
        print('Request with ID "{}" failed with status "{}" and includes the '
              'following errors:'.format(
                  error.request_id, error.error.code().name))
        for error in error.failure.errors:
            print('\tError with message "{}".'.format(error.message))
            if error.location:
                for field_path_element in error.location.field_path_elements:
                    print('\t\tOn field: {}'.format(
                        field_path_element.field_name))
        sys.exit(1)


if __name__ == '__main__':
    # GoogleAdsClient will read the google-ads.yaml configuration file in the
    # home directory if none is specified.
    google_ads_client = GoogleAdsClient.load_from_storage()

    parser = argparse.ArgumentParser(
        description='This code example adds a campaign label to a list of '
        'campaigns.')
    # The following argument(s) should be provided to run the example.
    parser.add_argument('-c', '--customer_id', type=str,
                        required=True, help='The Google Ads customer ID.')
    parser.add_argument('-l', '--label_id', type=str, required=True,
                        help='The ID of the label to attach to campaigns.')
    parser.add_argument('-i', '--campaign_ids', nargs='+', type=str,
                        required=True,
                        help='The campaign IDs to receive the label.')
    args = parser.parse_args()
    main(google_ads_client, args.customer_id, args.label_id, args.campaign_ids)
Esempio n. 23
0
    def ewah_execute(self, context):
        # Task execution happens here
        def get_data_from_ads_output(fields_dict, values, prefix=None):
            if prefix is None:
                prefix = ""
            elif not prefix[-1] == "_":
                prefix += "_"
                # e.g. 2b prefix = 'ad_group_criterion_'
            data = {}
            for key, value in fields_dict.items():
                # e.g. 1 key = 'metrics', value = ['impressions', 'clicks']
                # e.g. 2a key = 'ad_group_criterion', value = [{'keyword': ['text', 'match_type']}]
                # e.g. 2b key = 'keyword', value = ['text', 'match_type']
                node = getattr(values, key)
                # e.g. 1 node = row.metrics
                # e.g. 2a node = row.ad_group_criterion
                # e.g. 2b node = row.ad_group_criterion.keyword
                for item in value:
                    # e.g. 1 item = 'clicks'
                    # e.g. 2a item = {'keyword': ['text', 'match_type']}
                    # e.g. 2b item = 'text'
                    if type(item) == dict:
                        data.update(
                            get_data_from_ads_output(
                                fields_dict=item,
                                values=node,
                                prefix=prefix +
                                key,  # e.g. 2a '' + 'ad_group_criterion'
                            ))
                    else:
                        # e.g. 1: {'' + 'metrics' + '_' + 'clicks': row.metrics.clicks.value}
                        # e.g. 2b: {'ad_group_criterion_' + 'keyeword' + '_' + 'text': row.ad_group_criterion.keyword.text.value}
                        if hasattr(getattr(node, item), "value"):
                            data.update({
                                prefix + key + "_" + item:
                                getattr(node, item).value
                            })
                        else:
                            # some node ends don't respond to .value but are
                            #   already the value
                            data.update({
                                prefix + key + "_" + item:
                                getattr(node, item)
                            })
            return data

        conn = self.source_conn.extra_dejson
        credentials = {}
        for key in self._REQUIRED_KEYS:
            if not key in conn.keys():
                raise Exception(
                    "{0} must be in connection extra json!".format(key))
            credentials[key] = conn[key]

        # build the query
        query = "SELECT {0} FROM {1} WHERE segments.date {2} {3}".format(
            ", ".join(self.fields_list),
            self.resource,
            "BETWEEN '{0}' AND '{1}'".format(
                self.data_from.strftime("%Y-%m-%d"),
                self.data_until.strftime("%Y-%m-%d"),
            ),
            ("AND" + " AND ".join(self.conditions)) if self.conditions else "",
        )

        self.log.info("executing this google ads query:\n{0}".format(query))
        cli = GoogleAdsClient.load_from_dict(credentials)
        service = cli.get_service("GoogleAdsService", version="v3")
        search = service.search(
            self.client_id.replace("-", ""),
            query=query,
        )
        data = [row for row in search]

        # get into uploadable format
        upload_data = []
        fields_dict = deepcopy(self.fields_dict)
        fields_dict.update({self.resource: ["resource_name"]})
        while data:
            datum = data.pop(0)
            upload_data += [get_data_from_ads_output(
                fields_dict,
                datum,
            )]

        self.upload_data(upload_data)