def getTargetCpa(account_id): default_cpa = 50 settings = Settings() # account_id = "71211f98-45e6-475e-942f-e637289f6cce" query = "select kpi_name,kpi_value from accounts where id = '%s'" % ( account_id) df = pd.read_sql_query(query, settings.createEngine()) if dfIsEmpty(df): print( "Can't find the target cpa for this account, using the default (%s)" ) % (default_cpa) return default_cpa df = df.head(1) kpi_name = list(df["kpi_name"].values)[0] kpi_value = list(df["kpi_value"].values)[0] if kpi_name is None: kpi_name = "cpa" # default, #todo grab a default based on performance kpi_value = 50 if kpi_name == "cpa": target_cpa = kpi_value else: target_cpa = 50 return target_cpa
def main(self, account_id): if (Settings()).envvars["APP_DEBUG"] == "true": return 1 date_range = self.getDateRange() report = Report(account_id, date_range, options) report.createAccountDirectory() report.createReportDirectory() try: report.downloadReport(account_id, options["where_string"]) except requests.exceptions.ConnectionError: print("NO INTERNET CONNECTION") if Settings().envvars["APP_ENV"] == 'production': raise return 1000 except TransportError: print("NO INTERNET CONNECTION") if Settings().envvars["APP_ENV"] == 'production': raise return 1000 df = report.convertCsvToDataframe() df['Cost'] = df['Cost'] / 1000000 df = df.sort_values('Day') df = df.reset_index() return df
def trimDfToTableColumns(df, table_name): if dfIsEmpty(df): raise Exception("Error: can't trim an empty dataframe!") settings = Settings() columns = list( pd.read_sql("select * from %s where id = '999999999'" % (table_name), settings.createEngine()).columns) for col in columns: if col not in df.columns: df[col] = None df = df[columns] return df
def main(self): """Populate the adverts and advert_performance tables""" Log("info", "populating the adverts and advert_performance tables", "", self.account_id) settings = Settings() for date_range in settings.date_ranges: df_chunks = self.dataFrameFromAdPerformanceReports(date_range) self.deleteExisting(date_range) while True: try: df = next(df_chunks) except StopIteration: break if functions.dfIsEmpty(df): continue try: df = self.processDf(df, date_range) except Exception as exception: Log("error", str(exception), traceback.format_exc()) raise df = self.addInfo(df) self.writeToAdvertPerformanceTable(df, date_range)
def run_ganme(): """运行游戏的主方法""" setvar = Settings() # 初始化参数 pygame.init() # 初始化背景设置 screen = pygame.display.set_mode( (setvar.screen_width, setvar.screen_height)) # 创建显示窗口 pygame.display.set_caption("最炫酷的游戏(按Q退出)") ship = Ship(screen, setvar) #初始化一个主角对象 # wxr=Wxr(screen,setvar) # 创建一个用于存储子弹的编组 zidans = Group() wxrs = Group() # 创建外星人编组 create_wxrq(setvar, screen, wxrs) # 开始游戏的主循环 while True: '''监视键盘和鼠标事件''' check_event(ship, setvar, screen, zidans) ship.update() # 根据键盘事件更新位置 update_zidan(zidans) # 更新子弹 # update_wxr(wxrs) # 更新外星人位置 update_screen(ship, screen, setvar, zidans, wxrs) # 更新屏幕显示
def main(account_id): settings = Settings() Log("info", "processing ad n-grams", "", account_id) df = createFullDataFrame(account_id, settings) if functions.dfIsEmpty(df): return df = df.drop_duplicates() table_name = "ad_n_gram_performance" deleteFromTable(table_name, account_id, settings.createEngine()) functions.append_df_to_sql_table(df, table_name)
def __init__(self, account_id, service_type, operations=None): self.settings = Settings() self.helpers = Helpers() self.debug_mode = self.settings.envvars["APP_DEBUG"] == "true" self.account_id = account_id self.service_type = service_type self.operations = operations self.refresh_token = self.helpers.getRefreshToken(account_id) self.client_customer_id = self.helpers.getClientCustomerID( self.settings, account_id)
def getYamlData(self): """Read in yaml data from the googleads.yaml file""" settings = Settings() with open(settings.yaml_path, 'r') as stream: try: yaml_data = (yaml.safe_load(stream)) except yaml.YAMLError as exc: print(exc) return yaml_data
def check_batch_job_result(self, batch_job_id): download_url = self.check_batch_job(batch_job_id) response = None if download_url: batch_job_helper = self.getClient().GetBatchJobHelper( version=Settings.api_version()) response = batch_job_helper.ParseResponse( urlopen(download_url).read()) return response
def addCalculatedMetricsToDataFrame(df): # #add the calculated metrics (ctr, etc) settings = Settings() for metric in settings.calculatedMetrics: if settings.calculatedMetrics[metric][1] == "/": df[metric] = df[settings.calculatedMetrics[metric][0]] / \ df[settings.calculatedMetrics[metric][2]] if settings.calculatedMetrics[metric][3]: df[metric] = df[metric] * 100 return df
def add_batch_job(self): """ Add a new BatchJob to upload operations to. :return: The new BatchJob created by the request. """ # Initialize appropriate service. client = self.getClient() batch_job_service = client.GetService('BatchJobService', version=Settings.api_version()) # Create a BatchJob. batch_job_operations = [{'operand': {}, 'operator': 'ADD'}] return batch_job_service.mutate(batch_job_operations)['value'][0]
def getHtmlContent(username, account_name): email_html_template = "account_synced_successfully.html" template_path = os.path.abspath( os.path.join(Settings().python_dir, "email_templates", email_html_template)) template = Template(open(template_path).read()) html_content = template.render( username=username, account_name=account_name, ) return html_content
def fileSplit(): settings = Settings() fileName = "Search keyword.csv" keywordReportLocation = os.path.join(settings.this_dir, "scorer", fileName) keywordReportLocation df = pd.read_csv(keywordReportLocation, encoding="utf-8", skiprows=2) for account in df["Account"].drop_duplicates().values: this_df = df[df.Account == account] writeLocation = os.path.join(settings.this_dir, "scorer", "reports", account + ".csv") try: this_df.to_csv(writeLocation, index=False, encoding="utf-8") except: pass
def reportSpecificProcessing(df, account_id): df["date_range"] = "last_30_days" df["account_id"] = account_id df.reset_index(inplace=True) df["id"] = pd.Series([uuid.uuid1() for i in range(len(df))]).astype(str) # #add the calculated metrics (ctr, etc) settings = Settings() for metric in settings.calculatedMetrics: if settings.calculatedMetrics[metric][1] == "/": df[metric] = df[settings.calculatedMetrics[metric][0]] / \ df[settings.calculatedMetrics[metric][2]] if settings.calculatedMetrics[metric][3]: df[metric] = df[metric] * 100 return df
def writeToEntitiesTable(self, df, report, account_id): settings = Settings() delete_query = "delete from %s where account_id = '%s'" % ( self.entity_table_name, account_id) Database().executeQuery(delete_query) # we only need to write the keyword data in once # for the longest range to cover all keywords final_date_range = settings.date_ranges[len(settings.date_ranges) - 1] df = df[df.date_range == final_date_range] df['account_id'] = account_id df = df.reset_index(drop=True).drop_duplicates() report.writeDataframeToTable(df, self.entity_table_name)
def getHtmlContent(self): email_html_template = "budget_commander_emergency_stop_paused.html" template_path = os.path.abspath(os.path.join(Settings().python_dir,"email_templates", email_html_template)) template = Template(open(template_path).read()) html_content = template.render( username=self.budget_commander.username, account_name=self.budget_commander.name, account_id=self.budget_commander.account_id, google_account_id=self.budget_commander.google_id, day_limit=round(self.day_limit, 2), today_cost=round(self.today_cost, 2), currency_symbol=self.budget_commander.currency_symbol, ) return html_content
def getHtmlContent(self): template_path = os.path.abspath( os.path.join(Settings().python_dir, "email_templates", "budget_commander_email_notification.html")) template = Template(open(template_path).read()) html_content = template.render( username=self.username, account_name=self.name, account_id=self.account_id, google_account_id=self.google_id, budget=float(self.budget), spend=float(self.this_month_spend), currency_symbol=self.currency_symbol, ) return html_content
def __init__(self, account_id, date_range_string, options): self.options = options self.account_id = account_id self.date_range_string = date_range_string self.save_report_as_name = self.date_range_string + ".csv" self.report_name = options["report_name"] self.performance_table_name = options["performance_table_name"] self.entity_table_name = options["entity_table_name"] self.entity_id_name = options["entity_id_name"] self.where_string = options["where_string"] self.queryColumnsToTableColumns = options["queryColumnsToTableColumns"] self.queryColumnsToDownloadColumns = options[ "queryColumnsToDownloadColumns"] self.settings = Settings() self.helpers = Helpers() self.moneyFields = ["cost"] self.rate_errors = 0
def send_batch_job_request_async(self): batch_job_helper = self.getClient().GetBatchJobHelper( version=Settings.api_version()) batch_job = self.add_batch_job() print('Created BatchJob with ID "{0}", status "{1}"'.format( batch_job['id'], batch_job['status'])) upload_url = batch_job['uploadUrl']['url'] batch_job_helper.UploadOperations( upload_url, self.operations, ) return { 'batch_job_id': batch_job['id'], 'batch_job_status': batch_job['status'] }
def __init__(self, account_id, budget_group_id=None): self.account_id = account_id self.local_dates = LocalDates(account_id) self.budget_group_id = budget_group_id self.budget_group_info = self.getBudgetGroupInfo() self.envvars = (Settings()).getEnv() self.createBudgetCommanderTable() self.account_info = self.getAccountInfo() self.name = self.account_info["name"] self.google_id = self.account_info["google_id"] self.currency_symbol = (Currency()).getSymbol(account_id) self.user_settings = self.getBudgetCommanderSettings() self.username = self.getUserName() self.budget = self.getBudget() self.this_month_spend = self.getThisMonthSpend() self.last_month_spend = self.getLastMonthSpend() self.under_budget = self.accountIsUnderBudget(self.budget, self.this_month_spend)
def keywordScore(account_name): settings = Settings() targets = getTargets() df = readKeywordReport(settings, account_name) df = renameColumns(df) df = dfMetricsToFloat(settings.metrics, df) df = addCalculatedMetrics(df, settings) df = addFilters(df, targets) df = addScore(df, targets) df = addMessages(df) writeToCsv(df, settings)
def get_batch_job(self, client, batch_job_id): """ Retrieves the BatchJob with the given id. :param client: an instantiated AdWordsClient used to retrieve the BatchJob. :param batch_job_id: a long identifying the BatchJob to be retrieved. :return: The BatchJob associated with the given id. """ batch_job_service = client.GetService('BatchJobService', Settings.api_version()) selector = { 'fields': ['Id', 'Status', 'DownloadUrl'], 'predicates': [{ 'field': 'Id', 'operator': 'EQUALS', 'values': [batch_job_id] }] } return batch_job_service.get(selector)['entries'][0]
def main(account_id): Log("info", "getting ad performance from the api", "", account_id) settings = Settings() for date_range in settings.date_ranges: report = Report(account_id, date_range, options) report.createAccountDirectory() report.createReportDirectory() report.downloadReport(account_id, options["where_string"]) df = report.convertCsvToDataframe() if functions.dfIsEmpty(df): continue df = report.basicProcessing(df) df = reportSpecificProcessing(df, date_range, account_id) deleteExitingData(account_id, date_range, 'ad_performance_reports') report.writeDataframeToTable(df, 'ad_performance_reports') deleteExitingData(account_id, date_range, 'advert_performance') report.writeDataframeToTable(df, 'advert_performance') if functions.dfIsEmpty(df): return if 'advert_id' not in df.columns: Log('error', 'advert_id not in df columns', df.columns, account_id) return df["id"] = df["advert_id"] df = addParentId(df, account_id) # our UUID from the adverts table report.writeToEntitiesTable( df, report, account_id) # add the final date range data to adverts
def send(from_email, to_emails, subject, html_content): SENDGRID_API_KEY = (Settings()).getEnv()["SENDGRID_API_KEY"] try: message = Mail(from_email=from_email, to_emails=to_emails, subject=subject, html_content=html_content) except Exception as e: print(e) print(traceback.format_exc()) try: sg = SendGridAPIClient(SENDGRID_API_KEY) response = sg.send(message) # print(response.status_code) # print(response.body) # print(response.headers) except Exception as exception: print(exception)
def send_batch_job_request_sync(self): batch_job_helper = self.getClient().GetBatchJobHelper( version=Settings.api_version()) batch_job = self.add_batch_job() print('Created BatchJob with ID "{0}", status "{1}"'.format( batch_job['id'], batch_job['status'])) upload_url = batch_job['uploadUrl']['url'] batch_job_helper.UploadOperations( upload_url, self.operations, ) download_url = self.get_batch_job_download_url_when_ready( batch_job['id']) response = batch_job_helper.ParseResponse(urlopen(download_url).read()) self.helpers.print_batch_job_response(response) return response
def getHtmlContent(self, new_status): if new_status == 'Paused': email_html_template = "budget_commander_monthly_campaign_status_update_paused.html" if new_status == 'Enabled': email_html_template = "budget_commander_monthly_campaign_status_update_enabled.html" template_path = os.path.abspath( os.path.join(Settings().python_dir, "email_templates", email_html_template)) template = Template(open(template_path).read()) html_content = template.render( username=self.budget_commander.username, account_name=self.budget_commander.name, account_id=self.budget_commander.account_id, google_account_id=self.budget_commander.google_id, budget=float(self.budget), spend=float(self.budget_commander.this_month_spend), currency_symbol=self.budget_commander.currency_symbol, new_status=new_status) return html_content
def getClient(user_id=None, account_id=None, client_customer_id=None): """Returns the adwords client Manager level if a user_id is provided Client (account) level if account_id and client_customer_id are provided """ helpers = Helpers() settings = Settings() if user_id is not None: refresh_token = helpers.getRefreshTokenFromUserId(user_id) else: refresh_token = helpers.getRefreshToken(account_id) if not refresh_token: Log("info", "Can't determine refresh_token for user %s " % (user_id), '', account_id) return yaml_data = helpers.getYamlData()["adwords"] client_id = yaml_data["client_id"] client_secret = yaml_data["client_secret"] developer_token = yaml_data["developer_token"] oauth_client = oauth2.GoogleRefreshTokenClient( client_id=client_id, client_secret=client_secret, refresh_token=refresh_token) if client_customer_id is not None: adwords_client = adwords.AdWordsClient( developer_token, oauth_client, client_customer_id=client_customer_id) else: adwords_client = adwords.AdWordsClient(developer_token, oauth_client) return adwords_client
def main(self, file, account_id=None): if os.name == 'nt': #running on windows path = os.path.join(Settings().python_dir, file) else: path = file command = 'python3 ' + path if account_id: command = command + " -a %s" % (account_id) print(command) try: # Log("info", "Attempting to run process.", command) subprocess.Popen(command, shell=True) except Exception as e: subprocess.Popen(command, shell=False) Log("info", "command: %s" % (command), "", None) Log("error", e, traceback.format_exc(), None) print(e) exit(1)
def basicProcessing(self, df): downloadColumnsToTableColumns = {} for col in self.queryColumnsToDownloadColumns: downloadColumn = self.queryColumnsToDownloadColumns[col] tableColumn = self.queryColumnsToTableColumns[col] downloadColumnsToTableColumns[downloadColumn] = tableColumn df.reset_index(inplace=True) df["created_at"] = datetime.now() df["updated_at"] = datetime.now() df["account_id"] = self.account_id df = df.rename(columns=downloadColumnsToTableColumns) for field in self.moneyFields: if field in list(df.columns): df[field] = df[field] / 1000000 # #add the calculated metrics (ctr, etc) settings = Settings() for metric in settings.calculatedMetrics: operator = settings.calculatedMetrics[metric][1] first_metric = settings.calculatedMetrics[metric][0] second_metric = settings.calculatedMetrics[metric][2] if first_metric not in df.columns: continue if second_metric not in df.columns: continue if operator == "/": df[metric] = df[first_metric] / \ df[second_metric] continue if settings.calculatedMetrics[metric][3]: df[metric] = df[metric] * 100 return df
def createDfWithAllDateRanges(self, account_id): all_df = None settings = Settings() for date_range in settings.date_ranges: if date_range == "THIS_MONTH" and LocalDates( account_id).is_first_of_month: continue report = Report(account_id, date_range, self.options) report.createAccountDirectory() report.createReportDirectory() report.downloadReport(account_id, report.where_string) df = report.convertCsvToDataframe() df["date_range"] = date_range if df.shape[0] == 0: print("%s df is empty" % (date_range)) continue if functions.dfIsEmpty(all_df): all_df = df.copy() else: all_df = all_df.append(df.copy()) if functions.dfIsEmpty(all_df): Log("info", "%s report is empty" % (self.report_name), "", self.account_id) return all_df = report.stringifyIds(all_df) return all_df.reset_index()