def main(): """Run each morning after data processing has occured - This is triggered to run after every_night.py """ parser = argparse.ArgumentParser() parser.add_argument("-a") args = parser.parse_args() try: account_id = args.a if (not account_id): print('Please specify an account id with -a') return if not Helpers().isActiveAccount(account_id): Log("info", "this account isn't active. Exiting", '', account_id) return NotifyViaEmail(account_id).main() MonthlyStop(account_id) ControlSpend(account_id).main() # TODO: catch proper exception except: Log("error", "error starting run_budget_commander.py from command line", traceback.format_exc()) # TODO: return proper exception raise
def main(): """Download data from the api Process the data ready for the app""" Log("info", "process_account running", "from process_account.py") parser = argparse.ArgumentParser() parser.add_argument("-a") args = parser.parse_args() try: account_id = args.a if (not account_id): Log('info', 'Please specify an account id with -a') return if not Helpers().isActiveAccount(account_id): Log("info", "this account isn't active. Exiting", '', account_id) return download.main(account_id) except: Log("error", "error starting every night from command line", traceback.format_exc()) raise Log("info", "firing run_budget_commander command", '', account_id) myProcess().main("run_budget_commander.py", account_id)
def main(self): """Populate the adverts and advert_performance tables""" Log("info", "populating the adverts and advert_performance tables", "", self.account_id) settings = Settings() for date_range in settings.date_ranges: df_chunks = self.dataFrameFromAdPerformanceReports(date_range) self.deleteExisting(date_range) while True: try: df = next(df_chunks) except StopIteration: break if functions.dfIsEmpty(df): continue try: df = self.processDf(df, date_range) except Exception as exception: Log("error", str(exception), traceback.format_exc()) raise df = self.addInfo(df) self.writeToAdvertPerformanceTable(df, date_range)
def addUserAccounts(self, user_id, first_run): if not Helpers().isActiveUser(user_id): Log("info", "this user isn't active. Exiting", 'user_id: %s' % (user_id)) return try: Log("info", "adding accounts for user id '%s'" % user_id) self.user_id = user_id accounts_df = self.getAccountsDf() if functions.dfIsEmpty(accounts_df): return accounts_df = accounts_df.drop_duplicates('google_id') accounts_df = self.dropDuplicates(accounts_df, first_run) if (accounts_df.shape[0] == 0 and first_run): Log('warning', "no unique google accounts were found for this user", "user_id (%s)" % (user_id), "") accounts_df.to_sql("accounts", Database().createEngine(), index=False, if_exists="append") except Exception as exception: Log("error", str(exception) + " (User id: %s)" % (user_id), traceback.format_exc()) Log("info", "finished adding account meta data")
def createNotification(account_id): #only when running for the first time... def isFirstRun(account_id): query = "select ad_performance_report_processed_at from accounts where id = '%s'" % ( account_id) results = Database().executeQuery(query) for result in results: return result[0] is None if not isFirstRun(account_id): Log('info', "the notification won't be created as this isn't the first run", '', account_id) return Log('info', "creating successful sync notification", '', account_id) user_id = Helpers().getUserIdFromAccountId(account_id) account_name = Helpers().getAccountNameFromId(account_id) account_google_id = Helpers().getAccountGoogleIdFromId(account_id) username = Helpers().getUsername(account_id) query = r""" insert into notifications (id, type, notifiable_id, notifiable_type, data, created_at, updated_at) values ('%s', 'App\\Notifications\\AccountSynced', '%s','App\\User', '{"message":"%s was synced successfully! Refesh the page to access the account."}', now(), now()) """ % (str(uuid.uuid4()), user_id, account_name) Database().executeQuery(query) sendEmail(account_name, account_google_id, username, account_id)
def main(self): if self.user_settings["pause_campaigns"]: print("paused campaigns is enabled. Exiting...") return # they'll get an email when the campaigns pause so there's no need to send one here print("running email notifier") if self.budget is None: return print("under_budget: " + str(self.under_budget)) print("email_sent: " + str(self.user_settings["email_sent"])) if not self.user_settings["notify_via_email"]: Log("info", "email notifications turned off", '', self.account_id) return if self.under_budget and self.user_settings["email_sent"]: self.markAsNotSent() return if not self.user_settings["email_sent"] and not self.under_budget: self.sendEmail() return Log("info", "no action", "NotifyViaEmail", self.account_id)
def main(self): self.store_excess_budget() if not self.budget_commander.user_settings["pause_campaigns"]: Log("info", "pause_campaigns is disabled", '', self.account_id) return campaigns_are_enabled_month = self.budget_commander.campaignsAreEnabledMonth( ) spend_is_over_budget = self.spendIsOverBudget() if spend_is_over_budget and campaigns_are_enabled_month: (PauseEnableCampaigns(self.account_id)).pauseForMonth() Log( "info", "Budget commander monthly stop: campaigns paused for the month", "", self.account_id) self.sendEmail('Paused') return campaigns_are_paused_month = self.budget_commander.campaignsArePausedMonth( ) spend_is_under_budget = self.spendIsUnderBudget() if spend_is_under_budget and campaigns_are_paused_month and self.budget_commander.user_settings[ "enable_campaigns"]: (PauseEnableCampaigns(self.account_id)).enableForMonth() Log( "info", "Budget commander monthly stop: campaigns enabled for the month", "", self.account_id) self.sendEmail('Enabled') return Log("info", "Budget commander monthly stop: no actions", "", self.account_id)
def get_log(): if obj.run is not None: #只有一个线程能成功地获取锁 obj.mutex.acquire() MyLog.log = Log() #解锁 MyLog.mutex.release() return MyLog.log ''' finally: logger.info("*********TEST END*********") # send test report by email if Email_on_off == 'on': self.email.send_email() elif Email_on_off == 'off': logger.info("Doesn't send report email to developer.") else: logger.info("Unknow state.") # # send test report by DingTalk # if DingTalk_on_off == 'on': # #self.Dingtalk.send_message_to_robot() # elif DingTalk_on_off == 'off': # logger.info("Doesn't send report DingTalk to developer.") # else: # logger.info("Unknow state.") ''' '''
class Testbuild3(Testtodolist): log = Log() def test_01(self): """ 步骤: 1、打开搜索结果页 2、下拉刷新一次,退出搜索结果页 3、首页浏览事件记一次 :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') print(begin_date) testbuild = TodolistBuild(self.driver) testbuild.click_alert() testbuild.click_meigou() time.sleep(5) testbuild.click_search() time.sleep(2) testbuild.switch_android_up() time.sleep(5) self.driver.background_app(5) time.sleep(5)
def main(account_id): Log("info", "getting keyword performance from the api", "", account_id) report = Report(account_id, "", options) df = report.createDfWithAllDateRanges(account_id) if functions.dfIsEmpty(df): return # print df[df.google_id=="309491001346"].cpc_bid # remember column headers are as per the download here df["keyword_id"] = df.apply(lambda row: functions.addIdToDf( account_id, row["Keyword ID"], row["Ad group ID"]), axis=1) df["id"] = df["keyword_id"] df = addParentId(df, account_id) # our UUID from the keywords table df = report.basicProcessing(df) df = reportSpecificProcessing(df) report.writeToEntitiesTable(df, report, account_id) report.writeToPerformanceTable(df, report, account_id)
class Testbuild(Testtodolist): log = Log() def test_01(self): """ 首页----点击日记 :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') print(begin_date) testbuild = TodolistBuild(self.driver) testbuild.click_alert() time.sleep(10) testbuild.click_home_diary() time.sleep(10) self.driver.background_app(5) time.sleep(10) result = mysql_test.query(action='page_view', event_time=begin_date, page_name='diary_detail') print(result) end_page_view2 = result[0]['params'] referrer2 = end_page_view2['referrer'] referrer_tab_name2 = end_page_view2['referrer_tab_name'] assert referrer2 == 'home', 'referrer获取错误!' assert referrer_tab_name2 == '精选', 'referrer_tab_name获取错误!' assert len(result) == 1, f'埋点数量错误,预期为1个,实际为{len(result)}' print("page_view: %s" % end_page_view2)
class Testbuild3(Testtodolist): log = Log() def test_01(self): """ 步骤: 1、美购首页 2、品类聚合 3、搜索框 4、点击热词 :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') print(begin_date) testbuild = TodolistBuild(self.driver) testbuild. click_alert() testbuild.click_meigou() time.sleep(5) testbuild.click_czsl() testbuild.click_search() time.sleep(2) testbuild.click_hot_search() time.sleep(5) self.driver.background_app(5) time.sleep(5) # 美购搜索 埋点 result = mysql_test.query(action='search_result_open', event_time=begin_date) assert result != ' ', 'search_result_open事件为空!'
class Testbuild4(Testtodolist): log = Log() def testbuild(self): """ 步骤: 1、美购首页 2、搜索框 4、点击热词 5、切换医生tab :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') testbuild = TodolistBuild(self.driver) testbuild.click_alert() testbuild.click_meigou() testbuild.click_welfare_home_search() # 偶现失败 testbuild.click_doctor() testbuild.click_hot_search() # 偶现失败 time.sleep(5) self.driver.background_app(5) time.sleep(5) # 搜索框 埋点 result = mysql_test.query(action='search_result_open', event_time=begin_date) assert result != ' ', 'search_result_open事件为空!'
class Testbuild(Testtodolist): log = Log() def test_01(self): """ 首页品类聚合->点击玻尿酸 打开首页进入任意二级页面,首页的浏览事件记一次 """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') print(begin_date) testbuild = TodolistBuild(self.driver) testbuild.click_alert() time.sleep(10) testbuild.click_bns() time.sleep(3) self.driver.background_app(5) time.sleep(10) result = mysql_test.query(action='page_view', event_time=begin_date, page_name='home') print(result) end_page_view2 = result[0]['params'] referrer_page_name = end_page_view2['page_name'] assert referrer_page_name == 'home', 'page_name获取错误!' assert len(result) == 1, f'埋点数量错误,预期为1个,实际为{len(result)}'
class Testbuild(Testtodolist): log = Log() def test_01(self): """ 美购首页->品类聚合除皱廋脸 美购列表pv事件 :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') testbuild = TodolistBuild(self.driver) testbuild.click_alert() testbuild.click_meigou() testbuild.click_czsl() time.sleep(10) self.driver.background_app(5) time.sleep(10) result = mysql_test.query(action='page_view', event_time=begin_date, page_name='welfare_list') print(result) end_page_view2 = result[0]['params'] referrer = end_page_view2['referrer'] page_name = end_page_view2['page_name'] assert referrer == 'welfare_home', 'referrer获取错误!' assert page_name == 'welfare_list', 'page_name获取错误!' assert len(result) == 1, f'埋点数量错误,预期为1个,实际为{len(result)}'
class Testbuild3(Testtodolist): log = Log() def test_01(self): """ 步骤: 1、美购首页 2、刷新 3、美购首页浏览事件记一次 :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') testbuild = TodolistBuild(self.driver) testbuild.click_alert() # 点击美购首页 testbuild.click_meigou() testbuild.switch_android_up() time.sleep(2) self.driver.background_app(5) # 将app置于后台5秒钟,再唤起到前台 result = mysql_test.query(action='page_view', event_time=begin_date, page_name='welfare_home') print(result) end_page_view2 = result[0]['params'] referrer3 = end_page_view2['referrer'] referrer_page_name = end_page_view2['page_name'] assert referrer3 == 'home', 'referrer获取错误!' assert referrer_page_name == 'welfare_home', 'page_name获取错误!' assert len(result) == 1, f'埋点数量错误,预期为1个,实际为{len(result)}'
def __init__(self, account_id): self.account_id = account_id self.budget_commander = BudgetCommander(account_id) self.local_dates = LocalDates(account_id) if not self.budget_commander.user_settings['emergency_stop']: Log("info", "Emergency stop is disabled.", "", self.account_id) return self.budget = self.budget_commander.budget if self.budget_commander.this_month_spend >= self.budget: Log("info", "this month spend (%s) is over this month's budget (%s). Exiting." %(self.budget_commander.this_month_spend, self.budget), "", self.account_id) return self.costs = GetCostFromApi(account_id) self.today_cost = self.costs.today_cost self.day_budget_percentage = self.costs.day_budget_percentage self.day_limit = self.getDayLimit() self.main()
def __init__(self): super(sendEmail, self).__init__() self.L = Log("snedEmail", 'DEBUG').logger self.sender_email = 'EMAIL_SENDER' self.password = '******' self.smtpHost = 'smtpHost' self.receiver = 'receiver'
def store_excess_budget(self): """Only run on the 1st of the month * - Take the budget * - Take away last month's spend * - Any remaining budget is stored as the excess """ def update_excess_budget(excess_budget): Log('info', 'Storing excess budget', "excess_budget: %s" % (excess_budget), self.account_id) Database().setValue('budget_commander', 'excess_budget', excess_budget, 'where account_id = "%s"' % (self.account_id)) if not self.budget_commander.user_settings['rollover_spend']: Log('info', 'rollover_spend is disabled. Setting excess to 0', '', self.account_id) update_excess_budget(0) return if not self.local_dates.is_first_of_month: return if self.budget_commander.budget_group_id: #no rollover for budget groups return remaining = float(self.budget) - float( self.budget_commander.last_month_spend) if remaining < 0: return 0 update_excess_budget(remaining)
def main(account_id): Log("info", "getting account performance reports from the api", "", account_id) report = Report(account_id, "last_30_days", options) report.createAccountDirectory() report.createReportDirectory() report.downloadReport(account_id, options["where_string"]) df = report.convertCsvToDataframe() df = report.basicProcessing(df) df = reportSpecificProcessing(df, account_id) # for col in df.columns: # print col # return deleteExitingData(account_id, options["performance_table_name"]) report.writeDataframeToTable(df, options["performance_table_name"])
class Testbuild(Testtodolist): log = Log() def test_01(self): """ 首页, 刷新 :return: """ begin_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') print(begin_date) testbuild = TodolistBuild(self.driver) testbuild.click_alert() time.sleep(3) testbuild.switch_android_up() time.sleep(2) self.driver.background_app(5) time.sleep(10) result = mysql_test.query(action='page_view', event_time=begin_date, page_name='home') print(result) end_page_view2 = result[0]['params'] referrer2 = end_page_view2['page_name'] assert referrer2 == 'home', 'page_name获取错误!' assert len(result) == 1, f'埋点数量错误,预期为1个,实际为{len(result)}'
def addNew(self): """Add accounts for new users - users without any accounts""" first_run = True self.user_ids = self.getUserIdsToProcess(first_run) if not self.user_ids: Log('info', 'no new users to process') for user_id in self.user_ids: self.addUserAccounts(user_id, first_run)
def addAll(self): """Add accounts for all users - to run daily. Used for getting new accounts""" first_run = False self.user_ids = self.getUserIdsToProcess(first_run) for user_id in self.user_ids: self.addUserAccounts(user_id, first_run) Log("info", "finished adding account meta data")
def accountLevel(self): Log("info", "processing account winning elements", "", self.account_id) self.table_name = "account_winning_elements" self.lines = [ "headline_1", "headline_2", "headline_3", "description", "description_2" ] self.group_by_column = "account_id" self.process()
def controlSpend(self): self.storeOriginalBids() self.getTotalSpendForecast() Log('info', "Forecast: %s" % (self.total_spend_forecast, ), '', self.account_id) Log( 'info', "Spend Vs limit: %s" % (self.getForecastOverBudgetPercentage() * 100), '', self.account_id) if self.forecastIsOverLimit() and self.spendIsUnderBudget(): Log('info', "Reducing bids...", '', self.account_id) df = self.reduceBids() self.updateBids(df) # push to mutations queue self.updateKeywordsTable( df) # reflect the new bids in the keywords table return df
def __init__(self): self.__tmp_date = set() self.__operate_list_dict = list() self.__result = list() self.__key = "" # 按传入当参数类型进行聚合,入参形式[{},{},{}] # 只找第一个匹配当数据,之后数据丢弃处理 # 返回数据格式是[{},{},{}] self.L = Log("DataAggregate")
def main(df, account_id, settings): Log("info", "processing campaign winning element", "", account_id) if functions.dfIsEmpty(df): return # add the campaign id, add path_1_path_2 df = processDf(df, account_id, settings) lines = ["headline_1", "headline_2", "description", "path_1_path_2"] ldf = None # start lines loop for line in lines: this_df = df[["clicks", "impressions", "campaign_id", line]].groupby(["campaign_id", line]).sum() this_df["ctr"] = (this_df.clicks / this_df.impressions) * 100 if functions.dfIsEmpty(this_df): break campaignIds = list(this_df.index.levels[0]) this_df = this_df.reset_index() # start campaign ids loop for i, campaign_id in enumerate(campaignIds): # if i!=5:continue tdf = this_df.copy() quantile = tdf[(tdf.campaign_id == campaign_id)].impressions.quantile() tdf = tdf[(tdf.campaign_id == campaign_id) & (tdf.impressions > quantile) & (tdf[line] != "/")].sort_values( "ctr", ascending=False).head(3).reset_index(drop=True) if tdf.shape[0] == 0: continue tdf["order"] = tdf.index + 1 tdf["type"] = line tdf.rename(columns={line: "value"}, inplace=True) tdf = tdf[["campaign_id", "value", "order", "type"]] try: ldf = ldf.append(tdf) except: ldf = tdf.copy() if functions.dfIsEmpty(ldf): return ldf = ldf.reset_index(drop=True) ldf.value = ldf.value.str.replace(" --", "") ldf["id"] = pd.Series([uuid.uuid1() for i in range(len(ldf))]).astype(str) # #now let's add created_at and updated_at as today ldf["created_at"] = datetime.now() ldf["updated_at"] = datetime.now() # for date_range in settings.date_ranges: date_range = "last_30_days" ldf["date_range"] = date_range ldf.to_sql("campaign_winning_elements", settings.createEngine(), if_exists='append', index=False) return df
def main(account_id): Log("info", "processing budget commander data", "", account_id) account_performance_by_day = getAccountPerformanceByDay( "account_performance_reports", account_id, date_range) if functions.dfIsEmpty(account_performance_by_day): return addBudgetActualGraphDataToDb(account_performance_by_day, account_id)
def __init__(self, path: str): self.path = path self.log = Log().get_logger() if not os.path.exists(self.path): self.log.error(f"指定文件路径不存在:{self.path}") self.yml = None with open(path, 'r', encoding='utf-8') as f: try: self.yml = yaml.safe_load(f.read()) except Exception as e: print(e)
def bestWorstPerformers(account_id, settings): Log("info", "processing best and worst performers (ads)", "", account_id) advert_performance = WinningElements(account_id).getAdvertsDataframe() if advert_performance is None: return if len(advert_performance) == 0: return # return advert_performance df used for winning elements cols = [ "id", "cost", "date_range", "ctr", "cpa", "roas", "conversion_rate" ] advert_performance = advert_performance[cols + ["impressions"]] # order by cost desc then trim to the top 20% of rows advert_performance = advert_performance.sort_values("cost", ascending=False) numRows = int(advert_performance.shape[0] / 5) advert_performance = advert_performance.iloc[0:numRows, :] advert_performance["advert_id"] = advert_performance["id"] advert_performance["account_id"] = account_id # #now let's add created_at and updated_at as today advert_performance["created_at"] = datetime.now() advert_performance["updated_at"] = datetime.now() medianCost = advert_performance.cost.median() quantileImpressions = advert_performance.impressions.quantile() best_performance = advert_performance[ (advert_performance.cost > medianCost) & (advert_performance.impressions > quantileImpressions)].sort_values( "ctr", ascending=False).head(1).reset_index(drop=True) best_performance["id"] = pd.Series( [uuid.uuid1() for i in range(len(best_performance))]).astype(str) del best_performance["impressions"] del best_performance["cost"] worst_performance = advert_performance[ (advert_performance.cost > medianCost) & (advert_performance.impressions > quantileImpressions)].sort_values( "ctr", ascending=True).head(1).reset_index(drop=True) worst_performance["id"] = pd.Series( [uuid.uuid1() for i in range(len(worst_performance))]).astype(str) del worst_performance["impressions"] del worst_performance["cost"] append_df_to_sql_table(best_performance, "best_performers", settings.createEngine()) append_df_to_sql_table(worst_performance, "worst_performers", settings.createEngine())