def get_jlb_count(shop_id): """获取精灵币个数""" try: jlb_count = CacheAdpter.get(CacheKey.WEB_JLB_COUNT % shop_id, 'web', 'no_cache') if jlb_count == 'no_cache': from apps.web.point import PointManager jlb_count = PointManager.refresh_points_4shop( shop_id=request.user.shop_id) return jlb_count except Exception, e: log.error('get_jlb_count error,e=%s, shop_id=%s' % (e, shop_id)) return 0
def schedule(self): '''分时折扣''' if not hasattr(self, '_schedule'): schedule = CacheAdpter.get( CacheKey.WEB_CAMPAIGN_SCHEDULE % self.campaign_id, 'web', None) if not schedule: try: top_schedule = self.tapi.simba_campaign_schedule_get( campaign_id=self.campaign_id) except TopError, e: log.error( 'get campaign schedule error, shop_id=%s, campaign_id=%s, e=%s' % (self.shop_id, self.campaign_id, e)) top_schedule = None if top_schedule and hasattr(top_schedule, 'campaign_schedule'): schedule = top_schedule.campaign_schedule.schedule CacheAdpter.set( CacheKey.WEB_CAMPAIGN_SCHEDULE % self.campaign_id, schedule, 'web', 60 * 30) else: self._schedule = '' self._schedule = schedule
def __init__(self, shop_id): self.user = self.get_user(shop_id) # 查询满足显示条件的广告1、需要显示,2、已投放的,3、在有效期内 self.main_ads = get_main_ads() self.show_ads = {} if self.user: self.nick = self.user.nick self.shop_id = self.user.shop_id self.aus = self.get_aus() self.current_subscribe = self.get_current_subscribe() # 绑定各种数据 self.data = {} try: self.data = CacheAdpter.get(CacheKey.WEB_POPUP_BIND_DATA_CACHE % self.shop_id, 'web', {}) if not self.data: for b in dir(self): if 'bind_' in b: eval('self.%s()' % b) CacheAdpter.set(CacheKey.WEB_POPUP_BIND_DATA_CACHE % self.shop_id, self.data, 'web', 60 * 30) except Exception, e: log.error('bind datas error,e=%s, shop_id=%s' % (e, self.shop_id))
def download_task_new(task): st = time.time() shop_id = task['shop_id'] campaign_id = task['campaign_id'] adgroup_id = task['adgroup_id'] token = task['token'] start_time = task['start_time'] end_time = task['end_time'] source = task['source'] search_type = task['search_type'] cache_key = task['cache_key'] cache_db = task['cache_db'] successed = True cached_result = CacheAdpter.get(cache_key, cache_db, None) if not cached_result: # 有缓存,不用再下载(除非有重下的标志位) base_list, effect_list = [], [] tapi = get_tapi(shop_id = shop_id) try: base_list = KeywordRpt.download_kwrpt_base(shop_id, campaign_id, adgroup_id, token, start_time, end_time, search_type, source, tapi) if base_list: effect_list = KeywordRpt.download_kwrpt_effect(shop_id, campaign_id, adgroup_id, token, start_time, end_time, search_type, source, tapi) except TopError as e: log.error("download keyword report failed, e=%s, shop_id=%s, adgroup_id=%s" % (e, shop_id, adgroup_id)) successed = False rpt_list = KeywordRpt.merge_kwrpt(shop_id, campaign_id, adgroup_id, base_list, effect_list) if rpt_list: CacheAdpter.set(cache_key, rpt_list, cache_db, 12 * 60 * 60) # 考虑到缓存的数据 log.info("total cost %.5f seconds, cache_key=%s" % ((time.time() - st), cache_key)) CacheAdpter.set("%s_status" % (cache_key), successed, cache_db, 60 * 10) # 确认了某一次的任务状态之后,由contractor删除掉状态
def get_synonym_words(cat_id, word_list): SynonymWord.load_in_cache_if_not() result_list = [] cat_id_list = Cat.get_cat_attr_func(cat_id, "cat_path_id").split(' ') + [0] syno_dict = {} for cat_id in cat_id_list: temp_dict = CacheAdpter.get(CacheKey.KWLIB_SYNOWORD % cat_id, 'web', {}) for word, syno_word_list in temp_dict.items(): if syno_dict.has_key(word): syno_dict[word].extend(syno_word_list) else: syno_dict[word] = syno_word_list for wl in word_list: result_list.extend(syno_dict.get(wl, [])) return result_list
class NewSelectWordWorker(Worker): ''' 快速选词的农民工 ''' def __init__(self, prj_dict): self.prj_dict = prj_dict self.item_scorer = ItemScorer(prj_dict['label_conf_list']) self.cats = self.prj_dict['cats'] self.filter_conf = self.prj_dict['filter_conf'] self.filter_list = self.prj_dict['filter_list'] self.price_list = self.prj_dict['price_list'] self.remove_word_list = self.prj_dict[ 'remove_words'] and self.prj_dict['remove_words'].split(',') or [] self.cat_cpc = self.prj_dict['cat_cpc'] def do_my_work(self): log.info('worker start, item_id=%s, key=%s' % (self.prj_dict['item_id'], self.prj_dict['data_key'])) kw_list = CacheAdpter.get(self.prj_dict['from_key'], self.prj_dict['from_db'], []) if not kw_list: log.error('can not get group from memcache and the group is = %s' % self.prj_dict['from_key']) kw_list = CacheAdpter.get(self.prj_dict['from_key'], self.prj_dict['from_db'], []) group_dict = {} if kw_list: if ('click > 0' not in self.filter_conf) and ( 'click>0' not in self.filter_conf) or kw_list[0][2] > 0: cat_id = self.prj_dict['from_key'].split('_')[0] try: group_dict = group_kwlist(kw_list, self.item_scorer, int(cat_id), self.cat_cpc, self.cats, self.remove_word_list, self.filter_conf, self.filter_list, self.price_list) except Exception, e: log.error('group_kwlist error: cat_id=%s, e=%s' % (cat_id, e)) CacheAdpter.set(self.prj_dict['data_key'], group_dict, self.prj_dict['data_db']) log.info('worker finish, item_id=%s, key=%s' % (self.prj_dict['item_id'], self.prj_dict['data_key']))
def point_praise(request): """好评送积分""" from apps.web.point import PointManager shop_id = request.user.shop_id # 获取积分 point_count = CacheAdpter.get(CacheKey.WEB_JLB_COUNT % shop_id, 'web', 'no_cache') if point_count == 'no_cache': point_count = PointManager.refresh_points_4shop( shop_id=request.user.shop_id) # 获取随机好评 # appraises = Appraise.objects.filter().order_by('?') # appraises = appraises[:5] # return render_to_response('point_praise.html', {'point_count':point_count, 'appraises':appraises}, context_instance = RequestContext(request)) return render_to_response('point_praise.html', {'point_count': point_count}, context_instance=RequestContext(request))
def load_to_mem(cls, is_need_refresh=False): now_time = datetime.datetime.now() if not is_need_refresh: # 不是强制刷新时,判断是否需要刷新数据,每隔5分钟从缓存获取原子词更新时间 if not cls.mem_modifi_time: is_need_refresh = True elif get_time_delta(cls.mem_modifi_time, now_time, 'SECONDS') > 60: cache_modifi_time = CacheAdpter.get( CacheKey.KWLIB_ELEMWORD_MODIFI_TIME, 'web', cls.mem_modifi_time) if cache_modifi_time > cls.mem_modifi_time: is_need_refresh = True else: cls.mem_modifi_time = datetime.datetime.now() if is_need_refresh: for word in word_coll.find(): cls.word_dict[word['word']] = word['count'] cls.mem_modifi_time = datetime.datetime.now() log.info('init all elemword into mem')
def sum_prj_result(self, sub_prj_list, db_name): candi_kw_dict = {} key_list = [] for prj in sub_prj_list: key_list.append(prj['data_key']) log.info('sum project result item_id=%s' % (self.item_id)) worker_result_dict = CacheAdpter.get_many(key_list, db_name) # 汇总农民工的结果数据 for temp_dict in worker_result_dict.values(): for k, v in temp_dict.items(): if not candi_kw_dict.has_key(k): candi_kw_dict[k] = [] candi_kw_dict[k].extend(v) # 汇总排序 result_list = [] filter_index = 0 for filter in self.select_conf.select_conf_list: filter_index += 1 kw_list = candi_kw_dict.get(filter.candi_filter, [])[0:10000] # 卡死,某一类别最多10000,绝对够了 if not kw_list: continue sort_func = 'kw_list.sort(sort_kwlist_by_%s)' % filter.sort_mode eval(sort_func) # 根据配置的数目获取 range_list = filter.select_num.split('-') if float(range_list[0]) < 1.0: # 按照百分比 start_index = int(len(kw_list) * float(range_list[0])) end_index = int(len(kw_list) * float(range_list[1])) else: start_index = int(range_list[0]) - 1 end_index = int(range_list[1]) temp_list = [ kw + [str(filter_index)] for kw in kw_list[start_index:end_index - start_index] ] result_list.extend(temp_list) # result_list = remove_same_words(result_list) # 去除重复关键词 TODO wuhuaqiao 有问题,去重时重新排序,影响原来结果 log.info('select keyword from kwlib,result=%s' % len(result_list)) return result_list
def get_prj_statu(self, sub_prj_list, db_name): key_list = [] server_dict = {} for prj in sub_prj_list: if prj['statu'] == 'finished': continue key_list.append(prj['data_key'] + '_statu') server_dict[prj['host'] + ':' + str(prj['port'])] = 1 log.info('server is working, unfinished server is: %s' % (','.join(server_dict.keys()))) value_dict = CacheAdpter.get_many(key_list, db_name) if not value_dict: return for prj in sub_prj_list: if prj['statu'] == 'finished': continue value = value_dict.get(prj['data_key'] + '_statu', None) if value: prj['statu'] = value
def point_mall(request): """积分商城""" from apps.web.point import PointManager shop_id = request.user.shop_id customer = request.user.customer is_perfect_info = customer.is_perfect_info if customer else False # 获取积分 point_count = CacheAdpter.get(CacheKey.WEB_JLB_COUNT % shop_id, 'web', 'no_cache') if point_count == 'no_cache': point_count = PointManager.refresh_points_4shop( shop_id=request.user.shop_id) shop_items = MemberStore.query_present_templates() return render_to_response('point_mall.html', { 'is_perfect_info': is_perfect_info, "shop_items": shop_items, 'point_count': point_count }, context_instance=RequestContext(request))
def sum_result(self): """汇总结果""" candi_kw_dict = {} key_list = [] for prj in self.sub_prj_list: key_list.append(prj['data_key']) worker_result_dict = CacheAdpter.get_many(key_list, self.db_name) # 汇总农民工的结果数据 for temp_dict in worker_result_dict.values(): for k, v in temp_dict.items(): if not candi_kw_dict.has_key(k): candi_kw_dict[k] = [] candi_kw_dict[k].extend(v) # 汇总排序 result_list = [] filter_index = 0 for filter in self.select_conf.select_conf_list: # @ReservedAssignment filter_index += 1 kw_list = candi_kw_dict.get(filter.candi_filter, [])[0:10000] # 卡死,某一类别最多10000,绝对够了 if not kw_list: continue sort_func = 'kw_list.sort(sort_kwlist_by_%s)' % filter.sort_mode eval(sort_func) # 根据配置的数目获取 range_list = filter.select_num.split('-') if float(range_list[0]) < 1.0: # 按照百分比 start_index = int(len(kw_list) * float(range_list[0])) end_index = int(len(kw_list) * float(range_list[1])) else: start_index = int(range_list[0]) - 1 end_index = int(range_list[1]) temp_list = [ kw + [str(filter_index)] for kw in kw_list[start_index:end_index - start_index] ] result_list.extend(temp_list) return result_list
def get_cache(cls, shop_id): return CacheAdpter.get(cls.KEY % shop_id, 'web', None)
def set_cache(cls, shop_id, data): # data 形如{'session': 'xxxxx', 'expired': datetime.datetime(2015, 6, 20)} CacheAdpter.set(cls.KEY % shop_id, data, 'web', timeout=24 * 60 * 60 * 7)
kw_lockers = KeywordLocker.objects.filter( Q(is_stop__in = [0, None], start_time__lte = time_str, end_time__gte = time_str, next_run_time__lte = now) & (Q(is_running__in = [0, None]) | Q(is_running = 1, last_run_time__lte = now - datetime.timedelta(hours = 1)))) \ .order_by('-is_running', 'next_run_time', 'adgroup_id').limit(100) kw_id_list = [kl.keyword_id for kl in kw_lockers] kw_locker_coll.update({'_id': { '$in': kw_id_list }}, {'$set': { 'is_running': 1, 'last_run_time': now }}, multi=True) except Exception, e: log.error("robrank_task get_valid_task error, e=%s" % e) finally: CacheAdpter.delete(CacheKey.ENGINE_ROBRANK_TASK_MUTUAL_LOCK, 'web') # 清除缓存,把门打开 adg_dict = {} for kl in kw_lockers: adg_dict.setdefault(kl.adgroup_id, []).append(kl) for adgroup_id, kl_list in adg_dict.items( ): # 最后一个宝贝可能没有包含这个宝贝下所有的关键词,所以过滤掉,等下次执行 shop_id = kl_list[0].shop_id task = RobRankTask(shop_id=shop_id, adgroup_id=adgroup_id, kw_locker_list=kl_list, opt_type='auto') task.init_data() if task.is_runnable(): new_task_list.append(task) else:
def reload_single_cat_2memcache(cls, cat_id): cat = cls.get_cat_from_db(cat_id) CacheAdpter.set(CacheKey.KWLIB_CAT_INFO % str(cat_id), cat, 'web')
def load_all_cat_2memcache(cls): for cat in cls.objects.all(): CacheAdpter.set(CacheKey.KWLIB_CAT_INFO % str(cat.cat_id), cat, 'web', Const.KWLIB_CAT_CACHE_TIME)
def del_cache_progress(shop_id): '''删除缓存中的信息''' download_task_key = CacheKey.SUBWAY_DOWNLOAD_TASK % shop_id CacheAdpter.delete(download_task_key, 'web')
filter_list = prj_dict['filter_list'] price_list = prj_dict['price_list'] remove_word_list = prj_dict['remove_words'] and prj_dict[ 'remove_words'].split(',') or [] cat_cpc = prj_dict['cat_cpc'] log.info('worker start, item_id=%s, key=%s' % (prj_dict['item_id'], prj_dict['data_key'])) kw_list = CacheAdpter.get(prj_dict['from_key'], prj_dict['from_db'], []) if not kw_list: log.error('can not get group from memcache and the group is = %s' % prj_dict['from_key']) kw_list = CacheAdpter.get(prj_dict['from_key'], prj_dict['from_db'], []) group_dict = {} if kw_list: if ('click > 0' not in filter_conf) and ( 'click>0' not in filter_conf) or kw_list[0][2] > 0: cat_id = prj_dict['from_key'].split('_')[0] try: group_dict = group_kwlist(kw_list, item_scorer, int(cat_id), cat_cpc, cats, remove_word_list, filter_conf, filter_list, price_list) except Exception, e: log.error('group_kwlist error: cat_id=%s, e=%s' % (cat_id, e)) CacheAdpter.set(prj_dict['data_key'], group_dict, prj_dict['data_db']) log.info('worker finish, item_id=%s, key=%s' % (prj_dict['item_id'], prj_dict['data_key'])) CacheAdpter.set('%s_status' % prj_dict['data_key'], True, prj_dict['data_db'], 10)
def load_in_cache_if_not(): word_list = CacheAdpter.get(CacheKey.KWLIB_POINTLESSWORD, 'web', []) if word_list: return PointlessWord.update_memcache()
# 读取用户的模板主题保存到session中 if Config.get_value('router.SYS_THEMEM', default=False): request.session['theme'] = Config.get_value('router.SYS_THEMEM') else: request.session['theme'] = user.theme # 读取用户手机,是否开启短信提醒及昵称 if customer: # customer = customer[0] request.session['remind'] = customer.remind request.session['phone'] = customer.phone request.session['seller'] = customer.seller # 写入CRM缓存 if not is_backend and customer.consult_id: login_cache = CacheAdpter.get( CacheKey.LOGIN_USERS % (customer.consult_id, datetime.date.today()), 'crm', {}) shop_cache = login_cache.setdefault( customer.shop_id, [now, customer.nick, customer.phone, customer.qq, 0, '']) shop_cache[0] = now shop_cache[5] = plateform_type CacheAdpter.set( CacheKey.LOGIN_USERS % (customer.consult_id, datetime.date.today()), login_cache, 'crm', 24 * 60 * 60) # 登陆时删除左侧菜单缓存 CacheAdpter.delete(CacheKey.WEB_MNT_MENU % user.shop_id, 'web') CacheAdpter.delete(CacheKey.WEB_ISNEED_PHONE % user.shop_id, 'web')
args = is_force and { 'is_force': True, 'rpt_days': kwargs['rpt_days'] } or {} if not dler.sync_all_rpt(**args): raise Exception('dl_rpt_failed', is_force) return True, '' except Exception, e: log.error('download data error,shop_id=%s, e=%s' % (shop_id, e.args)) return False, e[0] finally: CacheAdpter.delete(cache_key, 'web') else: return False, 'doing' shop_id = int(request.user.shop_id) is_force = False cache_key = CacheKey.WEB_SYNC_DATA_FLAG % shop_id is_recent_clicked = CacheAdpter.get(cache_key, 'web') if is_recent_clicked: is_force = True CacheAdpter.delete(cache_key, 'web') # 强制下载时,将缓存删除,下次重新走自动同步路线 else: CacheAdpter.set(cache_key, True, 'web', 60 * 5) # 自动下载时,标记缓存 result, reason = download_data(shop_id=shop_id, is_force=is_force,
def set_cache_progress(shop_id, progress): '''根据shop_id设置一个缓存,用于记录下载进度''' download_task_key = CacheKey.SUBWAY_DOWNLOAD_TASK % shop_id CacheAdpter.set(download_task_key, progress, 'web', 60 * 60 * 2)
def get_cache_progress(shop_id): '''根据shop_id获取缓存信息,用于记录下载进度''' download_task_key = CacheKey.SUBWAY_DOWNLOAD_TASK % shop_id download_task_data = CacheAdpter.get(download_task_key, 'web', '') return download_task_data
def download_task(task): """理想:这个function只做发送请求,并且将结果保存到缓存里去。 1. 只传递session,由api这边动态构建请求; 现实是要发送一个请求,必须要知道app_key, app_secrect, session来构建auth等信息 2. 发送请求及接受的逻辑,重新写一份,因为原来的逻辑有必须等待1秒之类的设定,造成整体速度较慢。 并且返回的数据结构为TopObject,实际上这个封装没有必要,原生的json更直接,能够免去memcached的进出的转换开销 现实是要完成tapi对应的授权校验,就很难绕过上面的问题,在自动下载的时候,很难知道session是来自千牛还是WEB端的 最后的妥协: 使用原来的逻辑来构建请求,不作任何改动。 扩展: 这个任务,应该是可以接到动态的参数的;即也可以加速其它的数据,比如adgroup的报表加速 """ st = time.time() shop_id = task['shop_id'] campaign_id = task['campaign_id'] adgroup_id = task['adgroup_id'] token = task['token'] start_time = end_time = task['date'] source = task['source'] search_type = task['search_type'] cache_key = task['cache_key'] cache_db = task['cache_db'] successed = True cached_result = CacheAdpter.get(cache_key, cache_db, None) if not cached_result: # 有缓存,不用再下载(除非有重下的标志位) tapi = get_tapi(shop_id= shop_id) base_list, effect_list = [], [] try: # page_no, page_size 这里可以写死,因为单个推广组、单天、单来源、单类型的关键词报表,不会超过200条 top_base_objs = tapi.simba_rpt_adgroupkeywordbase_get(campaign_id = campaign_id, adgroup_id = adgroup_id, start_time = start_time, end_time = end_time, search_type = search_type, source = source, subway_token = token, page_no = 1, page_size = 200) if top_base_objs and hasattr(top_base_objs, 'rpt_adgroupkeyword_base_list') and top_base_objs.rpt_adgroupkeyword_base_list: base_list = top_base_objs.rpt_adgroupkeyword_base_list except Exception as e: log.error("base f****d, e=%s" % e) successed = False if base_list: try: top_effect_objs = tapi.simba_rpt_adgroupkeywordeffect_get(campaign_id = campaign_id, adgroup_id = adgroup_id, start_time = start_time, end_time = end_time, search_type = search_type, source = source, subway_token = token, page_no = 1, page_size = 200) if top_effect_objs and hasattr(top_effect_objs, 'rpt_adgroupkeyword_effect_list') and top_effect_objs.rpt_adgroupkeyword_effect_list: effect_list = top_effect_objs.rpt_adgroupkeyword_effect_list except Exception as e: log.error("effect f****d, e=%s" % e) successed = False if base_list: rpt_list = [] base_dict, effect_dict = collections.defaultdict(dict), collections.defaultdict(dict) for base in base_list: base_dict[base.keyword_id].update(KeywordRpt.parse_rpt(base, 'base')) for effect in effect_list: effect_dict[effect.keyword_id].update(KeywordRpt.parse_rpt(effect, 'effect')) for kw_id, base_rpt_dict in base_dict.items(): rpt_list.extend(KeywordRpt.merge_rpt_dict(base_rpt_dict, effect_dict.get(kw_id, {}), {'shop_id': shop_id, 'campaign_id': campaign_id, 'adgroup_id':adgroup_id, 'keyword_id':kw_id})) CacheAdpter.set(cache_key, rpt_list, cache_db, 12 * 60 * 60) # 考虑到缓存的数据 log.info("total cost %.5f seconds, cache_key=%s" % ((time.time() - st), cache_key)) CacheAdpter.set("%s_status" % (cache_key), successed, cache_db, 60 * 10) # 确认了某一次的任务状态之后,由contractor删除掉状态
def del_cache(cls, shop_id): CacheAdpter.delete(cls.KEY % (shop_id), 'web')
def set_mnt_camp(campaign_id, flag, mnt_type, opter = 3, opter_name = '', **kwargs): """托管计划设置器,flag标识开启或者关闭""" from apps.subway.upload import update_campaign, set_cmp_mnt_status_log try: campaign = Campaign.objects.get(campaign_id = campaign_id) except DoesNotExist: log.info('can not find campaign, campaign_id = %s' % campaign_id) return None set_dict = {'mnt_status':flag} max_num_dict = {1: 500, 2: 10 , 3: 50, 4: 10} max_num = max_num_dict.get(mnt_type, 10) if flag: # TODO: wangqi 20151019 这里检查是否有权限创建 if not MntMnger.check_create_mnt(campaign.shop_id): raise Exception("no_permission") set_dict.update({'online_status':'online', 'budget':kwargs['budget'], 'use_smooth':'true', }) if kwargs.get('area', 0) != 0: if campaign.area == 'all': area_list = Const.CAMP_AREA else: area_list = campaign.area.split(',') new_area_list = list(set(area_list) - {'599', '576', '578', '574'}) # 后面的数字,分别是港澳台国外,{1,2,3}是set初始化方法 new_areas = ','.join(new_area_list) set_dict.update({'area': new_areas}) if kwargs.get('platform', 0) != 0: set_dict.update({'search_channels':'1,2,4,8,16', 'nonsearch_channels':'1,2,8,16', 'outside_discount':60, 'mobile_discount':110 }) if kwargs.get('schedule', 0) != 0: set_dict.update({'schedule':'00:00-01:00:45,01:00-08:00:35,08:00-09:00:70,09:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100;\ 00:00-01:00:45,01:00-08:00:35,08:00-09:00:70,09:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100;\ 00:00-01:00:45,01:00-08:00:35,08:00-09:00:70,09:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100;\ 00:00-01:00:45,01:00-08:00:35,08:00-09:00:70,09:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100;\ 00:00-01:00:45,01:00-08:00:35,08:00-09:00:70,09:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100;\ 00:00-01:00:50,01:00-08:00:35,08:00-10:00:65,10:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100;\ 00:00-01:00:50,01:00-08:00:35,08:00-10:00:65,10:00-14:00:75,14:00-17:00:100,17:00-19:00:80,19:00-20:30:100,20:30-23:00:105,23:00-24:00:100'}) # mnt_desc = mnt_type == 1 and '长尾托管' or '重点托管' # set_dict.update({'title':'开车精灵-%s%s' % (mnt_desc, kwargs['mnt_index'])}) if kwargs.has_key('max_price') and kwargs['max_price'] > 0: set_dict.update({'max_price':kwargs['max_price']}) if kwargs.has_key('mobile_max_price') and kwargs['mobile_max_price'] > 0: set_dict.update({'mobile_max_price':kwargs['mobile_max_price']}) result_list, msg_list = update_campaign(shop_id = campaign.shop_id, campaign_id = campaign.campaign_id, opter = opter, opter_name = opter_name, **set_dict) create_args = {'shop_id':campaign.shop_id, 'campaign_id': campaign.campaign_id, 'mnt_index': kwargs['mnt_index'], 'mnt_type': mnt_type, 'max_num': max_num, 'mnt_cfg_list': []} if mnt_type in [2, 4]: create_args.update({ 'max_price': kwargs.get('max_price', 200), 'mobile_max_price': kwargs.get('mobile_max_price', 200), 'mnt_rt': kwargs.get('mnt_rt', 0), 'mnt_bid_factor': kwargs.get('mnt_bid_factor', 50), 'opt_wireless': kwargs.get('opt_wireless', 0), }) elif mnt_type in [1, 3]: create_args.update({ 'max_price': kwargs.get('max_price', 200), 'mobile_max_price': kwargs.get('mobile_max_price', 200), 'mnt_bid_factor': kwargs.get('mnt_bid_factor', 50), 'opt_wireless': kwargs.get('opt_wireless', 0) }) MntCampaign.objects.create(**create_args) else: msg_list = [] # campaign.rpt_days = 7 # if campaign.rpt_sum.roi < 1.5 and '开车精灵' in campaign.title or '开車精灵' in campaign.title: # set_dict['title'] = '推广计划%s' % time.strftime('%m%d%H%M') campaign.rpt_sum = campaign.get_summed_rpt(rpt_days = 15) if campaign.rpt_sum.roi < 1 and ('开车精灵-' in campaign.title or '开車精灵-' in campaign.title or '无线精灵-' in campaign.title or not flag): set_dict['title'] = campaign.title.replace('开车精灵-', '').replace('开車精灵-', '').replace('无线精灵-', '') result_list, msg_list = update_campaign(shop_id = campaign.shop_id, campaign_id = campaign.campaign_id, opter = opter, opter_name = opter_name, **set_dict) MntMnger.unmnt_campaign(shop_id = campaign.shop_id, campaign_id = campaign.campaign_id) CacheAdpter.delete(CacheKey.WEB_MNT_MENU % campaign.shop_id, 'web') warn_msg_dict = {} if msg_list: warn_dict = {'名称或状态': 'title_status', '日限额': 'budget', '投放地域': 'area', '分时折扣': 'schedule', '平台设置': 'platform'} for msg in msg_list: for warn_str, warn_key in warn_dict.items(): if warn_str in msg: warn_msg_dict[warn_key] = msg break return warn_msg_dict
def get_pointlessword_list(level=1): PointlessWord.load_in_cache_if_not() word_list = CacheAdpter.get(CacheKey.KWLIB_POINTLESSWORD, 'web', [[], []]) return word_list[level - 1]
@classmethod def reload_single_cat_2memcache(cls, cat_id): cat = cls.get_cat_from_db(cat_id) CacheAdpter.set(CacheKey.KWLIB_CAT_INFO % str(cat_id), cat, 'web') @classmethod def reload_cat_by_field(cls, cat_id, field_list): cat = None try: cat = cls.objects.only(*field_list).get(cat_id=cat_id) except Exception, e: log.error("can not get cat from db and the error = %s" % e) if cat: for key in field_list: attr_key = CacheKey.KWLIB_CAT_ATTR % (cat.cat_id, key) CacheAdpter.set(attr_key, getattr(cat, key), 'web', Const.KWLIB_ATTRIBUT_CACHE_TIME) # set_stat = True # set_count = 5 # while set_stat: # if not set_count: # break # old_data = CacheAdpter.get(attr_key, 'web') # CacheAdpter.set(attr_key, getattr(cat, key), 'web', Const.KWLIB_ATTRIBUT_CACHE_TIME) # new_data = CacheAdpter.get(attr_key, 'web') # if old_data == new_data: # set_stat = True # else: # set_stat = False # set_count -= 1
def work(self): self.do_my_work() CacheAdpter.set(self.prj_dict['data_key'] + '_statu', 'finished', self.prj_dict['data_db'])