def _free_pay_subscribe(cls, xfgroup, start_date, end_date, result_mapping): def check_cycle(subscribe): total_days = (subscribe.end_date - subscribe.start_date).days cycle = int(round(total_days / 30.0)) if cycle == 0: return 0 if subscribe.biz_type < 6 and subscribe.pay / cycle >= 3000: return 0 return cycle start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) sub_list = Subscribe.objects.values('id', 'shop_id', 'start_date', 'end_date', 'create_time', 'pay', 'biz_type') \ .exclude(biz_type = 3) \ .filter(pay_type = 1, category__in = ['rjjh', 'vip', 'kcjl']) \ .filter(create_time__gte = start_time, create_time__lte = end_time) \ .filter(xfgroup = xfgroup, approval_status = 1, pay__gt = 0) \ .order_by('-create_time') for subscribe in sub_list: subscribe = DictWrapper(subscribe) cycle = check_cycle(subscribe) if cycle: entity = DictWrapper({ 'id': subscribe.id, 'shop_id': subscribe.shop_id, 'cycle': cycle }) result_mapping[subscribe.create_time.date()].append(entity) return result_mapping
def __call__(self, staff_data): data = collections.OrderedDict() for psuser in self.psusers: user_data = staff_data.get(psuser, {}) for indicator in self.indicators: inicator_data = user_data.get(indicator, []) data.setdefault(indicator, []).extend(inicator_data) size = (self.end_time - self.start_time).days + 1 result = [] for indicator in data: new_data = {} for entity in data.get(indicator, []): new_data.setdefault(entity.result_date, []).append(entity) values_list = [] for index in xrange(size): cur_date = self.start_time.date() + datetime.timedelta( days=index) entity_list = new_data.get(cur_date, []) for entity in entity_list: entity_data_list = json.loads(entity.data_json) if entity_data_list: for entity_data in entity_data_list: entity_data = DictWrapper(entity_data) entity_data.date = entity.result_date values_list.append(entity_data) return_val, _, _ = indicator.sum_func(values_list) # result.append((indicator.name, return_val)) result.append(return_val) self.result = result return self
def _monthly_pay_subscribe(cls, xfgroup, start_date, end_date, result_mapping): def get_monthly_pay(subscribe): total_days = (subscribe.end_date - subscribe.start_date).days cycle = int(round(total_days / 30.0)) if cycle == 0: return 0 else: return subscribe.pay / cycle start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) sub_list = Subscribe.objects.values('id', 'shop_id', 'start_date', 'end_date', 'create_time', 'pay') \ .exclude(Q(biz_type=6) | (Q(biz_type=3) & Q(category__in=['rjjh', 'vip']))) \ .filter(pay_type=1, category__in=['rjjh', 'vip', 'kcjl']) \ .filter(create_time__gte=start_time, create_time__lte=end_time) \ .filter(xfgroup=xfgroup, approval_status=1) \ .order_by('-create_time') for subscribe in sub_list: subscribe = DictWrapper(subscribe) monthly_pay = get_monthly_pay(subscribe) if monthly_pay >= 3000: entity = DictWrapper({ 'id': subscribe.id, 'shop_id': subscribe.shop_id, 'monthly_pay': monthly_pay }) result_mapping[subscribe.create_time.date()].append(entity) return result_mapping
def _user_cycle_increment(cls, psuser, start_date, end_date, result_mapping): def check_mark(subscribe): total_days = (subscribe.end_date - subscribe.start_date).days cycle = int(round(total_days / 30.0)) if cycle >= 12 : return 4 elif cycle >= 6: return 2 elif cycle >= 3: return 1 return 0 start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) for subscribe in Subscribe.objects.values("id", 'shop_id', 'start_date', 'end_date', "create_time")\ .exclude(Q(biz_type = 6) | Q(category = "qn"))\ .filter(create_time__gte = start_time, create_time__lte = end_time)\ .filter(psuser = psuser) \ .filter(pay__gt = 0) \ .order_by('-create_time'): subscribe = DictWrapper(subscribe) mark = check_mark(subscribe) if mark: entity = DictWrapper({'id':subscribe.id, 'shop_id':subscribe.shop_id, 'mark':mark}) result_mapping[subscribe.create_time.date()].append(entity) return result_mapping
def router_message(result, request=None): result_dict = {'result': {}} try: data = DictWrapper.load_dict(json.loads(result)) data.content = DictWrapper.load_dict(json.loads(data.content)) handle_result = MsgManager.handle(data.topic, data) log.info("handled %s by %s, result=%s" % (data, data.topic, handle_result)) result_dict = {'result': {'result': handle_result}} except Exception, e: log.error('handle %s error, e=%s' % (result, e)) result_dict = {'result': {'result': u'处理消息时出错'}}
def prepare_data_foruser(fields): """为过滤操作,准备数据""" # 1. 初始化所有字段 # 2. 构建所有字段,并更新 init_fields, reset_fields = filter_fields(fields) reset_fiedls_default = {} handler_list = [] for field in reset_fields: handler = CONFIG_MAPPING[field] handler_list.append(handler) reset_fiedls_default.update({field: handler.default}) user_list = customers_initialize_data(init_fields) user_mapping = {} shop_nick_mapping = {} for user in user_list: shop_id = user['shop_id'] user = DictWrapper(user) user.update(reset_fiedls_default) user_mapping.update({shop_id: user}) shop_nick_mapping[user.nick] = shop_id exec_mapping = {} for handler in handler_list: func_name = handler.collector_handler if func_name not in exec_mapping: exec_mapping[func_name] = [] exec_mapping[func_name].append(handler) for deal_func, h_list in exec_mapping.items(): # 1. 获取数据源 db_fields = get_all_db_fields(h_list) data_list = deal_func(db_fields, shop_nick_mapping) # 2. 处理数据源 for data in data_list: shop_id = data.pop('shop_id') if shop_id in user_mapping: temp_result = {} for hler in h_list: result = hler.data_handler(data) temp_result.update(result) user_mapping[shop_id].update(temp_result) else: # 没有数据暂不处理 pass return user_mapping.values()
def _expiring_renew_subscribe(cls, xfgroup, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) start_month_1st = datetime.date(start_time.year, start_time.month, 1) if end_time.month == 12: end_month_1st = datetime.date(end_time.year + 1, 1, 1) else: end_month_1st = datetime.date(end_time.year, end_time.month + 1, 1) expiring_sub_list = Subscribe.objects.only('id', 'shop_id', 'end_date') \ .filter(consult_xfgroup=xfgroup) \ .filter(end_date__gte=start_month_1st, end_date__lt=end_month_1st)\ .exclude(biz_type=6) \ .order_by('-end_date') expiring_sub_dict = {sub.shop_id: sub for sub in expiring_sub_list} renew_sub_list = Subscribe.objects.values('id', 'shop_id', 'create_time') \ .filter(xfgroup=xfgroup) \ .filter(create_time__gte=start_time, create_time__lte=end_time) \ .exclude(biz_type=6) for sub in renew_sub_list: sub = DictWrapper(sub) if sub.shop_id in expiring_sub_dict: expiring_sub = expiring_sub_dict[sub.shop_id] if expiring_sub.id != sub.id and expiring_sub.end_date.month == sub.create_time.month: result_mapping[sub.create_time.date()].append(sub) return result_mapping
def get_dadg_list(shop_id,camp_ids): aggr_pipeline = [ { '$match':{ 'shop_id':shop_id, 'campaign_id':{"$in":camp_ids} } }, { '$group':{ '_id':{'item_id':'$item_id','campaign_id':'$campaign_id','shop_id':"$shop_id"}, 'adgroup_total' : { '$sum': 1 }, 'adg_list':{'$addToSet':'$_id'} } }, { '$match':{ 'adgroup_total':{ "$gt":1 } } } ] result_list = [] for cur_adg in adg_coll.aggregate(aggr_pipeline, allowDiskUse= True)['result']: temp_dict = {} temp_dict.update(cur_adg['_id']) temp_dict.update({'adg_list':cur_adg['adg_list']}) result_list.append(DictWrapper(temp_dict)) return result_list # as : [{'item_id': 41277564918L, 'adg_list': [454551306, 453152777], u'shop_id': 69342483, u'campaign_id': 15054447}]
def Load_monitor_Event(cls, customer_mapping): shop_id_list = customer_mapping.keys() for event in Monitor.objects.values('shop_id', 'create_time').filter(shop_id__in = shop_id_list)\ .order_by('-create_time'): event = DictWrapper(event) customer = customer_mapping.get(event.shop_id, None) if customer: if not hasattr(customer, '_monitor_events'): customer._monitor_events = [] customer._monitor_events.append(event) today = datetime.datetime.now() for customer in customer_mapping.values(): if not hasattr(customer, '_monitor_events'): customer._monitor_events = [] customer.last_monitor = None customer.last_monitor_days = -1 customer.monitor_counter = 0 else: # 默认挂在基础数据 customer.last_monitor = customer._monitor_events[0] customer.last_monitor_days = ( today - customer.last_monitor.create_time).days customer.monitor_counter = len( set([ event.create_time.date() for event in customer._monitor_events ])) # 每天无论登陆多少次都算作一次 return customer_mapping
def Load_operate_Event(cls, customer_mapping): shop_id_list = customer_mapping.keys() operate_dict = {} for operate in event_coll.find( { 'type': 'operate', 'shop_id': { "$in": shop_id_list } }, { 'shop_id': 1, 'create_time': 1 }).sort("create_time", pymongo.DESCENDING): operate = DictWrapper(operate) operate_dict.setdefault(operate.shop_id, []).append(operate) today = datetime.datetime.now() for shop_id, customer in customer_mapping.iteritems(): customer.operate_events = operate_dict.get(shop_id, []) if customer.operate_events: customer.last_operate_days = ( today - customer.operate_events[0].create_time).days else: customer.last_operate_days = None return customer_mapping
def Load_Login(cls, customer_mapping): # 此处有点数据量较大,可以考虑 登陆时间按天来记录 shop_id_list = customer_mapping.keys() for event in Login.objects.values('shop_id', 'create_time').filter(shop_id__in = shop_id_list)\ .order_by('-create_time'): event = DictWrapper(event) customer = customer_mapping.get(event.shop_id, None) if customer: if not hasattr(customer, '_login_events'): customer._login_events = [] customer._login_events.append(event) today = datetime.datetime.now() for customer in customer_mapping.values(): if not hasattr(customer, '_login_events'): customer._login_events = [] customer.last_login = None customer.last_login_days = -1 customer.login_counter = 0 else: # 默认挂在基础数据 customer.last_login = customer._login_events[0] customer.last_login_days = ( today - customer.last_login.create_time).days customer.login_counter = len( set([ event.create_time.date() for event in customer._login_events ])) # 每天无论登陆多少次都算作一次 return customer_mapping
def get_kw_gdata_4select_word(kw_list): # 主要用于选词,只是改了改了key的名称。 kw_dict = get_gdata_by_redis(kw_list) result_dict = {} for key, kw_data in kw_dict.iteritems(): if kw_data['pv']: result_dict[key.decode('utf8')] = DictWrapper({ 'pv': kw_data['pv'], 'click': kw_data['click'], 'competition': kw_data['cmpt'], 'avg_price': kw_data['cpc'], 'ctr': round(kw_data['ctr'], 2), 'roi': kw_data['roi'], 'coverage': kw_data['coverage'], 'favtotal': kw_data['favtotal'], }) return result_dict
def get(self, xfgroup, some_date, indicator): try: sp = self.adapter.objects.get(xfgroup_id = xfgroup.id, result_date = some_date, identify = indicator.name) entity = DictWrapper({f:getattr(sp, f) for f in self.fields}) return entity except: return None
def _active_customers(cls, xfgroup, start_date, end_date, result_mapping): # 托管计划日花费之和 mcs_list = list( mcs_coll.find({ 'rpt_date': { '$gte': date_2datetime(start_date), '$lt': date_2datetime(end_date + datetime.timedelta(days=1)) }, 'xfgroup_id': xfgroup.id, 'category': 'kcjl' })) shop_cost_dict = {} # {date:{shop_id:cost, ...}, ...} for mcs in mcs_list: temp_dict = shop_cost_dict.setdefault(mcs['rpt_date'].date(), {}) temp_dict.setdefault(mcs['shop_id'], 0) temp_dict[mcs['shop_id']] += mcs['cost'] for dt, temp_dict in shop_cost_dict.items(): for shop_id, cost in temp_dict.items(): if cost >= 5000: # 托管计划日花费之和大于等于50元 result_mapping[dt].append( DictWrapper({ 'shop_id': shop_id, 'cost': cost })) return result_mapping
def load_upgrade_account(): qs_cursor = account_coll.find( { '$or': [{ "receive_address": { "$nin": [None, ""] } }, { "receiver": { "$nin": [None, ""] } }, { "receiver_phone": { "$nin": [None, ""] } }, { "zip_code": { "$nin": [None, ""] } }] }, { "_id": 1, "receive_address": 1, "receiver": 1, "receiver_phone": 1, "zip_code": 1, }) return {info['_id']: DictWrapper(info) for info in qs_cursor}
def _load_all_entity(self, psuser, indicator, time_scope, is_force=False): loader = Loader([psuser], *time_scope) staff_mapping = loader.loading([indicator], is_force) all_entity = [] indicatore_data = staff_mapping.get(psuser) if indicator.show_func is not None: for indicator, entity_list in indicatore_data.iteritems(): for entity in entity_list: data_list = json.loads(entity.data_json) if data_list: for data in data_list: data = DictWrapper(data) data.date = entity.result_date all_entity.append(data) return all_entity
def _bad_comments(cls, xfgroup, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) comment_cur = event_coll.find( { 'type': 'comment', 'duty_xfgroup_id': xfgroup.id, 'comment_type': { '$in': [200, 302, 303, 304] }, 'current_version': 'kcjl', 'article_code': 'ts-25811', 'create_time': { '$gte': start_time, '$lte': end_time } }, { 'shop_id': 1, 'comment_type': 1, 'create_time': 1 }) for comment in comment_cur: comment = DictWrapper(comment) result_mapping[comment.create_time.date()].append(comment) return result_mapping
def load_init_mapping(self): start_time, end_time = self.timer_scope start_date = start_time.date() end_date = end_time.date() staff_customer_mapping = collections.defaultdict(list) customer_mapping = {} if self.position == "CONSULT": shop_staff_set = set([(sub_info['shop_id'], sub_info['consult_id']) \ for sub_info in Subscribe.objects.values("shop_id", "consult_id")\ .filter(consult__in = self.psuser_id_list)\ .filter(Q(start_date__lte = start_date, end_date__gte = start_date) \ | Q(start_date__lte = end_date, end_date__gte = end_date))]) # TODO: yangrongkai 请注意,为了保证引用点相同,请考虑 一个售后多个店铺, # 及一个店铺多个售后的情况, 此处编程需谨慎,很容易造成隐性bug customer_mapping = {shop_id : DictWrapper(shop_id = shop_id) \ for shop_id, _ in shop_staff_set} for shop_id, consult_id in shop_staff_set: cust = customer_mapping.get(shop_id, None) if cust is not None: staff_customer_mapping[consult_id].append(cust) return staff_customer_mapping, customer_mapping
def _unsubscribes_apportion(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) unsub_cursor = event_coll.find( {'type':'unsubscribe', 'refund_date':{"$gte":start_time, "$lte":end_time}, '$or':[{'saler_id':psuser.id}, {'server_id':psuser.id}, {'psuser_id':psuser.id}], 'refund_reason':{'$lt':5} # , 'refund_type':{'$lt':3} }, {'shop_id': 1, 'psuser_id': 1, 'refund_date': 1, 'saler_id':1, 'server_id':1, 'saler_apportion': 1, 'server_apportion':1} ) for unsub in unsub_cursor: apportion = 0 if 'psuser_id' in unsub and psuser.id == unsub['psuser_id']: apportion += 3 if 'saler_id' in unsub and unsub['saler_id']: if psuser.id == unsub['saler_id']: apportion += 7 elif psuser.id == unsub['server_id']: apportion += 7 result_mapping[unsub['refund_date'].date()].append(DictWrapper({ '_id':unsub['_id'], 'shop_id':unsub['shop_id'], 'apportion':apportion })) return result_mapping
def _new_orders(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) subscribe_mapping = {} for subscribe in Subscribe.objects.values('id', 'shop_id', 'create_time')\ .filter(create_time__gte = start_time, create_time__lte = end_time)\ .filter(consult = psuser) \ .filter(pay__gt = 0) \ .order_by('-create_time'): subscribe = DictWrapper(subscribe) subscribe_mapping.setdefault(subscribe.shop_id, []).append(subscribe) shop_id_list = subscribe_mapping.keys() for subscribe_info in Subscribe.objects.values('shop_id')\ .filter(shop_id__in = shop_id_list)\ .filter(create_time__lt = start_time)\ .filter(pay__gt = 0): shop_id = subscribe_info["shop_id"] if shop_id in subscribe_mapping: subscribe_mapping.pop(shop_id) for subscribe_list in subscribe_mapping.values(): for subscribe in subscribe_list: result_mapping[subscribe.create_time.date()].append(subscribe) return result_mapping
def _change_comments(cls, xfgroup, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) comment_cur = event_coll.find( { 'type': 'comment', 'xfgroup_id': xfgroup.id, 'comment_type': { '$in': [301, 302, 303, 304, 305] }, 'create_time': { '$gte': start_time, '$lte': end_time } }, { 'shop_id': 1, 'comment_type': 1, 'create_time': 1 }) for comment in comment_cur: comment = DictWrapper(comment) result_mapping[comment.create_time.date()].append(comment) return result_mapping
def _unsubscribes(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) # temp_dict = {} # for unsubscribe in event_coll.find({'type':'unsubscribe', 'refund_date':{"$gte":start_time, "$lte":end_time}}, {'shop_id': 1, 'event_id': 1, 'refund_date': 1, 'refund': 1}): # temp_dict[unsubscribe['event_id']] = DictWrapper(unsubscribe) # query_dict = {'create_time__lt':end_time, 'end_date__gt':start_date} # if psuser.position in ['CONSULT', 'CONSULTLEADER']: # query_dict['consult'] = psuser # else: # query_dict['operater'] = psuser # sub_id_list = Subscribe.objects.filter(**query_dict).values_list('id', flat = True) # for sub_id, unsubscribe in temp_dict.items(): # if sub_id in sub_id_list: # result_mapping[unsubscribe.refund_date.date()].append(unsubscribe) unsub_cursor = event_coll.find({'type':'unsubscribe', 'refund_date':{"$gte":start_time, "$lte":end_time}, '$or':[ {'saler_id':psuser.id, 'saler_apportion':{'$gt':0}}, {'server_id':psuser.id, 'server_apportion':{'$gt':0}} ], 'refund_reason':{'$lt':5} # , 'refund_type':{'$lt':3} }, {'shop_id': 1, 'refund_date': 1, 'saler_id':1, 'server_id':1, 'saler_apportion': 1, 'server_apportion':1}) for unsub in unsub_cursor: apportion = 0 if psuser.id == unsub['saler_id']: apportion += unsub.get('saler_apportion', 0) if psuser.id == unsub['server_id']: apportion += unsub.get('server_apportion', 0) result_mapping[unsub['refund_date'].date()].append(DictWrapper({ '_id':unsub['_id'], 'shop_id':unsub['shop_id'], 'apportion':apportion })) return result_mapping
def filter(self, xfgroups, start_date, end_date, indicators): xfgroups_mapping = {xfg.id: xfg for xfg in xfgroups} indicator_mapping = {it.name: it for it in indicators} result = {} for sp in self.adapter.query_staff_performance(xfgroups_mapping.keys(), indicator_mapping.keys(), start_date, end_date): entity = DictWrapper({f:getattr(sp, f) for f in self.fields}) result.setdefault(xfgroups_mapping[sp.xfgroup_id], {}).setdefault(indicator_mapping[entity.identify], []).append(entity) return result
def export_keyword_gdata(start_index, read_size, cache_unit = 1000, is_overwrite = False): def get_gdata_cachekey(kw): mark = "G_%s" return mark % kw.replace(' ', '') cache_adapter = GDataCacheAdapter(size = cache_unit) total_count = keyword_coll.count() sum_export = start_index + 0 deal_count = 0 msg = 'Total kw count : %s , time is %s' % (total_count, datetime.datetime.now()) log_info_to_file(msg) for kw_list in get_keyword_list(start_index, read_size = read_size): cur_total = len(kw_list) cycle = int(math.ceil(cur_total * 1.0 / cache_unit)) for index in xrange(cycle): origin_dict = {} alas_dict = {} for kw in kw_list[index * cache_unit:(index + 1) * cache_unit]: word = str(kw.word) origin_dict.update({word:kw}) alas_dict.update({get_gdata_cachekey(word):word}) g_data_dict = {} if not is_overwrite: cache_dict = cache_adapter.get_many(alas_dict.keys()) for als_kw, g_data in cache_dict.items(): real_key = str(alas_dict[als_kw]) g_data_dict[real_key] = g_data store_dict = {} for kw, ori_data in origin_dict.items(): word = str(kw) if word in g_data_dict: cache_data = DictWrapper(g_data_dict[ word ]) if cache_data.g_pv > 0: continue c_cache = { "g_pv":ori_data.g_pv, "g_click":ori_data.g_click, "g_competition":ori_data.g_competition, "g_cpc":ori_data.g_cpc } store_dict[ get_gdata_cachekey(word) ] = c_cache if store_dict: cache_adapter.set_many(store_dict) deal_count += len(store_dict) sum_export += len(origin_dict) msg = 'Has finished : %s%% , finished count: %s , deal count : %s' % (round(sum_export * 100.0 / total_count, 2), sum_export, deal_count) log_info_to_file(msg) msg = "has finished, current time is %s" % (datetime.datetime.now()) log_info_to_file(msg)
def _reintros(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) for reintro in event_coll.find({'type':'reintro', 'psuser_id':psuser.id, \ 'create_time':{"$gte":start_time, "$lte":end_time}}, {'shop_id':1, 'create_time':1}): reintro = DictWrapper(reintro) result_mapping[reintro.create_time.date()].append(reintro) return result_mapping
def _valid_contacts(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) for contact in event_coll.find({'type':'contact', 'psuser_id':psuser.id, 'visible':1, \ 'create_time':{"$gte":start_time, "$lte":end_time}}, {'shop_id':1, 'create_time':1}): contact = DictWrapper(contact) result_mapping[contact.create_time.date()].append(contact) return result_mapping
def _expire_orders(cls, psuser, start_date, end_date, result_mapping): for subscribe in Subscribe.objects.values('id', 'shop_id', 'end_date')\ .filter(end_date__gte = start_date, end_date__lte = end_date)\ .filter(consult = psuser)\ .filter(pay__gt = 0)\ .order_by('-create_time'): subscribe = DictWrapper(subscribe) result_mapping[subscribe.end_date].append(subscribe) return result_mapping
def _expire_orders(cls, xfgroup, start_date, end_date, result_mapping): sub_list = Subscribe.objects.values('id', 'shop_id', 'end_date') \ .filter(consult_xfgroup = xfgroup, pay__gt = 0) \ .filter(end_date__gte = start_date, end_date__lte = end_date) \ .order_by('-create_time') for subscribe in sub_list: subscribe = DictWrapper(subscribe) result_mapping[subscribe.end_date].append(subscribe) return result_mapping
def _real_renew_orders(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) for subscribe in Subscribe.objects.values("id", 'shop_id', 'create_time', 'pay')\ .filter(create_time__gte = start_time, create_time__lte = end_time, approval_status = 1)\ .filter(psuser = psuser).exclude(biz_type__in = [6, 7]).order_by('-create_time'): subscribe = DictWrapper(subscribe) result_mapping[subscribe.create_time.date()].append(subscribe) return result_mapping
def _unknown_orders(cls, psuser, start_date, end_date, result_mapping): start_time = datetime.datetime(start_date.year, start_date.month, start_date.day, 0, 0, 0) end_time = datetime.datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59) for subscribe in Subscribe.objects.values("id", 'shop_id', 'create_time', 'biz_type')\ .filter(biz_type = 6, create_time__gte = start_time, create_time__lte = end_time)\ .filter(psuser = psuser) \ .order_by('-create_time'): subscribe = DictWrapper(subscribe) result_mapping[subscribe.create_time.date()].append(subscribe) return result_mapping