Esempio n. 1
0
    def run(self):
        try:
            is_login_ok = False
            is_oauth_request, is_auth_ok, oauth_error = self.check_is_oauth_and_result(
            )
            if is_oauth_request:
                if is_auth_ok:
                    self.token = AccessToken.sync_access_token(
                        self.request.GET['code'])
                else:
                    return self.jump_limited_page(oauth_error)
            elif self.check_parms_integrity() \
                    and self.check_timestamp() \
                    and self.check_sign() \
                    and self.get_token():
                is_login_ok = True

            log.info("LOGIN main_port, nick=%s, from=%s" %
                     (self.nick, self.visitor_from))
            if is_auth_ok or is_login_ok:
                domain = NickPort.get_port_domain(nick=self.nick,
                                                  force_create=True)
                if not domain:
                    return self.jump_limited_page('服务器繁忙,请稍候再登录')
                else:
                    return self.redirect_2subport(domain)
            else:
                return self.redirect_2top_authorize()
        except Exception, e:
            log.error('Login error, e=%s, request=%s' %
                      (e, self.request.get_full_path()))
            return self.jump_limited_page('登录失败,请联系客服')
Esempio n. 2
0
    def get_mnt_info(shop_id):
        """获取左侧导航全自动引擎菜单"""
        try:
            mnt_list = CacheAdpter.get(CacheKey.WEB_MNT_MENU % shop_id, 'web',
                                       [])
            if not mnt_list:
                from apps.subway.models_campaign import Campaign
                from apps.mnt.models import MntCampaign, MNT_TYPE_CHOICES
                campid_mnttype_dict = dict(
                    MntCampaign.objects.filter(shop_id=shop_id).values_list(
                        'campaign_id', 'mnt_type').order_by('start_time'))
                campid_title_dict = dict(
                    Campaign.objects.filter(shop_id=shop_id).values_list(
                        'campaign_id', 'title'))
                mnt_desc_dict = dict(MNT_TYPE_CHOICES)
                for campaign_id, mnt_type in campid_mnttype_dict.items():
                    name = campid_title_dict[campaign_id]
                    mnt_type_name = mnt_desc_dict[mnt_type].replace('托管', '')
                    mnt_list.append({
                        'name': name,
                        'campaign_id': campaign_id,
                        'mnt_type_name': mnt_type_name
                    })

                CacheAdpter.set(CacheKey.WEB_MNT_MENU % shop_id, mnt_list,
                                'web', 60 * 60 * 6)
            return mnt_list
        except Exception, e:
            log.error('get_mnt_info error,e=%s, shop_id=%s' % (e, shop_id))
            return []
Esempio n. 3
0
 def execute(self):
     result = 0
     method_string = self.method + '(self.record_list,self.manager)'
     try:
         result = eval(method_string)
     except Exception, e:
         log.error('e=%s' % e)
Esempio n. 4
0
 def auto_insert_record(self, psuser, shop_id, nick, rec_type, rec_value, create_time):
     try:
         plan_tree = list(pt_coll.find({
             'psuser_id': psuser.id,
             'status': 1,
             'start_time':{'$lte':create_time},
             'end_time':{'$gt':create_time - datetime.timedelta(days=1)}
         }))
         if plan_tree:
             plan_tree = plan_tree[0]
             # tree_obj = PlanTree.get_tree_template(plan_tree)
             # path = self.get_path_by_shop_id(shop_id, tree_obj)
             path = self.get_or_create_path_by_shop_id(shop_id, plan_tree['_id'], plan_tree)
             if path:
                 ptr_coll.insert({
                     'tree_id': plan_tree['_id'],
                     'path': path,
                     'shop_id': shop_id,
                     'nick': nick,
                     'rec_type': rec_type,
                     'rec_value': rec_value,
                     'psuser_id': 0,
                     'psuser_cn': '系统',
                     'create_time': create_time
                 })
     except Exception, e:
         log.error('build_tree.auto_insert_record error, e=%s' % e)
Esempio n. 5
0
    def get_catinfo(cls, cat_id):
        cat = None
        if cat_id != 0 and not cat_id:
            return cat

        try:
            str_cat_id = str(cat_id)
            cat = CacheAdpter.get(CacheKey.KWLIB_CAT_INFO % str_cat_id, 'web',
                                  None)
            if not cat:
                cat = cls.get_cat_from_db(cat_id)
                if not cat:
                    tmp = get_catinfo_new(1, category_id_list=[str_cat_id])
                    if tmp:
                        tmp = tmp[int(cat_id)]
                        tmp['_id'] = tmp.pop('cat_id')
                        temp_dict = {
                            'cat_child_list': [],
                            'danager_info': {},
                            'product_dict': {},
                            'sale_dict': {},
                            'selectconf_dict': {},
                            'brand_list': [],
                            'meta_list': [],
                            'forbid_list': [],
                            'exclude_list': []
                        }
                        cat_coll.insert(dict(tmp), **temp_dict)
                        cat = cls.get_cat_from_db(cat_id)
                CacheAdpter.set(CacheKey.KWLIB_CAT_INFO % str_cat_id, cat,
                                'web', Const.KWLIB_CAT_CACHE_TIME)
        except Exception, e:
            log.error(
                "get cat error , can not get the cat by cat_id = %s and the error = %s"
                % (cat_id, e))
Esempio n. 6
0
    def get_selectconf(item, select_type='', conf_name=''):
        '''
        根据item id以及选词场景获取选词的配置信息.依次从item、类目、父类、根类目、默认寻配置
        '''
        if not conf_name:
            conf_name = 'default_' + select_type + '_conf'
            if item.selectconf_dict.has_key(select_type):
                conf_name = item.selectconf_dict[select_type]
            else:
                cat_path_id = Cat.get_cat_attr_func(item.cat_id, "cat_path_id")
                if cat_path_id:
                    cat_list = cat_path_id.split(' ')
                    cat_list.reverse()
                    for cat_id in cat_list:
                        conf_dict = Cat.get_cat_attr_func(
                            cat_id, "selectconf_dict")
                        if conf_dict and conf_dict.has_key(select_type):
                            conf_name = conf_dict[select_type]
                            break
                else:
                    log.error('can not find cat path id, cat_id = %s' %
                              item.cat_id)
        try:
            conf = SelectConf.objects.get(conf_name=conf_name)
        except SelectConf.DoesNotExist:
            log.error(
                'Can not find SelectConf! select_type=%s, conf_name=%s ' %
                (select_type, conf_name))
            return None

        return conf
Esempio n. 7
0
    def check_status(self): # 这里会不会有问题,造成api访问太过频繁超限?

        def is_done(status_dict, task_count):
            if status_dict:
                all_successed = all(status_dict.values())
                if not all_successed:
                    raise WorkerFailed("download-failed")
                else:
                    if len(status_dict) == task_count:
                        return True
                    return False
            else:
                return False

        key_list = [("%s_status" % key) for key in self.key_list]
        status_dict = CacheAdpter.get_many(key_list, self.cache_db)
        has_done = is_done(status_dict, len(self.key_list))

        time_interval = 0.1
        # 还是给一个超时吧,超时就下载失败。(还要考虑全局的流控,比如今天关键词的报表下载完毕)
        timeout = len(self.adgroup_id_list) * 60 # 超时时间依据下载的adgroup个数来计算(最好还有一个最大超时时间)
        st_time = time.time()
        while (not has_done):
            time.sleep(time_interval)
            if time.time() - st_time >= timeout:
                # 记录状态
                log.error("timeout: total %s, finished %s, undone task=%s" % (len(self.key_list), len(status_dict), list(set(self.key_list) - set(status_dict.keys()))))
                raise WorkerTimeout("download-timeout")
            status_dict = CacheAdpter.get_many(key_list, self.cache_db)
            has_done = is_done(status_dict, len(self.key_list))

        CacheAdpter.delete_many(key_list, self.cache_db) # 确认了本次任务的状态之后,清掉这些标准位,以免影响下次的判断
        return True
Esempio n. 8
0
 def get_rob_rank_task(cls):
     new_task_list = []
     is_mutual = CacheAdpter.get(CacheKey.ENGINE_ROBRANK_TASK_MUTUAL_LOCK,
                                 'web')
     if not is_mutual:  # 否则,马上设置缓存,将门关了
         kw_lockers = []
         try:
             CacheAdpter.set(CacheKey.ENGINE_ROBRANK_TASK_MUTUAL_LOCK, True,
                             'web', 60 * 2)
             now = datetime.datetime.now()
             time_str = now.strftime('%H:%M')
             # 关键词自动抢排名的开始时间、结束时间,只在取任务时做检验。每个关键词抢排名时,不再校验,因为可能抢排名时,时间刚好超过该执行的时间
             kw_lockers = KeywordLocker.objects.filter(
                 Q(is_stop__in = [0, None], start_time__lte = time_str, end_time__gte = time_str, next_run_time__lte = now)
                 & (Q(is_running__in = [0, None]) | Q(is_running = 1, last_run_time__lte = now - datetime.timedelta(hours = 1)))) \
                 .order_by('-is_running', 'next_run_time', 'adgroup_id').limit(100)
             kw_id_list = [kl.keyword_id for kl in kw_lockers]
             kw_locker_coll.update({'_id': {
                 '$in': kw_id_list
             }}, {'$set': {
                 'is_running': 1,
                 'last_run_time': now
             }},
                                   multi=True)
         except Exception, e:
             log.error("robrank_task get_valid_task error, e=%s" % e)
         finally:
Esempio n. 9
0
 def execute(cls, poison):
     signal.signal(signal.SIGINT, signal.SIG_IGN)
     rob_rank_list = []
     new_rob_rank_list = []
     try:
         while True:
             if rob_rank_list and rob_rank_list[
                     0].check_time <= datetime.datetime.now():
                 task = rob_rank_list.pop(0)
                 if not task.rob_rank():
                     rob_rank_list.append(task)
                 continue
             if new_rob_rank_list:
                 task = new_rob_rank_list.pop(0)
                 if not task.rob_rank():
                     rob_rank_list.append(task)
                 continue
             if not poison.is_set():
                 new_task_list = cls.get_rob_rank_task()
                 if len(new_task_list):
                     new_rob_rank_list.extend(new_task_list)
                 else:
                     time.sleep(10)
                 continue
             if not rob_rank_list:
                 break
     except Exception, e:
         subject = '【紧急问题】自动抢排名任务出错'
         exstr = traceback.format_exc()
         content = str(exstr)
         cc_list = [settings.AUTO_TASK_EMAIL]
         send_mail(subject, content, settings.DEFAULT_FROM_EMAIL, cc_list)
         log.error('auto_rob_rank error, e=%s' % exstr)
Esempio n. 10
0
 def check_keyword_status(self):
     '''在特殊情况下淘宝改价接口异常时,再次尝试改价。正常情况下,到这里时所有关键词都已经抢排名结束了'''
     upd_arg_list = []
     for kl in self.kw_locker_list:
         if kl.result_flag not in kl.fininshed_status_list:
             new_price = min(kl.limit_price, max(kl.old_price, 5))
             if new_price == kl.cur_price:
                 kl.result_flag = 'done'
                 kl.save_process(desc='抢排名结束')
             else:
                 upd_arg_list.append([
                     self.campaign_id, self.adgroup_id, kl.keyword_id,
                     kl.word, {
                         'max_price': new_price,
                         'old_price': kl.cur_price
                     }
                 ])
                 kl.result_flag = 'failed'
                 kl.save_process(desc='尝试恢复初始出价%s元, 抢排名结束' %
                                 round(kl.old_price / 100.0, 2))
     if upd_arg_list:
         try:
             update_keywords(shop_id=self.shop_id,
                             kw_arg_list=upd_arg_list,
                             opter=self.opter,
                             opter_name='')
         except Exception, e:
             log.error(
                 'submit_keywords error, shop_id=%s, upd_arg_list=%s, e=%s'
                 % (self.shop_id, upd_arg_list, e))
Esempio n. 11
0
 def submit_keywords(self):
     upd_arg_list = []
     for kl in self.kw_locker_list:
         kl.do_failed_word()
         if kl.result_flag not in kl.fininshed_status_list:
             update_dict = {'old_price': kl.cur_price}
             if kl.platform == 'pc':
                 update_dict.update({
                     'max_price': kl.test_price,
                     'old_price': kl.cur_price
                 })
             else:
                 update_dict.update({
                     'max_mobile_price': kl.test_price,
                     'mobile_old_price': kl.cur_price
                 })
             upd_arg_list.append([
                 self.campaign_id, self.adgroup_id, kl.keyword_id, kl.word,
                 update_dict
             ])
     if upd_arg_list:
         updated_kw_list = []
         try:
             updated_kw_list, _ = update_keywords(shop_id=self.shop_id,
                                                  kw_arg_list=upd_arg_list,
                                                  opter=self.opter,
                                                  opter_name='')
         except Exception, e:
             log.error(
                 'submit_keywords error, shop_id=%s, upd_arg_list=%s, e=%s'
                 % (self.shop_id, upd_arg_list, e))
         for kl in self.kw_locker_list:
             kl.check_upded_price(updated_kw_list)
Esempio n. 12
0
    def pack_select_word_conf(select_conf_name):
        if not select_conf_name:
            return {}

        try:
            select_conf = SelectConf.objects.get(conf_name=select_conf_name)
            return {
                'conf_name':
                select_conf.conf_name,
                'conf_desc':
                select_conf.conf_desc,
                'candi_filter':
                json.dumps(select_conf.candi_filter),
                'label_define_list':
                select_conf.label_define_list,
                'select_conf_list':
                [conf._data for conf in select_conf.select_conf_list],
                'price_conf_list':
                [conf._data for conf in select_conf.price_conf_list],
                'delete_conf':
                select_conf.delete_conf
            }
        except Exception, e:
            log.error("get select conf error, name=%s, e=%s" %
                      (select_conf_name, e))
            return {}
Esempio n. 13
0
def get_cat_avg_cpc(request):
    '''获取宝贝行业平均值'''
    errMsg = ''
    try:
        shop_id = request.user.shop_id
        cat_id_list = request.POST.getlist('cat_id_list[]')
        cat_id_list = map(int, cat_id_list)
        if not cat_id_list:
            adg_id = int(request.POST.get('adg_id', 0))
            category_ids = Adgroup.objects.filter(shop_id = int(request.user.shop_id), adgroup_id = adg_id).values_list('category_ids')
            if len(category_ids):
                temp_list = category_ids[0].strip().split(' ')
                try:
                    cat_id_list.append(int(temp_list[-1]))
                except:
                    pass
        try:
            cat_path, danger_descr = Cat.get_cat_path(cat_id_list = cat_id_list, last_name = request.user.shop_type).values()[0]
        except Exception, e:
            cat_path = '未获取到值'
            danger_descr = ''
            log.error('mnt_get_cat_avg_cpc get_cat_path error, shop_id=%s, cat_id_list=%s, e = %s' % (shop_id, cat_id_list, e))
        try:
            cat_stat_info = CatStatic.get_market_data(cat_id_list = cat_id_list).values()[0]
            avg_cpc = round(cat_stat_info['cpc'] * 0.01, 2)
            avg_cpc = avg_cpc or 0.50
            avg_cpc_flag = 1
        except Exception, e:
            avg_cpc = 1
            avg_cpc_flag = 0
            log.error('mnt_get_cat_avg_cpc get_cat_stat_info error, shop_id=%s, e = %s' % (shop_id, e))
Esempio n. 14
0
def get_seller_cids(request):
    result_list = CacheAdpter.get(CacheKey.WEB_SHOP_SELLER_CIDS % request.user.shop_id, 'web', [])
    if not result_list:
        tapi = get_tapi(request.user)
        try:
            tobj = tapi.sellercats_list_get(nick = request.user.nick)
        except TopError, e:
            log.error("get_sellercats error, e=%s, shop_id=%s" % (e, request.user.shop_id))
            return {'errMsg': '', 'cat_list': result_list}

        cat_list = []
        # 有这些属性:cid, parent_cid, name, pic_url, sort_order, created, modified, type
        if hasattr(tobj, 'seller_cats') and hasattr(tobj.seller_cats, 'seller_cat'):
            for top_cat in tobj.seller_cats.seller_cat:
                cat_list.append({'cat_id': top_cat.cid, 'cat_name': top_cat.name, 'parent_cat_id': top_cat.parent_cid, 'sort_order': top_cat.sort_order})

        cat_dict = {}
        child_cat_dict = collections.defaultdict(list)

        for cat in cat_list:
            if cat['parent_cat_id'] != 0:
                child_cat_dict[cat['parent_cat_id']].append(cat)
            else:
                cat_dict.update({cat['sort_order']: cat})

        for order, cat in cat_dict.items():
            if cat['cat_id'] in child_cat_dict:
                cat['child_cat_list'] = child_cat_dict[cat['cat_id']]
            else:
                cat['child_cat_list'] = []
            result_list.append(cat)

        CacheAdpter.set(CacheKey.WEB_SHOP_SELLER_CIDS % request.user.shop_id, result_list, 'web', 60 * 10)
Esempio n. 15
0
    def allot_2_workers(self, sub_prj_list, db_name):
        log.info('start: send msgs to workers')
        # 先将各个任务状态为working
        prj_stat_dict = {}
        data_keys = []
        for prj in sub_prj_list:
            prj['statu'] = 'working'
            prj_stat_dict[prj['data_key'] + '_statu'] = 'working'
            data_keys.append(prj['data_key'])
        CacheAdpter.set_many(prj_stat_dict, db_name, 180)
        CacheAdpter.delete_many(data_keys, db_name)

        # 分发任务
        for prj in sub_prj_list:
            # 派活
            try:
                nt = NewThread(JAPI(host='%s:%s' %
                                    (prj['host'], prj['port'])).worker_work,
                               prj_dict=prj,
                               is_sync=False)
                nt.setDaemon(True)
                nt.start()
            except Exception, e:
                log.error('error=%s,prj=%s' % (e, prj))
                continue
Esempio n. 16
0
 def create_rob_rank_task(cls, user, adgroup_id, kw_cfg_list):
     error_msg = ''
     task = None
     shop_id = int(user.shop_id)
     try:
         if RANKING_CODE not in user.perms_code:
             error_msg = "version_limit"
         else:
             kw_locker_list = []
             for kw in kw_cfg_list:
                 kw_locker = KeywordLocker(
                     shop_id=shop_id,
                     adgroup_id=adgroup_id,
                     keyword_id=kw['keyword_id'],
                     word=kw['word'],
                     exp_rank_range=kw['exp_rank_range'],
                     limit_price=kw['limit_price'],
                     platform=kw['platform'],
                     nearly_success=kw['nearly_success'],
                     is_auto_robrank=False)
                 kw_locker_list.append(kw_locker)
             task = RobRankTask(shop_id=shop_id,
                                adgroup_id=adgroup_id,
                                kw_locker_list=kw_locker_list,
                                opt_type='manual')
     except Exception, e:
         log.error('shop_id=%s, adg_id=%s, e=%s' % (shop_id, adgroup_id, e))
         error_msg = 'others'
Esempio n. 17
0
 def do_my_work(self):
     log.info('worker start, item_id=%s, key=%s' %
              (self.prj_dict['item_id'], self.prj_dict['data_key']))
     kw_list = CacheAdpter.get(self.prj_dict['from_key'],
                               self.prj_dict['from_db'], [])
     if not kw_list:
         log.error('can not get group from memcache and the group is = %s' %
                   self.prj_dict['from_key'])
         kw_list = CacheAdpter.get(self.prj_dict['from_key'],
                                   self.prj_dict['from_db'], [])
     group_dict = {}
     if kw_list:
         if ('click > 0' not in self.filter_conf) and (
                 'click>0' not in self.filter_conf) or kw_list[0][2] > 0:
             cat_id = self.prj_dict['from_key'].split('_')[0]
             try:
                 group_dict = group_kwlist(kw_list, self.item_scorer,
                                           int(cat_id), self.cat_cpc,
                                           self.cats, self.remove_word_list,
                                           self.filter_conf,
                                           self.filter_list,
                                           self.price_list)
             except Exception, e:
                 log.error('group_kwlist error: cat_id=%s, e=%s' %
                           (cat_id, e))
Esempio n. 18
0
def get_gdata_by_redis(kw_list):
    result = {}
    try:
        result = get_gdata_word(word_list=kw_list).result.to_dict()
    except Exception, e:
        log.error('get gdata from redis error and the error is =%s' % e)
        pass
Esempio n. 19
0
 def publish_task(self):
     try:
         self.TASK_QUEUE.publish_tasks(self.task_list)
         return True
     except Exception as e:
         log.error("publish task error, e=%s" % e)
         return False
Esempio n. 20
0
    def account_data_collector(db_fields, days):
        # 耗性能
        group_dict = {'_id':'$_id'}
        project_dict = {'shop_id':'$_id', '_id':0}
        for field in db_fields:
            field_mapping = DATA_AGGR_CONFIG[field]
            group_dict.update(field_mapping['group'])
            project_dict.update(field_mapping['project'])

        date_time = datetime.datetime.now() - datetime.timedelta(days = days + 1)
        pipeline = [{'$project':{'shop_id':'$id',
                                            'rpt_list':{'$setUnion':['$rpt_list', [{'cost':0,
                                                                                                  'impressions':0,
                                                                                                  'click':0,
                                                                                                  'directpay':0,
                                                                                                  'indirectpay':0,
                                                                                                  'directpaycount':0,
                                                                                                  'indirectpaycount':0,
                                                                                                  'favitemcount':0,
                                                                                                  'favshopcount':0,
                                                                                                  'date':date_time,
                                                                                                   }]
                                                                                 ]},
                                            }
                         },
                        {'$unwind':'$rpt_list'},
                        {'$match':{'rpt_list.date':{'$gte':date_time}}},
                        {'$group':group_dict},
                        {'$project':project_dict}
                        ]
        try:
            return account_coll.aggregate(pipeline)['result']
        except Exception, e:
            log.error('account aggregate error, e=%s' % (e))
            return []
Esempio n. 21
0
 def get_subdata_by_redis(cls, word_list, sub_type):
     result = {}
     tb_list = []
     pc_key_list , mobile_key_list = [], []
     pc_result_list, mobile_result_list = [], []
     for word in word_list:
         pc_key, mobile_key = cls.get_sub_data_key(word)
         pc_key_list.append(pc_key)
         mobile_key_list.append(mobile_key)
     try:
         if sub_type == 0:
             mobile_result_list = cls.r_skeyword.mget(mobile_key_list)
             result, tb_list = cls.get_subdata_by_key(word_list, mobile_result_list)
         elif sub_type == 1:
             pc_result_list = cls.r_skeyword.mget(pc_key_list)
             result, tb_list = cls.get_subdata_by_key(word_list, pc_result_list)
         else:
             mobile_result_list = cls.r_skeyword.mget(mobile_key_list)
             mobile_dict, mobile_list = cls.get_subdata_by_key(word_list, mobile_result_list)
             pc_result_list = cls.r_skeyword.mget(pc_key_list)
             pc_dict, pc_list = cls.get_subdata_by_key(word_list, pc_result_list)
             tb_list = list(set(mobile_list + pc_list))
             result = cls.get_total_subdata(mobile_dict, pc_dict)
     except Exception, e:
         log.error("get redis sub data error and the error=%s" % e)
         return result, word_list
Esempio n. 22
0
    def struct_download(cls, shop_id, tapi):
        result = False
        try:
            account_dict = {'balance': 100}
            try:
                # tobj_balance = tapi.simba_account_balance_get()
                # if tobj_balance and hasattr(tobj_balance, 'balance'):
                #     account_dict.update({'balance':tobj_balance.balance})
                balance = tapi.get_account_balance()
                account_dict.update({'balance': balance})
            except Exception, e:
                log.error('get balance error, shop_id=%s, error=%s' %
                          (shop_id, e))

            if account_coll.find_one({'_id': shop_id}):
                account_coll.update({'_id': shop_id}, {'$set': account_dict})
            else:
                account_dict.update({
                    '_id': shop_id,
                    'cat_id': 0,
                    'consult_group_id': 0,
                    'consult_id': 0
                })
                account_coll.insert(account_dict)
            log.info('sync account OK, shop_id=%s' % shop_id)
            result = True
Esempio n. 23
0
 def get_syno_brand(word_list, syno_dict):
     syno_list = []
     try:
         for word in word_list:
             syno_list.extend(syno_dict.get(word, []))
     except Exception, e:
         log.error("get syno brand error and the error is = %s" % e)
Esempio n. 24
0
def show_chart(request):
    """获取账户图表"""
    shop_id = int(request.user.shop_id)
    chart_data = {}
    update_cache = int(request.POST.get('update_cache', '0'))
    try:
        start_date = request.POST.get('start_date')
        end_date = request.POST.get('end_date')
        errMsg = ''
        rpt_dict = RealtimeReport.get_summed_rtrpt(
            rpt_type='account',
            args_list=[shop_id],
            update_now=bool(update_cache))  # 从缓存取账户时实数据
        rtrpt_item = rpt_dict.get(shop_id, Account.Report())
        snap_dict = Account.Report.get_snap_list(
            query_dict={'shop_id': shop_id},
            start_date=start_date,
            end_date=end_date)
        snap_list = snap_dict.get(shop_id, [])
        snap_list.append(rtrpt_item)
        category_list, series_cfg_list = get_trend_chart_data(
            data_type=1, rpt_list=snap_list)
        chart_data = {
            'category_list': category_list,
            'series_cfg_list': series_cfg_list
        }
    except Exception, e:
        log.error("get account_chart error, shop_id=%s, error=%s" %
                  (shop_id, e))
        errMsg = '获取店铺数据失败,请刷新页面'
Esempio n. 25
0
    def download_kwrpt_base(cls, shop_id, campaign_id, adgroup_id, token,
                            start_time, end_time, search_type, source, tapi):
        """下载base报表"""
        page_size = 200
        page_no = 1
        base_list = []
        while (True):
            try:
                top_base_objs = tapi.simba_rpt_adgroupkeywordbase_get(
                    campaign_id=campaign_id,
                    adgroup_id=adgroup_id,
                    start_time=start_time,
                    end_time=end_time,
                    search_type=search_type,
                    source=source,
                    subway_token=token,
                    page_no=page_no,
                    page_size=page_size,
                    retry_count=settings.TAPI_RETRY_COUNT * 4,
                    retry_delay=1)
            except TopError, e:
                log.error(
                    'simba_rpt_adgroupkeywordbase_get TopError, shop_id=%s, adgroup_id=%s, start_time=%s, end_time=%s, e=%s'
                    % (shop_id, adgroup_id, start_time, end_time, e))
                raise e

            if top_base_objs and hasattr(
                    top_base_objs, 'rpt_adgroupkeyword_base_list'
            ) and top_base_objs.rpt_adgroupkeyword_base_list:
                base_list.extend(top_base_objs.rpt_adgroupkeyword_base_list)
                if len(top_base_objs.rpt_adgroupkeyword_base_list) < page_size:
                    break
                page_no += 1
            else:
                break
Esempio n. 26
0
def set_online_status(request):
    """修改计划状态 """
    shop_id = int(request.user.shop_id)
    camp_id_list = request.POST.getlist('camp_id_list[]')
    mode = int(request.POST['mode'])
    online_status = mode and 'online' or 'offline'
    opter, opter_name = analysis_web_opter(request)
    success_camp_ids, failed_camp_ids = [], []
    try:
        for camp_id in camp_id_list:
            result_list, _ = update_campaign(shop_id=shop_id,
                                             campaign_id=camp_id,
                                             online_status=online_status,
                                             opter=opter,
                                             opter_name=opter_name)
            if 'online_status' in result_list:
                success_camp_ids.append(str(camp_id))
            else:
                failed_camp_ids.append(str(camp_id))
        return {
            'errMsg': '',
            'mode': mode,
            'success_camp_ids': success_camp_ids
        }
    except Exception, e:
        log.error('update_camps_status error,e=%s, shop_id=%s' % (e, shop_id))
        return {'errMsg': '修改失败:淘宝接口不稳定,请稍后再试'}
Esempio n. 27
0
    def get_server_menu():
        '''获取服务中心菜单 add by tianxiaohe 20151031'''
        try:
            server_menu_list = []
            if not server_menu_list:
                from apps.web.models import main_ad_coll
                page_list = main_ad_coll.find({
                    'ad_position': 'servermenu',
                    'ad_display': 1,
                    'ad_status': 2
                }).sort('ad_weight', -1)

                for page in page_list:
                    server_menu_list.append({
                        'id': str(page['_id']),
                        'obj_id': page.get('_id', 0),
                        'weight': page.get('ad_weight', ''),
                        'title': page.get('ad_title', '')
                    })
                CacheAdpter.set(CacheKey.WEB_AD_MENU, server_menu_list, 'web',
                                60 * 60 * 6)
            return server_menu_list
        except Exception, e:
            log.error('get_server_menu error,e=%s, shop_id=%s' % (e, shop_id))
            return []
Esempio n. 28
0
    def download_data(shop_id, is_force, **kwargs):
        cache_key = CacheKey.WEB_SYNC_DATA_MUTUAL % shop_id
        mutual_timeout = is_force and (60 * 40) or (60 * 10)
        is_mutual = CacheAdpter.get(cache_key, 'web')
        if not is_mutual:
            CacheAdpter.set(cache_key, True, 'web', mutual_timeout)
            try:
                dler = Downloader.objects.get(shop_id=shop_id)
                if not dler.sync_all_struct(is_force=is_force):
                    raise Exception('dl_struct_failed', is_force)

                args = is_force and {
                    'is_force': True,
                    'rpt_days': kwargs['rpt_days']
                } or {}
                if not dler.sync_all_rpt(**args):
                    raise Exception('dl_rpt_failed', is_force)

                return True, ''
            except Exception, e:
                log.error('download data error,shop_id=%s, e=%s' %
                          (shop_id, e.args))
                return False, e[0]

            finally:
Esempio n. 29
0
def calc_match(prj_dict):
    prj_dict = prj_dict
    item_scorer = ItemScorer(prj_dict['label_conf_list'])
    cats = prj_dict['cats']
    filter_conf = prj_dict['filter_conf']
    filter_list = prj_dict['filter_list']
    price_list = prj_dict['price_list']
    remove_word_list = prj_dict['remove_words'] and prj_dict[
        'remove_words'].split(',') or []
    cat_cpc = prj_dict['cat_cpc']

    log.info('worker start, item_id=%s, key=%s' %
             (prj_dict['item_id'], prj_dict['data_key']))
    kw_list = CacheAdpter.get(prj_dict['from_key'], prj_dict['from_db'], [])
    if not kw_list:
        log.error('can not get group from memcache and the group is = %s' %
                  prj_dict['from_key'])
        kw_list = CacheAdpter.get(prj_dict['from_key'], prj_dict['from_db'],
                                  [])
    group_dict = {}
    if kw_list:
        if ('click > 0' not in filter_conf) and (
                'click>0' not in filter_conf) or kw_list[0][2] > 0:
            cat_id = prj_dict['from_key'].split('_')[0]
            try:
                group_dict = group_kwlist(kw_list, item_scorer, int(cat_id),
                                          cat_cpc, cats, remove_word_list,
                                          filter_conf, filter_list, price_list)
            except Exception, e:
                log.error('group_kwlist error: cat_id=%s, e=%s' % (cat_id, e))
Esempio n. 30
0
 def item_img_delete(cls, tapi, shop_id, num_iid, img_id):
     '''删除图片'''
     try:
         tobj = tapi.taobao_item_img_delete(num_iid=num_iid, id=img_id)
     except TopError, e:
         log.error("item_img_delete TopError, e=%s" % (e))
         return None