def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = [ 'users', 'all_data_users', 'first_data_users', 'experience_users', 'all_data_experience_users', 'members', 'all_data_members', 'self_members', 'all_data_self_members', 'manual_service_members', 'all_data_manual_service_members', 'user_access_log' ] self.db_builder = DataBuilder( start_query_time, end_query_time, ret_list=ret_list ) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def get_today_reward_coin_count(self): sql_str = """select sum(coin) from partner_taskcoinrecord where partner_taskcoinrecord.record_time BETWEEN "{0}" AND "{1}"; """.format( get_today(), get_today() + datetime.timedelta(days=1)) return self.do_sql(sql_str)
def get_today_extra_reward_coin_count(self): sql_str = """select sum(coin) from partner_taskcoinrecord where (record_type = 'extra_taking_work' or record_type = 'extra_interview' or record_type = 'extra_download') and partner_taskcoinrecord.record_time BETWEEN "{0}" AND "{1}"; """.format( get_today(), get_today() + datetime.timedelta(days=1)) return self.do_sql(sql_str)
def get_today_commend_and_download_count(self): sql_str = """select count(*) from partner_taskcoinrecord , partner_usertaskresume where partner_taskcoinrecord.record_type = 'download' and partner_taskcoinrecord.task_id = partner_usertaskresume.task_id and partner_taskcoinrecord.upload_resume_id = partner_usertaskresume.resume_id and date_format(partner_taskcoinrecord.record_time,"%Y-%m-%d") = date_format(partner_usertaskresume.upload_time,"%Y-%m-%d") and partner_taskcoinrecord.record_time BETWEEN "{0}" AND "{1}"; """.format( get_today(), get_today() + datetime.timedelta(days=1)) return self.do_sql(sql_str)
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = ['pub_feeds', 'statistic', 'feed_result', 'resumes', 'down_resumes', 'send_cards', 'fav_resumes', 'all_data_user_mark_logs', 'first_data_staffs'] self.db_builder = DataBuilder( start_query_time, end_query_time, ret_list=ret_list ) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = ['pub_feeds', 'user_access_log', 'resumes', 'down_resumes', 'send_cards', 'fav_resumes', 'all_data_user_mark_logs', 'first_data_staff', 'statistic'] self.db_builder = DataBuilder( start_query_time, end_query_time, ret_list=ret_list ) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data self.staff_list = self.origin_data.get('first_data_staff').values_list('username', flat=True)
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = [ 'pub_feeds', 'user_access_log', 'resumes', 'down_resumes', 'send_cards', 'fav_resumes', 'all_data_user_mark_logs' ] self.db_builder = DataBuilder(start_query_time, end_query_time, ret_list=ret_list) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = [ 'task_system', ] self.db_builder = DataBuilder(start_query_time, end_query_time, ret_list=ret_list) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = [ 'users', 'weixin_users', 'weixin_msgs', 'all_data_self_members', 'all_data_manual_service_members', 'user_access_log' ] self.db_builder = DataBuilder(start_query_time, end_query_time, ret_list=ret_list) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = [ 'task_system', ] self.db_builder = DataBuilder( start_query_time, end_query_time, ret_list=ret_list ) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def update_today_cache(cls): update_feed_id_list = cache.get(cls.update_cache_key, []) today = get_today() tomorrow = get_tomommow() has_update_feed_list = [] for feed_id in update_feed_id_list: feed_oid = get_oid(feed_id) today_cache_key = feed_id + '_today_cache' today_cache_value = cache.get(today_cache_key, []) feed_results = FeedResult.objects( feed=feed_oid, display_time__gte=today, display_time__lt=tomorrow, ) resume_ids = [str(fr.resume.id) for fr in feed_results] if resume_ids: today_cache_value = list( set(today_cache_value) | set(resume_ids)) cache.set(today_cache_key, today_cache_value, timeout=0) has_update_feed_list.append(feed_id) not_update_feed_list = list( set(update_feed_id_list) - set(has_update_feed_list)) cache.set(cls.update_cache_key, not_update_feed_list, timeout=0)
def get(self, request, token): user = request.user today = get_today() promotion_token = get_object_or_none( PromotionToken, promotion_user=user, token=token, ) if not promotion_token: return JsonResponse({ 'status': 'token_error', 'msg': 'token 不存在', }) click_record = get_object_or_none( PromotionClickRecord, user=user, click_date=today, ) if click_record: click_record.click_times += 1 else: click_record = PromotionClickRecord( user=user, ) click_record.save() return JsonResponse({ 'status': 'ok', 'msg': u'记录成功', })
def get_partner_resume(username): today = get_today() yesterday = get_yesterday() user_task_resume = UserTaskResume.objects.select_related( 'task', 'task__feed', 'resume', ).filter( task__feed__user__username=username, upload_time__gte=yesterday, upload_time__lt=today, ).exclude( resume_status=5, ).order_by('-id')[:2] count = user_task_resume.count() display_task_resume = user_task_resume[:2] for tr in display_task_resume: last_work_query = tr.resume.resume_works.all().order_by('-start_time')[:1] if last_work_query: tr.resume.last_work = last_work_query[0] last_edu_query = tr.resume.resume_educations.all().order_by('-start_time')[:1] if last_edu_query: tr.resume.last_edu = last_edu_query[0] return { 'partner_resumes': display_task_resume, 'partner_resumes_count': count, }
def add_read_rate(day_num, user_feeds): start_date = get_today() - datetime.timedelta(days=day_num) user_feeds_id = [user_feed.feed.id for user_feed in user_feeds] all_feed_count = FeedResult.objects.filter( feed__in=user_feeds_id, published=True, display_time__gt=start_date, ).count() if all_feed_count == 0: return { 'all_feed_count': 0, 'all_read_feed_count': 0, 'read_rate': 0, } all_read_feed_count = FeedResult.objects.filter( feed__in=user_feeds_id, user_read_status="read", display_time__gt=start_date, ).count() read_rate = "{:.2f}%".format( 100 * (float(all_read_feed_count) / float(all_feed_count))) return { 'all_feed_count': all_feed_count, 'all_read_feed_count': all_read_feed_count, 'read_rate': read_rate, }
def main(): today = get_today() tomorrow = get_tomommow() expire_feeds = Feed2.objects( feed_expire_time__gte=today, feed_expire_time__lt=tomorrow, feed_type=1, ) for feed in expire_feeds: if is_deleted_feed(feed): continue email = feed.username subject = get_email_subject(feed) message = render_to_string( 'email-template/activate_tips.html', {'feed': feed} ) result = asyn_bat_mail( email, subject, message, ) if result.get('status') == 'success': print '%s send email success' % email else: print '%s send email error, result %s' % (email, result)
def update_user_vip(self, user_vip, admin=False): user_vip.apply_status = 'success' user = user_vip.user if user_vip.vip_role.auto_active or admin: self.update_guide_switch(user) UserVip.objects.filter( user=user, is_active=True, ).update( is_active=False, ) service_time = user_vip.vip_role.service_time now = datetime.datetime.now() today = get_today() expire_time = today + relativedelta(months=service_time) user_vip.is_active = True user_vip.active_time = now user_vip.expire_time = expire_time user_vip.save() UserVip.objects.filter( apply_status='applying', user=user, ).delete() return user_vip
def update_user_vip(self, user_vip, admin=False): user_vip.apply_status = 'success' user = user_vip.user if user_vip.vip_role.auto_active or admin: self.update_guide_switch(user) UserVip.objects.filter( user=user, is_active=True, ).update(is_active=False, ) service_time = user_vip.vip_role.service_time now = datetime.datetime.now() today = get_today() expire_time = today + relativedelta(months=service_time) user_vip.is_active = True user_vip.active_time = now user_vip.expire_time = expire_time user_vip.save() UserVip.objects.filter( apply_status='applying', user=user, ).delete() return user_vip
def add_update_cache(feed): tomorrow = get_tomommow() today = get_today() feed_id = feed.id feed_results = FeedResult.objects( feed=feed_id, display_time__gte=today, display_time__lt=tomorrow, ) resume_sids = [str(fr.resume.id) for fr in feed_results] FeedCacheUtils.add_update_cache(feed_id, resume_sids)
def get(self, request): user = request.user custom_schedule_count = AdminSchedule.objects.filter( user=user, start_time__gt=get_today(), start_time__lt=get_tomorrow() ).count() alarm_count = InterviewAlarm.objects.filter( buy_record__user__crm_client_info__admin=user, interview_time__gt=get_today(), interview_time__lt=get_tomorrow(), ).count() return JsonResponse({ 'all_count': custom_schedule_count + alarm_count, 'custom_schedule_count': custom_schedule_count, 'alarm_count': alarm_count, })
def __init__(self, **kwargs): start_query_time = kwargs.get('start_query_time', get_today() - datetime.timedelta(days=1)) end_query_time = kwargs.get('end_query_time', get_today()) ret_list = [ 'tasks', 'all_data_tasks', 'upload_resumes', 'all_data_upload_resumes', 'task_coin_records', 'all_data_task_coin_records', 'do_tasks', 'all_data_do_tasks' ] self.db_builder = DataBuilder( start_query_time, end_query_time, ret_list=ret_list ) self.db_builder.build_data() self.origin_data = self.db_builder.origin_data
def add_update_cache(feed): tomorrow = get_tomommow() today = get_today() feed_id = feed.id feed_results = FeedResult.objects( feed=feed_id, display_time__gte=today, display_time__lt=tomorrow, ) resume_sids = [str(fr.resume.id) for fr in feed_results] FeedCacheUtils.add_update_cache(feed_id, resume_sids)
def get_task_type(self): """获取所有task类型""" task_type = [] task_type.append('today') today = get_today() """判断当天是否为星期一""" if today == today + datetime.timedelta(days=-today.weekday()): task_type.append('week') """判断当天是否为月初""" if today == datetime.datetime(today.year, today.month, 1): task_type.append('month') return task_type
def renew(cls): today = get_today() expire_time = today + datetime.timedelta(days=15) user_vip_query = UserVip.objects.select_related('user', ).filter( is_active=True, vip_role__code_name='experience_user', expire_time__lt=expire_time, ) for user_vip in user_vip_query: user_vip.expire_time += relativedelta(months=3) user_vip.save() PackageUtils.update_uservip_package(user_vip) return user_vip_query.count()
def query_reco_time(self, query_cond): title_match = get_int(self.request.GET.get('title_match', 0)) if title_match: return query_cond reco_time = abs(get_int(self.request.GET.get('reco_time', 0))) if not reco_time or reco_time < 0: return query_cond if reco_time > 30: reco_time = 30 today = get_today() start_date = today + datetime.timedelta(days=-reco_time) query_cond.update({'display_time__gte': start_date}) return query_cond
def pinbot_daily_task(self): today = get_today() start_time = today + datetime.timedelta(days=-1) report = get_object_or_none( PinbotDailyReport, report_date=start_time ) if not report: report = PinbotDailyReport( report_date=start_time ) self.user_daily_dash(report, start_time) self.get_pkg_dash(report, start_time) report.save() return report
def get(self, request): today = get_today() range_days = (today - self.START_DATE).days + 1 ret = {} for day in xrange(range_days): today_key = (self.START_DATE + datetime.timedelta(days=day)).strftime('%Y-%m-%d') for ui_check in self.STAT_KEYS: cache_key = '{0}_{1}_{2}'.format( self.REDIS_KEY_PREFIX, today_key, ui_check, ) cache_value = cache.get(cache_key, 0) ret[cache_key] = cache_value return JsonResponse(ret)
def get(self, request): username = request.user.username today = get_today() tomorrow = get_tomorrow() has_close = CloseEasterRecord.objects.filter( username=username, close_time__gte=today, close_time__lt=tomorrow, ).exists() if not has_close: CloseEasterRecord.objects.create(username=username, ) return JsonResponse({ 'status': 'ok', 'msg': 'ok', })
def has_unmark_record(cls, user): ''' 查找未标记过的简历和举报失败的未标记简历 ''' today = get_today() time_limit = today + datetime.timedelta(days=-7) if time_limit < MARK_TIME: return False unmark_record = ResumeBuyRecord.objects.filter( Q(resume_mark=None) | Q(resume_mark__accu_status=3), user=user, finished_time__gt=MARK_TIME, finished_time__lte=time_limit, status='LookUp', ) return True if unmark_record else False
def get_new_interview_count(self): condition = [ 'invite_interview', 'join_interview', 'break_invite', 'send_offer', 'reject_offer' ] mark_ids = self.origin_data.get('down_resumes').select_related('ResumeMarkSetting').filter( current_mark__code_name__in=condition ).values_list('buy_record', flat=True) all_user_mark_logs = self.origin_data.get('all_data_user_mark_logs').select_related('ResumeMarkSetting').filter( mark__code_name__in=condition, mark_time__lte=get_today() - datetime.timedelta(days=1) ).values_list('resume_mark', flat=True) return len([val for val in mark_ids if val not in all_user_mark_logs])
def add_pub_data(self, feed_oid, resume_oids, display_time): if not resume_oids: return False today = get_today() tomorrow = get_tomommow() feed = Feed2.objects(id=feed_oid)[0] email = feed.username pub_admin = self.request.user.username pub_feed = PubFeedData.objects( feed=feed_oid, pub_time__gte=today, pub_time__lt=tomorrow, ).first() if pub_feed: if display_time == tomorrow: update_kwargs = { 'set__display_time': tomorrow, } else: update_kwargs = {} PubFeedData.objects( feed=feed_oid, pub_time__gte=today, pub_time__lt=tomorrow, ).update(set__pub_admin=pub_admin, add_to_set__resumes=resume_oids, **update_kwargs) else: pub_feed = PubFeedData( email=email, pub_admin=pub_admin, feed=feed_oid, resumes=resume_oids, pub_time=datetime.datetime.now(), display_time=display_time, ) pub_feed.save() return True
def main(): old_feed_results = FeedResult.objects( published=True, display_time=None, ).order_by('-calc_time') today = get_today() tomorrow = get_tomommow() caculate_time = today + datetime.timedelta(hours=14) for feed_result in old_feed_results: calc_time = feed_result.calc_time if calc_time < caculate_time: feed_result.display_time = today else: feed_result.display_time = tomorrow feed_result.user_read_status = get_read_status(feed_result) feed_result.save() print feed_result.id, calc_time, feed_result.user_read_status, feed_result.display_time
def get(self, request): username = request.user.username today = get_today() tomorrow = get_tomorrow() has_close = CloseEasterRecord.objects.filter( username=username, close_time__gte=today, close_time__lt=tomorrow, ).exists() if not has_close: CloseEasterRecord.objects.create( username=username, ) return JsonResponse({ 'status': 'ok', 'msg': 'ok', })
def get_new_interview_count(self): condition = [ 'invite_interview', 'join_interview', 'break_invite', 'send_offer', 'reject_offer' ] mark_ids = self.origin_data.get('down_resumes').select_related( 'ResumeMarkSetting').filter( current_mark__code_name__in=condition).values_list( 'buy_record', flat=True) all_user_mark_logs = self.origin_data.get( 'all_data_user_mark_logs').select_related( 'ResumeMarkSetting').filter( mark__code_name__in=condition, mark_time__lte=get_today() - datetime.timedelta(days=1)).values_list('resume_mark', flat=True) return len([val for val in mark_ids if val not in all_user_mark_logs])
def main(): old_feed_results = FeedResult.objects( published=True, display_time=None, ).order_by('-calc_time') today = get_today() tomorrow = get_tomommow() caculate_time = today + datetime.timedelta(hours=14) for feed_result in old_feed_results: calc_time = feed_result.calc_time if calc_time < caculate_time: feed_result.display_time = today else: feed_result.display_time = tomorrow feed_result.user_read_status = get_read_status(feed_result) feed_result.save() print feed_result.id, calc_time, feed_result.user_read_status, feed_result.display_time
def renew(cls): today = get_today() expire_time = today + datetime.timedelta(days=15) user_vip_query = UserVip.objects.select_related( 'user', ).filter( is_active=True, vip_role__code_name='experience_user', expire_time__lt=expire_time, ) for user_vip in user_vip_query: user_vip.expire_time += relativedelta( months=3 ) user_vip.save() PackageUtils.update_uservip_package(user_vip) return user_vip_query.count()
def get(self, request): ui_check = request.GET.get('ui_check', '') if ui_check in self.STAT_KEYS: today = get_today() today_key = today.strftime('%Y-%m-%d') cache_key = '{0}_{1}_{2}'.format( self.REDIS_KEY_PREFIX, today_key, ui_check, ) cache_value = cache.get(cache_key, 0) cache_value += 1 cache.set(cache_key, cache_value, None) return JsonResponse({ 'status': 'ok', 'msg': 'ok', })
def main(): start_time = datetime.datetime(2014, 01, 01) today = get_today() while 1: if start_time >= today: print 'dash done, last dash time', start_time.strftime('%Y-%m-%d') break report = get_object_or_none( PinbotDailyReport, report_date=start_time ) if not report: report = PinbotDailyReport( report_date=start_time ) user_daily_dash(report, start_time) get_pkg_dash(report, start_time) report.save() print start_time.strftime('%Y-%m-%d'), 'dash success' start_time = start_time + datetime.timedelta(days=1)
def get(self, request): today = get_today() time_limit = today + datetime.timedelta(days=-2) if time_limit < MARK_TIME: has_mark = False else: user = request.user need_mark_query = ResumeBuyRecord.objects.filter( user=user, status='LookUp', finished_time__gt=MARK_TIME, finished_time__lte=time_limit, ).exclude( resume_mark__current_mark__end_status=True, resume_mark__accu_status__in=(0, 1, 2), ) has_mark = True if need_mark_query else False return JsonResponse({ 'status': 'ok', 'has_mark': has_mark, 'redirect_url': '/transaction/unmark_resume/', 'msg': 'ok', })
def get(self, request): today = get_today() time_limit = today + datetime.timedelta(days=-2) if time_limit < MARK_TIME: has_mark = False else: user = request.user need_mark_query = ResumeBuyRecord.objects.filter( user=user, status='LookUp', finished_time__gt=MARK_TIME, finished_time__lte=time_limit, ).exclude( resume_mark__current_mark__end_status=True, resume_mark__accu_status__in=(0, 1, 2), ) has_mark = True if need_mark_query else False return JsonResponse({ 'status': 'ok', 'has_mark': has_mark, 'redirect_url': '/transaction/unmark_resume/', 'msg': 'ok', })
def __init__(self): self.report_date = get_today() - datetime.timedelta(days=1) self.task_type = self.get_task_type() self.task_doc = TASK_MAP
class DriversTask(object): task_type_date_range_map = { 'today': { 'start_query_time': get_today() - datetime.timedelta(days=1), 'end_query_time': get_today() }, 'week': { 'start_query_time': get_pre_week()[6], 'end_query_time': get_pre_week()[0] }, 'month': { 'start_query_time': get_pre_month()[0], 'end_query_time': get_pre_month()[1] } } def __init__(self): self.report_date = get_today() - datetime.timedelta(days=1) self.task_type = self.get_task_type() self.task_doc = TASK_MAP def set_report_date(self, report_date): self.report_date = report_date self.task_type_date_range_map.get( 'today')['start_query_time'] = report_date self.task_type_date_range_map.get( 'today')['end_query_time'] = report_date + datetime.timedelta( days=1) def set_task_type(self, task_type): self.task_type = [task_type] def set_task_name(self, task_name): self.task_doc = {} self.task_doc[task_name] = TASK_MAP.get(task_name) def make_data(self): for task_type in self.task_type: task_doc = self.task_doc.copy() start_query_time = self.task_type_date_range_map.get( task_type)['start_query_time'] end_query_time = self.task_type_date_range_map.get( task_type)['end_query_time'] if task_type == 'today': task_doc.pop('month_data', '') task_doc.pop('week_data', '') if task_type == 'week': task_doc.pop('month_data', '') task_doc.pop('weixin_data', '') task_doc.pop('user_data', '') task_doc.pop('resume_data', '') task_doc.pop('resume_data_without_staff', '') task_doc.pop('partner', '') task_doc.pop('feed_data', '') task_doc.pop('core_data', '') if task_type == 'month': task_doc.pop('week_data', '') task_doc.pop('weixin_data', '') task_doc.pop('user_data', '') task_doc.pop('resume_data', '') task_doc.pop('resume_data_without_staff', '') task_doc.pop('partner', '') task_doc.pop('feed_data', '') task_doc.pop('core_data', '') print task_type, start_query_time, end_query_time for key, val in task_doc.items(): hour = datetime.datetime.now().hour # 判断时间段该报表是否应该统计) if not (hour >= TASK_MAP[key].get('crontab_time')[0] and hour <= TASK_MAP[key].get('crontab_time')[1]): continue self.model_class = TASK_MAP[key].get('model_class') driver_class = TASK_MAP[key].get('driver_class')( start_query_time=start_query_time, end_query_time=end_query_time) self.db_result = {'report_date': self.report_date} for fun_key, fun_field in TASK_MAP[key].get( 'fun_list').items(): build_fun = getattr(driver_class, fun_key) ret = build_fun() self.db_result.update({fun_field: ret}) print 'model_class={0} task={1}, key={2}'.format( self.model_class, task_type, key) print self.db_result self.save_db() def save_db(self): today_report = self.model_class.objects.filter( report_date=self.report_date, ) if today_report: today_report.update(**self.db_result) else: report = self.model_class(**self.db_result) report.save() def get_task_type(self): """获取所有task类型""" task_type = [] task_type.append('today') today = get_today() """判断当天是否为星期一""" if today == today + datetime.timedelta(days=-today.weekday()): task_type.append('week') """判断当天是否为月初""" if today == datetime.datetime(today.year, today.month, 1): task_type.append('month') return task_type
def get_new_feed(self): return self.origin_data.get('feeds').filter( add_time__gte=get_today() - datetime.timedelta(days=1), add_time__lt=datetime.datetime.now() ).count()
def get_new_reg_weixin_user_count(self): return self.origin_data.get('weixin_users').filter( user__date_joined__gte=get_today() - datetime.timedelta(days=1)).count()
def get_new_reg_weixin_user_count(self): return self.origin_data.get('weixin_users').filter( user__date_joined__gte=get_today() - datetime.timedelta(days=1) ).count()
os.environ['DJANGO_SETTINGS_MODULE'] = 'Pinbot.settings' from pin_utils.django_utils import ( get_today ) from Pinbot.settings import ( settings ) REPORT_SCHEMA = { 'wexin_daily_report': { 'crontab_time': (0, 18), 'dash_date': { 'first_time': datetime.datetime(2010, 01, 01), 'all_time': datetime.datetime(2015, 03, 28), 'start_query_time': get_today() - datetime.timedelta(days=1), 'end_query_time': get_today() }, 'report_date': get_today() - datetime.timedelta(days=1), 'report_table_name': 'WeixinDailyReportData', 'schema': { 'new_bind_count': { 'data_type': 'db', 'table': 'WeixinUser', 'fields': [ { 'is_bind': True, 'create_time__gt': 'user__date_joined' } ],
def modify_feed_result(request): """ @summary: 功能1.修改机器推荐的结果 功能2.手工添加结果推荐结果 """ get_data = request.GET feed_id = get_data.get('feed_id') resume_id = get_data.get('resume_id') if not (feed_id and resume_id): data = produce_return_json(data='feed_id or resume_id is None', status=False) return JsonResponse(data) reco_index = get_data.get('reco_index', 100) feedback_list = get_data.getlist('feedback[]', []) user = request.user username = user.username now = datetime.datetime.now() feed = ObjectId(feed_id) resume = ObjectId(resume_id) reco_index = int(reco_index) manual_push_resume = ManualPushResume( username=username, feed=feed, resume=resume, reco_index=reco_index, op_time=now, ) manual_push_resume.save() recommend = True if reco_index >= 0 else False resume_update_time = get_today() feed_results = FeedResult.objects.filter(feed=feed, resume=resume) if not feed_results: # 对于搜索结果的推荐点击很合适 feed_result = FeedResult( feed=feed, resume=resume, algorithm='search_result' ) feed_results = [feed_result] for feed_result in feed_results: if not feed_result.is_recommended: feed_result.is_manual = True feed_result.is_recommended = recommend feed_result.resume_update_time = resume_update_time if reco_index >= 0: feed_result.calc_time = now feed_result.manual_ensure_time = now else: # 任务系统中会用到user_feedback_time字段,记录下用户反馈不感兴趣的时间 feed_result.user_feedback_time = now feed_result.reco_index += reco_index # 记录下管理员账号,如果是用户自己点击不合适,则留下用户的账号 feed_result.admin = username feed_result.feedback_list = feedback_list feed_result._data['is_staff'] = 1 if request.user.is_staff else 0 # 添加cls_score if reco_index >= 0 and request.user.is_staff: feed_result.score['cls_score'] = 100 feed_result.save() data = produce_return_json() return HttpResponse(data, 'application/json')
# coding: utf-8 import os os.environ['DJANGO_SETTINGS_MODULE'] = 'Pinbot.settings' import datetime from users.models import UserProfile from pin_utils.django_utils import ( get_today ) FROM_DATE = datetime.datetime(2015, 7, 9) TODAY = get_today() FILE_NAME = 'user_acc.csv' RANGE_DAYS = (TODAY - FROM_DATE).days CSV_FILE = 'keep.csv' def login_user_group_by_date(): with open(FILE_NAME, 'r') as login_file: login_user_list = login_file.readlines() login_user_mapper = { (FROM_DATE + datetime.timedelta(days=i)).strftime('%Y-%m-%d'): [] for i in xrange(RANGE_DAYS) }
# coding: utf-8 import os os.environ['DJANGO_SETTINGS_MODULE'] = 'Pinbot.settings' import datetime from users.models import UserProfile from pin_utils.django_utils import (get_today) FROM_DATE = datetime.datetime(2015, 7, 9) TODAY = get_today() FILE_NAME = 'user_acc.csv' RANGE_DAYS = (TODAY - FROM_DATE).days CSV_FILE = 'keep.csv' def login_user_group_by_date(): with open(FILE_NAME, 'r') as login_file: login_user_list = login_file.readlines() login_user_mapper = { (FROM_DATE + datetime.timedelta(days=i)).strftime('%Y-%m-%d'): [] for i in xrange(RANGE_DAYS) } for i in login_user_list: d, u, _ = i.split(',') d = d.strip()