def get(self, request): date = request.GET.get("date") t = pendulum.parse(date) if date else get_current_datetime() bar_configs = DashBoardManger.gen_ref_log_bar_chart_configs( request.user.id, [dt.date() for dt in gen_datetime_list(t)] ) return JsonResponse(bar_configs)
def get(self, request): # 最近10天的 date = request.GET.get("date") t = pendulum.parse(date) if date else get_current_datetime() date_list = [t.add(days=i).date() for i in range(-7, 3)] bar_configs = UserRefLog.gen_bar_chart_configs(request.user.id, date_list) return JsonResponse(bar_configs)
def get_or_create_order(cls, user, amount): # NOTE 目前这里只支持支付宝 所以暂时写死 with lock.user_create_order_lock(user.id): now = get_current_datetime() order = cls.get_not_paid_order_by_amount(user, amount) if order and order.expired_at > now: return order with transaction.atomic(): out_trade_no = cls.gen_out_trade_no() trade = pay.trade_precreate( out_trade_no=out_trade_no, total_amount=amount, subject=settings.ALIPAY_TRADE_INFO.format(amount), timeout_express=cls.DEFAULT_ORDER_TIME_OUT, notify_url=cls.ALIPAY_CALLBACK_URL, ) qrcode_url = trade.get("qr_code") order = cls.objects.create( user=user, status=cls.STATUS_CREATED, out_trade_no=out_trade_no, amount=amount, qrcode_url=qrcode_url, expired_at=now.add(minutes=10), ) return order
def gen_doughnut_config(dt_list): now = utils.get_current_datetime() today_amount = (sm.UserOrder.objects.filter( status=sm.UserOrder.STATUS_FINISHED, created_at__range=[ now.start_of("day"), now.end_of("day"), ], ).aggregate(amount=models.Sum("amount"))["amount"] or "0") total_amount = (sm.UserOrder.objects.filter( status=sm.UserOrder.STATUS_FINISHED, created_at__range=[ dt_list[0].start_of("day"), dt_list[-1].end_of("day"), ], ).aggregate(amount=models.Sum("amount"))["amount"] or "0") return { "title": f"总收入{total_amount}元", "labels": ["总收入", "今日收入"], "data": [ int(decimal.Decimal(total_amount)), int(decimal.Decimal(today_amount)), ], "data_title": "收入分析", }
def get(self, request): node_id = request.GET.get("node_id", 0) user_id = request.user.pk configs = DashBoardManger.gen_traffic_line_chart_configs( user_id, node_id, gen_datetime_list(get_current_datetime()) ) return JsonResponse(configs)
def get(self, request): node_id = request.GET.get("node_id", 0) node_type = request.GET.get("node_type", "ss") user_id = request.user.pk now = get_current_datetime() last_week = [now.subtract(days=i) for i in range(6, -1, -1)] configs = UserTrafficLog.gen_line_chart_configs( user_id, node_type, node_id, last_week) return JsonResponse(configs)
def get_user_online_device_count(cls, user, minutes=10): """获取最近一段时间内用户在线设备数量""" now = utils.get_current_datetime() ips = set() for data in cls.objects.filter(user=user, created_at__range=[ now.add(minutes=minutes * -1), now ]).values("ip"): ips.add(data["ip"]) return len(ips)
def post(self, request, node_id): node = VmessNode.get_or_none_by_node_id(node_id) if not node: return HttpResponseNotFound() log_time = get_current_datetime() node_total_traffic = 0 need_clear_cache = False trafficlog_model_list = [] user_model_list = [] for log in request.json["user_traffics"]: user_id = log["user_id"] u = int(log["ut"] * node.enlarge_scale) d = int(log["dt"] * node.enlarge_scale) # 个人流量增量 user = User.get_by_pk(user_id) user.download_traffic += d user.upload_traffic += u user.last_use_time = log_time user_model_list.append(user) if user.overflow: need_clear_cache = True # 个人流量记录 trafficlog_model_list.append( UserTrafficLog( node_type=UserTrafficLog.NODE_TYPE_VMESS, node_id=node_id, user_id=user_id, download_traffic=u, upload_traffic=d, ) ) # 节点流量增量 node_total_traffic += u + d # 节点流量记录 VmessNode.increase_used_traffic(node_id, node_total_traffic) # 流量记录 UserTrafficLog.objects.bulk_create(trafficlog_model_list) # TODO 在线IP # 个人流量记录 User.objects.bulk_update( user_model_list, ["download_traffic", "upload_traffic", "last_use_time"], ) # 节点在线人数 NodeOnlineLog.add_log( NodeOnlineLog.NODE_TYPE_VMESS, node_id, len(request.json["user_traffics"]) ) # check node && user traffic if node.overflow: node.enable = False if need_clear_cache or node.overflow: node.save() return JsonResponse(data={})
def handle(self, *args, **options): job_name = options.get("jobname") if not job_name: raise CommandError("--jobname are required options") job_func = getattr(self, job_name, None) if not job_func: raise CommandError(f"job: {job_name} not found") now = get_current_datetime().strftime("%Y-%m-%d %H:%M") print(f"RUNNING JOB: {job_name} .... Time: {now}") job_func()
def gen_doughnut_config(): user_status = [ pm.NodeOnlineLog.get_all_node_online_user_count(), sm.User.get_today_register_user().count(), sm.UserCheckInLog.get_checkin_user_count( utils.get_current_datetime().date()), ] return { "title": f"总用户数量{sm.User.objects.all().count()}人", "labels": ["在线人数", "今日注册", "今日签到"], "data": user_status, "data_title": "活跃用户", }
def make_up_lost_orders(cls): now = get_current_datetime() for order in cls.objects.filter(status=cls.STATUS_CREATED, expired_at__gte=now): try: with lock.order_lock(order.out_trade_no): order.refresh_from_db() changed = order.check_order_status() if changed: print(f"补单:{order.user}={order.amount}") except LockError: # NOTE 定时任务跑,抢不到锁就算了吧 pass
def get_recent_log_by_node_id(cls, proxy_node): # TODO 优化一下IP的存储方式 now = utils.get_current_datetime() ip_set = set() ret = [] for log in cls.objects.filter( proxy_node=proxy_node, created_at__range=[now.subtract(seconds=c.NODE_TIME_OUT), now], ): if log.ip not in ip_set: ret.append(log) ip_set.add(log.ip) return ret
def sync_user_vmess_traffic_task(node_id, data): node = m.VmessNode.get_or_none_by_node_id(node_id) if not node: return log_time = get_current_datetime() node_total_traffic = 0 need_clear_cache = False trafficlog_model_list = [] user_model_list = [] for log in data: user_id = log["user_id"] u = int(log["ut"] * node.enlarge_scale) d = int(log["dt"] * node.enlarge_scale) # 个人流量增量 user = m.User.get_by_pk(user_id) user.download_traffic += d user.upload_traffic += u user.last_use_time = log_time user_model_list.append(user) if user.overflow or user.level < node.level: need_clear_cache = True # 个人流量记录 trafficlog_model_list.append( m.UserTrafficLog( node_type=m.UserTrafficLog.NODE_TYPE_VMESS, node_id=node_id, user_id=user_id, download_traffic=u, upload_traffic=d, )) # 节点流量增量 node_total_traffic += u + d # 节点流量记录 m.VmessNode.increase_used_traffic(node_id, node_total_traffic) # 流量记录 m.UserTrafficLog.objects.bulk_create(trafficlog_model_list) # 个人流量记录 m.User.objects.bulk_update( user_model_list, ["download_traffic", "upload_traffic", "last_use_time"], ) # 节点在线人数 m.NodeOnlineLog.add_log(m.NodeOnlineLog.NODE_TYPE_VMESS, node_id, len(data)) # check node && user traffic if node.overflow: node.enable = False if need_clear_cache or node.overflow: node.save()
def check_and_disable_expired_users(cls): now = get_current_datetime() expired_users = list( cls.objects.filter(level__gt=0, level_expire_time__lte=now)) for user in expired_users: user.level = 0 user.save() print(f"Time: {now} user: {user} level timeout!") if expired_users and settings.EXPIRE_EMAIL_NOTICE: EmailSendLog.send_mail_to_users( expired_users, f"您的{settings.TITLE}账号已到期", f"您的账号现被暂停使用。如需继续使用请前往 {settings.HOST} 充值", )
def calc_traffic_by_datetime(cls, dt: pendulum.DateTime, user_id=None, proxy_node=None): """获取指定日期指定用户的流量,只有今天的数据会hit db""" if dt.date() == utils.get_current_datetime().date(): return cls._calc_traffic_by_datetime.uncached( cls, dt, user_id, proxy_node.id if proxy_node else None, ) return cls._calc_traffic_by_datetime( dt.start_of("day"), user_id, proxy_node.id if proxy_node else None, )
def create_or_update_stats(cls, dt: pendulum.DateTime): date = dt.date() today = utils.get_current_datetime().date() log, _ = cls.objects.get_or_create(date=date) # 如果是今天之前的记录就不在更新了 if date < today: return log log.new_user_count = sm.User.get_new_user_count_by_datetime(dt) log.active_user_count = pm.UserTrafficLog.get_active_user_count_by_datetime(dt) log.checkin_user_count = sm.UserCheckInLog.get_checkin_user_count(dt.date()) log.order_count = sm.UserOrder.get_success_order_count(dt) log.order_amount = decimal.Decimal(sm.UserOrder.get_success_order_amount(dt)) log.total_used_traffic = pm.UserTrafficLog.calc_traffic_by_datetime(dt) log.save() return log
def get(self, request): today_register_user = User.get_today_register_user().values()[:10] # find inviter for u in today_register_user: try: u["inviter"] = User.objects.get(pk=u["inviter_id"]) except User.DoesNotExist: u["inviter"] = "None" context = { "total_user_num": User.get_total_user_num(), "alive_user_count": NodeOnlineLog.get_all_node_online_user_count(), "today_checked_user_count": UserCheckInLog.get_checkin_user_count( utils.get_current_datetime().date() ), "today_register_user_count": len(today_register_user), "traffic_users": User.get_user_order_by_traffic(count=10), "rich_users_data": Donate.get_most_donated_user_by_count(10), "today_register_user": today_register_user, } return render(request, "my_admin/user_status.html", context=context)
def purchase_by_user(self, user): """购买商品 返回是否成功""" if user.balance < self.money or not self.user_can_buy(user): return False # 验证成功进行提权操作 user.balance -= self.money now = get_current_datetime() days = pendulum.duration(days=self.days) if user.level == self.level and user.level_expire_time > now: user.level_expire_time += days user.total_traffic += self.transfer else: user.level_expire_time = now + days user.reset_traffic(self.transfer) user.level = self.level user.save(update_fields=[ "level", "balance", "total_traffic", "upload_traffic", "download_traffic", "level_expire_time", ]) # 增加购买记录 PurchaseHistory.add_log(good=self, user=user) inviter = User.get_or_none(user.inviter_id) if inviter and inviter != user: # 增加返利记录 rebaterecord = RebateRecord( user_id=inviter.pk, consumer_id=user.pk, money=self.money * Decimal(settings.INVITE_PERCENT), ) inviter.balance += rebaterecord.money inviter.save(update_fields=["balance"]) rebaterecord.save() return True
def clean_traffic_log_task(): """清空七天前的所有流量记录""" dt = get_current_datetime().subtract(days=7) query = UserTrafficLog.objects.filter(created_at__lt=dt) count, _ = query.delete() print(f"UserTrafficLog removed count:{count}")
def clean_online_ip_log_task(): """清空一天前在线ip记录""" dt = get_current_datetime().subtract(days=1) query = UserOnLineIpLog.objects.filter(created_at__lt=dt) count, _ = query.delete() print(f"UserOnLineIpLog removed count:{count}")
def post(self, request, node_id): """ 这个接口操作比较重,所以为了避免发信号 所有写操作都需要用BULK的方式 1 更新节点流量 2 更新用户流量 3 记录节点在线IP 4 关闭超出流量的节点 """ ss_node = SSNode.get_or_none_by_node_id(node_id) if not ss_node: return HttpResponseNotFound() data = request.json["data"] node_total_traffic = 0 log_time = get_current_datetime() active_tcp_connections = 0 need_clear_cache = False user_model_list = [] trafficlog_model_list = [] online_ip_log_model_list = [] for user_data in data: user_id = user_data["user_id"] u = int(user_data["upload_traffic"] * ss_node.enlarge_scale) d = int(user_data["download_traffic"] * ss_node.enlarge_scale) # 个人流量增量 user = User.get_by_pk(user_id) user.download_traffic += d user.upload_traffic += u user.last_use_time = log_time user_model_list.append(user) if user.overflow: need_clear_cache = True # 个人流量记录 trafficlog_model_list.append( UserTrafficLog( node_type=UserTrafficLog.NODE_TYPE_SS, node_id=node_id, user_id=user_id, download_traffic=u, upload_traffic=d, ) ) # 节点流量增量 node_total_traffic += u + d # active_tcp_connections active_tcp_connections += user_data["tcp_conn_num"] # online ip log for ip in user_data.get("ip_list", []): online_ip_log_model_list.append( UserOnLineIpLog(user_id=user_id, node_id=node_id, ip=ip) ) # 用户流量 User.objects.bulk_update( user_model_list, ["download_traffic", "upload_traffic", "last_use_time"], ) # 节点流量记录 SSNode.increase_used_traffic(node_id, node_total_traffic) # 流量记录 UserTrafficLog.objects.bulk_create(trafficlog_model_list) # 在线IP UserOnLineIpLog.objects.bulk_create(online_ip_log_model_list) # 节点在线人数 NodeOnlineLog.add_log( NodeOnlineLog.NODE_TYPE_SS, node_id, len(data), active_tcp_connections ) # check node && user traffic if ss_node.overflow: ss_node.enable = False if need_clear_cache or ss_node.overflow: ss_node.save() return JsonResponse(data={})
def clean_user_sub_log_task(): """清空一月前在线ip记录""" dt = get_current_datetime().subtract(months=1) query = m.UserSubLog.objects.filter(created_at__lt=dt) count, _ = query.delete() print(f"UserSubLog removed count:{count}")
def sync_user_traffic_task(node_id, data): """ 这个接口操作比较重,所以为了避免发信号 所有写操作都需要用BULK的方式 1 更新节点流量 2 更新用户流量 3 记录节点在线IP 4 关闭超出流量的节点 """ node = ProxyNode.get_or_none(node_id) if not node: return node_total_traffic = 0 log_time = get_current_datetime() tcp_connections_count = 0 user_model_list = [] trafficlog_model_list = [] online_ip_log_model_list = [] for user_data in data: user_id = user_data["user_id"] u = int(user_data["upload_traffic"] * node.enlarge_scale) d = int(user_data["download_traffic"] * node.enlarge_scale) # 个人流量增量 user = m.User.get_by_pk(user_id) user.download_traffic += d user.upload_traffic += u user.last_use_time = log_time user_model_list.append(user) # 个人流量记录 trafficlog_model_list.append( UserTrafficLog( proxy_node=node, user=user, download_traffic=u, upload_traffic=d, )) # 节点流量增量 node_total_traffic += u + d # active_tcp_connections tcp_connections_count += user_data["tcp_conn_num"] # online ip log for ip in user_data.get("ip_list", []): online_ip_log_model_list.append( UserOnLineIpLog(user=user, proxy_node=node, ip=ip)) # 节点流量记录 node.used_traffic += node_total_traffic if node.overflow: node.enable = False node.save(update_fields=["used_traffic", "enable"]) # 用户流量 m.User.objects.bulk_update( user_model_list, ["download_traffic", "upload_traffic", "last_use_time"], ) # 流量记录 UserTrafficLog.objects.bulk_create(trafficlog_model_list) # 在线IP UserOnLineIpLog.objects.bulk_create(online_ip_log_model_list) # 节点在线人数 NodeOnlineLog.add_log(node, len(data), tcp_connections_count)
def gen_daily_stats_task(): """生成昨天的记录,和更新今天的记录""" today = utils.get_current_datetime() models.DailyStats.create_or_update_stats(today) models.DailyStats.create_or_update_stats(today.add(days=-1))
def get_active_user_count_by_datetime(cls, dt: pendulum.DateTime): """获取指定日期的活跃用户数量,只有今天的数据会hit db""" today = utils.get_current_datetime() if dt.date() == today.date(): return cls._get_active_user_count_by_datetime.uncached(cls, dt) return cls._get_active_user_count_by_datetime(dt.start_of("day"))
def online(self): return (utils.get_current_datetime().subtract(seconds=c.NODE_TIME_OUT) < self.created_at)
def clean_traffic_log(self): """清空七天前的所有流量记录""" dt = get_current_datetime().subtract(days=7).date() query = UserTrafficLog.objects.filter(date__lt=dt) count, res = query.delete() print(f"UserTrafficLog removed count:{count}")
def clean_node_online_log_task(): """清空一天前在线记录""" dt = get_current_datetime().subtract(days=1) query = NodeOnlineLog.objects.filter(created_at__lt=dt) count, _ = query.delete() print(f"NodeOnlineLog removed count:{count}")