def get(self, request, start_dt, end_dt, *args, **kwargs): profile = request.user.get_profile() start_dt = parse_date(start_dt) end_dt = parse_date(end_dt) result = tasks.task_async_order.delay(start_dt, end_dt, profile.visitor_id) return Response({"task_id": result})
def update_interval_refunds(request, dt_f, dt_t): dt_f = parse_date(dt_f) dt_t = parse_date(dt_t) logistics_task = updateAllUserRefundOrderTask.delay(update_from=dt_f, update_to=dt_t) ret_params = {'task_id': logistics_task.task_id} return HttpResponse(json.dumps(ret_params), content_type='application/json')
def update_finish_trade_amount(request, dt_f, dt_t): dt_f = parse_date(dt_f) dt_t = parse_date(dt_t) order_amount_task = updateAllUserOrdersAmountTask.delay(dt_f=dt_f, dt_t=dt_t) ret_params = {'task_id': order_amount_task.task_id} return HttpResponse(json.dumps(ret_params), content_type='application/json')
def get(self, request, *args, **kwargs): dt_f = kwargs.get('dt_f') dt_t = kwargs.get('dt_t') dt_f = parse_date(dt_f) dt_t = parse_date(dt_t) + datetime.timedelta(1, 0, 0) queryset = Order.objects.filter(created__gte=dt_f, created__lte=dt_t, refund_status=pcfg.REFUND_SUCCESS) total_refund_num = queryset.count() full_refunds_num = 0 part_refunds_num = 0 consign_full_refunds_num = 0 consign_part_refunds_num = 0 refund_orders = queryset.values_list('trade', flat=True).distinct() # refund_orders = queryset.values_list('trade',flat=True) for trade in refund_orders: trade = Trade.objects.get(id=trade) refunds = Order.objects.filter(trade=trade).exclude(refund_status=pcfg.REFUND_SUCCESS) if refunds.count() > 0: part_refunds_num += 1 if trade.consign_time: consign_part_refunds_num += 1 else: full_refunds_num += 1 if trade.consign_time: consign_full_refunds_num += 1 cursor = connection.cursor() cursor.execute(self.gen_refund_sql(format_datetime(dt_f), format_datetime(dt_t))) result = cursor.fetchall() ret_dict = { 'result': result, 'total_refunds': total_refund_num, 'full_refunds': full_refunds_num, 'part_refunds': part_refunds_num, 'consign_part_refunds': consign_part_refunds_num, 'consign_full_refunds': consign_full_refunds_num, } # print "eeeeeeeeeeeeeeeeee",ret_dict return Response({"object": ret_dict})
def __init__(self, date): try: parsed = parse_date(date) # Ensures that strings like "2020-1-1" are treated as format errors # even though they can be parsed correctly if parsed.isoformat() != date: raise ValueError() except ValueError: detail = "Dates must be in YYYY-MM-DD format" else: detail = "Date is outside the 5 years of data available" super().__init__(detail)
def extract_text(self, source_news, publish_date): try: article = newspaper.Article(source_news) # code that checks if internet connection is present! # if not present stop the process if is_connected() is False: print("Internet Connection disrupted! Exiting the process.") sys.exit(1) article.download() article.parse() self.title = article.title self.description = article.text self.meta_keywords = article.meta_keywords self.meta_description = article.meta_description # this checks manually if there is any issue in self.meta_keywords keywords_list = self.get_keywords() if len(keywords_list) <= 0: self.meta_keywords = UTILS.parse_keywords(article) # soup = self.cleanMe(article.html) if self.description == "": self.description = self.clean_text(article.html) # Check if publish_date is a timestamp if is_date_timestamp(publish_date): self.publish_date = UTILS.convertTimeStampToDataTime(publish_date) elif article.publish_date is not None: self.publish_date = article.publish_date.date() else: # self.publish_date is None: self.publish_date = UTILS.parse_date(article) self.error = "False" return self.jsonify() except newspaper.ArticleException as err: self.title = None self.description = None self.meta_keywords = [] self.meta_description = None self.publish_date = publish_date self.error = "Article Exception" return self.jsonify() except Exception as err: self.title = None self.description = None self.meta_keywords = [] self.meta_description = None self.publish_date = publish_date # get the type of exception if article.download_exception_msg is not None: colon_len = article.download_exception_msg.find(":") self.error = article.download_exception_msg[:colon_len] else: self.error = "newspaper Exception" return self.jsonify()
def get(self, request, *args, **kwargs): content = request.GET df = content.get('df') dt = content.get('dt') nicks = content.get('nicks', '') cat_by = content.get('cat_by', 'hour') trade_type = content.get('type', 'all') logistic_company = content.get('lg_company') xy = content.get('xy', 'horizon') nicks_list = nicks.split(',') if df and dt: start_dt = parse_date(df) end_dt = parse_date(dt) start_dt = datetime.datetime(start_dt.year, start_dt.month, start_dt.day, 0, 0, 0) end_dt = datetime.datetime(end_dt.year, end_dt.month, end_dt.day, 23, 59, 59) else: dt = datetime.datetime.now() start_dt = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0) end_dt = datetime.datetime(dt.year, dt.month, dt.day, 23, 59, 59) queryset = Trade.objects.filter(seller_nick__in=nicks_list, status__in=pcfg.ORDER_SUCCESS_STATUS) queryset = queryset.filter(pay_time__gte=start_dt, pay_time__lte=end_dt) \ .extra(select={'pay_time': "date_format(pay_time,'%%y-%%m-%%d')"}) if logistic_company: queryset = queryset.filter(logistics_company=logistic_company) if trade_type != 'all': queryset = queryset.filter(type=trade_type) orders_data_chts = [] if queryset.count() != 0: if xy == 'vertical': categories = [cat_by] else: if cat_by == 'year': categories = ['year'] elif cat_by == 'month': categories = ['year', 'month'] elif cat_by == 'day': categories = ['year', 'month', 'day'] elif cat_by == 'week': categories = ['year', 'week'] else: categories = ['year', 'month', 'day', 'hour'] series = { 'options': {'source': queryset, 'categories': "pay_time", 'legend_by': 'seller_nick'}, 'terms': { 'total_trades': {'func': Count('id'), 'legend_by': 'seller_nick'}, 'total_sales': {'func': Sum('payment'), 'legend_by': 'seller_nick'}, 'post_fees': {'func': Sum('post_fee'), 'legend_by': 'seller_nick'}, # 'commission_fees':{'func':Sum('commission_fee'),'legend_by':'seller_nick'}, # 'buyer_obtain_point_fees':{'func':Sum('buyer_obtain_point_fee'),'legend_by':'seller_nick'}, } } # ordersdata = PivotDataPool(series=[series], sortf_mapf_mts=(None, map_datetime2daystr, True)) series_options = [{ 'options': {'type': 'column', 'stacking': True, 'yAxis': 0}, 'terms': ['total_trades', {'total_sales': {'type': 'line', 'stacking': False, 'yAxis': 1}}, {'post_fees': {'type': 'line', 'stacking': False, 'yAxis': 1}}, # {'commission_fees':{'type':'area','stacking':False,'yAxis':1}}, # {'buyer_obtain_point_fees':{'type':'column','stacking':False,'yAxis':4}}, ]}, ] chart_options = { 'chart': {'zoomType': 'xy', 'renderTo': "container1"}, 'title': {'text': nicks}, 'xAxis': {'title': {'text': 'per %s' % (cat_by)}, 'labels': {'rotation': 45, 'align': 'right', 'style': {'font': 'normal 12px Verdana, sansserif'}}}, 'yAxis': [{'title': {'text': u'\u8ba2\u5355\u6570'}}, {'title': {'text': u'\u4ea4\u6613\u989d'}, 'opposite': True}, {'title': {'text': u'\u90ae\u8d39'}, 'opposite': True}, # {'title': {'text': u'\u4f63\u91d1'},'opposite': True}, # {'title': {'text': u'\u79ef\u5206'},}, ] } orders_data_chts.append( # PivotChart( # datasource=ordersdata, # series_options=series_options, # chart_options=chart_options # ) ) chart_data = {'df': format_date(start_dt), 'dt': format_date(end_dt), 'nicks': nicks, 'cat_by': cat_by, 'type': trade_type, 'xy': xy, 'charts': orders_data_chts} return Response(chart_data)
def get(self, request, *args, **kwargs): content = request.GET df = content.get('df') dt = content.get('dt') outer_id = content.get('outer_id', '') outer_sku_ids = content.get('sku_ids') limit = content.get('limit', 10) try: item = Item.objects.get(num_iid=outer_id) except Item.DoesNotExist: pass else: outer_id = item.outer_id if df and dt: start_dt = parse_date(df) end_dt = parse_date(dt) start_dt = datetime.datetime(start_dt.year, start_dt.month, start_dt.day, 0, 0, 0) end_dt = datetime.datetime(end_dt.year, end_dt.month, end_dt.day, 23, 59, 59) else: dt = datetime.datetime.now() start_dt = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0) end_dt = datetime.datetime(dt.year, dt.month, dt.day, 23, 59, 59) order_item_list = [] if outer_id: merge_orders = Order.objects.filter(outer_id=outer_id, created__gte=start_dt, created__lte=end_dt).exclude( status__in=(pcfg.TRADE_CLOSED_BY_TAOBAO, pcfg.WAIT_BUYER_PAY, pcfg.TRADE_CLOSED)) if outer_sku_ids: sku_ids = outer_sku_ids.split(',') merge_orders = merge_orders.filter(outer_sku_id__in=sku_ids) buyer_set = set() relative_orders_dict = {} for order in merge_orders: buyer_nick = order.buyer_nick try: buyer_set.remove(buyer_nick) except: buyer_set.add(buyer_nick) relat_orders = Order.objects.filter(buyer_nick=buyer_nick, created__gte=start_dt, created__lte=end_dt).exclude( status__in=(pcfg.TRADE_CLOSED_BY_TAOBAO, pcfg.WAIT_BUYER_PAY, pcfg.TRADE_CLOSED)) for o in relat_orders: relat_outer_id = o.outer_id if relative_orders_dict.has_key(relat_outer_id): relative_orders_dict[relat_outer_id]['cnum'] += o.num else: relative_orders_dict[relat_outer_id] = {'pic_path': o.pic_path, 'title': o.title, 'cnum': o.num} else: buyer_set.add(buyer_nick) relat_order_list = sorted(relative_orders_dict.items(), key=lambda d: d[1]['cnum'], reverse=True) for order in relat_order_list[0:int(limit)]: order_item = [] order_item.append(order[0]) order_item.append(order[1]['pic_path']) order_item.append(order[1]['title']) order_item.append(order[1]['cnum']) order_item_list.append(order_item) return Response({'df': format_date(start_dt), 'dt': format_date(end_dt), 'outer_id': outer_id, 'limit': limit, 'order_items': order_item_list})
def get(self, request, *args, **kwargs): dt_f = kwargs.get('dt_f') dt_t = kwargs.get('dt_t') num_iid = kwargs.get('num_iid') nicks = request.GET.get('nicks', None) cat_by = request.GET.get('cat_by', 'hour') pay_type = request.GET.get('type', 'all') xy = request.GET.get('xy', 'horizontal') base = request.GET.get('base', 'created') nicks_list = nicks.split(',') dt_f = parse_date(dt_f) dt_t = parse_date(dt_t) + datetime.timedelta(1, 0, 0) try: item = Item.objects.get(num_iid=num_iid) except Item.DoesNotExist: outer_id = num_iid else: outer_id = item.outer_id try: product = Product.objects.get() except: product_name = '商品名未知' else: product_name = product.name queryset = Order.objects.filter(seller_nick__in=nicks_list, outer_id=outer_id) if base == 'consign': queryset = queryset.filter(trade__consign_time__gte=dt_f, trade__consign_time__lt=dt_t) else: queryset = queryset.filter(trade__created__gte=dt_f, trade__created__lt=dt_t) if pay_type == 'pay': queryset = queryset.filter(status__in=pcfg.ORDER_SUCCESS_STATUS) elif pay_type == 'finish': queryset = queryset.filter(status=pcfg.ORDER_FINISH_STATUS) if queryset.count() == 0: raise Http404('no nick found') if xy == 'vertical': categories = [cat_by] else: if cat_by == 'year': categories = ['year'] elif cat_by == 'month': categories = ['year', 'month'] elif cat_by == 'day': categories = ['year', 'month', 'day'] elif cat_by == 'week': categories = ['year', 'week'] else: categories = ['year', 'month', 'day', 'hour'] series = { 'options': {'source': queryset, 'categories': categories, 'legend_by': ['seller_nick', 'outer_sku_id']}, 'terms': { 'sku_nums': {'func': Sum('num'), 'legend_by': ['seller_nick', 'outer_sku_id']}, } } # ordersdata = PivotDataPool(series=[series], sortf_mapf_mts=(None, map_int2str, True)) series_options = [{ 'options': {'type': 'area', 'stacking': True, 'yAxis': 0}, 'terms': ['sku_nums', ]}, ] chart_options = { 'chart': {'zoomType': 'xy', 'renderTo': "container1"}, 'title': {'text': product_name}, 'xAxis': {'title': {'text': 'per %s' % (cat_by)}, 'labels': {'rotation': 45, 'align': 'right', 'style': {'font': 'normal 12px Verdana, sansserif'}}}, 'yAxis': [{'title': {'text': u'\u9500\u552e\u6570\u91cf'}}, ] } orders_data_cht = None # PivotChart( # datasource=ordersdata, # series_options=series_options, # chart_options=chart_options) product_sku = ProductSku.objects.filter(product=outer_id) sku_list = [] for psku in product_sku: sku = {} sku['sku_outer_id'] = psku.outer_id sku['sku_values'] = psku.properties_alias sku_list.append(sku) chart_data = {"charts": [orders_data_cht], 'skus': sku_list} if self.request.GET.get('format') == 'table': class ChartEncoder(json.JSONEncoder): pass # def default(self, obj): # if isinstance(obj, (Chart, PivotChart)): # return obj.hcoptions # Serializer().serialize # return DjangoJSONEncoder.default(self, obj) chart_data = json.loads(json.dumps(chart_data, cls=ChartEncoder)) return Response(chart_data)
def commissions(request, concierge, template="concierges/commission.html"): label = '' date_format = '%B %d, %Y' today = date.today() start_date = parse_date(request.GET.get('start_date', '')) end_date = parse_date(request.GET.get('end_date', '')) day = parse_date(request.GET.get('day')) # Default range is previous half of whatever is today. So if 16th, then 1-15th of current month. If 14th, then 1-15th # of previous month. if not start_date and not end_date and not day: if today.day > 15: start_date = date(today.year, today.month, 1) end_date = date(today.year, today.month, 15) else: today_minus_15 = today - timedelta(days=15) first_day, last_day = first_and_last_date_in_month(today_minus_15) # of previous month start_date = date(today_minus_15.year, today_minus_15.month, 16) end_date = last_day # If we have a day, find the half of month the day is in elif day: first_day, last_day = first_and_last_date_in_month(day) if day.day <= 15: start_date = first_day end_date = date(day.year, day.month, 15) else: start_date = date(day.year, day.month, 16) end_date = last_day elif start_date and end_date: label = '%s to %s' % (start_date.strftime(date_format), end_date.strftime(date_format)) total_commission, orders = concierge.order_report(start_date, end_date) recent_periods = [] for x in xrange(0, 6): new_month = today.month - x new_year = today.year if new_month < 1: new_month = 12 - abs(new_month) new_year = new_year - 1 beg_date = date(new_year, new_month, 1) mid_date = date(new_year, new_month, 16) if mid_date < today: recent_periods.insert(0, [mid_date, mid_date.strftime('%B'), '2nd Half']) if beg_date < today: recent_periods.insert(0, [beg_date, beg_date.strftime('%B'), '1st Half']) if not label: half = 'First Half' if start_date.day <= 15 else 'Second Half' label = '%s of %s' % (half, start_date.strftime('%B')) ctx = RequestContext(request, { 'recent_periods': recent_periods, 'label': label, 'start_date': start_date, 'end_date': end_date, 'concierge': concierge, 'total_commission': total_commission, 'orders': orders, 'tour_types': TourType.objects.filter(active=True, featured=True, default_site_skin__is_concierge_cta=True), }) return render_to_response(template, context_instance=ctx)