def index(request): """ Renders the first 20 most recent pending and approved/denied redemptions. """ # do not just use timezone.now(). that will just get the current # utc time. We need the local time and then convert it to utc today = timezone.make_aware(datetime.now(), SESSION.get_store_timezone(request.session)) today = today + relativedelta(days=-1) today = today.replace(hour=23, minute=59, second=59) # midnight data = {"workbench_nav":True, "settings":\ SESSION.get_settings(request.session), "today":today} redemps = SESSION.get_redemptions_pending(request.session) past_redemps = SESSION.get_redemptions_past(request.session) # initially display the first 20 pending/history chronologically redemps.sort(key=lambda r: r.createdAt, reverse=True) past_redemps.sort(key=lambda r: r.updatedAt, reverse=True) data['pending_redemptions'] = redemps[:PAGINATION_THRESHOLD] data['past_redemptions'] = past_redemps[:PAGINATION_THRESHOLD] data["pag_threshold"] = PAGINATION_THRESHOLD data["pag_page"] = 1 data["pending_redemptions_count"] = len(redemps) data["history_redemptions_count"] = len(past_redemps) return render(request, 'manage/workbench.djhtml', data)
def _wrapped_view(request, *args, **kwargs): if test_func(request): if SESSION.get_store(request.session) and\ SESSION.get_store(request.session).active: # may not want to import parse.session here due # to cyclic imports timezone.activate(SESSION.get_store_timezone(\ request.session)) try: return view_func(request, *args, **kwargs) except KeyError: return logout(request, "manage_login") else: return logout(request, "manage_login") # if http_response is provided and content_type is json # and request.is_ajax then this request if from comet.js if request.is_ajax() and http_response and\ content_type == "application/json": # no need to update session- if it got here then the session is empty #request.session.clear() #request.session.update(SessionStore(request.session.session_key)) return HttpResponse(json.dumps(http_response), content_type=content_type) path = request.build_absolute_uri() # If the login url is the same scheme and net location then just # use the path as the "next" url. login_scheme, login_netloc = urlparse.urlparse(login_url or settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login # no need to update session- if it got here then the session is empty #request.session.clear() #request.session.update(SessionStore(request.session.session_key)) return redirect_to_login(path, login_url, redirect_field_name)
def _wrapped_view(request, *args, **kwargs): if test_func(request): if SESSION.get_store(request.session) and\ SESSION.get_store(request.session).active: # may not want to import parse.session here due # to cyclic imports timezone.activate(SESSION.get_store_timezone(\ request.session)) try: return view_func(request, *args, **kwargs) except KeyError: return logout(request, "manage_login") else: return logout(request, "manage_login") # if http_response is provided and content_type is json # and request.is_ajax then this request if from comet.js if request.is_ajax() and http_response and\ content_type == "application/json": # no need to update session- if it got here then the session is empty #request.session.clear() #request.session.update(SessionStore(request.session.session_key)) return HttpResponse(json.dumps(http_response), content_type=content_type) path = request.build_absolute_uri() # If the login url is the same scheme and net location then just # use the path as the "next" url. login_scheme, login_netloc = urlparse.urlparse( login_url or settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login # no need to update session- if it got here then the session is empty #request.session.clear() #request.session.update(SessionStore(request.session.session_key)) return redirect_to_login(path, login_url, redirect_field_name)
def update_subscription(request): """ This view is also used for explicit upgrades. """ do_upgrade = request.GET.get("do_upgrade") is not None if do_upgrade: data = {'account_nav': True, 'upgrade': True} else: data = {'account_nav': True, 'update': True} store = SESSION.get_store(request.session) subscription = SESSION.get_subscription(request.session) if request.method == 'POST': form = SubscriptionForm(request.POST) form.subscription = subscription # to validate cc_number all_forms_valid = form.is_valid() if all_forms_valid: # upgrade account if date_passed_user_limit is on # should fetch the most up-to-date subscription first subscription = Subscription.objects().get(objectId=\ subscription.objectId) upgraded = False if subscription.date_passed_user_limit or do_upgrade: level = subscription.get("subscriptionType") if level == 0: subscription.set("subscriptionType", 1) subscription.date_passed_user_limit = None upgraded = True elif level == 1: subscription.date_passed_user_limit = None subscription.set("subscriptionType", 2) upgraded = True # subscription.update() called in store_cc subscription.update_locally(request.POST.dict(), False) d = datetime(int(request.POST['date_cc_expiration_year']), int(request.POST['date_cc_expiration_month']), 1) subscription.set( "date_cc_expiration", make_aware_to_utc(d, SESSION.get_store_timezone(request.session))) def invalid_card(): # add some asterisk to cc_number if form.initial.get("cc_number"): form.initial['cc_number'] = "*" * 12 +\ form.initial.get('cc_number')[-4:] errs = form._errors.setdefault(\ "cc_number", ErrorList()) errs.append("Invalid credit " +\ "card. Please make sure that you provide " +\ "correct credit card information and that you " +\ "have sufficient funds, then try again.") data['form'] = form return render(request, 'manage/subscription_update.djhtml', data) res = True # only store_cc if it is a digit (new) if str(form.data['cc_number']).isdigit(): res = subscription.store_cc(form.data['cc_number'], form.data['cc_cvv'], False) if not res: return invalid_card() # if monthly billing failed if subscription.date_charge_failed: sub_cost = sub_type[subscription.get(\ "subscriptionType")]["monthly_cost"] invoice = subscription.charge_cc(\ sub_cost, EMAIL_MONTHLY_SUBJECT, MONTHLY) if invoice: subscription.date_last_billed =\ subscription.date_last_billed +\ relativedelta(days=30) subscription.date_charge_failed = None subscription.update() send_email_receipt_monthly_success(\ request.session['account'], store, subscription, invoice) else: return invalid_card() ########### if upgraded: max_users = sub_type[\ subscription.subscriptionType]["max_users"] if max_users == UNLIMITED: max_users = "Unlimited" package = { "sub_type": sub_type[\ subscription.subscriptionType-1]["name"], "new_sub_type": sub_type[\ subscription.subscriptionType]["name"], "new_sub_type_cost": sub_type[\ subscription.subscriptionType]["monthly_cost"], "new_max_patronStore_count": max_users, } send_email_account_upgrade(request.session['account'], store, package) # Important that this is last since invalid_card may be # returned! subscription.update() # update the session cache request.session['store'] = store request.session['subscription'] = subscription # notify other dashboards of these changes payload = { COMET_RECEIVE_KEY_NAME: COMET_RECEIVE_KEY, "updatedSubscription": subscription.jsonify() } comet_receive(store.objectId, payload) # if coming from the message edit limit reached if do_upgrade: if request.session.get('from_limit_reached') and\ request.session.get('message_b4_upgrade'): # redirect back to message_edit view to process return redirect(reverse('message_edit', args=(0,)) + "?%s" %\ urllib.urlencode({'send_message': '1'})) if do_upgrade: return redirect(reverse('store_index')+ "?%s" %\ urllib.urlencode({'success':\ 'Your subscription has been upgraded.'})) else: return redirect(reverse('store_index')+ "?%s" %\ urllib.urlencode({'success':\ 'Your subscription has been updated.'})) else: form = SubscriptionForm() form.initial = subscription.__dict__.copy() # add some asterisk to cc_number if form.initial.get("cc_number"): form.initial['cc_number'] = "*" * 12 +\ form.initial.get('cc_number')[-4:] if do_upgrade: from_limit_reached =\ request.session.get("from_limit_reached") if from_limit_reached: data['from_limit_reached'] = from_limit_reached # update the session cache request.session['store'] = store request.session['subscription'] = subscription data['form'] = form return render(request, 'manage/subscription_update.djhtml', data)
def make_aware(date, session): """ Returns an aware datetime object """ return timezone.make_aware(date, SESSION.get_store_timezone(session))
def graph(request): store_timezone = SESSION.get_store_timezone(request.session) employee_ids = request.GET.getlist('employee[]') start = request.GET.get('start') end = request.GET.get('end') start = datetime.strptime(start, "%m/%d/%Y") start = start.replace(hour=0, minute=0, second=0, microsecond=0) end = datetime.strptime(end, "%m/%d/%Y") end = end.replace(hour=23, minute=59, second=59, microsecond=0) # need to make aware and then convert to utc for querying start_aware = make_aware_to_utc(start, store_timezone) end_aware = make_aware_to_utc(end, store_timezone) columns = [{"id": "", "label": "Date", "type": "string"}] # build the list of employees from the list in the session cache employees_approved_list = SESSION.get_employees_approved_list(\ request.session) employees = [] for ind, m in enumerate(employees_approved_list): if m.objectId in employee_ids: employees.append(m) if employees: for emp in employees: columns.append({"id":"", "label":emp.get('first_name')+\ ' '+emp.get('last_name'), "type":"number"}) punch_map = {} # since a punch no longer contains a pointer to an employee # the query must be made in the punches for each employee... if employees: for emp in employees: ps = emp.get('punches', createdAt__gte=start_aware, createdAt__lte=end_aware) if ps: for punch in ps: key = timezone.localtime(punch.createdAt, store_timezone).strftime("%m/%d")+'-'+\ emp.objectId if key in punch_map: punch_map[key] = punch_map[key] +\ punch.get('punches') else: punch_map[key] = punch.get('punches') rows = [] for single_date in rputils.daterange(start, end): str_date = make_aware_to_utc(single_date, store_timezone).strftime("%m/%d") c = [{"v": str_date}] for emp in employees: try: punch_count = punch_map[str_date + '-' + emp.objectId] except KeyError: punch_count = 0 c.append({"v": punch_count}) rows.append({'c': c}) return HttpResponse(json.dumps({ 'cols': columns, 'rows': rows }), content_type="application/json")
def trends_graph(request, data_type=None, start=None, end=None): """ Handles requests for the trends graph. """ store = SESSION.get_store(request.session) # We need the store's timezone to convert everything to UTC # because the time we get from start and end are local times # and in order to convert to UTC we must first make start and end # timezone aware. We use parse.utils.make_aware_to_utc to do # this in 1 step. We convert everything to UTC for use in queries. store_timezone = SESSION.get_store_timezone(request.session) start = datetime.strptime(start, "%Y-%m-%d") end = datetime.strptime(end, "%Y-%m-%d") start = start.replace(hour=0, minute=0, second=0, microsecond=0) end = end.replace(hour=23, minute=59, second=59, microsecond=0) start_aware = make_aware_to_utc(start, store_timezone) end_aware = make_aware_to_utc(end, store_timezone) rows, columns = [], [] if data_type == 'punches': # we need to return graph data for punches for all days in # between start and end. # columns contains the data that any given row needs to have. # rows contains multiple dates paired with punch count columns = [{ "id": "", "label": "Date", "type": "string" }, { "id": "", "label": 'Punches', "type": "number" }] # get the Punches punches = store.get('punches', createdAt__lte=end_aware, createdAt__gte=start_aware, order='createdAt', limit=900) # have to clear the punches cache attr of store filled # by the above query store.punches = None #create dictionary for easy search punch_map = {} if punches: for punch in punches: # The keys in the punch map is the month/day of the # createdAt of the punch object. We also convert it # to the store's local time for when we send back the # data to the client. key = timezone.localtime(punch.createdAt, store_timezone).strftime("%m/%d") if key in punch_map: # add to the punch count for the existing key punch_map[key] =\ punch_map[key] + punch.get('punches') else: # initialize the key in the punch map punch_map[key] = punch.get('punches') for single_date in rputils.daterange(start, end): # we now populate the rows with the corresponding punch counts # str_date is a day in between start and end with the # same format as a key in our punch_map str_date = single_date.strftime("%m/%d") try: punch_count = punch_map[str_date] except KeyError: punch_count = 0 # the first item in our row is the date # the second item is the corresponding punch_count row = [{"v": str_date}, {"v": punch_count}] rows.append({'c': row}) elif data_type == 'facebook': # we need to return graph data for facebook posts for # all days in between start and end. # columns contains the data that any given row needs to have. # rows contains multiple dates paired with post count columns = [{ "id": "", "label": "Date", "type": "string" }, { "id": "", "label": 'Posts', "type": "number" }] # get the FacebookPosts posts = store.get("facebookPosts", createdAt__lte=end, createdAt__gte=start, limit=900) # have to clear the facebookPosts cache attr of store filled # by the above query store.facebookPosts = None #create dictionary for easy search post_map = {} if posts: for post in posts: # The keys in the post map is the month/day of the # createdAt of the punch object. We also convert it # to the store's local time for when we send back the # data to the client. key = timezone.localtime(post.createdAt, store_timezone).strftime("%m/%d") if key in post_map: # add to the post count for the existing key post_map[key] = post_map[key] + 1 else: # initialize the post count post_map[key] = 1 for single_date in rputils.daterange(start, end): # we now populate the rows with the corresponding post counts # str_date is a day in between start and end with the # same format as a key in our punch_map str_date = single_date.strftime("%m/%d") try: post_count = post_map[str_date] except KeyError: post_count = 0 # the first item in our row is the date # the second item is the corresponding post_count row = [{"v": str_date}, {"v": post_count}] rows.append({'c': row}) else: # we need to return graph data for unique patrons for # all days in between start and end. # columns contains the data that any given row needs to have. # rows contains multiple dates paired with accumulative patron count columns = [{ "id": "", "label": "Date", "type": "string" }, { "id": "", "label": 'Patrons', "type": "number" }] for single_date in rputils.daterange(start, end): # str_date is a day in between start and end with the # same format as a key in our punch_map str_date = single_date.strftime("%m/%d") # FIXME In order to get the cumulative count for each day, # we make a query of the count for each day. Optimization? d = single_date.replace(hour=23, minute=59, second=59) d_aware = make_aware_to_utc(d, store_timezone) patron_count = store.get('patronStores', count=1, limit=0, createdAt__lte=d_aware) row = [{"v": str_date}, {"v": patron_count}] rows.append({'c': row}) # return the graph data return HttpResponse(json.dumps({ 'cols': columns, 'rows': rows }), content_type="application/json")
def breakdown_graph(request, data_type=None, filter=None, range=None): """ handles requests for the breakdown graph. """ store = SESSION.get_store(request.session) store_timezone = SESSION.get_store_timezone(request.session) (start, end) = rputils.calculate_daterange(range) start = start.replace(hour=0, minute=0, second=0, microsecond=0) end = end.replace(hour=23, minute=59, second=59, microsecond=0) # need to make aware and then convert to utc for querying start_aware = make_aware_to_utc(start, store_timezone) end_aware = make_aware_to_utc(end, store_timezone) results = [] if data_type == 'punches': if filter == 'gender': results.append(["Range", "Unknown", "Male", "Female"]) # WARNING! max punches returned is 1000! unknown, male, female = 0, 0, 0 male_punches = relational_query(store.objectId, "Store", "Punches", "Punch", "Patron", "Patron", {"gender": "male"}, { 'createdAt__lte': end_aware, 'createdAt__gte': start_aware }) female_punches = relational_query(store.objectId, "Store", "Punches", "Punch", "Patron", "Patron", {"gender": "female"}, { 'createdAt__lte': end_aware, 'createdAt__gte': start_aware }) if male_punches: male_punches = male_punches['results'] # aggregate the punches for p in male_punches: male += p.get('punches') if female_punches: female_punches = female_punches['results'] for p in female_punches: female += p.get('punches') rows = [start.strftime("%m/%d/%Y")+\ ' - '+end.strftime("%m/%d/%Y"), unknown, male, female] results.append(rows) elif filter == 'age': results.append(["Range", "<20", "20-29", "30-39", "40-49", ">50"]) now = datetime.now() rows = [ start.strftime("%m/%d/%Y") + ' - ' + end.strftime("%m/%d/%Y"), 0, 0, 0, 0, 0 ] age_ranges = [(1, 0, -20), (2, -20, -30), (3, -30, -40), (4, -40, -50), (5, -50, -200)] for (idx, start_age, end_age) in age_ranges: start_dob = now + relativedelta(years=end_age) start_dob = start_dob.replace(hour=0, minute=0, second=0) end_dob = now + relativedelta(years=start_age) end_dob = end_dob + relativedelta(days=-1) end_dob = end_dob.replace(hour=23, minute=59, second=59) # need to make aware and then convert to utc for querying start_dob_aware = make_aware_to_utc(start_dob, store_timezone) end_dob_aware = make_aware_to_utc(end_dob, store_timezone) punches = relational_query( store.objectId, "Store", "Punches", "Punch", "Patron", "Patron", { 'date_of_birth__lte': end_dob_aware, 'date_of_birth__gte': start_dob_aware }, { 'createdAt__lte': end_aware, 'createdAt__gte': start_aware }) punch_count = 0 if punches: punches = punches['results'] for punch in punches: punch_count += punch['punches'] rows[idx] = punch_count results.append(rows) elif data_type == 'facebook': if filter == 'gender': results.append(["Range", "Unknown", "Male", "Female"]) results.append([ start.strftime("%m/%d/%Y")+\ ' - '+end.strftime("%m/%d/%Y"), 0, relational_query(store.objectId, "Store", "FacebookPosts", "FacebookPost", "Patron", "Patron", {"gender": "male"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), relational_query(store.objectId, "Store", "FacebookPosts", "FacebookPost", "Patron", "Patron", {"gender": "female"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), ]) elif filter == 'age': results.append(["Range", "<20", "20-29", "30-39", "40-49", ">50"]) now = datetime.now() rows = [ start.strftime("%m/%d/%Y") + ' - ' + end.strftime("%m/%d/%Y"), 0, 0, 0, 0, 0 ] age_ranges = [(1, 0, -20), (2, -20, -30), (3, -30, -40), (4, -40, -50), (5, -50, -200)] for (idx, start_age, end_age) in age_ranges: start_dob = now + relativedelta(years=end_age) start_dob = start_dob.replace(hour=0, minute=0, second=0) end_dob = now + relativedelta(years=start_age) end_dob = end_dob + relativedelta(days=-1) end_dob = end_dob.replace(hour=23, minute=59, second=59) # need to make aware and then convert to utc for querying start_dob_aware = make_aware_to_utc(start_dob, store_timezone) end_dob_aware = make_aware_to_utc(end_dob, store_timezone) rows[idx] = relational_query( store.objectId, "Store", "FacebookPosts", "FacebookPost", "Patron", "Patron", { 'date_of_birth__lte': end_dob_aware, 'date_of_birth__gte': start_dob_aware }, { 'createdAt__lte': end_aware, 'createdAt__gte': start_aware }, count=True) results.append(rows) else: # patrons if filter == 'gender': results.append(["Range", "Unknown", "Male", "Female"]) results.append([ start.strftime("%m/%d/%Y")+\ ' - '+end.strftime("%m/%d/%Y"), 0, relational_query(store.objectId, "Store", "PatronStores", "PatronStore", "Patron", "Patron", {"gender": "male"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), relational_query(store.objectId, "Store", "PatronStores", "PatronStore", "Patron", "Patron", {"gender": "female"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), ]) elif filter == 'age': results.append(["Range", "<20", "20-29", "30-39", "40-49", ">50"]) now = datetime.now() rows = [ start.strftime("%m/%d/%Y") + ' - ' + end.strftime("%m/%d/%Y"), 0, 0, 0, 0, 0 ] age_ranges = [(1, 0, -20), (2, -20, -30), (3, -30, -40), (4, -40, -50), (5, -50, -200)] for (idx, start_age, end_age) in age_ranges: start_dob = now + relativedelta(years=end_age) start_dob = start_dob.replace(hour=0, minute=0, second=0) end_dob = now + relativedelta(years=start_age) end_dob = end_dob + relativedelta(days=-1) end_dob = end_dob.replace(hour=23, minute=59, second=59) # need to make aware and then convert to utc for querying start_dob_aware = make_aware_to_utc(start_dob, store_timezone) end_dob_aware = make_aware_to_utc(end_dob, store_timezone) rows[idx] = relational_query( store.objectId, "Store", "PatronStores", "PatronStore", "Patron", "Patron", { 'date_of_birth__lte': end_dob_aware, 'date_of_birth__gte': start_dob_aware }, { 'createdAt__lte': end_aware, 'createdAt__gte': start_aware }, count=True) results.append(rows) return HttpResponse(json.dumps(results), content_type="application/json")
def get_page(request): """ Returns a generated html to plug in the tables. """ if request.method == "GET": type = request.GET.get("type") page = int(request.GET.get("page")) - 1 if type == "pending-redemptions": template = "manage/redemptions_pending_chunk.djhtml" pending_redemps =\ SESSION.get_redemptions_pending(request.session) # sort header_map = { "redemption_time": "createdAt", # IMPORTANT DIFF! "redemption_customer_name": "customer_name", "redemption_title": "title", "redemption_punches": "num_punches", } header = request.GET.get("header") if header: # header can only be date reverse = request.GET.get("order") == "desc" pending_redemps.sort(key=lambda r:\ r.__dict__[header_map[header]], reverse=reverse) # set the chunk start = page * PAGINATION_THRESHOLD end = start + PAGINATION_THRESHOLD data = {"pending_redemptions": pending_redemps[start:end]} request.session["redemptions_pending"] = pending_redemps elif type == "history-redemptions": template = "manage/redemptions_history_chunk.djhtml" past_redemps =\ SESSION.get_redemptions_past(request.session) # sort header_map = { "redemption_time-past": "updatedAt", # IMPORTANT DIFF! "redemption_customer_name-past": "customer_name", "redemption_title-past": "title", "redemption_punches-past": "num_punches", } header = request.GET.get("header") if header: reverse = request.GET.get("order") == "desc" past_redemps.sort(key=lambda r:\ r.__dict__[header_map[header]], reverse=reverse) request.session["redemptions_past"] = past_redemps # set the chunk start = page * PAGINATION_THRESHOLD end = start + PAGINATION_THRESHOLD data = {"past_redemptions": past_redemps[start:end]} # don't forget the today for comparison! today = timezone.make_aware( datetime.now(), SESSION.get_store_timezone(request.session)) today = today + relativedelta(days=-1) today = today.replace(hour=23, minute=59, second=59) data["today"] = today return render(request, template, data) return HttpResponse("Bad request")
def graph(request): store_timezone = SESSION.get_store_timezone(request.session) employee_ids = request.GET.getlist('employee[]') start = request.GET.get('start') end = request.GET.get('end'); start = datetime.strptime(start, "%m/%d/%Y") start = start.replace(hour=0, minute=0, second=0, microsecond=0) end = datetime.strptime(end, "%m/%d/%Y") end = end.replace(hour=23, minute=59, second=59, microsecond=0) # need to make aware and then convert to utc for querying start_aware = make_aware_to_utc(start, store_timezone) end_aware = make_aware_to_utc(end, store_timezone) columns = [ {"id":"", "label":"Date", "type":"string"} ] # build the list of employees from the list in the session cache employees_approved_list = SESSION.get_employees_approved_list(\ request.session) employees = [] for ind, m in enumerate(employees_approved_list): if m.objectId in employee_ids: employees.append(m) if employees: for emp in employees: columns.append({"id":"", "label":emp.get('first_name')+\ ' '+emp.get('last_name'), "type":"number"}) punch_map = {} # since a punch no longer contains a pointer to an employee # the query must be made in the punches for each employee... if employees: for emp in employees: ps = emp.get('punches', createdAt__gte=start_aware, createdAt__lte=end_aware) if ps: for punch in ps: key = timezone.localtime(punch.createdAt, store_timezone).strftime("%m/%d")+'-'+\ emp.objectId if key in punch_map: punch_map[key] = punch_map[key] +\ punch.get('punches') else: punch_map[key] = punch.get('punches') rows = [] for single_date in rputils.daterange(start, end): str_date = make_aware_to_utc(single_date, store_timezone).strftime("%m/%d") c = [{"v": str_date}] for emp in employees: try: punch_count = punch_map[str_date + '-' + emp.objectId] except KeyError: punch_count = 0 c.append({"v": punch_count}) rows.append({'c': c}) return HttpResponse(json.dumps({'cols': columns, 'rows': rows}), content_type="application/json")
def trends_graph(request, data_type=None, start=None, end=None ): """ Handles requests for the trends graph. """ store = SESSION.get_store(request.session) # We need the store's timezone to convert everything to UTC # because the time we get from start and end are local times # and in order to convert to UTC we must first make start and end # timezone aware. We use parse.utils.make_aware_to_utc to do # this in 1 step. We convert everything to UTC for use in queries. store_timezone = SESSION.get_store_timezone(request.session) start = datetime.strptime(start, "%Y-%m-%d") end = datetime.strptime(end, "%Y-%m-%d") start = start.replace(hour=0, minute=0, second=0, microsecond=0) end = end.replace(hour=23, minute=59, second=59, microsecond=0) start_aware = make_aware_to_utc(start, store_timezone) end_aware = make_aware_to_utc(end, store_timezone) rows, columns = [], [] if data_type == 'punches': # we need to return graph data for punches for all days in # between start and end. # columns contains the data that any given row needs to have. # rows contains multiple dates paired with punch count columns = [ {"id":"", "label":"Date", "type":"string"}, {"id":"", "label":'Punches', "type":"number"} ] # get the Punches punches = store.get('punches', createdAt__lte=end_aware, createdAt__gte=start_aware,order='createdAt', limit=900) # have to clear the punches cache attr of store filled # by the above query store.punches = None #create dictionary for easy search punch_map = {} if punches: for punch in punches: # The keys in the punch map is the month/day of the # createdAt of the punch object. We also convert it # to the store's local time for when we send back the # data to the client. key = timezone.localtime(punch.createdAt, store_timezone).strftime("%m/%d") if key in punch_map: # add to the punch count for the existing key punch_map[key] =\ punch_map[key] + punch.get('punches') else: # initialize the key in the punch map punch_map[key] = punch.get('punches') for single_date in rputils.daterange(start, end): # we now populate the rows with the corresponding punch counts # str_date is a day in between start and end with the # same format as a key in our punch_map str_date = single_date.strftime("%m/%d") try: punch_count = punch_map[str_date] except KeyError: punch_count = 0 # the first item in our row is the date # the second item is the corresponding punch_count row = [{"v": str_date}, {"v": punch_count}] rows.append({'c': row}) elif data_type == 'facebook': # we need to return graph data for facebook posts for # all days in between start and end. # columns contains the data that any given row needs to have. # rows contains multiple dates paired with post count columns = [ {"id":"", "label":"Date", "type":"string"}, {"id":"", "label":'Posts', "type":"number"} ] # get the FacebookPosts posts = store.get("facebookPosts", createdAt__lte=end, createdAt__gte=start, limit=900) # have to clear the facebookPosts cache attr of store filled # by the above query store.facebookPosts = None #create dictionary for easy search post_map = {} if posts: for post in posts: # The keys in the post map is the month/day of the # createdAt of the punch object. We also convert it # to the store's local time for when we send back the # data to the client. key = timezone.localtime(post.createdAt, store_timezone).strftime("%m/%d") if key in post_map: # add to the post count for the existing key post_map[key] = post_map[key] + 1 else: # initialize the post count post_map[key] = 1 for single_date in rputils.daterange(start, end): # we now populate the rows with the corresponding post counts # str_date is a day in between start and end with the # same format as a key in our punch_map str_date = single_date.strftime("%m/%d") try: post_count = post_map[str_date] except KeyError: post_count = 0 # the first item in our row is the date # the second item is the corresponding post_count row = [{"v": str_date}, {"v": post_count}] rows.append({'c': row}) else: # we need to return graph data for unique patrons for # all days in between start and end. # columns contains the data that any given row needs to have. # rows contains multiple dates paired with accumulative patron count columns = [ {"id":"", "label":"Date", "type":"string"}, {"id":"", "label":'Patrons', "type":"number"} ] for single_date in rputils.daterange(start, end): # str_date is a day in between start and end with the # same format as a key in our punch_map str_date = single_date.strftime("%m/%d") # FIXME In order to get the cumulative count for each day, # we make a query of the count for each day. Optimization? d = single_date.replace(hour=23, minute=59, second=59) d_aware = make_aware_to_utc(d, store_timezone) patron_count = store.get('patronStores', count=1, limit=0, createdAt__lte=d_aware) row = [{"v": str_date}, {"v": patron_count}] rows.append({'c': row}) # return the graph data return HttpResponse(json.dumps({'cols': columns, 'rows': rows}), content_type="application/json")
def breakdown_graph(request, data_type=None, filter=None, range=None): """ handles requests for the breakdown graph. """ store = SESSION.get_store(request.session) store_timezone = SESSION.get_store_timezone(request.session) (start, end) = rputils.calculate_daterange(range) start = start.replace(hour=0, minute=0, second=0, microsecond=0) end = end.replace(hour=23, minute=59, second=59, microsecond=0) # need to make aware and then convert to utc for querying start_aware = make_aware_to_utc(start, store_timezone) end_aware = make_aware_to_utc(end, store_timezone) results = [] if data_type == 'punches': if filter == 'gender': results.append(["Range", "Unknown", "Male", "Female"]); # WARNING! max punches returned is 1000! unknown, male, female = 0, 0, 0 male_punches = relational_query(store.objectId, "Store", "Punches", "Punch", "Patron", "Patron", {"gender": "male"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}) female_punches = relational_query(store.objectId, "Store", "Punches", "Punch", "Patron", "Patron", {"gender": "female"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}) if male_punches: male_punches = male_punches['results'] # aggregate the punches for p in male_punches: male += p.get('punches') if female_punches: female_punches = female_punches['results'] for p in female_punches: female += p.get('punches') rows = [start.strftime("%m/%d/%Y")+\ ' - '+end.strftime("%m/%d/%Y"), unknown, male, female] results.append(rows) elif filter == 'age': results.append(["Range", "<20", "20-29", "30-39", "40-49", ">50"]); now = datetime.now() rows = [start.strftime("%m/%d/%Y")+' - '+end.strftime("%m/%d/%Y"), 0, 0, 0, 0, 0] age_ranges = [(1, 0, -20), (2, -20,-30), (3, -30, -40), (4, -40, -50), (5, -50, -200)] for (idx, start_age, end_age) in age_ranges: start_dob = now + relativedelta(years=end_age) start_dob = start_dob.replace(hour=0, minute=0, second=0) end_dob = now + relativedelta(years=start_age) end_dob = end_dob + relativedelta(days=-1) end_dob = end_dob.replace(hour=23, minute=59, second=59) # need to make aware and then convert to utc for querying start_dob_aware = make_aware_to_utc(start_dob, store_timezone) end_dob_aware = make_aware_to_utc(end_dob, store_timezone) punches = relational_query(store.objectId, "Store", "Punches", "Punch", "Patron", "Patron", {'date_of_birth__lte':end_dob_aware, 'date_of_birth__gte':start_dob_aware}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}) punch_count = 0 if punches: punches = punches['results'] for punch in punches: punch_count += punch['punches'] rows[idx] = punch_count results.append(rows) elif data_type == 'facebook': if filter == 'gender': results.append(["Range", "Unknown", "Male", "Female"]); results.append([ start.strftime("%m/%d/%Y")+\ ' - '+end.strftime("%m/%d/%Y"), 0, relational_query(store.objectId, "Store", "FacebookPosts", "FacebookPost", "Patron", "Patron", {"gender": "male"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), relational_query(store.objectId, "Store", "FacebookPosts", "FacebookPost", "Patron", "Patron", {"gender": "female"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), ]) elif filter == 'age': results.append(["Range", "<20", "20-29", "30-39", "40-49", ">50"]); now = datetime.now() rows = [start.strftime("%m/%d/%Y")+' - '+end.strftime("%m/%d/%Y"), 0, 0, 0, 0, 0] age_ranges = [(1, 0, -20), (2, -20,-30), (3, -30, -40), (4, -40, -50), (5, -50, -200)] for (idx, start_age, end_age) in age_ranges: start_dob = now + relativedelta(years=end_age) start_dob = start_dob.replace(hour=0, minute=0, second=0) end_dob = now + relativedelta(years=start_age) end_dob = end_dob + relativedelta(days=-1) end_dob = end_dob.replace(hour=23, minute=59, second=59) # need to make aware and then convert to utc for querying start_dob_aware = make_aware_to_utc(start_dob, store_timezone) end_dob_aware = make_aware_to_utc(end_dob, store_timezone) rows[idx] = relational_query(store.objectId, "Store", "FacebookPosts", "FacebookPost", "Patron", "Patron", {'date_of_birth__lte':end_dob_aware, 'date_of_birth__gte':start_dob_aware}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True) results.append(rows) else: # patrons if filter == 'gender': results.append(["Range", "Unknown", "Male", "Female"]); results.append([ start.strftime("%m/%d/%Y")+\ ' - '+end.strftime("%m/%d/%Y"), 0, relational_query(store.objectId, "Store", "PatronStores", "PatronStore", "Patron", "Patron", {"gender": "male"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), relational_query(store.objectId, "Store", "PatronStores", "PatronStore", "Patron", "Patron", {"gender": "female"}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True), ]) elif filter == 'age': results.append(["Range", "<20", "20-29", "30-39", "40-49", ">50"]); now = datetime.now() rows = [start.strftime("%m/%d/%Y")+' - '+end.strftime("%m/%d/%Y"), 0, 0, 0, 0, 0] age_ranges = [(1, 0, -20), (2, -20,-30), (3, -30, -40), (4, -40, -50), (5, -50, -200)] for (idx, start_age, end_age) in age_ranges: start_dob = now + relativedelta(years=end_age) start_dob = start_dob.replace(hour=0, minute=0, second=0) end_dob = now + relativedelta(years=start_age) end_dob = end_dob + relativedelta(days=-1) end_dob = end_dob.replace(hour=23, minute=59, second=59) # need to make aware and then convert to utc for querying start_dob_aware = make_aware_to_utc(start_dob, store_timezone) end_dob_aware = make_aware_to_utc(end_dob, store_timezone) rows[idx] = relational_query(store.objectId, "Store", "PatronStores", "PatronStore", "Patron", "Patron", {'date_of_birth__lte':end_dob_aware, 'date_of_birth__gte':start_dob_aware}, {'createdAt__lte':end_aware, 'createdAt__gte':start_aware}, count=True) results.append(rows) return HttpResponse(json.dumps(results), content_type="application/json")
def edit(request, message_id): """ Render the message edit template for a new message and handles send message forms. """ data = {'messages_nav': True, 'message_id': message_id, "filters": FILTERS} store = SESSION.get_store(request.session) # number of patron stores mp = SESSION.get_patronStore_count(request.session) # make sure cache attr is None for future queries! store.patronStores = None data['mp_slider_value'] = int(ceil(float(mp) * 0.50)) data['mp_slider_min'] = 1 data['mp_slider_max'] = mp # redirect if no patrons if not store.get("patronStores", count=1, limit=0): return redirect(reverse("messages_index")) # user submitted a form by form submission through POST request # or user is coming from an upgrade sequence from subscription_update if request.method == 'POST' or (request.method == "GET" and\ request.GET.get("send_message") and "message_b4_upgrade" in\ request.session): if request.method == "GET": # user is coming from an upgrade sequence from subscription_update postDict = request.session['message_b4_upgrade'].copy() # cleanup temp vars in session del request.session['message_b4_upgrade'] del request.session['from_limit_reached'] else: # user submitted a form by form submission through POST request postDict = request.POST.dict().copy() # populate a message form with the POST data for validation form = MessageForm(postDict) if form.is_valid(): # form is valid so continue to send the message subscription = SESSION.get_subscription(request.session) subType = subscription.get('subscriptionType') # refresh the message count - make sure we get the one in the cloud if 'message_count' in request.session: del request.session['message_count'] message_count = SESSION.get_message_count(request.session) # get the max_messages from the user's subscriptionType # or the highest level if god_mode is on if subscription.god_mode: max_messages = sub_type[2]['max_messages'] else: max_messages = sub_type[subType]['max_messages'] # limit is reached if the amount of messages sent this # billing cycle passed the amount for that subscription type limit_reached = message_count >= max_messages # We always enforce the limit when we are in production # otherwise, we ignore it if we have message_limit_off in our session if limit_reached and (PRODUCTION_SERVER or\ (not PRODUCTION_SERVER and "message_limit_off" not in request.session)): data['limit_reached'] = limit_reached # not the highest level of subscription so an upgrade # is still possible if subType != 2: # save the dict to the session request.session['message_b4_upgrade'] =\ request.POST.dict().copy() # the highest level of subscription so no more # upgrades can occur - therefore maxed_out elif subType == 2: data['maxed_out'] = True else: # limit has not yet been reached - send the message # build the message from session and POST data message = Message(\ sender_name=store.get('store_name'), store_id=store.objectId ) message.update_locally(postDict, False) # check if attach offer is selected if 'attach_offer' in postDict: # message has an offer - extract it from the post # post data ensuring proper datetime format d = parser.parse(postDict['date_offer_expiration']) d = make_aware_to_utc( d, SESSION.get_store_timezone(request.session)) message.set('date_offer_expiration', d) message.set('message_type', OFFER) else: # make sure to delete offer information in the case # that attach offer is not checked but the form # submitted still contained offer information message.set('offer_title', None) message.set('date_offer_expiration', None) message.set('message_type', BASIC) # actually create the message to Parse message.create() # put the message in the template context for rendering data['message'] = message # add to the store's relation store.add_relation("SentMessages_", [message.objectId]) # prepare the parameters for the cloud call params = { "store_id": store.objectId, "store_name": store.get('store_name'), "subject": message.get('subject'), "message_id": message.objectId, "filter": message.filter, } # process the filter option if message.filter == "idle": # pass in the correct idle_date which is today # minus the days specified by idle_latency idle_days = postDict['idle_latency'] d = timezone.now() + relativedelta(days=\ -1*int(idle_days)) params.update({"idle_date": d.isoformat()}) elif message.filter == "most_loyal": # pass in the number of patrons params.update({"num_patrons": postDict['num_patrons']}) # update store and message_count in session cache request.session['message_count'] = message_count request.session['store'] = store # save session- cloud_call may take a while! request.session.save() # make the cloud call res = cloud_call("retailer_message", params) if "error" not in res and res.get("result"): message.set("receiver_count", res.get("result").get("receiver_count")) # notify other tabs and windows that are logged into # this store about the new message sent. payload = { COMET_RECEIVE_KEY_NAME: COMET_RECEIVE_KEY, "newMessage": message.jsonify() } comet_receive(store.objectId, payload) # Note that the new message is saved in comet_receive # make sure we have the latest session to save! request.session.clear() request.session.update( SessionStore(request.session.session_key)) return HttpResponseRedirect(message.get_absolute_url()) elif 'num_patrons' in form.errors: # form is invalid due to the number of patrons input # for most_loyal filter data['error'] = "Number of customers must be a "+\ "whole number and greater than 0." else: # form has some errors data['error'] = "The form you submitted has errors." else: # check if the incoming request is for an account upgrade if request.GET.get("do_upgrade"): # flag the upgrade view request.session["from_limit_reached"] = True # redirect to upgrade account return HttpResponseRedirect(reverse("subscription_update") +\ "?do_upgrade=1") if message_id in (0, '0'): # this is a new message so just instantiate a new form form = MessageForm() else: # this is an existing message that the user wants to view # inserting this success and error message into the template # should be done in a cleaner way - this was done by the # first guy. I just didn't bother changing it. if request.GET.get("error"): data['error'] = request.GET.get("error") if request.GET.get("success"): data['success'] = request.GET.get("success") # get from the messages_sent_list in session cache messages_sent_list = SESSION.get_messages_sent_list(\ request.session) for m in messages_sent_list: if m.objectId == message_id: data['message'] = m if data['message']: # message is found so fill up the form with its data form = MessageForm(data['message'].__dict__.copy()) else: # message not found so just instantiate a new form form = MessageForm() # update store session cache request.session['store'] = store # inject the form in the template context for rendering data['form'] = form return render(request, 'manage/message_edit.djhtml', data)
def get_page(request): """ Returns a generated html to plug in the tables. """ if request.method == "GET": type = request.GET.get("type") page = int(request.GET.get("page")) - 1 if type == "pending-redemptions": template = "manage/redemptions_pending_chunk.djhtml" pending_redemps =\ SESSION.get_redemptions_pending(request.session) # sort header_map = { "redemption_time":"createdAt", # IMPORTANT DIFF! "redemption_customer_name": "customer_name", "redemption_title": "title", "redemption_punches": "num_punches", } header = request.GET.get("header") if header: # header can only be date reverse = request.GET.get("order") == "desc" pending_redemps.sort(key=lambda r:\ r.__dict__[header_map[header]], reverse=reverse) # set the chunk start = page * PAGINATION_THRESHOLD end = start + PAGINATION_THRESHOLD data = {"pending_redemptions":pending_redemps[start:end]} request.session["redemptions_pending"] = pending_redemps elif type == "history-redemptions": template = "manage/redemptions_history_chunk.djhtml" past_redemps =\ SESSION.get_redemptions_past(request.session) # sort header_map = { "redemption_time-past":"updatedAt", # IMPORTANT DIFF! "redemption_customer_name-past": "customer_name", "redemption_title-past": "title", "redemption_punches-past": "num_punches", } header = request.GET.get("header") if header: reverse = request.GET.get("order") == "desc" past_redemps.sort(key=lambda r:\ r.__dict__[header_map[header]], reverse=reverse) request.session["redemptions_past"] = past_redemps # set the chunk start = page * PAGINATION_THRESHOLD end = start + PAGINATION_THRESHOLD data = {"past_redemptions":past_redemps[start:end]} # don't forget the today for comparison! today = timezone.make_aware(datetime.now(), SESSION.get_store_timezone(request.session)) today = today + relativedelta(days=-1) today = today.replace(hour=23, minute=59, second=59) data["today"] = today return render(request, template, data) return HttpResponse("Bad request")