def get_traffic_dates(thing): """Retrieve the start and end of a Promoted Link or PromoCampaign.""" now = datetime.datetime.now(g.tz).replace(minute=0, second=0, microsecond=0) start, end = promote.get_total_run(thing) end = min(now, end) return start, end
def finalize_completed_campaigns(daysago=1): # PromoCampaign.end_date is utc datetime with year, month, day only now = datetime.datetime.now(g.tz) date = now - datetime.timedelta(days=daysago) date = date.replace(hour=0, minute=0, second=0, microsecond=0) q = PromoCampaign._query( PromoCampaign.c.end_date == date, # exclude no transaction and freebies PromoCampaign.c.trans_id > 0, data=True) campaigns = list(q) # check that traffic is up to date earliest_campaign = min(campaigns, key=lambda camp: camp.start_date) start, end = promote.get_total_run(earliest_campaign) missing_traffic = get_missing_traffic(start.replace(tzinfo=None), date.replace(tzinfo=None)) if missing_traffic: raise ValueError("Can't finalize campaigns finished on %s." "Missing traffic from %s" % (date, missing_traffic)) links = Link._byID([camp.link_id for link in links], data=True) for camp in campaigns: if hasattr(camp, 'refund_amount'): continue link = links[camp.link_id] billable_impressions = promote.get_billable_impressions(camp) billable_amount = promote.get_billable_amount(camp, billable_impressions) if billable_amount >= camp.bid: text = ('%s completed with $%s billable (%s impressions @ $%s).' % (camp, billable_amount, billable_impressions, camp.cpm)) PromotionLog.add(link, text) refund_amount = 0. else: refund_amount = camp.bid - billable_amount user = Account._byID(link.author_id, data=True) try: success = authorize.refund_transaction(user, camp.trans_id, camp._id, refund_amount) except authorize.AuthorizeNetException as e: text = ('%s $%s refund failed' % (camp, refund_amount)) PromotionLog.add(link, text) g.log.debug(text + ' (response: %s)' % e) continue text = ('%s completed with $%s billable (%s impressions @ $%s).' ' %s refunded.' % (camp, billable_amount, billable_impressions, camp.cpm, refund_amount)) PromotionLog.add(link, text) camp.refund_amount = refund_amount camp._commit()
def make_tables(self): start, end = promote.get_total_run(self.thing) if not start or not end: self.history = [] return cutoff = end - datetime.timedelta(days=31) start = max(start, cutoff) fullname = self.thing._fullname imps = traffic.AdImpressionsByCodename.promotion_history(fullname, start, end) clicks = traffic.ClickthroughsByCodename.promotion_history(fullname, start, end) # promotion might have no clicks, zip_timeseries needs valid columns if imps and not clicks: clicks = [(imps[0][0], (0, 0))] history = traffic.zip_timeseries(imps, clicks, order="ascending") computed_history = [] self.total_impressions, self.total_clicks = 0, 0 for date, data in history: u_imps, imps, u_clicks, clicks = data u_ctr = _clickthrough_rate(u_imps, u_clicks) ctr = _clickthrough_rate(imps, clicks) self.total_impressions += imps self.total_clicks += clicks computed_history.append((date, data + (u_ctr, ctr))) self.history = computed_history if self.total_impressions > 0: self.total_ctr = _clickthrough_rate(self.total_impressions, self.total_clicks) # XXX: _is_promo_preliminary correctly expects tz-aware datetimes # because it's also used with datetimes from promo code. this hack # relies on the fact that we're storing UTC w/o timezone info. # TODO: remove this when traffic is correctly using timezones. end_aware = end.replace(tzinfo=g.tz) self.is_preliminary = _is_promo_preliminary(end_aware) # we should only graph a sane number of data points (not everything) self.max_points = traffic.points_for_interval("hour") return computed_history
def get_tables(self): start, end = promote.get_total_run(self.thing) if not start or not end: self.history = [] return cutoff = end - datetime.timedelta(days=31) start = max(start, cutoff) fullname = self.thing._fullname imps = traffic.AdImpressionsByCodename.promotion_history( fullname, start, end) clicks = traffic.ClickthroughsByCodename.promotion_history( fullname, start, end) # promotion might have no clicks, zip_timeseries needs valid columns if imps and not clicks: clicks = [(imps[0][0], (0, 0))] history = traffic.zip_timeseries(imps, clicks, order="ascending") computed_history = [] self.total_impressions, self.total_clicks = 0, 0 for date, data in history: u_imps, imps, u_clicks, clicks = data u_ctr = self.calculate_clickthrough_rate(u_imps, u_clicks) ctr = self.calculate_clickthrough_rate(imps, clicks) self.total_impressions += imps self.total_clicks += clicks computed_history.append((date, data + (u_ctr, ctr))) self.history = computed_history if self.total_impressions > 0: self.total_ctr = ( (float(self.total_clicks) / self.total_impressions) * 100.) # the results are preliminary until 1 day after the promotion ends now = datetime.datetime.utcnow() self.is_preliminary = end + datetime.timedelta(days=1) > now # we should only graph a sane number of data points (not everything) self.max_points = traffic.points_for_interval("hour") return computed_history
def make_tables(self): now = datetime.datetime.utcnow().replace(minute=0, second=0, microsecond=0) promo_start, promo_end = promote.get_total_run(self.thing) promo_end = min(now, promo_end) if not promo_start or not promo_end: self.history = [] return if self.period: start = self.after end = self.before if not start and not end: end = promo_end start = end - self.period elif not end: end = start + self.period elif not start: start = end - self.period if start > promo_start: p = request.get.copy() p.update({'after':None, 'before':start.strftime('%Y%m%d%H')}) self.prev = '%s?%s' % (request.path, urllib.urlencode(p)) else: start = promo_start if end < promo_end: p = request.get.copy() p.update({'after':end.strftime('%Y%m%d%H'), 'before':None}) self.next = '%s?%s' % (request.path, urllib.urlencode(p)) else: end = promo_end else: start, end = promo_start, promo_end fullname = self.thing._fullname imps = traffic.AdImpressionsByCodename.promotion_history(fullname, start, end) clicks = traffic.ClickthroughsByCodename.promotion_history(fullname, start, end) # promotion might have no clicks, zip_timeseries needs valid columns if imps and not clicks: clicks = [(imps[0][0], (0, 0))] history = traffic.zip_timeseries(imps, clicks, order="ascending") computed_history = [] self.total_impressions, self.total_clicks = 0, 0 for date, data in history: u_imps, imps, u_clicks, clicks = data u_ctr = _clickthrough_rate(u_imps, u_clicks) ctr = _clickthrough_rate(imps, clicks) self.total_impressions += imps self.total_clicks += clicks computed_history.append((date, data + (u_ctr, ctr))) self.history = computed_history if self.total_impressions > 0: self.total_ctr = _clickthrough_rate(self.total_impressions, self.total_clicks) # XXX: _is_promo_preliminary correctly expects tz-aware datetimes # because it's also used with datetimes from promo code. this hack # relies on the fact that we're storing UTC w/o timezone info. # TODO: remove this when traffic is correctly using timezones. end_aware = end.replace(tzinfo=g.tz) self.is_preliminary = _is_promo_preliminary(end_aware) # we should only graph a sane number of data points (not everything) self.max_points = traffic.points_for_interval("hour") return computed_history