def get_predicted_pageviews(srs, start, end): srs, is_single = tup(srs, ret_is_single=True) sr_names = [sr.name for sr in srs] # default subreddits require a different inventory factor content_langs = [g.site_lang] default_srids = Subreddit.top_lang_srs(content_langs, limit=g.num_default_reddits, filter_allow_top=True, over18=False, ids=True) # prediction does not vary by date daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names) dates = get_date_range(start, end) ret = {} for sr in srs: if not isinstance(sr, FakeSubreddit) and sr._id in default_srids: factor = DEFAULT_INVENTORY_FACTOR else: factor = INVENTORY_FACTOR sr_daily_inventory = daily_inventory.get(sr.name, 0) * factor sr_daily_inventory = int(sr_daily_inventory) ret[sr.name] = dict.fromkeys(dates, sr_daily_inventory) if is_single: return ret[srs[0].name] else: return ret
def get_predicted_by_date(sr_name, start, stop=None): """Return dict mapping datetime objects to predicted pageviews.""" # lowest pageviews any day the last 2 weeks min_daily = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_name).get(sr_name, 0) # expand out to the requested range of dates ndays = (stop - start).days if stop else 1 # default is one day predicted = OrderedDict() for i in range(ndays): date = start + timedelta(i) predicted[date] = min_daily return predicted
def get_predicted_by_date(sr_name, start, stop=None): """Return dict mapping datetime objects to predicted pageviews.""" if not sr_name: sr_name = DefaultSR.name.lower() # lowest pageviews any day the last 2 weeks min_daily = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_name).get(sr_name, 0) # expand out to the requested range of dates ndays = (stop - start).days if stop else 1 # default is one day predicted = OrderedDict() for i in range(ndays): date = start + timedelta(i) predicted[date] = min_daily return predicted
def get_predicted_by_date(sr_name, start, stop=None): """ For now, use lowest pageviews in the subreddit any day the last two weeks as a simple heuristic. """ # lowest pageviews any day the last 2 weeks min_daily = PromoMetrics.get(CassKeys.MIN_DAILY, sr_name).get(sr_name, 0) # expand out to the requested range of dates ndays = (stop - start).days if stop else 1 # default is one day predicted = OrderedDict() for i in range(ndays): date = start + timedelta(i) predicted[date] = min_daily return predicted
def get_predicted_by_date(sr_name, start, stop=None): ''' For now, use lowest pageviews in the subreddit any day the last two weeks as a simple heuristic. ''' # lowest pageviews any day the last 2 weeks min_daily = PromoMetrics.get(CassKeys.MIN_DAILY, sr_name).get(sr_name, 0) # expand out to the requested range of dates ndays = (stop - start).days if stop else 1 # default is one day predicted = OrderedDict() for i in range(ndays): date = start + timedelta(i) predicted[date] = min_daily return predicted
def get_predicted_pageviews(srs, start, end): srs, is_single = tup(srs, ret_is_single=True) sr_names = [sr.name for sr in srs] # prediction does not vary by date daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names) dates = get_date_range(start, end) ret = {} for sr in srs: sr_daily_inventory = daily_inventory.get(sr.name, 0) * INVENTORY_FACTOR sr_daily_inventory = int(sr_daily_inventory) ret[sr.name] = dict.fromkeys(dates, sr_daily_inventory) if is_single: return ret[srs[0].name] else: return ret
def get_predicted_pageviews(srs, location=None): """ Return predicted number of pageviews for sponsored headlines. Predicted geotargeted impressions are estimated as: geotargeted impressions = (predicted untargeted impressions) * (fp impressions for location / fp impressions) """ srs, is_single = tup(srs, ret_is_single=True) sr_names = [sr.name for sr in srs] # default subreddits require a different inventory factor default_srids = LocalizedDefaultSubreddits.get_global_defaults() if location: no_location = Location(None) r = LocationPromoMetrics.get(DefaultSR, [no_location, location]) location_pageviews = r[(DefaultSR, location)] all_pageviews = r[(DefaultSR, no_location)] if all_pageviews: location_factor = float(location_pageviews) / float(all_pageviews) else: location_factor = 0. else: location_factor = 1.0 # prediction does not vary by date daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names) ret = {} for sr in srs: if not isinstance(sr, FakeSubreddit) and sr._id in default_srids: default_factor = DEFAULT_INVENTORY_FACTOR else: default_factor = INVENTORY_FACTOR base_pageviews = daily_inventory.get(sr.name, 0) ret[sr.name] = int(base_pageviews * default_factor * location_factor) if is_single: return ret[srs[0].name] else: return ret
def get_predicted_pageviews(srs): srs, is_single = tup(srs, ret_is_single=True) sr_names = [sr.name for sr in srs] # default subreddits require a different inventory factor default_srids = LocalizedDefaultSubreddits.get_global_defaults() # prediction does not vary by date daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names) ret = {} for sr in srs: if not isinstance(sr, FakeSubreddit) and sr._id in default_srids: factor = DEFAULT_INVENTORY_FACTOR else: factor = INVENTORY_FACTOR ret[sr.name] = int(daily_inventory.get(sr.name, 0) * factor) if is_single: return ret[srs[0].name] else: return ret
def get_available_pageviews(srs, start, end, datestr=False, ignore=None): srs, is_single = tup(srs, ret_is_single=True) sr_names = [sr.name for sr in srs] daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names) sold_by_sr_by_date = get_sold_pageviews(srs, start, end, ignore) datekey = lambda dt: dt.strftime('%m/%d/%Y') if datestr else dt ret = {} for sr in srs: sold_by_date = sold_by_sr_by_date[sr.name] sr_ad_inventory = int(daily_inventory.get(sr.name, 0) * 1.00) ret[sr.name] = { datekey(date): max(0, sr_ad_inventory - sold) for date, sold in sold_by_date.iteritems() } if is_single: return ret[srs[0].name] else: return ret