def test_periods( self ): bl = BucketedList({ '2014' : [1,2,3], '2013' : [4,5,6], }) self.assertEqual( bl.periods, bl.keys() ) self.assertEqual( bl.periods, ['2014', '2013'] )
def test_extend( self ): bl1 = BucketedList( {'2014':[1,2], '2013':[3,4], '1999':[5,6]} ) bl2 = BucketedList( {'2014':[1,2], '2013':[3,4], '2011':[5,6]} ) bl1.extend( bl2 ) self.assertEqual( bl1, { '2014':[1,2,1,2], '2013':[3,4,3,4], '2011':[5,6], '1999':[5,6], })
def _mrr(start, end): bucket = request.args.get('bucketed', 'quarter') owners = QueryOwners(['CTP', '%WP', '%WU'], start, end, owners=None).bucketed(bucket) # This will essentially zero any periods that do not exist but are in our # time range of the other series pset = BucketedList.period_set(*owners.values()) map(lambda bl: bl.fill_missing_periods(pset), owners.values()) # Used as a map across bucketed lists to sum each period # sum_to_rate = partial( sum_delta, 'trate' ) sum_rate_delta = partial(sum_delta, 'trate', 'frate') # states = partial( map, lambda x: x[1] ) series = [] for name in owners: bucketed_list = owners[name] bucketed_list.period_map(sum_rate_delta) # bucketed_list.period_map( states ) # bucketed_list.period_map( sum_rate_change ) series.append({ 'key': name, 'values': [(key, bucketed_list[key]) for key in sorted(bucketed_list)], }) return series
def _paid_account_count(start, end): bucket_func = attrgetter(request.args.get('bucketed', 'quarter')) std = query_std_count(start, end) tb_std = bucket_func(Timebucket(std, 'updated'))() pro = query_pro_count(start, end) tb_pro = bucket_func(Timebucket(pro, 'updated'))() bucketed_lists = OrderedDict(( ('standard', tb_std), ('pro', tb_pro), )) def counts(alist): return sum(1 for entry in alist) # Make all bucketed list contain a single value summation of their rate change for bl in bucketed_lists.values(): bl.period_map(counts) # Zero out any bucketed timeperiod that does not have a key that the other bucketed periods do pset = BucketedList.period_set(*(bucketed_lists.values())) map(lambda bl: bl.fill_missing_periods(pset), bucketed_lists.values()) series = [] for name, bl in bucketed_lists.items(): series.append({ 'key': name, 'values': [(key, bl[key]) for key in sorted(bl)], }) return series
def _downgrades_plus_churn_count(start, end): bucket_func = attrgetter(request.args.get('bucketed', 'quarter')) downgrade = query_downgrades(start, end) tb_downgrades = bucket_func(Timebucket(downgrade, 'updated'))() churn = query_lostbiz(start, end) tb_churn = bucket_func(Timebucket(churn, 'updated'))() bucketed_lists = OrderedDict(( ('churn', tb_churn), ('downgrade', tb_downgrades), )) def counts(alist): return sum(1 for entry in alist) # Make all bucketed list contain a single value summation of their rate change for bl in bucketed_lists.values(): bl.period_map(counts) # Zero out any bucketed timeperiod that does not have a key that the other bucketed periods do pset = BucketedList.period_set(*(bucketed_lists.values())) map(lambda bl: bl.fill_missing_periods(pset), bucketed_lists.values()) series = [] for name, bl in bucketed_lists.items(): series.append({ 'key': name, 'values': [(key, bl[key]) for key in sorted(bl)], }) return series
def _new_biz_plus_upsell(start, end): bucket_func = attrgetter(request.args.get('bucketed', 'quarter')) newbiz = query_upsell(start, end) sum_newbiz = bucket_func(Timebucket(newbiz, 'updated'))() upsell = query_newbiz(start, end) sum_upsell = bucket_func(Timebucket(upsell, 'updated'))() bucketed_lists = OrderedDict(( ('newbiz', sum_newbiz), ('upsell', sum_upsell), )) # Make all bucketed list contain a single value summation of their rate change for bl in bucketed_lists.values(): bl.period_map(sum_rate_change) # Zero out any bucketed timeperiod that does not have a key that the other bucketed periods do pset = BucketedList.period_set(*(bucketed_lists.values())) map(lambda bl: bl.fill_missing_periods(pset), bucketed_lists.values()) series = [] for name, bl in bucketed_lists.items(): series.append({ 'key': name, 'values': [(key, bl[key]) for key in sorted(bl)], }) return series
def test_construction( self ): bl = BucketedList() self.assertEqual( isinstance( bl, (dict, BucketedList) ), True ) bl['2014'] = [1,2,3] bl['2013'] = [4,5,6] self.assertEqual( bl['2014'], [1,2,3] ) self.assertEqual( bl['2013'], [4,5,6] )
def test_period_map( self ): bl1 = BucketedList( {'2014':[1,2], '2013':[3,4], '1999':[5,6]} ) bl1.period_map( sum ) self.assertEqual( bl1['2014'], 3 ) self.assertEqual( bl1['2013'], 7 ) self.assertEqual( bl1['1999'], 11 ) bl1 = BucketedList( {'2014':[1,2], '2013':[3,4], '1999':[5,6]} ) bl1.period_map( partial( map, lambda x: x+1 ) ) self.assertEqual( bl1['2014'], [2,3] ) self.assertEqual( bl1['2013'], [4,5] ) self.assertEqual( bl1['1999'], [6,7] )
def test_fill_empty_periods( self ): bl1 = BucketedList( {'2014':[1,2], '2013':[3,4], '1999':[5,6]} ) bl2 = BucketedList( {'2014':[1,2], '2013':[3,4], '2011':[5,6]} ) pset = BucketedList.period_set( bl1, bl2 ) bl1.fill_missing_periods( pset ) self.assertEqual( set( bl1.periods ), set(( '2014', '2013', '2011', '1999' )) ) self.assertEqual( bl1['2014'], [ 1, 2 ] ) self.assertEqual( bl1['2013'], [ 3, 4 ] ) self.assertEqual( bl1['2011'], [] ) self.assertEqual( bl1['1999'], [ 5, 6 ] ) bl2.fill_missing_periods( pset, 0 ) self.assertEqual( set( bl2.periods ), set(( '2014', '2013', '2011', '1999' )) ) self.assertEqual( bl2['2014'], [ 1, 2 ] ) self.assertEqual( bl2['2013'], [ 3, 4 ] ) self.assertEqual( bl2['2011'], [ 5, 6 ] ) self.assertEqual( bl2['1999'], 0 )
def _product_average_deal_size(start, end): bucket_func = attrgetter(request.args.get('bucketed', 'quarter')) std = query_std(start, end) avg_std = bucket_func(Timebucket(std, 'updated'))() pro = query_pro(start, end) avg_pro = bucket_func(Timebucket(pro, 'updated'))() bucketed_lists = OrderedDict(( ('avg standard', avg_std), ('avg pro', avg_pro), )) # Zero out any bucketed timeperiod that does not have a key that the other bucketed periods do pset = BucketedList.period_set(*(bucketed_lists.values())) map(lambda bl: bl.fill_missing_periods(pset), bucketed_lists.values()) def average(alist): # Make sure that there is at least some kind of positive movement culled_list = [item for item in alist if item.rate_delta > 0] num_items = len(culled_list) if num_items != 0: avg = reduce(lambda acc, val: acc + val.rate_delta, culled_list, 0) / float(num_items) else: avg = 0 return avg # Caluculate the average deal size and mutate each bucket for bl in bucketed_lists.values(): bl.period_map(average) series = [] for name, bl in bucketed_lists.items(): series.append({ 'key': name, 'values': [(key, bl[key]) for key in sorted(bl)], }) return series
def _net_value(start, end): bucket_func = attrgetter(request.args.get('bucketed', 'quarter')) prev = query_state(['CTP', '%WP', '%WD', '%WU', '%WF'], datetime(2008, 1, 1), start, g2only=True) prev_acc = reduce(lambda pacc, val: pacc + val.rate_delta, prev, 0) value = query_state(['CTP', '%WP', '%WD', '%WU', '%WF'], start, end) net_value = bucket_func(Timebucket(value, 'updated'))() bucketed_lists = OrderedDict((('net value', net_value), )) def accumulated_net(bl, accumulated=0): period_keys = sorted(bl.periods) for period in period_keys: bl[period] = reduce(lambda pacc, val: pacc + val.rate_delta, bl[period], accumulated) accumulated = bl[period] # Mutate the bucketedlist componenets for bl in bucketed_lists.values(): accumulated_net(bl, accumulated=prev_acc) pset = BucketedList.period_set(*(bucketed_lists.values())) map(lambda bl: bl.fill_missing_periods(pset), bucketed_lists.values()) series = [] for name, bl in bucketed_lists.items(): series.append({ 'key': name, 'values': [(key, bl[key]) for key in sorted(bl)], }) return series
def _conversion_rate(start, end): bucket_func = attrgetter(request.args.get('bucketed', 'quarter')) trials = query_state(['SUT'], start, end) bl_trials = bucket_func(Timebucket(trials, 'updated'))() converted = query_state(['TWP', 'FWP'], start, end) bl_converted = bucket_func(Timebucket(converted, 'updated'))() # Segment by Standard/Pro dev_conv = query_product_state(['TWP', 'FWP'], ['development'], start, end) bl_dev_conv = bucket_func(Timebucket(dev_conv, 'updated'))() pro_conv = query_product_state(['TWP', 'FWP'], ['production'], start, end) bl_pro_conv = bucket_func(Timebucket(pro_conv, 'updated'))() bucketed_lists = OrderedDict(( ('trial', bl_trials), ('conversions', bl_converted), ('dev conversion', bl_dev_conv), ('pro conversion', bl_pro_conv), )) pset = BucketedList.period_set(*(bucketed_lists.values())) map(lambda bl: bl.fill_missing_periods(pset), bucketed_lists.values()) def counts(alist): return sum(1 for entry in alist) # Mutate the bucketedlist componenets for bl in bucketed_lists.values(): bl.period_map(counts) series = [] series.append({ 'key': 'trials', 'values': [{ 'x': i, 'y': bl_trials[key] } for i, key in enumerate(sorted(bl_trials))], 'labels': [key for key in sorted(bl)], 'bar': True, }) for name, bl in OrderedDict(bucketed_lists.items()[-3:]).items(): # Calculate the conversion rate for key in bl: if bl_trials[key]: bl[key] = float(bl.get(key, 0)) / bl_trials[key] else: bl[key] = float(bl.get(key, 0)) series.append({ 'key': name, 'values': [{ 'x': i, 'y': bl[key] } for i, key in enumerate(sorted(bl))], 'labels': [key for key in sorted(bl)], }) return series
def test_get_period_set( self ): bl1 = BucketedList( {'2014':[1,2], '2013':[3,4], '1999':[5,6]} ) bl2 = BucketedList( {'2014':[1,2], '2013':[3,4], '2011':[5,6]} ) pset = BucketedList.period_set( bl1, bl2 ) self.assertEqual( pset, set(( '2014', '2013', '2011', '1999' )) )