def _delta(self, method, data, **worker_args): # entity should be unique if data.entity.duplicated().any(): raise ValueError('Entities in data should be unique') worker_table = { 'fixed_horizon': statx.make_delta, 'group_sequential': es.make_group_sequential, 'bayes_factor': es.make_bayes_factor, 'bayes_precision': es.make_bayes_precision } if not method in worker_table: raise NotImplementedError if 'multi_test_correction' in worker_args: worker_args['num_tests'] = len(self.report_kpi_names) worker = worker_table[method](**worker_args) result = { 'warnings': [], 'errors': [], 'expan_version': __version__, 'control_variant': self.control_variant_name } kpis = [] for kpi in self.report_kpi_names: res_kpi = {'name': kpi, 'variants': []} control = self.get_kpi_by_name_and_variant( data, kpi, self.control_variant_name) control_weight = self._get_weights(data, kpi, self.control_variant_name) control_data = control * control_weight for variant in self.variant_names: treatment = self.get_kpi_by_name_and_variant( data, kpi, variant) treatment_weight = self._get_weights(data, kpi, variant) treatment_data = treatment * treatment_weight with warnings.catch_warnings(record=True) as w: statistics = worker(x=treatment_data, y=control_data) # add statistical power power = statx.compute_statistical_power( treatment_data, control_data) statistics['statistical_power'] = power if len(w): result['warnings'].append( 'kpi: {}, variant: {}: {}'.format( kpi, variant, w[-1].message)) res_kpi['variants'].append({ 'name': variant, 'delta_statistics': statistics }) kpis.append(res_kpi) result['kpis'] = kpis return result
def test_compute_statistical_power(self): float_precision = 2 # Confirm with pre-computed value by hand via power analysis alpha = 0.05 beta = 0.2 sigma = 1 mean1 = 1 mean2 = 0 n1 = 13 n2 = 12 z_1_minus_alpha = stats.norm.ppf(1-alpha) power = statx.compute_statistical_power(mean1, sigma, n1, mean2, sigma, n2, z_1_minus_alpha) self.assertAlmostEqual(power, 1-beta, float_precision)