def refresh_config(self): ''' re-read config and process any changes ''' try: logging.debug("reading config file from '%s'", self.config_filepath) # read in the config from the given path with open(self.config_filepath, 'r') as fin: conf = yaml.load(fin) dc_conf = conf['data_collector'] if 'counters' in dc_conf: dc_conf['counters'] = normalise_path(dc_conf['counters']) assert os.path.exists(os.path.dirname(dc_conf['counters'])) with open(dc_conf['counters'], 'r') as fin: counters_conf = yaml.load(fin) dc_conf['counters'] = counters_conf['counters'] else: dc_conf['counters'] = conf['counters'] dc_conf['state'] = normalise_path(dc_conf['state']) assert os.path.exists(os.path.dirname(dc_conf['state'])) assert dc_conf['name'] != '' assert dc_conf['noise_weight'] >= 0 assert dc_conf['tally_server_info']['ip'] is not None assert dc_conf['tally_server_info']['port'] > 0 assert dc_conf['event_source'] is not None assert dc_conf['event_source'] > 0 for key in dc_conf['counters']: assert dc_conf['counters'][key]['sigma'] >= 0.0 assert 'share_keepers' in dc_conf if self.config == None: self.config = dc_conf logging.info("using config = %s", str(self.config)) else: changed = False for k in dc_conf: if k not in self.config or dc_conf[k] != self.config[k]: logging.info("updated config for key {} from {} to {}".format(k, self.config[k], dc_conf[k])) self.config[k] = dc_conf[k] changed = True if not changed: logging.debug('no config changes found') except AssertionError: logging.warning("problem reading config file: invalid data") log_error() except KeyError: logging.warning("problem reading config file: missing required keys") log_error()
def refresh_config(self): ''' re-read config and process any changes ''' try: logging.debug("reading config file from '%s'", self.config_filepath) # read in the config from the given path with open(self.config_filepath, 'r') as fin: conf = yaml.load(fin) sk_conf = conf['share_keeper'] # if key path is not specified, look at default path, or generate a new key if 'key' in sk_conf: sk_conf['key'] = normalise_path(sk_conf['key']) assert os.path.exists(sk_conf['key']) else: sk_conf['key'] = normalise_path('privcount.rsa_key.pem') if not os.path.exists(sk_conf['key']): generate_keypair(sk_conf['key']) sk_conf['name'] = get_public_digest(sk_conf['key']) sk_conf['state'] = normalise_path(sk_conf['state']) assert os.path.exists(os.path.dirname(sk_conf['state'])) assert sk_conf['tally_server_info']['ip'] is not None assert sk_conf['tally_server_info']['port'] > 0 if self.config == None: self.config = sk_conf logging.info("using config = %s", str(self.config)) else: changed = False for k in sk_conf: if k not in self.config or sk_conf[k] != self.config[k]: logging.info( "updated config for key {} from {} to {}".format( k, self.config[k], sk_conf[k])) self.config[k] = sk_conf[k] changed = True if not changed: logging.debug('no config changes found') except AssertionError: logging.warning("problem reading config file: invalid data") log_error() except KeyError: logging.warning( "problem reading config file: missing required keys") log_error()
def __init__(self, config_filepath): self.config_filepath = normalise_path(config_filepath) self.config = None self.clients = {} self.collection_phase = None self.idle_time = time() self.num_completed_collection_phases = 0
def stopFactory(self): # TODO return state_filepath = normalise_path(self.config['state']) if self.collection_phase is not None or len(self.clients) > 0: # export everything that would be needed to survive an app restart state = {'clients': self.clients, 'collection_phase': self.collection_phase, 'idle_time': self.idle_time} with open(state_filepath, 'w') as fout: pickle.dump(state, fout)
def stopFactory(self): # TODO return state_filepath = normalise_path(self.config['state']) if self.keystore is not None: # export everything that would be needed to survive an app restart state = {'keystore': self.keystore} with open(state_filepath, 'w') as fout: pickle.dump(state, fout)
def startFactory(self): # TODO return # load any state we may have from a previous run state_filepath = normalise_path(self.config['state']) if os.path.exists(state_filepath): with open(state_filepath, 'r') as fin: state = pickle.load(fin) self.keystore = state['keystore']
def startFactory(self): # TODO return # load any state we may have from a previous run state_filepath = normalise_path(self.config['state']) if os.path.exists(state_filepath): with open(state_filepath, 'r') as fin: state = pickle.load(fin) self.clients = state['clients'] self.collection_phase = state['collection_phase'] self.idle_time = state['idle_time']
def write_results(self, path_prefix): # this should already have been done, but let's make sure path_prefix = normalise_path(path_prefix) if not self.is_stopped(): logging.warning("trying to write results before collection phase is stopped") return if len(self.final_counts) <= 0: logging.warning("no tally results to write!") return tallied_counter = SecureCounters(self.counters_config, self.modulus) tally_was_successful = tallied_counter.tally_counters(self.final_counts.values()) if not tally_was_successful: logging.warning("problem tallying counters, did all counters and bins match!?") return tallied_counts = tallied_counter.detach_counts() # For backwards compatibility, write out a "tallies" file # This file only has the counts begin, end = int(round(self.starting_ts)), int(round(self.stopping_ts)) filepath = os.path.join(path_prefix, "privcount.tallies.{}-{}.json".format(begin, end)) with open(filepath, 'a') as fout: json.dump(tallied_counts, fout, sort_keys=True, indent=4) #logging.info("tally was successful, counts for phase from %d to %d were written to file '%s'", begin, end, filepath) # Write out an "outcome" file that adds context to the counts # This makes it easier to interpret results later on result_info = {} # add the existing list of counts as its own item result_info['Tally'] = tallied_counts # add the context of the outcome as another item result_info['Context'] = self.get_result_context() filepath = os.path.join(path_prefix, "privcount.outcome.{}-{}.json".format(begin, end)) with open(filepath, 'a') as fout: json.dump(result_info, fout, sort_keys=True, indent=4) logging.info("tally was successful, outcome of phase of {} was written to file '{}'".format(format_interval_time_between(begin, 'from', end), filepath)) self.final_counts = {}
def refresh_config(self): ''' re-read config and process any changes ''' try: logging.debug("reading config file from '%s'", self.config_filepath) # read in the config from the given path with open(self.config_filepath, 'r') as fin: conf = yaml.load(fin) ts_conf = conf['tally_server'] if 'counters' in ts_conf: ts_conf['counters'] = normalise_path(ts_conf['counters']) assert os.path.exists(os.path.dirname(ts_conf['counters'])) with open(ts_conf['counters'], 'r') as fin: counters_conf = yaml.load(fin) ts_conf['counters'] = counters_conf['counters'] else: ts_conf['counters'] = conf['counters'] # if key path is not specified, look at default path, or generate a new key if 'key' in ts_conf and 'cert' in ts_conf: ts_conf['key'] = normalise_path(ts_conf['key']) assert os.path.exists(ts_conf['key']) ts_conf['cert'] = normalise_path(ts_conf['cert']) assert os.path.exists(ts_conf['cert']) else: ts_conf['key'] = normalise_path('privcount.rsa_key.pem') ts_conf['cert'] = normalise_path('privcount.rsa_key.cert') if not os.path.exists(ts_conf['key']) or not os.path.exists(ts_conf['cert']): generate_keypair(ts_conf['key']) generate_cert(ts_conf['key'], ts_conf['cert']) if 'results' in ts_conf: ts_conf['results'] = normalise_path(ts_conf['results']) else: ts_conf['results'] = normalise_path('./') assert os.path.exists(os.path.dirname(ts_conf['results'])) ts_conf['state'] = normalise_path(ts_conf['state']) assert os.path.exists(os.path.dirname(ts_conf['state'])) # Must be configured manually assert 'collect_period' in ts_conf # Set the default periods ts_conf.setdefault('event_period', 60) ts_conf.setdefault('checkin_period', 60) # The event period should be less than or equal to half the # collect period, otherwise privcount sometimes takes an extra # event period to produce results event_max = ts_conf['collect_period']/2 if (ts_conf['event_period'] > event_max): logging.warning("event_period %d too large for collect_period %d, reducing to %d", ts_conf['event_period'], ts_conf['collect_period'], event_max) ts_conf['event_period'] = event_max # The checkin period must be less than or equal to half the # collect period, otherwise privcount never finishes. checkin_max = ts_conf['collect_period']/2 if (ts_conf['checkin_period'] > checkin_max): logging.warning("checkin_period %d too large for collect_period %d, reducing to %d", ts_conf['checkin_period'], ts_conf['collect_period'], checkin_max) ts_conf['checkin_period'] = checkin_max # It should also be less than or equal to the event period, # so that the TS is up to date with client statuses every # event loop. checkin_max_log = ts_conf['event_period'] if (ts_conf['checkin_period'] > checkin_max_log): logging.info("checkin_period %d greater than event_period %d, client statuses might be delayed", ts_conf['checkin_period'], ts_conf['event_period']) assert ts_conf['listen_port'] > 0 assert ts_conf['sk_threshold'] > 0 assert ts_conf['dc_threshold'] > 0 assert ts_conf['collect_period'] > 0 assert ts_conf['event_period'] > 0 assert ts_conf['checkin_period'] > 0 assert ts_conf['continue'] == True or ts_conf['continue'] == False # check the hard-coded counter values are sane assert counter_modulus() > 0 assert min_blinded_counter_value() == 0 assert max_blinded_counter_value() > 0 assert max_blinded_counter_value() < counter_modulus() assert min_tally_counter_value() < 0 assert max_tally_counter_value() > 0 assert max_tally_counter_value() < counter_modulus() assert -min_tally_counter_value() < counter_modulus() for key in ts_conf['counters']: if 'Histogram' in key: assert 'bins' in ts_conf['counters'][key] and ts_conf['counters'][key]['bins'] is not None if self.config == None: self.config = ts_conf logging.info("using config = %s", str(self.config)) else: changed = False for k in ts_conf: if k not in self.config or ts_conf[k] != self.config[k]: logging.info("updated config for key {} from {} to {}".format(k, self.config[k], ts_conf[k])) self.config[k] = ts_conf[k] changed = True if not changed: logging.debug('no config changes found') except AssertionError: logging.warning("problem reading config file: invalid data") log_error() except KeyError: logging.warning("problem reading config file: missing required keys") log_error()
def __init__(self, config_filepath): self.config_filepath = normalise_path(config_filepath) self.config = None self.keystore = None
def __init__(self, config_filepath): self.config_filepath = normalise_path(config_filepath) self.config = None self.aggregator = None self.aggregator_defer_id = None self.context = {}