def load_groups(): service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): if group_key_id not in known_groups(): known_groups()[group_key_id] = { 'last_sequence_id': -1, 'active': False, 'archive_folder_path': None, } group_path = os.path.join(groups_dir, group_key_id) group_info = jsn.loads_text(local_fs.ReadTextFile(group_path)) if group_info: known_groups()[group_key_id] = group_info for customer_id in os.listdir(brokers_dir): customer_path = os.path.join(brokers_dir, customer_id) for broker_id in os.listdir(customer_path): if customer_id not in known_brokers(): known_brokers()[customer_id] = [None, ] * REQUIRED_BROKERS_COUNT if broker_id in known_brokers(customer_id): lg.warn('broker %r already exist' % broker_id) continue broker_path = os.path.join(customer_path, broker_id) broker_info = jsn.loads_text(local_fs.ReadTextFile(broker_path)) known_brokers()[customer_id][int(broker_info['position'])] = broker_id
def read_state(customer_id, broker_id): service_dir = settings.ServiceDir('service_message_broker') keepers_dir = os.path.join(service_dir, 'keepers') broker_dir = os.path.join(keepers_dir, broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, json_value=json_value) return json_value broker_idurl = global_id.glob2idurl(broker_id) if id_url.is_cached(broker_idurl): for one_broker_id in os.listdir(keepers_dir): one_broker_idurl = global_id.glob2idurl(one_broker_id) if id_url.is_cached(one_broker_idurl): if one_broker_idurl == broker_idurl: broker_dir = os.path.join(keepers_dir, one_broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=one_broker_id, json_value=json_value) return json_value return None
def clear_broker(customer_id, position): service_dir = settings.ServiceDir('service_private_groups') brokers_dir = os.path.join(service_dir, 'brokers') customer_dir = os.path.join(brokers_dir, customer_id) if not os.path.isdir(customer_dir): if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position) return False to_be_erased = [] for broker_id in os.listdir(customer_dir): broker_path = os.path.join(customer_dir, broker_id) broker_info = jsn.loads_text(local_fs.ReadTextFile(broker_path)) if not broker_info: to_be_erased.append(broker_id) lg.warn('found empty broker info for customer %r : %r' % (customer_id, broker_id, )) continue if broker_info.get('position') != position: continue to_be_erased.append(broker_id) if not to_be_erased: if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position, to_be_erased=to_be_erased) return False removed = [] for broker_id in to_be_erased: broker_path = os.path.join(customer_dir, broker_id) os.remove(broker_path) removed.append(broker_path) if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position, removed=removed) return True
def read_settings(): create_home_dir() src = ReadTextFile(settings_filepath()) if not src: return {} json_data = jsn.loads_text(src) return json_data
def read_key_file(key_id, keys_folder=None): if not keys_folder: keys_folder = settings.KeyStoreDir() key_filepath = os.path.join(keys_folder, '%s.private' % key_id) is_private = True if not os.path.exists(key_filepath): key_filepath = os.path.join(keys_folder, '%s.public' % key_id) is_private = False key_raw = local_fs.ReadTextFile(key_filepath) if not key_raw: lg.warn('failed reading key from %r' % key_filepath) return None key_raw_strip = key_raw.strip() try: if key_raw_strip.startswith('{') and key_raw_strip.endswith('}'): key_dict = jsn.loads_text(key_raw_strip) else: key_dict = { 'label': key_id, 'is_private': is_private, 'body': key_raw_strip, 'local_key_id': None, 'need_to_convert': True, } except: lg.exc() return None return key_dict
def read_transaction(transaction_id): create_home_dir() src = ReadTextFile(transaction_filepath(transaction_id)) if not src: return None json_data = jsn.loads_text(src) return json_data
def BytesToDict(inp, encoding='latin1', errors='strict', keys_to_text=False, values_to_text=False, unpack_types=False): """ A smart way to extract input bytes into python dictionary object. All input bytes will be decoded into text and then loaded via `json.loads()` method. Finally every text key and value in result dict will be encoded back to bytes if `values_to_text` is False. Smart feature `unpack_types` can be used to "extract" real types of keys and values from input bytes. Can be used to extract dictionaries of mixed types - binary and text values. """ if not inp: return {} _t = strng.to_text(inp, encoding=encoding) if values_to_text: return jsn.loads_text(_t, encoding=encoding) if unpack_types: return jsn.unpack_dict(jsn.loads(_t, encoding=encoding), encoding=encoding, errors=errors) if keys_to_text: return jsn.dict_keys_to_text(jsn.loads(_t, encoding=encoding)) return jsn.loads(_t, encoding=encoding)
def read_transaction(transaction_id): create_home_dir() src = ReadTextFile(transaction_filepath(transaction_id)) if not src: return None json_data = jsn.loads_text(src) json_data['btc_amount'] = btc_util.clean_btc_amount(json_data['btc_amount']) return json_data
def read_group_info(group_key_id): service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') group_info_path = os.path.join(groups_dir, group_key_id) if not os.path.isfile(group_info_path): return None group_info = jsn.loads_text(local_fs.ReadTextFile(group_info_path)) return group_info
def read_customer_info(customer_id): create_home_dir() if not os.path.exists(customer_dir(customer_id)): return None src = ReadTextFile(customer_info_filepath(customer_id)) if not src: return None json_data = jsn.loads_text(src) return json_data
def load_key(key_id, keys_folder=None): """ """ if not is_valid_key_id(key_id): lg.warn('key_id is not valid: %s' % key_id) return False if not keys_folder: keys_folder = settings.KeyStoreDir() key_filepath = os.path.join(keys_folder, '%s.private' % key_id) is_private = True if not os.path.exists(key_filepath): key_filepath = os.path.join(keys_folder, '%s.public' % key_id) is_private = False key_raw = local_fs.ReadTextFile(key_filepath) if not key_raw: lg.warn('failed reading key from %r' % key_filepath) return False key_raw_strip = key_raw.strip() need_to_convert = False try: if key_raw_strip.startswith('{') and key_raw_strip.endswith('}'): key_dict = jsn.loads_text(key_raw_strip) else: key_dict = { 'label': key_id, 'body': key_raw_strip, } need_to_convert = True except: lg.exc() return False try: key_object = rsa_key.RSAKey() key_object.fromDict(key_dict) except: lg.exc() return False if not key_object.isPublic(): if not validate_key(key_object): lg.warn('validation failed for %s' % key_filepath) return False known_keys()[key_id] = key_object if _Debug: lg.out( _DebugLevel, 'my_keys.load_key %r label=%r is_private=%r from %s' % ( key_id, key_object.label, is_private, keys_folder, )) if need_to_convert: save_key(key_id, keys_folder=keys_folder) lg.info('key %r format converted to JSON' % key_id) return True
def load_customers_list(sort_by='customer_id'): create_home_dir() result = [] for customer_id in os.listdir(customers_dir()): src = ReadTextFile(customer_info_filepath(customer_id)) src = src or ('{"customer_id": %s}' % customer_id) json_data = jsn.loads_text(src) result.append(json_data) if sort_by == 'customer_id': result.sort(key=lambda i: str(i.get('customer_id', ''))) return result
def load_transactions_list(sort_by='transaction_id'): create_home_dir() result = [] for transaction_id in os.listdir(transactions_dir()): src = ReadTextFile(transaction_filepath(transaction_id)) src = src or ('{"transaction_id": %s' % transaction_id) json_data = jsn.loads_text(src) result.append(json_data) if sort_by == 'transaction_id': result.sort(key=lambda i: -int(i.get('transaction_id', '0'))) return result
def dump_local_db(value_as_json=False): if not node(): if _Debug: lg.out(_DebugLevel, 'dht_service.dump_local_db local node is not ready') return None l = [] for itm in node()._dataStore.getAllItems(): if value_as_json: if isinstance(itm['value'], dict): _j = jsn.dumps(itm['value'], keys_to_text=True, errors='ignore') itm['value'] = jsn.loads_text(_j, errors='ignore') else: try: itm['value'] = jsn.loads_text(itm['value'], errors='ignore') except: itm['value'] = strng.to_text(itm['value']) itm['scope'] = 'global' l.append(itm) for k, v in node().data.items(): l.append({'key': k, 'value': v, 'scope': 'node', }) return l
def load_local_keys(keys_folder=None): """ """ if not keys_folder: keys_folder = settings.KeyStoreDir() if _Debug: lg.out(_DebugLevel, 'my_keys.load_local_keys will read files from %s' % keys_folder) known_keys().clear() count = 0 for key_filename in os.listdir(keys_folder): key_filepath = os.path.join(keys_folder, key_filename) key_id = key_filename.replace('.private', '').replace('.public', '') if not is_valid_key_id(key_id): lg.warn('key_id is not valid: %s' % key_id) continue key_raw = local_fs.ReadTextFile(key_filepath) if not key_raw: lg.warn('failed reading key from %r' % key_filepath) continue key_raw_strip = key_raw.strip() try: if key_raw_strip.startswith('{') and key_raw_strip.endswith('}'): key_dict = jsn.loads_text(key_raw_strip) else: key_dict = { 'label': key_id, 'body': key_raw_strip, } except: lg.exc() continue try: key_object = rsa_key.RSAKey() key_object.fromDict(key_dict) except: lg.exc() continue if not key_object.isPublic(): if not validate_key(key_object): lg.warn('validation failed for %s' % key_filepath) continue known_keys()[key_id] = key_object count += 1 if _Debug: lg.out(_DebugLevel, ' %d keys loaded' % count) return count
def read_json_response(response, key, result_defer=None, as_bytes=False): if _Debug: lg.out(_DebugLevel + 6, 'dht_service.read_json_response [%r] : %r' % (key, response)) value = None if isinstance(response, list): if _Debug: lg.out(_DebugLevel, ' response is a list, value not found') if result_defer: result_defer.callback(response) return None if isinstance(response, dict): if response.get('values'): try: latest = 0 if as_bytes: value = jsn.loads(response['values'][0][0]) else: value = jsn.loads_text(response['values'][0][0]) for v in response['values']: if v[1] > latest: latest = v[1] value = jsn.loads(v[0]) except: lg.exc() if _Debug: lg.out( _DebugLevel, ' invalid json value found in DHT, return None') if result_defer: result_defer.errback( Exception('invalid json value found in DHT')) return None else: if _Debug: lg.out( _DebugLevel, ' response is a dict, "values" field is empty, value not found' ) if result_defer: result_defer.callback(response.get('activeContacts', [])) return None if _Debug: lg.out(_DebugLevel, ' response is a dict, value is OK') if result_defer: result_defer.callback(value) return value
def load_groups(): loaded_brokers = 0 loaded_groups = 0 service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): latest_group_key_id = my_keys.latest_key_id(group_key_id) latest_group_path = os.path.join(groups_dir, latest_group_key_id) if latest_group_key_id != group_key_id: lg.info('going to rename rotated group key: %r -> %r' % (group_key_id, latest_group_key_id, )) old_group_path = os.path.join(groups_dir, group_key_id) try: os.rename(old_group_path, latest_group_path) except: lg.exc() continue latest_group_info = jsn.loads_text(local_fs.ReadTextFile(latest_group_path)) if not latest_group_info: lg.err('was not able to load group info from %r' % latest_group_path) continue active_groups()[latest_group_key_id] = latest_group_info loaded_groups += 1 for customer_id in os.listdir(brokers_dir): latest_customer_id = global_id.latest_glob_id(customer_id) latest_customer_dir = os.path.join(brokers_dir, latest_customer_id) if latest_customer_id != customer_id: lg.info('going to rename rotated customer id: %r -> %r' % (customer_id, latest_customer_id, )) old_customer_dir = os.path.join(brokers_dir, customer_id) try: bpio.move_dir_recursive(old_customer_dir, latest_customer_dir) bpio.rmdir_recursive(old_customer_dir) except: lg.exc() continue for broker_id in os.listdir(latest_customer_dir): if latest_customer_id not in known_brokers(): known_brokers(latest_customer_id) latest_broker_id = global_id.latest_glob_id(broker_id) latest_broker_path = os.path.join(latest_customer_dir, latest_broker_id) if latest_broker_id != broker_id: lg.info('going to rename rotated broker id: %r -> %r' % (broker_id, latest_broker_id, )) old_broker_path = os.path.join(latest_customer_dir, broker_id) try: os.rename(old_broker_path, latest_broker_path) except: lg.exc() continue latest_broker_info = jsn.loads_text(local_fs.ReadTextFile(latest_broker_path)) if not latest_broker_info: lg.err('was not able to load broker info from %r' % latest_broker_path) continue existing_broker_id = known_brokers(latest_customer_id)[int(latest_broker_info['position'])] if existing_broker_id: if os.path.isfile(latest_broker_path): lg.err('found duplicated broker for customer %r on position %d, erasing file %r' % ( latest_customer_id, int(latest_broker_info['position']), latest_broker_path, )) try: os.remove(latest_broker_path) except: lg.exc() continue known_brokers()[latest_customer_id][int(latest_broker_info['position'])] = latest_broker_id loaded_brokers += 1 if _Debug: lg.args(_DebugLevel, loaded_groups=loaded_groups, loaded_brokers=loaded_brokers)