def load_groups(): service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): if group_key_id not in known_groups(): known_groups()[group_key_id] = { 'last_sequence_id': -1, 'active': False, 'archive_folder_path': None, } group_path = os.path.join(groups_dir, group_key_id) group_info = jsn.loads_text(local_fs.ReadTextFile(group_path)) if group_info: known_groups()[group_key_id] = group_info for customer_id in os.listdir(brokers_dir): customer_path = os.path.join(brokers_dir, customer_id) for broker_id in os.listdir(customer_path): if customer_id not in known_brokers(): known_brokers()[customer_id] = [None, ] * REQUIRED_BROKERS_COUNT if broker_id in known_brokers(customer_id): lg.warn('broker %r already exist' % broker_id) continue broker_path = os.path.join(customer_path, broker_id) broker_info = jsn.loads_text(local_fs.ReadTextFile(broker_path)) known_brokers()[customer_id][int(broker_info['position'])] = broker_id
def read_state(customer_id, broker_id): service_dir = settings.ServiceDir('service_message_broker') keepers_dir = os.path.join(service_dir, 'keepers') broker_dir = os.path.join(keepers_dir, broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, json_value=json_value) return json_value broker_idurl = global_id.glob2idurl(broker_id) if id_url.is_cached(broker_idurl): for one_broker_id in os.listdir(keepers_dir): one_broker_idurl = global_id.glob2idurl(one_broker_id) if id_url.is_cached(one_broker_idurl): if one_broker_idurl == broker_idurl: broker_dir = os.path.join(keepers_dir, one_broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=one_broker_id, json_value=json_value) return json_value return None
def clear_broker(customer_id, position): service_dir = settings.ServiceDir('service_private_groups') brokers_dir = os.path.join(service_dir, 'brokers') customer_dir = os.path.join(brokers_dir, customer_id) if not os.path.isdir(customer_dir): if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position) return False to_be_erased = [] for broker_id in os.listdir(customer_dir): broker_path = os.path.join(customer_dir, broker_id) broker_info = jsn.loads_text(local_fs.ReadTextFile(broker_path)) if not broker_info: to_be_erased.append(broker_id) lg.warn('found empty broker info for customer %r : %r' % (customer_id, broker_id, )) continue if broker_info.get('position') != position: continue to_be_erased.append(broker_id) if not to_be_erased: if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position, to_be_erased=to_be_erased) return False removed = [] for broker_id in to_be_erased: broker_path = os.path.join(customer_dir, broker_id) os.remove(broker_path) removed.append(broker_path) if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position, removed=removed) return True
def read_key_file(key_id, keys_folder=None): if not keys_folder: keys_folder = settings.KeyStoreDir() key_filepath = os.path.join(keys_folder, '%s.private' % key_id) is_private = True if not os.path.exists(key_filepath): key_filepath = os.path.join(keys_folder, '%s.public' % key_id) is_private = False key_raw = local_fs.ReadTextFile(key_filepath) if not key_raw: lg.warn('failed reading key from %r' % key_filepath) return None key_raw_strip = key_raw.strip() try: if key_raw_strip.startswith('{') and key_raw_strip.endswith('}'): key_dict = jsn.loads_text(key_raw_strip) else: key_dict = { 'label': key_id, 'is_private': is_private, 'body': key_raw_strip, 'local_key_id': None, 'need_to_convert': True, } except: lg.exc() return None return key_dict
def doInitInterfaces(self, *args, **kwargs): if _Debug: lg.out(_DebugLevel, 'initializer.doInitInterfaces') # if settings.enableFTPServer(): # try: # from interface import ftp_server # ftp_server.init() # except: # lg.exc() if settings.enableAPIAuthSecret(): current_secret = local_fs.ReadTextFile(settings.APISecretFile()) if not current_secret: new_secret = cipher.generate_secret_text(10) local_fs.WriteTextFile(settings.APISecretFile(), new_secret) lg.info('generated new API auth secret text and stored in %r' % settings.APISecretFile()) if settings.enableRESTHTTPServer(): try: from interface import api_rest_http_server api_rest_http_server.init(port=settings.getRESTHTTPServerPort()) except: lg.exc() if settings.enableWebSocketServer(): try: from interface import api_web_socket api_web_socket.init(port=settings.getWebSocketServerPort()) except: lg.exc() reactor.callLater(0, self.automat, 'init-interfaces-done') # @UndefinedVariable
def default_nodes(): """ A set of identity servers currently maintained, see file networks.json in the root folder. """ from system import bpio from system import local_fs from lib import serialization from lib import strng from main import settings # from logs import lg networks_json = serialization.BytesToDict( local_fs.ReadBinaryFile( os.path.join(bpio.getExecutableDir(), 'networks.json')), keys_to_text=True, values_to_text=True, ) my_network = local_fs.ReadTextFile(settings.NetworkFileName()).strip() if not my_network: my_network = 'main' if my_network not in networks_json: my_network = 'main' network_info = networks_json[my_network] identity_servers = {} for identity_server in network_info['identity-servers']: identity_servers[strng.to_bin(identity_server['host'])] = ( identity_server['http_port'], identity_server['tcp_port'], ) # lg.info('Active network is [%s] identity_servers=%s' % (my_network, identity_servers, )) return identity_servers
def default_nodes(): """ List of DHT nodes currently maintained : (host, UDP port number) """ from system import bpio from system import local_fs from lib import serialization from main import settings from logs import lg networks_json = serialization.BytesToDict( local_fs.ReadBinaryFile( os.path.join(bpio.getExecutableDir(), 'networks.json'))) my_network = local_fs.ReadTextFile(settings.NetworkFileName()).strip() if not my_network: my_network = 'main' if my_network not in networks_json: my_network = 'main' network_info = networks_json[my_network] dht_seeds = [] for dht_seed in network_info['dht-seeds']: dht_seeds.append(( dht_seed['host'], dht_seed['udp_port'], )) lg.info('Active network is [%s] dht_seeds=%s' % ( my_network, dht_seeds, )) return dht_seeds
def scan_local_keys(keys_folder=None): global _LatestLocalKeyID if not keys_folder: keys_folder = settings.KeyStoreDir() if _Debug: lg.out(_DebugLevel, 'my_keys.scan_local_keys will read files from %r' % keys_folder) latest_local_key_id_filepath = os.path.join(keys_folder, 'latest_local_key_id') latest_local_key_id_src = local_fs.ReadTextFile( latest_local_key_id_filepath) if latest_local_key_id_src: _LatestLocalKeyID = int(latest_local_key_id_src) else: _LatestLocalKeyID = 0 known_keys().clear() local_keys().clear() local_keys_index().clear() count = 0 unregistered_keys = [] for key_filename in os.listdir(keys_folder): if key_filename == 'latest_local_key_id': continue key_id = key_filename.replace('.private', '').replace('.public', '') if not is_valid_key_id(key_id): lg.warn('key_id is not valid: %r' % key_id) continue key_dict = read_key_file(key_id, keys_folder=keys_folder) local_key_id = key_dict.get('local_key_id') if local_key_id is None: key_dict['key_id'] = key_id unregistered_keys.append(key_dict) continue if _LatestLocalKeyID < local_key_id: _LatestLocalKeyID = local_key_id local_keys()[local_key_id] = key_id known_keys()[key_id] = None count += 1 registered_count = 0 for key_dict in unregistered_keys: key_id = key_dict['key_id'] if not load_key(key_id, keys_folder=keys_folder): continue _LatestLocalKeyID += 1 new_local_key_id = _LatestLocalKeyID lg.warn('about to register key %r with local_key_id=%r' % ( key_id, new_local_key_id, )) known_keys()[key_id].local_key_id = new_local_key_id save_key(key_id, keys_folder=keys_folder) registered_count += 1 unregistered_keys = [] save_latest_local_key_id(keys_folder=keys_folder) if _Debug: lg.out( _DebugLevel, ' %d keys found and %d registered' % ( count, registered_count, ))
def read_group_info(group_key_id): service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') group_info_path = os.path.join(groups_dir, group_key_id) if not os.path.isfile(group_info_path): return None group_info = jsn.loads_text(local_fs.ReadTextFile(group_info_path)) return group_info
def fromFile(self, keyfilename): if self.keyObject: raise ValueError('key object already exist') key_src = local_fs.ReadTextFile(keyfilename) self.keyObject = RSA.import_key(key_src) del key_src gc.collect() return True
def load_key(key_id, keys_folder=None): """ """ if not is_valid_key_id(key_id): lg.warn('key_id is not valid: %s' % key_id) return False if not keys_folder: keys_folder = settings.KeyStoreDir() key_filepath = os.path.join(keys_folder, '%s.private' % key_id) is_private = True if not os.path.exists(key_filepath): key_filepath = os.path.join(keys_folder, '%s.public' % key_id) is_private = False key_raw = local_fs.ReadTextFile(key_filepath) if not key_raw: lg.warn('failed reading key from %r' % key_filepath) return False key_raw_strip = key_raw.strip() need_to_convert = False try: if key_raw_strip.startswith('{') and key_raw_strip.endswith('}'): key_dict = jsn.loads_text(key_raw_strip) else: key_dict = { 'label': key_id, 'body': key_raw_strip, } need_to_convert = True except: lg.exc() return False try: key_object = rsa_key.RSAKey() key_object.fromDict(key_dict) except: lg.exc() return False if not key_object.isPublic(): if not validate_key(key_object): lg.warn('validation failed for %s' % key_filepath) return False known_keys()[key_id] = key_object if _Debug: lg.out( _DebugLevel, 'my_keys.load_key %r label=%r is_private=%r from %s' % ( key_id, key_object.label, is_private, keys_folder, )) if need_to_convert: save_key(key_id, keys_folder=keys_folder) lg.info('key %r format converted to JSON' % key_id) return True
def fromFile(self, keyfilename): if self.keyObject: raise ValueError('key object already exist') key_src = local_fs.ReadTextFile(keyfilename) key_src = strng.to_bin(key_src) try: self.keyObject = RSA.import_key(key_src) # @UndefinedVariable except: if _Debug: lg.exc('key_src=%r' % key_src) del key_src # gc.collect() return True
def load_customers(path=None): """ Load customers list from disk. """ global _CustomersMetaInfo if path is None: path = settings.CustomerIDsFilename() lst = bpio._read_list(path) if lst is None: lst = list() lst = list(map(strng.to_bin, lst)) set_customers(lst) _CustomersMetaInfo = jsn.loads( local_fs.ReadTextFile(settings.CustomersMetaInfoFilename()) or '{}') lg.out(4, 'contactsdb.load_customers %d items' % len(lst))
def load_local_keys(keys_folder=None): """ """ if not keys_folder: keys_folder = settings.KeyStoreDir() if _Debug: lg.out(_DebugLevel, 'my_keys.load_local_keys will read files from %s' % keys_folder) known_keys().clear() count = 0 for key_filename in os.listdir(keys_folder): key_filepath = os.path.join(keys_folder, key_filename) key_id = key_filename.replace('.private', '').replace('.public', '') if not is_valid_key_id(key_id): lg.warn('key_id is not valid: %s' % key_id) continue key_raw = local_fs.ReadTextFile(key_filepath) if not key_raw: lg.warn('failed reading key from %r' % key_filepath) continue key_raw_strip = key_raw.strip() try: if key_raw_strip.startswith('{') and key_raw_strip.endswith('}'): key_dict = jsn.loads_text(key_raw_strip) else: key_dict = { 'label': key_id, 'body': key_raw_strip, } except: lg.exc() continue try: key_object = rsa_key.RSAKey() key_object.fromDict(key_dict) except: lg.exc() continue if not key_object.isPublic(): if not validate_key(key_object): lg.warn('validation failed for %s' % key_filepath) continue known_keys()[key_id] = key_object count += 1 if _Debug: lg.out(_DebugLevel, ' %d keys loaded' % count) return count
def load_customers(path=None): """ Load customers list from disk. """ global _CustomersMetaInfo if path is None: path = settings.CustomerIDsFilename() lst = bpio._read_list(path) if lst is None: lst = list() lst = list(filter(id_url.is_cached, lst)) set_customers(lst) _CustomersMetaInfo = jsn.loads( local_fs.ReadTextFile(settings.CustomersMetaInfoFilename()) or '{}', keys_to_bin=True, ) _CustomersMetaInfo = id_url.to_bin_dict(_CustomersMetaInfo) _CustomersMetaInfo = jsn.dict_values_to_text(_CustomersMetaInfo) if _Debug: lg.out(_DebugLevel, 'contactsdb.load_customers %d items' % len(lst))
def websocket_thread(): global _APISecretFilePath global _WebSocketApp global _WebSocketClosed websocket.enableTrace(False) while is_started(): _WebSocketClosed = False ws_url = "ws://localhost:8280/" if _APISecretFilePath: if os.path.isfile(_APISecretFilePath): api_secret = local_fs.ReadTextFile(_APISecretFilePath) if api_secret: ws_url += '?api_secret=' + api_secret if _Debug: print('websocket_thread() ws_url=%r' % ws_url) _WebSocketApp = websocket.WebSocketApp( ws_url, on_message=on_message, on_error=on_error, on_close=on_close, on_open=on_open, ) try: ws().run_forever(ping_interval=10) except Exception as exc: _WebSocketApp = None if _Debug: print('\n WS Thread ERROR:', exc) time.sleep(1) if _WebSocketApp: del _WebSocketApp _WebSocketApp = None if not is_started(): break time.sleep(1) _WebSocketApp = None
def read_api_secret(): global _APISecret _APISecret = local_fs.ReadTextFile(settings.APISecretFile())
def ReadTextFile(filename): return local_fs.ReadTextFile(filename=filename)
def load_groups(): loaded_brokers = 0 loaded_groups = 0 service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): latest_group_key_id = my_keys.latest_key_id(group_key_id) latest_group_path = os.path.join(groups_dir, latest_group_key_id) if latest_group_key_id != group_key_id: lg.info('going to rename rotated group key: %r -> %r' % (group_key_id, latest_group_key_id, )) old_group_path = os.path.join(groups_dir, group_key_id) try: os.rename(old_group_path, latest_group_path) except: lg.exc() continue latest_group_info = jsn.loads_text(local_fs.ReadTextFile(latest_group_path)) if not latest_group_info: lg.err('was not able to load group info from %r' % latest_group_path) continue active_groups()[latest_group_key_id] = latest_group_info loaded_groups += 1 for customer_id in os.listdir(brokers_dir): latest_customer_id = global_id.latest_glob_id(customer_id) latest_customer_dir = os.path.join(brokers_dir, latest_customer_id) if latest_customer_id != customer_id: lg.info('going to rename rotated customer id: %r -> %r' % (customer_id, latest_customer_id, )) old_customer_dir = os.path.join(brokers_dir, customer_id) try: bpio.move_dir_recursive(old_customer_dir, latest_customer_dir) bpio.rmdir_recursive(old_customer_dir) except: lg.exc() continue for broker_id in os.listdir(latest_customer_dir): if latest_customer_id not in known_brokers(): known_brokers(latest_customer_id) latest_broker_id = global_id.latest_glob_id(broker_id) latest_broker_path = os.path.join(latest_customer_dir, latest_broker_id) if latest_broker_id != broker_id: lg.info('going to rename rotated broker id: %r -> %r' % (broker_id, latest_broker_id, )) old_broker_path = os.path.join(latest_customer_dir, broker_id) try: os.rename(old_broker_path, latest_broker_path) except: lg.exc() continue latest_broker_info = jsn.loads_text(local_fs.ReadTextFile(latest_broker_path)) if not latest_broker_info: lg.err('was not able to load broker info from %r' % latest_broker_path) continue existing_broker_id = known_brokers(latest_customer_id)[int(latest_broker_info['position'])] if existing_broker_id: if os.path.isfile(latest_broker_path): lg.err('found duplicated broker for customer %r on position %d, erasing file %r' % ( latest_customer_id, int(latest_broker_info['position']), latest_broker_path, )) try: os.remove(latest_broker_path) except: lg.exc() continue known_brokers()[latest_customer_id][int(latest_broker_info['position'])] = latest_broker_id loaded_brokers += 1 if _Debug: lg.args(_DebugLevel, loaded_groups=loaded_groups, loaded_brokers=loaded_brokers)
def init(): """ """ global _IdentityHistoryDir global _KnownUsers global _KnownIDURLs global _MergedIDURLs global _Ready from userid import identity if _Debug: lg.out(_DebugLevel, "id_url.init") if not _IdentityHistoryDir: _IdentityHistoryDir = settings.IdentityHistoryDir() if not os.path.exists(_IdentityHistoryDir): bpio._dir_make(_IdentityHistoryDir) lg.info('created new folder %r' % _IdentityHistoryDir) else: lg.info('using existing folder %r' % _IdentityHistoryDir) for one_user_dir in os.listdir(_IdentityHistoryDir): one_user_dir_path = os.path.join(_IdentityHistoryDir, one_user_dir) one_user_identity_files = [] for one_filename in os.listdir(one_user_dir_path): try: one_ident_number = int(one_filename) except: lg.exc() continue one_user_identity_files.append(one_ident_number) if _Debug: lg.out(_DebugLevel, 'id_url.init found %d historical records in %r' % (len(one_user_identity_files), one_user_dir_path, )) for one_ident_file in one_user_identity_files: one_ident_path = os.path.join(one_user_dir_path, strng.to_text(one_ident_file)) try: xmlsrc = local_fs.ReadTextFile(one_ident_path) known_id_obj = identity.identity(xmlsrc=xmlsrc) if not known_id_obj.isCorrect(): raise Exception('identity history in %r is broken, identity is not correct: %r' % ( one_user_dir, one_ident_path)) if not known_id_obj.Valid(): raise Exception('identity history in %r is broken, identity is not valid: %r' % ( one_user_dir, one_ident_path)) except: lg.exc() continue one_pub_key = known_id_obj.getPublicKey() one_revision = known_id_obj.getRevisionValue() if one_pub_key not in _KnownUsers: _KnownUsers[one_pub_key] = one_user_dir_path known_sources = known_id_obj.getSources(as_originals=True) for known_idurl in reversed(known_sources): if known_idurl not in _KnownIDURLs: _KnownIDURLs[known_idurl] = known_id_obj.getPublicKey() if _Debug: lg.out(_DebugLevel, ' new IDURL added: %r' % known_idurl) else: if _KnownIDURLs[known_idurl] != known_id_obj.getPublicKey(): _KnownIDURLs[known_idurl] = known_id_obj.getPublicKey() lg.warn('another user had same identity source: %r' % known_idurl) if one_pub_key not in _MergedIDURLs: _MergedIDURLs[one_pub_key] = {} if _Debug: lg.out(_DebugLevel, ' new Public Key added: %s...' % one_pub_key[-10:]) if one_revision in _MergedIDURLs[one_pub_key]: if _MergedIDURLs[one_pub_key][one_revision] != known_idurl: if _MergedIDURLs[one_pub_key][one_revision] not in known_sources: lg.warn('rewriting existing identity revision %d : %r -> %r' % ( one_revision, _MergedIDURLs[one_pub_key][one_revision], known_idurl)) _MergedIDURLs[one_pub_key][one_revision] = known_idurl else: _MergedIDURLs[one_pub_key][one_revision] = known_idurl if _Debug: lg.out(_DebugLevel, ' revision %d merged with other %d known items' % ( one_revision, len(_MergedIDURLs[one_pub_key]))) _Ready = True