def create_archive_folder(group_key_id, force_path_id=None): group_key_alias, group_creator_idurl = my_keys.split_key_id(group_key_id) catalog_path = os.path.join('.archive', group_key_alias) archive_folder_catalog_path = global_id.MakeGlobalID( key_alias=group_key_alias, customer=group_creator_idurl.to_id(), path=catalog_path) res = api.file_exists(archive_folder_catalog_path) if res['status'] != 'OK': lg.err('failed to check archive folder in the catalog: %r' % res) return None if res['result']['exist']: ret = res['result']['path_id'] if force_path_id is not None: if force_path_id != ret: lg.err('archive folder exists, but have different path ID in the catalog: %r' % ret) return None return ret res = api.file_create(archive_folder_catalog_path, as_folder=True, exist_ok=True, force_path_id=force_path_id) if res['status'] != 'OK': lg.err('failed to create archive folder in the catalog: %r' % res) return None if res['result']['created']: lg.info('created new archive folder in the catalog: %r' % res) else: lg.info('archive folder already exist in the catalog: %r' % res) ret = res['result']['path_id'] if force_path_id is not None: if force_path_id != ret: lg.err('archive folder exists, but have different path ID in the catalog: %r' % ret) return None return ret
def _do_check_sync_keys(self, result): from logs import lg from interface import api from storage import keys_synchronizer from userid import global_id from userid import my_id self.sync_keys_requested = False global_keys_folder_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path='.keys') res = api.file_exists(global_keys_folder_path) if res['status'] != 'OK' or not res['result'] or not res['result'].get( 'exist'): res = api.file_create(global_keys_folder_path, as_folder=True) if res['status'] != 'OK': lg.err( 'failed to create ".keys" folder "%s" in the catalog: %r' % (global_keys_folder_path, res)) result.errback( Exception( 'failed to create keys folder "%s" in the catalog: %r' % (global_keys_folder_path, res))) return lg.info('created new remote folder ".keys" in the catalog: %r' % global_keys_folder_path) keys_synchronizer.A('sync', result)
def _do_synchronize_keys(self): """ Make sure all my keys are stored on my suppliers nodes (encrypted with my master key). If some key I do not have locally, but I know remote copy exists - download it. If some key was not stored - make a remote copy on supplier machine. When key was renamed (after identity rotate) make sure to store the latest copy and remove older one. """ from logs import lg from userid import global_id from userid import my_id from interface import api from storage import backup_control from storage import index_synchronizer from storage import keys_synchronizer from twisted.internet.defer import Deferred result = Deferred() result.addCallback(self._on_keys_synchronized) result.addErrback(self._on_keys_synchronize_failed) is_in_sync = index_synchronizer.is_synchronized( ) and backup_control.revision() > 0 if not is_in_sync: lg.warn('backup index database is not synchronized yet') result.errback( Exception('backup index database is not synchronized yet')) return None global_keys_folder_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path='.keys') res = api.file_exists(global_keys_folder_path) if res['status'] != 'OK' or not res['result'] or not res['result'].get( 'exist'): res = api.file_create(global_keys_folder_path, as_folder=True) if res['status'] != 'OK': lg.err( 'failed to create ".keys" folder "%s" in the catalog: %r' % (global_keys_folder_path, res)) result.errback( Exception( 'failed to create keys folder "%s" in the catalog: %r' % (global_keys_folder_path, res))) return lg.info('created new remote folder ".keys" in the catalog: %r' % global_keys_folder_path) keys_synchronizer.A('sync', result)
def do_backup_key(key_id, keys_folder=None, wait_result=False): """ Send given key to my suppliers to store it remotely. This will make a regular backup copy of that key file - encrypted with my master key. """ if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%r' % key_id) if key_id == my_id.getGlobalID(key_alias='master') or key_id == 'master': lg.err('master key must never leave local host') if wait_result: return fail(Exception('master key must never leave local host')) return False if not my_keys.is_key_registered(key_id): lg.err('unknown key: "%s"' % key_id) if wait_result: return fail(Exception('unknown key: "%s"' % key_id)) return False if not keys_folder: keys_folder = settings.KeyStoreDir() if my_keys.is_key_private(key_id): local_key_filepath = os.path.join(keys_folder, '%s.private' % key_id) remote_path_for_key = '.keys/%s.private' % key_id else: local_key_filepath = os.path.join(keys_folder, '%s.public' % key_id) remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) res = api.file_exists(global_key_path) if res['status'] == 'OK' and res['result'] and res['result'].get('exist'): lg.warn('key %s already exists in catalog' % global_key_path) global_key_path_id = res['result'].get('path_id') if global_key_path_id and backup_control.IsPathInProcess(global_key_path_id): lg.warn('skip, another backup for key already started: %s' % global_key_path_id) if not wait_result: return True backup_id_list = backup_control.FindRunningBackup(global_key_path_id) if backup_id_list: backup_id = backup_id_list[0] backup_job = backup_control.GetRunningBackupObject(backup_id) if backup_job: backup_result = Deferred() backup_job.resultDefer.addCallback( lambda resp: backup_result.callback(True) if resp == 'done' else backup_result.errback( Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp)))) if _Debug: backup_job.resultDefer.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='key_ring.do_backup_key') backup_job.resultDefer.addErrback(backup_result.errback) if _Debug: lg.args(_DebugLevel, backup_id=backup_id, global_key_path_id=global_key_path_id) return backup_result else: lg.warn('did not found running backup job: %r' % backup_id) else: lg.warn('did not found running backup id for path: %r' % global_key_path_id) else: res = api.file_create(global_key_path) if res['status'] != 'OK': lg.err('failed to create path "%s" in the catalog: %r' % (global_key_path, res)) if wait_result: return fail(Exception('failed to create path "%s" in the catalog: %r' % (global_key_path, res))) return False res = api.file_upload_start( local_path=local_key_filepath, remote_path=global_key_path, wait_result=wait_result, open_share=False, ) if not wait_result: if res['status'] != 'OK': lg.err('failed to upload key "%s": %r' % (global_key_path, res)) return False if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%s : %r' % (key_id, res)) return True backup_result = Deferred() # TODO: put that code bellow into api.file_upload_start() method with additional parameter def _job_done(result): if _Debug: lg.args(_DebugLevel, key_id=key_id, result=result) if result == 'done': backup_result.callback(True) else: backup_result.errback(Exception('failed to upload key "%s", backup is %r' % (key_id, result))) return None def _task_started(resp): if _Debug: lg.args(_DebugLevel, key_id=key_id, response=resp) if resp['status'] != 'OK': backup_result.errback(Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp))) return None backupObj = backup_control.jobs().get(resp['version']) if not backupObj: backup_result.errback(Exception('failed to upload key "%s", task %r failed to start' % (global_key_path, resp['version']))) return None backupObj.resultDefer.addCallback(_job_done) backupObj.resultDefer.addErrback(backup_result.errback) return None if not isinstance(res, Deferred): res_defer = Deferred() res_defer.callback(res) res = res_defer res.addCallback(_task_started) res.addErrback(backup_result.errback) return backup_result