def backup_outgoing_message(private_message_object, message_id): """ """ if not driver.is_on('service_backups'): lg.warn('service_backups is not started') return False if not my_keys.is_key_registered(messages_key_id()): lg.warn('key to store messages was not found') return False serialized_message = private_message_object.serialize() local_msg_folder = os.path.join(settings.ChatChannelsDir(), private_message_object.recipient, 'out') if not bpio._dir_exist(local_msg_folder): bpio._dirs_make(local_msg_folder) local_msg_filename = os.path.join(local_msg_folder, message_id) if not bpio.WriteBinaryFile(local_msg_filename, serialized_message): lg.warn('failed writing outgoing message locally') return False remote_path_for_message = os.path.join('.messages', 'out', private_message_object.recipient, message_id) global_message_path = global_id.MakeGlobalID(customer=messages_key_id(), path=remote_path_for_message) res = api.file_create(global_message_path) if res['status'] != 'OK': lg.warn('failed to create path "%s" in the catalog: %r' % (global_message_path, res)) return False res = api.file_upload_start(local_msg_filename, global_message_path, wait_result=False) if res['status'] != 'OK': lg.warn('failed to upload message "%s": %r' % (global_message_path, res)) return False return True
def jsonrpc_file_upload_start(self, local_path, remote_path, wait_result=True): return api.file_upload_start(local_path, remote_path, wait_result=wait_result)
def _cbFileRecevied(self, consumer, local_path, newsegs): # receive_defer.addCallback(self._startFileBackup, upload_filename, newsegs, d) # consumer.fObj.flush() # os.fsync(consumer.fObj.fileno()) # consumer.fObj.close() # consumer.close() remote_path = '/'.join(newsegs) lg.out(8, 'ftp_server._cbFileRecevied %s %s' % (local_path, remote_path)) ret = api.file_info(remote_path) if ret['status'] != 'OK': ret = api.file_create(remote_path) if ret['status'] != 'OK': return defer.fail(FileNotFoundError(remote_path)) else: if ret['result'][0]['type'] == 'dir': return defer.fail(IsADirectoryError(remote_path)) ret = api.file_upload_start(local_path, remote_path, wait_result=False) if ret['status'] != 'OK': lg.warn('file_upload_start() returned: %s' % ret) return defer.fail(FileNotFoundError(remote_path)) # shortPathID = backup_fs.ToID(full_path) # if not shortPathID: # shortPathID, _, _ = backup_fs.AddFile(full_path, read_stats=False) # item = backup_fs.GetByID(shortPathID) # item.read_stats(upload_filename) # backup_control.StartSingle(shortPathID, upload_filename) # # upload_task.result_defer.addCallback(self._cbFileBackup, result_defer, newsegs) # backup_fs.Calculate() # backup_control.Save() # result_defer.callback(None) # return consumer return (TXFR_COMPLETE_OK,)
def file_upload_start_v1(self, request): data = _request_data(request, mandatory_keys=['local_path', 'remote_path', ]) return api.file_upload_start( local_path=data['local_path'], remote_path=data['remote_path'], wait_result=bool(data.get('wait_result', '0') in ['1', 'true', ]), open_share=bool(data.get('open_share', '0') in ['1', 'true', ]), )
def do_backup_key(key_id, keys_folder=None, wait_result=False): """ Send given key to my suppliers to store it remotely. This will make a regular backup copy of that key file - encrypted with my master key. """ if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%r' % key_id) if key_id == my_id.getGlobalID(key_alias='master') or key_id == 'master': lg.err('master key must never leave local host') if wait_result: return fail(Exception('master key must never leave local host')) return False if not my_keys.is_key_registered(key_id): lg.err('unknown key: "%s"' % key_id) if wait_result: return fail(Exception('unknown key: "%s"' % key_id)) return False if not keys_folder: keys_folder = settings.KeyStoreDir() if my_keys.is_key_private(key_id): local_key_filepath = os.path.join(keys_folder, '%s.private' % key_id) remote_path_for_key = '.keys/%s.private' % key_id else: local_key_filepath = os.path.join(keys_folder, '%s.public' % key_id) remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) res = api.file_exists(global_key_path) if res['status'] == 'OK' and res['result'] and res['result'].get('exist'): lg.warn('key %s already exists in catalog' % global_key_path) global_key_path_id = res['result'].get('path_id') if global_key_path_id and backup_control.IsPathInProcess(global_key_path_id): lg.warn('skip, another backup for key already started: %s' % global_key_path_id) if not wait_result: return True backup_id_list = backup_control.FindRunningBackup(global_key_path_id) if backup_id_list: backup_id = backup_id_list[0] backup_job = backup_control.GetRunningBackupObject(backup_id) if backup_job: backup_result = Deferred() backup_job.resultDefer.addCallback( lambda resp: backup_result.callback(True) if resp == 'done' else backup_result.errback( Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp)))) if _Debug: backup_job.resultDefer.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='key_ring.do_backup_key') backup_job.resultDefer.addErrback(backup_result.errback) if _Debug: lg.args(_DebugLevel, backup_id=backup_id, global_key_path_id=global_key_path_id) return backup_result else: lg.warn('did not found running backup job: %r' % backup_id) else: lg.warn('did not found running backup id for path: %r' % global_key_path_id) else: res = api.file_create(global_key_path) if res['status'] != 'OK': lg.err('failed to create path "%s" in the catalog: %r' % (global_key_path, res)) if wait_result: return fail(Exception('failed to create path "%s" in the catalog: %r' % (global_key_path, res))) return False res = api.file_upload_start( local_path=local_key_filepath, remote_path=global_key_path, wait_result=wait_result, open_share=False, ) if not wait_result: if res['status'] != 'OK': lg.err('failed to upload key "%s": %r' % (global_key_path, res)) return False if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%s : %r' % (key_id, res)) return True backup_result = Deferred() # TODO: put that code bellow into api.file_upload_start() method with additional parameter def _job_done(result): if _Debug: lg.args(_DebugLevel, key_id=key_id, result=result) if result == 'done': backup_result.callback(True) else: backup_result.errback(Exception('failed to upload key "%s", backup is %r' % (key_id, result))) return None def _task_started(resp): if _Debug: lg.args(_DebugLevel, key_id=key_id, response=resp) if resp['status'] != 'OK': backup_result.errback(Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp))) return None backupObj = backup_control.jobs().get(resp['version']) if not backupObj: backup_result.errback(Exception('failed to upload key "%s", task %r failed to start' % (global_key_path, resp['version']))) return None backupObj.resultDefer.addCallback(_job_done) backupObj.resultDefer.addErrback(backup_result.errback) return None if not isinstance(res, Deferred): res_defer = Deferred() res_defer.callback(res) res = res_defer res.addCallback(_task_started) res.addErrback(backup_result.errback) return backup_result
def file_upload_start_v1(self, request): data = _request_data(request, mandatory_keys=['local_path', 'remote_path', ]) return api.file_upload_start( local_path=data['local_path'], remote_path=data['remote_path'], wait_result=data.get('wait_result', True), )