Exemplo n.º 1
0
    def __save_with_temp_file(self, file_path: Path, on_save_to_temp):
        temp_file = gene_temp_file_name()

        on_save_to_temp(temp_file)

        if file_path.exists():
            file_path.unlink()
        shutil.move(temp_file.as_posix(), file_path.as_posix())
Exemplo n.º 2
0
 def ipfs_download_file_content(self, cid, is_proxy=False, sha256=None, size=None):
     temp_file = gene_temp_file_name()
     msg = fm.ipfs_download_file_to_path(cid, temp_file, is_proxy=is_proxy, sha256=sha256, size=size)
     if msg:
         temp_file.unlink()
         raise BadRequestException(msg=msg)
     with temp_file.open() as f:
         metadata = json.load(f)
     temp_file.unlink()
     return metadata
Exemplo n.º 3
0
 def ipfs_pin_cid(self, cid):
     # TODO: optimize this as ipfs not support pin other node file to local node.
     logging.info(f'[fm.ipfs_pin_cid] Try to pin {cid} in backup node.')
     temp_file = gene_temp_file_name()
     self.ipfs_download_file_to_path(cid, temp_file, is_proxy=True)
     logging.info(f'[fm.ipfs_pin_cid] Download file OK.')
     self.ipfs_upload_file_from_path(temp_file)
     logging.info(f'[fm.ipfs_pin_cid] Upload file OK.')
     size = temp_file.stat().st_size
     temp_file.unlink()
     return size
Exemplo n.º 4
0
 def _upload_file2temp(self):
     temp_file = gene_temp_file_name()
     try:
         with open(temp_file, "bw") as f:
             while True:
                 chunk = request.stream.read(CHUNK_SIZE)
                 if len(chunk) == 0:
                     break
                 f.write(chunk)
     except Exception as e:
         raise BadRequestException(msg='Failed to write to upload file.')
     return temp_file
    def backup_patch_file(self, file_name):
        if not file_name:
            raise InvalidParameterException()

        user_did, _, _ = self._check_auth_backup()

        temp_file = gene_temp_file_name()
        fm.write_file_by_request_stream(temp_file)
        pickle_data = fm.read_rsync_data_from_file(temp_file)
        temp_file.unlink()
        fm.apply_rsync_data(
            (get_vault_backup_path(user_did) / file_name).resolve(),
            pickle_data)
Exemplo n.º 6
0
    def dump_database_data_to_backup_cids(self, user_did):
        names = cli.get_all_user_database_names(user_did)
        metadata_list = list()
        for name in names:
            d = {'path': gene_temp_file_name(), 'name': name}
            ## dump the database data to snapshot file.
            succeeded = export_mongo_db_to_full_path(d['name'], d['path'])
            if not succeeded:
                raise BadRequestException(
                    f'Failed to dump {d["name"]} for {user_did}')

            ## upload this snapshot file onto IPFS node.
            d['cid'] = fm.ipfs_upload_file_from_path(d['path'])
            d['sha256'] = fm.get_file_content_sha256(d['path'])
            d['size'] = d['path'].stat().st_size
            d['path'].unlink()

            metadata_list.append(d)
        return metadata_list
Exemplo n.º 7
0
    def generate_root_backup_cid(self, database_cids, files_cids,
                                 total_file_size):
        """
        Create a json doc containing basic root informations:
        - database data DIDs;
        - files data DIDs;
        - total amount of vault data;
        - total amount of backup data to sync.
        - create timestamp.
        """

        data = {
            'databases': [{
                'name': d['name'],
                'sha256': d['sha256'],
                'cid': d['cid'],
                'size': d['size']
            } for d in database_cids],
            'files': [{
                'sha256': d['sha256'],
                'cid': d['cid'],
                'size': d['size'],
                'count': d['count']
            } for d in files_cids],
            USR_DID:
            self.user_did,
            "vault_size":
            fm.get_vault_storage_size(self.user_did),
            "backup_size":
            sum([d['size'] for d in database_cids]) + total_file_size,
            "create_time":
            datetime.now().timestamp(),
        }

        temp_file = gene_temp_file_name()
        with temp_file.open('w') as f:
            json.dump(data, f)

        sha256 = fm.get_file_content_sha256(temp_file)
        size = temp_file.stat().st_size
        cid = fm.ipfs_upload_file_from_path(temp_file)
        temp_file.unlink()
        return cid, sha256, size
Exemplo n.º 8
0
 def restore_database_by_dump_files(self, request_metadata):
     databases = request_metadata['databases']
     if not databases:
         logging.info(
             '[IpfsBackupClient] No user databases dump files, skip.')
         return
     for d in databases:
         temp_file = gene_temp_file_name()
         msg = fm.ipfs_download_file_to_path(d['cid'],
                                             temp_file,
                                             is_proxy=True,
                                             sha256=d['sha256'],
                                             size=d['size'])
         if msg:
             logging.error(
                 f'[IpfsBackupClient] Failed to download dump file for database {d["name"]}.'
             )
             temp_file.unlink()
             raise BadRequestException(msg=msg)
         import_mongo_db_by_full_path(temp_file)
         temp_file.unlink()
         logging.info(
             f'[IpfsBackupClient] Success to restore the dump file for database {d["name"]}.'
         )