コード例 #1
0
ファイル: data_operations.py プロジェクト: shvar/redfs
def upload_delete_files(
    base_dir, files, ds_name, group_uuid, sync, cryptographer, rdbw, chunk_storage, ospath=posixpath
):
    """Upload several files to the cloud.

    Note: these should be indeed the uploaded files, or deleted files,
    but not the deleted files.

    @note: all the paths should be in POSIX format.

    @param base_dir: the base directory path (in the dataset) where to upload
        the file.
    @type base_dir: basestring

    @param files: the iterable of files which should be uploaded.
        Contains instances of C{FileToUpload}.
    @type files: col.Iterable

    @param group_uuid: the UUID of the user group, for which the file
        should be bound.
    @type group_uuid: UserGroupUUID

    @param sync: whether the created dataset should be considered a
        "sync dataset".
    @type sync: bool

    @param rdbw: RelDB access wrapper.
    @type rdbw: DatabaseWrapperSQLAlchemy

    @param chunk_storage: the chunk storage object.
    @type chunk_storage: IChunkStorage

    @return: the UUID of newly created dataset.
    @rtype: DatasetUUID
    """
    upload_time = datetime.utcnow()

    # For each FileToUpload, create fake stat for the files,
    # either uploaded or deleted.
    _files_to_upload_with_stat = (
        (
            ftu,
            fake_stat(
                atime=upload_time,
                mtime=upload_time,
                ctime=upload_time,
                # Is deleted? In FTU, "deleted" file is marked
                # via .size=None, but in stat - via st_mode=None.
                st_mode=0777 if ftu.size is not None else None,
                size=ftu.size,
            ),
        )
        for ftu in files
    )  # isinstance(ftu, FileToUpload)
コード例 #2
0
ファイル: data_operations.py プロジェクト: shvar/redfs
def upload_delete_files(base_dir,
                        files,
                        ds_name,
                        group_uuid,
                        sync,
                        cryptographer,
                        rdbw,
                        chunk_storage,
                        ospath=posixpath):
    """Upload several files to the cloud.

    Note: these should be indeed the uploaded files, or deleted files,
    but not the deleted files.

    @note: all the paths should be in POSIX format.

    @param base_dir: the base directory path (in the dataset) where to upload
        the file.
    @type base_dir: basestring

    @param files: the iterable of files which should be uploaded.
        Contains instances of C{FileToUpload}.
    @type files: col.Iterable

    @param group_uuid: the UUID of the user group, for which the file
        should be bound.
    @type group_uuid: UserGroupUUID

    @param sync: whether the created dataset should be considered a
        "sync dataset".
    @type sync: bool

    @param rdbw: RelDB access wrapper.
    @type rdbw: DatabaseWrapperSQLAlchemy

    @param chunk_storage: the chunk storage object.
    @type chunk_storage: IChunkStorage

    @return: the UUID of newly created dataset.
    @rtype: DatasetUUID
    """
    upload_time = datetime.utcnow()

    # For each FileToUpload, create fake stat for the files,
    # either uploaded or deleted.
    _files_to_upload_with_stat = \
        ((ftu, fake_stat(atime=upload_time,
                         mtime=upload_time,
                         ctime=upload_time,
                         # Is deleted? In FTU, "deleted" file is marked
                         # via .size=None, but in stat - via st_mode=None.
                         st_mode=0777 if ftu.size is not None else None,
                         size=ftu.size))
             for ftu in files)  # isinstance(ftu, FileToUpload)
コード例 #3
0
ファイル: uhost_process.py プロジェクト: shvar/redfs
    def __backup_some_phys_files(self, base_dir, files, ugroup,
                                 __do_start_backup=True):
        r"""Given some files, create a new dataset and start to backup them.

        >>> # ugroup = UserGroup(
        >>> #     uuid=UserGroupUUID('00000000-bbbb-0000-0000-000000000001'),
        >>> #     name='AlphA',
        >>> #     private=True,
        >>> #     enc_key='\x01\xe6\x13\xdab)\xd2n\xd6\xafTH\x03h\x02\x12'
        >>> #             '\x17D\x1a\xeb\x8b6\xc0\x9b\xa6\x7f\xcc\x06N\xcf'
        >>> #             '\x8b\xcd'
        >>> # )

        >>> # __backup_some_phys_files(
        >>> #     base_dir='u'/home/john/FreeBrie',
        >>> #     files=[
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir='',
        >>> #             rel_file=u'f1.mp3',
        >>> #             size=13829879,
        >>> #             time_changed=datetime(2012, 11, 5, 12,12,41,904430)),
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir='',
        >>> #             rel_file=u'f2.avi',
        >>> #             size=3522710,
        >>> #             time_changed=datetime(2012, 11, 5, 12,12,41,988433)),
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir=u'a/b',
        >>> #             rel_file=u'bbb',
        >>> #             size=4,
        >>> #             time_changed=datetime(2012, 10, 11, 15 33 42 19808)),
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir=u'a/b/c',
        >>> #             rel_file=u'ccc',
        >>> #             size=4,
        >>> #             time_changed=datetime(2012, 10, 11, 15 33 41 979807))
        >>> #     ],
        >>> #     ugroup=ugroup)

        @todo: complete the unit test, which is half-done!

        @param base_dir: the directory being backed up.
        @type base_dir: basestring

        @param files: the iterable over the files which should be backed up.
            Contains C{LocalPhysicalFileStateRel} objects.
            The caller should ensure that C{files} is non-empty!
        @type files: col.Iterable

        @type ugroup: UserGroup

        @return: the created dataset (if succeeded).
        @rtype: DatasetOnPhysicalFiles, NoneType
        """
        logger.debug('__backup_some_phys_files(%r, %r)',
                     base_dir, ugroup)

        # Group files by rel_dir; then ignore base_dir,
        # keep only rel_dir, rel_file, size and time_changed
        files_grouped_by_rel_dir = \
            ((RelVirtualFile(rel_dir=f.rel_dir,
                             filename=f.rel_file,
                             # If we can read real stat, read it;
                             # otherwise we'll emulate it with fake_stat
                             stat=coalesce(os_ex.safe_stat(  # real stat
                                               os.path.join(base_dir,
                                                            f.rel_path)),
                                           os_ex.fake_stat(  # deleted file
                                               st_mode=None,
                                               atime=f.time_changed,
                                               mtime=f.time_changed,
                                               ctime=f.time_changed,
                                               size=None)),
                             stat_getter=lambda f=f:
                                             os_ex.safe_stat(
                                                 os.path.join(base_dir,
                                                              f.rel_path)),
                             file_getter=lambda f=f:
                                             open(os.path.join(base_dir,
                                                               f.rel_path),
                                                  'rb'))
                 for f in per_rel_dir)
                     for rel_dir, per_rel_dir
                         in sorted_groupby(files, attrgetter('rel_dir')))

        # Example:
        # files_grouped_by_rel_dir = [
        #     [
        #         RelVirtualFile(...),
        #         RelVirtualFile(...),
        #         RelVirtualFile(...)
        #     ],
        #     [
        #         RelVirtualFile(...),
        #     [
        #         RelVirtualFile(...)
        #     ]
        # ]
        _path_map = {base_dir: {'ifiles': files_grouped_by_rel_dir,
                                'stat': os_ex.safe_stat(base_dir)}}

        ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
        ds = self.select_paths_for_backup(ds_name='',
                                          ds_uuid=ds_uuid,
                                          ugroup_uuid=ugroup.uuid,
                                          sync=True,
                                          paths_map=_path_map)
        if ds is not None and __do_start_backup:
            self.start_backup(ds_uuid)

        return ds
コード例 #4
0
    def __backup_some_phys_files(self,
                                 base_dir,
                                 files,
                                 ugroup,
                                 __do_start_backup=True):
        r"""Given some files, create a new dataset and start to backup them.

        >>> # ugroup = UserGroup(
        >>> #     uuid=UserGroupUUID('00000000-bbbb-0000-0000-000000000001'),
        >>> #     name='AlphA',
        >>> #     private=True,
        >>> #     enc_key='\x01\xe6\x13\xdab)\xd2n\xd6\xafTH\x03h\x02\x12'
        >>> #             '\x17D\x1a\xeb\x8b6\xc0\x9b\xa6\x7f\xcc\x06N\xcf'
        >>> #             '\x8b\xcd'
        >>> # )

        >>> # __backup_some_phys_files(
        >>> #     base_dir='u'/home/john/FreeBrie',
        >>> #     files=[
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir='',
        >>> #             rel_file=u'f1.mp3',
        >>> #             size=13829879,
        >>> #             time_changed=datetime(2012, 11, 5, 12,12,41,904430)),
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir='',
        >>> #             rel_file=u'f2.avi',
        >>> #             size=3522710,
        >>> #             time_changed=datetime(2012, 11, 5, 12,12,41,988433)),
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir=u'a/b',
        >>> #             rel_file=u'bbb',
        >>> #             size=4,
        >>> #             time_changed=datetime(2012, 10, 11, 15 33 42 19808)),
        >>> #         LocalPhysicalFileStateRel(
        >>> #             rel_dir=u'a/b/c',
        >>> #             rel_file=u'ccc',
        >>> #             size=4,
        >>> #             time_changed=datetime(2012, 10, 11, 15 33 41 979807))
        >>> #     ],
        >>> #     ugroup=ugroup)

        @todo: complete the unit test, which is half-done!

        @param base_dir: the directory being backed up.
        @type base_dir: basestring

        @param files: the iterable over the files which should be backed up.
            Contains C{LocalPhysicalFileStateRel} objects.
            The caller should ensure that C{files} is non-empty!
        @type files: col.Iterable

        @type ugroup: UserGroup

        @return: the created dataset (if succeeded).
        @rtype: DatasetOnPhysicalFiles, NoneType
        """
        logger.debug('__backup_some_phys_files(%r, %r)', base_dir, ugroup)

        # Group files by rel_dir; then ignore base_dir,
        # keep only rel_dir, rel_file, size and time_changed
        files_grouped_by_rel_dir = \
            ((RelVirtualFile(rel_dir=f.rel_dir,
                             filename=f.rel_file,
                             # If we can read real stat, read it;
                             # otherwise we'll emulate it with fake_stat
                             stat=coalesce(os_ex.safe_stat(  # real stat
                                               os.path.join(base_dir,
                                                            f.rel_path)),
                                           os_ex.fake_stat(  # deleted file
                                               st_mode=None,
                                               atime=f.time_changed,
                                               mtime=f.time_changed,
                                               ctime=f.time_changed,
                                               size=None)),
                             stat_getter=lambda f=f:
                                             os_ex.safe_stat(
                                                 os.path.join(base_dir,
                                                              f.rel_path)),
                             file_getter=lambda f=f:
                                             open(os.path.join(base_dir,
                                                               f.rel_path),
                                                  'rb'))
                 for f in per_rel_dir)
                     for rel_dir, per_rel_dir
                         in sorted_groupby(files, attrgetter('rel_dir')))

        # Example:
        # files_grouped_by_rel_dir = [
        #     [
        #         RelVirtualFile(...),
        #         RelVirtualFile(...),
        #         RelVirtualFile(...)
        #     ],
        #     [
        #         RelVirtualFile(...),
        #     [
        #         RelVirtualFile(...)
        #     ]
        # ]
        _path_map = {
            base_dir: {
                'ifiles': files_grouped_by_rel_dir,
                'stat': os_ex.safe_stat(base_dir)
            }
        }

        ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
        ds = self.select_paths_for_backup(ds_name='',
                                          ds_uuid=ds_uuid,
                                          ugroup_uuid=ugroup.uuid,
                                          sync=True,
                                          paths_map=_path_map)
        if ds is not None and __do_start_backup:
            self.start_backup(ds_uuid)

        return ds
コード例 #5
0
ファイル: data_operations.py プロジェクト: shvar/redfs
def create_directory(base_dir, rel_path, ds_name, group_uuid, sync, cryptographer, rdbw, ospath=posixpath):
    """Create new directory in the cloud.

    @note: all the paths should be in POSIX format.

    @param base_dir: the base directory path (in the dataset) where to upload
        the file.
    @type base_dir: basestring

    @param rel_path: the name of directory which should be created.
    @type rel_path: basestring

    @param group_uuid: the UUID of the user group, for which the file
        should be bound.
    @type group_uuid: UserGroupUUID

    @param sync: whether the created dataset should be considered a
        "sync dataset".
    @type sync: bool

    @param rdbw: RelDB wrapper.
    @type rdbw: DatabaseWrapperSQLAlchemy

    @return: the UUID of newly created dataset.
    @rtype: DatasetUUID
    """
    upload_time = datetime.utcnow()

    # For each FileToUpload, create fake stat
    dir_fake_stat = fake_stat(isdir=True, atime=upload_time, mtime=upload_time, ctime=upload_time, size=None)

    # Turn the original FileToUpload's to RelVirtualFile's
    _vfile = RelVirtualFile(
        rel_dir=ospath.dirname(rel_path),
        filename=ospath.basename(rel_path),
        stat=dir_fake_stat,
        stat_getter=lambda dir_fake_stat=dir_fake_stat: dir_fake_stat,
    )
    # isinstance(ftu, FileToUpload)

    # Group RelVirtualFile's by rel_dir
    # _files_grouped_by_rel_dir = \
    #     ((rvf for rvf in per_rel_dir)
    #          for rel_dir, per_rel_dir
    #          in sorted_groupby(_vfiles, attrgetter('rel_dir')))

    paths_map = {
        base_dir: {
            "ifiles": [[_vfile]],
            "stat": fake_stat(isdir=True, atime=upload_time, mtime=upload_time, ctime=upload_time),
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(
        ds_name, ds_uuid, group_uuid, sync, paths_map, upload_time, cryptographer
    )

    assert dataset is not None

    # Use group_uuid as host_uuid
    fake_host_uuid = None

    dummy_ds_uuid = Queries.Datasets.create_dataset_for_backup(fake_host_uuid, dataset, rdbw)

    dataset.time_completed = datetime.utcnow()

    # That's all, folks!
    Queries.Datasets.update_dataset(fake_host_uuid, dataset, rdbw)

    return ds_uuid
コード例 #6
0
ファイル: data_operations.py プロジェクト: shvar/redfs
            stat_getter=lambda fstat=fstat: fstat,
            file_getter=ftu.file_getter,
        )
        for ftu, fstat in _files_to_upload_with_stat
    )
    # consists_of(_vfiles, RelVirtualFile)

    # Group RelVirtualFile's by rel_dir
    _files_grouped_by_rel_dir = (
        (rvf for rvf in per_rel_dir) for rel_dir, per_rel_dir in sorted_groupby(_vfiles, attrgetter("rel_dir"))
    )

    paths_map = {
        base_dir: {
            "ifiles": _files_grouped_by_rel_dir,
            "stat": fake_stat(isdir=True, atime=upload_time, mtime=upload_time, ctime=upload_time),
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(
        ds_name, ds_uuid, group_uuid, sync, paths_map, upload_time, cryptographer
    )

    assert dataset is not None

    thosts = list(TrustedQueries.HostAtNode.get_all_trusted_hosts(for_storage=True, rdbw=rdbw))

    logger.debug("Uploading dataset %r,... like, to %r", dataset, thosts)

    # Use group_uuid as host_uuid
コード例 #7
0
ファイル: data_operations.py プロジェクト: shvar/redfs
def create_directory(base_dir,
                     rel_path,
                     ds_name,
                     group_uuid,
                     sync,
                     cryptographer,
                     rdbw,
                     ospath=posixpath):
    """Create new directory in the cloud.

    @note: all the paths should be in POSIX format.

    @param base_dir: the base directory path (in the dataset) where to upload
        the file.
    @type base_dir: basestring

    @param rel_path: the name of directory which should be created.
    @type rel_path: basestring

    @param group_uuid: the UUID of the user group, for which the file
        should be bound.
    @type group_uuid: UserGroupUUID

    @param sync: whether the created dataset should be considered a
        "sync dataset".
    @type sync: bool

    @param rdbw: RelDB wrapper.
    @type rdbw: DatabaseWrapperSQLAlchemy

    @return: the UUID of newly created dataset.
    @rtype: DatasetUUID
    """
    upload_time = datetime.utcnow()

    # For each FileToUpload, create fake stat
    dir_fake_stat = fake_stat(isdir=True,
                              atime=upload_time,
                              mtime=upload_time,
                              ctime=upload_time,
                              size=None)

    # Turn the original FileToUpload's to RelVirtualFile's
    _vfile = RelVirtualFile(
        rel_dir=ospath.dirname(rel_path),
        filename=ospath.basename(rel_path),
        stat=dir_fake_stat,
        stat_getter=lambda dir_fake_stat=dir_fake_stat: dir_fake_stat)
    # isinstance(ftu, FileToUpload)

    # Group RelVirtualFile's by rel_dir
    # _files_grouped_by_rel_dir = \
    #     ((rvf for rvf in per_rel_dir)
    #          for rel_dir, per_rel_dir
    #          in sorted_groupby(_vfiles, attrgetter('rel_dir')))

    paths_map = {
        base_dir: {
            'ifiles': [[_vfile]],
            'stat':
            fake_stat(isdir=True,
                      atime=upload_time,
                      mtime=upload_time,
                      ctime=upload_time)
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(ds_name, ds_uuid, group_uuid,
                                               sync, paths_map, upload_time,
                                               cryptographer)

    assert dataset is not None

    # Use group_uuid as host_uuid
    fake_host_uuid = None

    dummy_ds_uuid = Queries.Datasets.create_dataset_for_backup(
        fake_host_uuid, dataset, rdbw)

    dataset.time_completed = datetime.utcnow()

    # That's all, folks!
    Queries.Datasets.update_dataset(fake_host_uuid, dataset, rdbw)

    return ds_uuid
コード例 #8
0
ファイル: data_operations.py プロジェクト: shvar/redfs
               for ftu, fstat in _files_to_upload_with_stat)
    # consists_of(_vfiles, RelVirtualFile)

    # Group RelVirtualFile's by rel_dir
    _files_grouped_by_rel_dir = \
        ((rvf for rvf in per_rel_dir)
             for rel_dir, per_rel_dir
                 in sorted_groupby(_vfiles, attrgetter('rel_dir')))

    paths_map = {
        base_dir: {
            'ifiles':
            _files_grouped_by_rel_dir,
            'stat':
            fake_stat(isdir=True,
                      atime=upload_time,
                      mtime=upload_time,
                      ctime=upload_time)
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(ds_name, ds_uuid, group_uuid,
                                               sync, paths_map, upload_time,
                                               cryptographer)

    assert dataset is not None

    thosts = list(
        TrustedQueries.HostAtNode.get_all_trusted_hosts(for_storage=True,
                                                        rdbw=rdbw))