示例#1
0
def create_directory(base_dir, rel_path, ds_name, group_uuid, sync, cryptographer, rdbw, ospath=posixpath):
    """Create new directory in the cloud.

    @note: all the paths should be in POSIX format.

    @param base_dir: the base directory path (in the dataset) where to upload
        the file.
    @type base_dir: basestring

    @param rel_path: the name of directory which should be created.
    @type rel_path: basestring

    @param group_uuid: the UUID of the user group, for which the file
        should be bound.
    @type group_uuid: UserGroupUUID

    @param sync: whether the created dataset should be considered a
        "sync dataset".
    @type sync: bool

    @param rdbw: RelDB wrapper.
    @type rdbw: DatabaseWrapperSQLAlchemy

    @return: the UUID of newly created dataset.
    @rtype: DatasetUUID
    """
    upload_time = datetime.utcnow()

    # For each FileToUpload, create fake stat
    dir_fake_stat = fake_stat(isdir=True, atime=upload_time, mtime=upload_time, ctime=upload_time, size=None)

    # Turn the original FileToUpload's to RelVirtualFile's
    _vfile = RelVirtualFile(
        rel_dir=ospath.dirname(rel_path),
        filename=ospath.basename(rel_path),
        stat=dir_fake_stat,
        stat_getter=lambda dir_fake_stat=dir_fake_stat: dir_fake_stat,
    )
    # isinstance(ftu, FileToUpload)

    # Group RelVirtualFile's by rel_dir
    # _files_grouped_by_rel_dir = \
    #     ((rvf for rvf in per_rel_dir)
    #          for rel_dir, per_rel_dir
    #          in sorted_groupby(_vfiles, attrgetter('rel_dir')))

    paths_map = {
        base_dir: {
            "ifiles": [[_vfile]],
            "stat": fake_stat(isdir=True, atime=upload_time, mtime=upload_time, ctime=upload_time),
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(
        ds_name, ds_uuid, group_uuid, sync, paths_map, upload_time, cryptographer
    )

    assert dataset is not None

    # Use group_uuid as host_uuid
    fake_host_uuid = None

    dummy_ds_uuid = Queries.Datasets.create_dataset_for_backup(fake_host_uuid, dataset, rdbw)

    dataset.time_completed = datetime.utcnow()

    # That's all, folks!
    Queries.Datasets.update_dataset(fake_host_uuid, dataset, rdbw)

    return ds_uuid
示例#2
0
def create_directory(base_dir,
                     rel_path,
                     ds_name,
                     group_uuid,
                     sync,
                     cryptographer,
                     rdbw,
                     ospath=posixpath):
    """Create new directory in the cloud.

    @note: all the paths should be in POSIX format.

    @param base_dir: the base directory path (in the dataset) where to upload
        the file.
    @type base_dir: basestring

    @param rel_path: the name of directory which should be created.
    @type rel_path: basestring

    @param group_uuid: the UUID of the user group, for which the file
        should be bound.
    @type group_uuid: UserGroupUUID

    @param sync: whether the created dataset should be considered a
        "sync dataset".
    @type sync: bool

    @param rdbw: RelDB wrapper.
    @type rdbw: DatabaseWrapperSQLAlchemy

    @return: the UUID of newly created dataset.
    @rtype: DatasetUUID
    """
    upload_time = datetime.utcnow()

    # For each FileToUpload, create fake stat
    dir_fake_stat = fake_stat(isdir=True,
                              atime=upload_time,
                              mtime=upload_time,
                              ctime=upload_time,
                              size=None)

    # Turn the original FileToUpload's to RelVirtualFile's
    _vfile = RelVirtualFile(
        rel_dir=ospath.dirname(rel_path),
        filename=ospath.basename(rel_path),
        stat=dir_fake_stat,
        stat_getter=lambda dir_fake_stat=dir_fake_stat: dir_fake_stat)
    # isinstance(ftu, FileToUpload)

    # Group RelVirtualFile's by rel_dir
    # _files_grouped_by_rel_dir = \
    #     ((rvf for rvf in per_rel_dir)
    #          for rel_dir, per_rel_dir
    #          in sorted_groupby(_vfiles, attrgetter('rel_dir')))

    paths_map = {
        base_dir: {
            'ifiles': [[_vfile]],
            'stat':
            fake_stat(isdir=True,
                      atime=upload_time,
                      mtime=upload_time,
                      ctime=upload_time)
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(ds_name, ds_uuid, group_uuid,
                                               sync, paths_map, upload_time,
                                               cryptographer)

    assert dataset is not None

    # Use group_uuid as host_uuid
    fake_host_uuid = None

    dummy_ds_uuid = Queries.Datasets.create_dataset_for_backup(
        fake_host_uuid, dataset, rdbw)

    dataset.time_completed = datetime.utcnow()

    # That's all, folks!
    Queries.Datasets.update_dataset(fake_host_uuid, dataset, rdbw)

    return ds_uuid
示例#3
0
    # Group RelVirtualFile's by rel_dir
    _files_grouped_by_rel_dir = (
        (rvf for rvf in per_rel_dir) for rel_dir, per_rel_dir in sorted_groupby(_vfiles, attrgetter("rel_dir"))
    )

    paths_map = {
        base_dir: {
            "ifiles": _files_grouped_by_rel_dir,
            "stat": fake_stat(isdir=True, atime=upload_time, mtime=upload_time, ctime=upload_time),
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(
        ds_name, ds_uuid, group_uuid, sync, paths_map, upload_time, cryptographer
    )

    assert dataset is not None

    thosts = list(TrustedQueries.HostAtNode.get_all_trusted_hosts(for_storage=True, rdbw=rdbw))

    logger.debug("Uploading dataset %r,... like, to %r", dataset, thosts)

    # Use group_uuid as host_uuid
    fake_host_uuid = None

    dummy_ds_uuid = Queries.Datasets.create_dataset_for_backup(fake_host_uuid, dataset, rdbw)

    # Commit this session to get latest files in web
    # with status "Processing" when they are encrypting.
示例#4
0
    paths_map = {
        base_dir: {
            'ifiles':
            _files_grouped_by_rel_dir,
            'stat':
            fake_stat(isdir=True,
                      atime=upload_time,
                      mtime=upload_time,
                      ctime=upload_time)
        }
    }

    ds_uuid = DatasetUUID.safe_cast_uuid(gen_uuid())
    dataset = DatasetOnVirtualFiles.from_paths(ds_name, ds_uuid, group_uuid,
                                               sync, paths_map, upload_time,
                                               cryptographer)

    assert dataset is not None

    thosts = list(
        TrustedQueries.HostAtNode.get_all_trusted_hosts(for_storage=True,
                                                        rdbw=rdbw))

    logger.debug('Uploading dataset %r,... like, to %r', dataset, thosts)

    # Use group_uuid as host_uuid
    fake_host_uuid = None

    dummy_ds_uuid = Queries.Datasets.create_dataset_for_backup(
        fake_host_uuid, dataset, rdbw)