Exemplo n.º 1
0
def get_name_by_incrementing(
    instance: Storage,
    name: str,
    max_length: Optional[int] = None,
) -> str:
    """Generate usable file name for storage iterating if needed.

    Returns a filename that is available in the storage mechanism,
    taking the provided filename into account.

    This maintains the old behavior of get_available_name that was available
    prior to Django 1.5.9. This behavior increments the file name by adding _1,
    _2, etc., but was removed because incrementing the file names in this
    manner created a security vector if users were able to upload (many) files.

    We are only able to use it in places where users are not uploading files,
    and we are instead creating them programmatically (for example, via a
    scraper).

    For more detail, see:

    https://docs.djangoproject.com/en/1.8/releases/1.5.9/#file-upload-denial-of-service

    :param instance: The instance of the storage class being used
    :param max_length: The name will not exceed max_length, if provided
    :param name: File name of the object being saved
    :return: The filepath
    """
    dir_name, file_name = os.path.split(name)
    file_root, file_ext = os.path.splitext(file_name)
    count = itertools.count(1)
    while instance.exists(name):
        # file_ext includes the dot.
        name = os.path.join(dir_name, f"{file_root}_{next(count)}{file_ext}")
    return name
Exemplo n.º 2
0
def test_write_manifest_files(storage: Storage, version: Version,
                              asset_factory):
    # Pretend like AssetBlob was defined with the given storage
    # The task piggybacks off of the AssetBlob storage to write the yamls
    AssetBlob.blob.field.storage = storage

    # Create a new asset in the version so there is information to write
    version.assets.add(asset_factory())

    # All of these files should be generated by the task
    assets_yaml_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/assets.yaml'
    )
    dandiset_yaml_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/dandiset.yaml'
    )
    assets_jsonld_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/assets.jsonld'
    )
    dandiset_jsonld_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/dandiset.jsonld'
    )
    collection_jsonld_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/collection.jsonld'
    )

    tasks.write_manifest_files(version.id)

    assert storage.exists(assets_yaml_path)
    assert storage.exists(dandiset_yaml_path)
    assert storage.exists(assets_jsonld_path)
    assert storage.exists(dandiset_jsonld_path)
    assert storage.exists(collection_jsonld_path)
Exemplo n.º 3
0
    def _check_is_init_files_deleted(self, video_name: str, storage: Storage):
        count_of_stream_init = 5

        stream_init_name = DashFilesNames.dash_init_files_mask(video_name)
        stream_init_name_mask = stream_init_name.replace(
            r"\$RepresentationID\$", "{0}")

        stream_init_files_checked = 0
        for stream_id in range(count_of_stream_init):
            init_name = stream_init_name_mask.format(stream_id)
            if not storage.exists(init_name):
                stream_init_files_checked += 1

        self.assertEqual(
            stream_init_files_checked, count_of_stream_init,
            f"Удаляются не все DASH потоки. "
            f"Не удалилось {count_of_stream_init - stream_init_files_checked} потоков"
        )