Ejemplo n.º 1
0
    def stage_file(name, composite_file_path, is_binary=False):
        dp = composite_file_path['path']
        path, is_url = to_path(dp)
        if is_url:
            dataset.path = path
            dp = path

        auto_decompress = composite_file_path.get('auto_decompress', True)
        if auto_decompress and not datatype.composite_type and CompressedFile.can_decompress(
                dp):
            # It isn't an explicitly composite datatype, so these are just extra files to attach
            # as composite data. It'd be better if Galaxy was communicating this to the tool
            # a little more explicitly so we didn't need to dispatch on the datatype and so we
            # could attach arbitrary extra composite data to an existing composite datatype if
            # if need be? Perhaps that would be a mistake though.
            CompressedFile(dp).extract(files_path)
        else:
            tmpdir = output_adjacent_tmpdir(output_path)
            tmp_prefix = 'data_id_%s_convert_' % dataset.dataset_id
            sniff.handle_composite_file(
                datatype,
                dp,
                files_path,
                name,
                is_binary,
                tmpdir,
                tmp_prefix,
                composite_file_path,
            )
Ejemplo n.º 2
0
    def _resolve_item(item):
        # Might be a dataset or a composite upload.
        requested_ext = item.get("ext", None)
        registry = upload_config.registry
        datatype = registry.get_datatype_by_extension(requested_ext)
        composite = item.pop("composite", None)
        if datatype and datatype.composite_type:
            composite_type = datatype.composite_type
            writable_files = datatype.writable_files
            assert composite_type == "auto_primary_file", "basic composite uploads not yet implemented"

            # get_composite_dataset_name finds dataset name from basename of contents
            # and such but we're not implementing that here yet. yagni?
            # also need name...
            dataset_bunch = Bunch()
            name = item.get("name") or 'Composite Dataset'
            dataset_bunch.name = name
            primary_file = sniff.stream_to_file(
                StringIO(datatype.generate_primary_file(dataset_bunch)),
                prefix='upload_auto_primary_file',
                dir=".")
            extra_files_path = primary_file + "_extra"
            os.mkdir(extra_files_path)
            rval = {
                "name": name,
                "filename": primary_file,
                "ext": requested_ext,
                "link_data_only": False,
                "sources": [],
                "hashes": [],
                "extra_files": extra_files_path,
            }
            _copy_and_validate_simple_attributes(item, rval)
            composite_items = composite.get("elements", [])
            keys = [value.name for value in writable_files.values()]
            composite_item_idx = 0
            for composite_item in composite_items:
                if composite_item_idx >= len(keys):
                    # raise exception - too many files?
                    pass
                key = keys[composite_item_idx]
                writable_file = writable_files[key]
                _, src_target = _has_src_to_path(upload_config, composite_item)
                # do the writing
                sniff.handle_composite_file(
                    datatype,
                    src_target,
                    extra_files_path,
                    key,
                    writable_file.is_binary,
                    ".",
                    os.path.basename(extra_files_path) + "_",
                    composite_item,
                )
                composite_item_idx += 1

            writable_files_idx = composite_item_idx
            while writable_files_idx < len(keys):
                key = keys[writable_files_idx]
                writable_file = writable_files[key]
                if not writable_file.optional:
                    # raise Exception, non-optional file missing
                    pass
                writable_files_idx += 1
            return rval
        else:
            if composite:
                raise Exception(
                    "Non-composite datatype [%s] attempting to be created with composite data."
                    % datatype)
            return _resolve_item_with_primary(item)
Ejemplo n.º 3
0
    def _resolve_item(item):
        # Might be a dataset or a composite upload.
        requested_ext = item.get("ext", None)
        registry = upload_config.registry
        datatype = registry.get_datatype_by_extension(requested_ext)
        composite = item.pop("composite", None)
        if datatype and datatype.composite_type:
            composite_type = datatype.composite_type
            assert composite_type == "auto_primary_file", "basic composite uploads not yet implemented"

            # get_composite_dataset_name finds dataset name from basename of contents
            # and such but we're not implementing that here yet. yagni?
            # also need name...
            metadata = {
                composite_file.substitute_name_with_metadata: datatype.metadata_spec[
                    composite_file.substitute_name_with_metadata
                ].default
                for composite_file in datatype.composite_files.values()
                if composite_file.substitute_name_with_metadata
            }
            name = item.get("name") or "Composite Dataset"
            metadata["base_name"] = name
            dataset = Bunch(
                name=name,
                metadata=metadata,
            )
            writable_files = datatype.get_writable_files_for_dataset(dataset)
            primary_file = sniff.stream_to_file(
                StringIO(datatype.generate_primary_file(dataset)),
                prefix="upload_auto_primary_file",
                dir=".",
            )
            extra_files_path = f"{primary_file}_extra"
            os.mkdir(extra_files_path)
            rval: Dict[str, Any] = {
                "name": name,
                "filename": primary_file,
                "ext": requested_ext,
                "link_data_only": False,
                "sources": [],
                "hashes": [],
                "extra_files": extra_files_path,
            }
            _copy_and_validate_simple_attributes(item, rval)
            composite_items = composite.get("elements", [])
            keys = list(writable_files.keys())
            composite_item_idx = 0
            for composite_item in composite_items:
                if composite_item_idx >= len(keys):
                    # raise exception - too many files?
                    pass
                key = keys[composite_item_idx]
                writable_file = writable_files[key]
                _, src_target = _has_src_to_path(upload_config, composite_item)
                # do the writing
                sniff.handle_composite_file(
                    datatype,
                    src_target,
                    extra_files_path,
                    key,
                    writable_file.is_binary,
                    ".",
                    f"{os.path.basename(extra_files_path)}_",
                    composite_item,
                )
                composite_item_idx += 1

            writable_files_idx = composite_item_idx
            while writable_files_idx < len(keys):
                key = keys[writable_files_idx]
                writable_file = writable_files[key]
                if not writable_file.optional:
                    # raise Exception, non-optional file missing
                    pass
                writable_files_idx += 1
            return rval
        else:
            if composite:
                raise Exception(f"Non-composite datatype [{datatype}] attempting to be created with composite data.")
            return _resolve_item_with_primary(item)