Пример #1
0
def write_job_metadata(tool_job_working_directory, job_metadata, set_meta, tool_provided_metadata):
    for i, file_dict in enumerate(tool_provided_metadata.get_new_datasets_for_metadata_collection(), start=1):
        filename = file_dict["filename"]
        new_dataset_filename = os.path.join(tool_job_working_directory, "working", filename)
        new_dataset = Dataset(id=-i, external_filename=new_dataset_filename)
        extra_files = file_dict.get('extra_files', None)
        if extra_files is not None:
            new_dataset._extra_files_path = os.path.join(tool_job_working_directory, "working", extra_files)
        new_dataset.state = new_dataset.states.OK
        new_dataset_instance = HistoryDatasetAssociation(id=-i, dataset=new_dataset, extension=file_dict.get('ext', 'data'))
        set_meta(new_dataset_instance, file_dict)
        file_dict['metadata'] = json.loads(new_dataset_instance.metadata.to_JSON_dict())  # storing metadata in external form, need to turn back into dict, then later jsonify

    tool_provided_metadata.rewrite()
Пример #2
0
 def _job_dataset(self, name, path):
     metadata = dict()
     hda = HistoryDatasetAssociation(name=name, metadata=metadata)
     hda.dataset = Dataset(id=123, external_filename=path)
     hda.dataset.metadata = dict()
     hda.children = []
     jida = JobToInputDatasetAssociation(name=name, dataset=hda)
     return jida
Пример #3
0
 def hda(id, name, path):
     hda = HistoryDatasetAssociation(name=name, metadata=dict())
     hda.dataset = Dataset(id=id, external_filename=path)
     hda.dataset.metadata = dict()
     hda.children = []
     return hda