def _job_dataset( self, name, path ): metadata = dict( ) hda = HistoryDatasetAssociation( name=name, metadata=metadata ) hda.dataset = Dataset( id=123, external_filename=path ) hda.dataset.metadata = dict() hda.children = [] jida = JobToInputDatasetAssociation( name=name, dataset=hda ) return jida
def _job_dataset(self, name, path): metadata = dict() hda = HistoryDatasetAssociation(name=name, metadata=metadata) hda.dataset = Dataset(id=123, external_filename=path) hda.dataset.metadata = dict() hda.children = [] jida = JobToInputDatasetAssociation(name=name, dataset=hda) return jida
def prototype_elements(self): left_association = DatasetCollectionElement( element=HistoryDatasetAssociation(), element_identifier=FORWARD_IDENTIFIER, ) right_association = DatasetCollectionElement( element=HistoryDatasetAssociation(), element_identifier=REVERSE_IDENTIFIER, ) yield left_association yield right_association
def test_check_required_metadata_inherited(): app = GalaxyDataTestApp() app.datatypes_registry.datatypes_by_extension[ 'inherited'] = CheckRequiredInherited hda = HistoryDatasetAssociation(sa_session=app.model.session, extension='inherited') assert hda.metadata.spec['columns'].check_required_metadata assert not hda.metadata.spec['something'].check_required_metadata
def create_bed_data(sa_session, string_size): hda = HistoryDatasetAssociation(extension="bed") big_string = "0" * string_size sa_session.add(hda) hda.metadata.column_names = [big_string] assert hda.metadata.column_names sa_session.flush() return hda
def _create_hda(model, object_store, history, path, visible=True, include_metadata_file=False): hda = HistoryDatasetAssociation(extension="bam", create_dataset=True, sa_session=model.context) hda.visible = visible model.context.add(hda) model.context.flush([hda]) object_store.update_from_file(hda, file_name=path, create=True) if include_metadata_file: hda.metadata.from_JSON_dict( json_dict={ "bam_index": MetadataTempFile.from_JSON({ "kwds": {}, "filename": path }) }) _check_metadata_file(hda) hda.set_size() history.add_dataset(hda) hda.add_item_annotation(model.context, history.user, hda, "annotation #%d" % hda.hid) return hda
def write_job_metadata(tool_job_working_directory, job_metadata, set_meta, tool_provided_metadata): for i, file_dict in enumerate(tool_provided_metadata.get_new_datasets_for_metadata_collection(), start=1): filename = file_dict["filename"] new_dataset_filename = os.path.join(tool_job_working_directory, "working", filename) new_dataset = Dataset(id=-i, external_filename=new_dataset_filename) extra_files = file_dict.get('extra_files', None) if extra_files is not None: new_dataset._extra_files_path = os.path.join(tool_job_working_directory, "working", extra_files) new_dataset.state = new_dataset.states.OK new_dataset_instance = HistoryDatasetAssociation(id=-i, dataset=new_dataset, extension=file_dict.get('ext', 'data')) set_meta(new_dataset_instance, file_dict) file_dict['metadata'] = json.loads(new_dataset_instance.metadata.to_JSON_dict()) # storing metadata in external form, need to turn back into dict, then later jsonify tool_provided_metadata.rewrite()
def hda(id, name, path): hda = HistoryDatasetAssociation(name=name, metadata=dict()) hda.dataset = Dataset(id=id, external_filename=path) hda.dataset.metadata = dict() hda.children = [] return hda
def test_check_required_metadata_true(): app = GalaxyDataTestApp() app.datatypes_registry.datatypes_by_extension['true'] = CheckRequiredTrue hda = HistoryDatasetAssociation(sa_session=app.model.session, extension='true') assert hda.metadata.spec['columns'].check_required_metadata