def test_model_create_context_persist_error_hda(): work_directory = mkdtemp() with open(os.path.join(work_directory, "file1.txt"), "w") as f: f.write("hello world\nhello world line 2") target = { "destination": { "type": "hdas", }, "elements": [{ "error_message": "Failed to download some URL I guess", }], } app = _mock_app(store_by="uuid") temp_directory = mkdtemp() with store.DirectoryModelExportStore( temp_directory, serialize_dataset_objects=True) as export_store: persist_target_to_export_store(target, export_store, app.object_store, work_directory) import_history = _import_directory_to_history(app, temp_directory, work_directory) assert len(import_history.datasets) == 1 imported_hda = import_history.datasets[0] assert imported_hda.state == "error" assert imported_hda.info == "Failed to download some URL I guess"
def test_persist_target_hdca(): work_directory = mkdtemp() with open(os.path.join(work_directory, "file1.txt"), "w") as f: f.write("hello world\nhello world line 2") with open(os.path.join(work_directory, "file2.txt"), "w") as f: f.write("file 2 contents") target = { "destination": { "type": "hdca", }, "name": "My HDCA", "collection_type": "list", "elements": [{ "filename": "file1.txt", "ext": "txt", "dbkey": "hg19", "info": "dataset info", "name": "my file", }, { "filename": "file2.txt", "ext": "txt", "dbkey": "hg18", "info": "dataset info 2", "name": "my file 2", }] } app = _mock_app(store_by="uuid") temp_directory = mkdtemp() with store.DirectoryModelExportStore( temp_directory, serialize_dataset_objects=True) as export_store: persist_target_to_export_store(target, export_store, app.object_store, work_directory) import_history = _import_directory_to_history(app, temp_directory, work_directory) assert len(import_history.dataset_collections) == 1 assert len(import_history.datasets) == 2 import_hdca = import_history.dataset_collections[0] datasets = import_hdca.dataset_instances assert len(datasets) == 2 dataset0 = datasets[0] dataset1 = datasets[1] with open(dataset0.file_name, "r") as f: assert f.read().startswith("hello world\n") with open(dataset1.file_name, "r") as f: assert f.read().startswith("file 2 contents")
def main(argv=None): if argv is None: argv = sys.argv[1:] args = _arg_parser().parse_args(argv) object_store_config = Bunch( object_store_store_by="uuid", object_store_config_file=args.object_store_config, object_store_check_old_style=False, jobs_directory=None, new_file_path=None, umask=os.umask(0o77), gid=os.getgid(), ) object_store = build_object_store_from_config(object_store_config) galaxy.model.Dataset.object_store = object_store galaxy.model.set_datatypes_registry(example_datatype_registry_for_sample()) from galaxy.model import mapping mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=object_store) with open(args.objects) as f: targets = yaml.safe_load(f) if not isinstance(targets, list): targets = [targets] export_path = args.export export_type = args.export_type if export_type is None: export_type = "directory" if not export_path.endswith( ".tgz") else "bag_archive" export_types = { "directory": store.DirectoryModelExportStore, "tar": store.TarModelExportStore, "bag_directory": store.BagDirectoryModelExportStore, "bag_archive": store.BagArchiveModelExportStore, } store_class = export_types[export_type] export_kwds = { "serialize_dataset_objects": True, } with store_class(export_path, **export_kwds) as export_store: for target in targets: persist_target_to_export_store(target, export_store, object_store, ".")
def _import_library_target(target, work_directory): app = _mock_app(store_by="uuid") temp_directory = mkdtemp() with store.DirectoryModelExportStore( temp_directory, app=app, serialize_dataset_objects=True) as export_store: persist_target_to_export_store(target, export_store, app.object_store, work_directory) u = model.User(email="*****@*****.**", password="******") import_options = store.ImportOptions(allow_dataset_object_edit=True, allow_library_creation=True) import_model_store = store.get_import_model_store_for_directory( temp_directory, app=app, user=u, import_options=import_options) import_model_store.perform_import() sa_session = app.model.context return sa_session
def test_model_create_context_persist_hdas(): work_directory = mkdtemp() with open(os.path.join(work_directory, "file1.txt"), "w") as f: f.write("hello world\nhello world line 2") target = { "destination": { "type": "hdas", }, "elements": [{ "filename": "file1.txt", "ext": "txt", "dbkey": "hg19", "name": "my file", "md5": "e5d21b1ea57fc9a31f8ea0110531bf3d", "tags": ["name:value"] }], } app = _mock_app() temp_directory = mkdtemp() with store.DirectoryModelExportStore( temp_directory, serialize_dataset_objects=True) as export_store: persist_target_to_export_store(target, export_store, app.object_store, work_directory) import_history = _import_directory_to_history(app, temp_directory, work_directory) assert len(import_history.datasets) == 1 imported_hda = import_history.datasets[0] assert imported_hda.ext == "txt" assert imported_hda.name == "my file" assert imported_hda.metadata.data_lines == 2 assert len(imported_hda.dataset.hashes) == 1 assert imported_hda.dataset.hashes[ 0].hash_value == "e5d21b1ea57fc9a31f8ea0110531bf3d" tags = imported_hda.tags assert len(tags) == 1 assert tags[0].value == "value" with open(imported_hda.file_name) as f: assert f.read().startswith("hello world\n")