def test_import_library_require_permissions(): """Verify library creation (import) is off by default.""" app = _mock_app() sa_session = app.model.context u = model.User(email="*****@*****.**", password="******") library = model.Library(name="my library 1", description="my library description", synopsis="my synopsis") root_folder = model.LibraryFolder(name="my library 1", description='folder description') library.root_folder = root_folder sa_session.add_all((library, root_folder)) sa_session.flush() temp_directory = mkdtemp() with store.DirectoryModelExportStore(temp_directory, app=app) as export_store: export_store.export_library(library) error_caught = False try: import_model_store = store.get_import_model_store_for_directory( temp_directory, app=app, user=u) import_model_store.perform_import() except AssertionError: # TODO: throw and catch a better exception... error_caught = True assert error_caught
def _perform_import_from_directory(directory, app, user, import_history, import_options=None): import_model_store = store.get_import_model_store_for_directory( directory, app=app, user=user, import_options=import_options) with import_model_store.target_history(default_history=import_history): import_model_store.perform_import(import_history)
def import_archive(archive_path, app, user): dest_parent = mkdtemp() dest_dir = CompressedFile(archive_path).extract(dest_parent) new_history = None model_store = store.get_import_model_store_for_directory(dest_dir, app=app, user=user) with model_store.target_history(default_history=None) as new_history: model_store.perform_import(new_history) shutil.rmtree(dest_parent) return new_history
def cleanup_after_job(self): """ Set history, datasets, collections and jobs' attributes and clean up archive directory. """ # # Import history. # jiha = self.sa_session.query(model.JobImportHistoryArchive).filter_by( job_id=self.job_id).first() if not jiha: return None user = jiha.job.user new_history = None try: archive_dir = jiha.archive_dir if self.app.config.external_chown_script: external_chown( archive_dir, jiha.job.user.system_user_pwent(getpass.getuser()), self.app.config.external_chown_script, "history import archive directory") model_store = store.get_import_model_store_for_directory( archive_dir, app=self.app, user=user, tag_handler=self.app.tag_handler.create_tag_handler_session()) job = jiha.job with model_store.target_history( default_history=job.history) as new_history: jiha.history = new_history self.sa_session.flush() model_store.perform_import(new_history, job=job, new_history=True) # Cleanup. if os.path.exists(archive_dir): shutil.rmtree(archive_dir) except Exception as e: jiha.job.tool_stderr += f"Error cleaning up history import job: {e}" self.sa_session.flush() raise return new_history
def test_sessionless_import_edit_datasets(): app, h, temp_directory, import_history = _setup_simple_export( {"for_edit": True}) # Create a model store without a session and import it. import_model_store = store.get_import_model_store_for_directory( temp_directory, import_options=store.ImportOptions(allow_dataset_object_edit=True, allow_edit=True)) import_model_store.perform_import() # Not using app.sa_session but a session mock that has a query/find pattern emulating usage # of real sa_session. d1 = import_model_store.sa_session.query( model.HistoryDatasetAssociation).find(h.datasets[0].id) d2 = import_model_store.sa_session.query( model.HistoryDatasetAssociation).find(h.datasets[1].id) assert d1 is not None assert d2 is not None
def _import_directory_to_history(app, target, work_directory): sa_session = app.model.context u = model.User(email="*****@*****.**", password="******") import_history = model.History(name="Test History for Import", user=u) sa_session = app.model.context sa_session.add_all([u, import_history]) sa_session.flush() assert len(import_history.datasets) == 0 import_options = store.ImportOptions(allow_dataset_object_edit=True) import_model_store = store.get_import_model_store_for_directory( target, app=app, user=u, import_options=import_options) with import_model_store.target_history(default_history=import_history): import_model_store.perform_import(import_history) return import_history
def _import_library_target(target, work_directory): app = _mock_app(store_by="uuid") temp_directory = mkdtemp() with store.DirectoryModelExportStore( temp_directory, app=app, serialize_dataset_objects=True) as export_store: persist_target_to_export_store(target, export_store, app.object_store, work_directory) u = model.User(email="*****@*****.**", password="******") import_options = store.ImportOptions(allow_dataset_object_edit=True, allow_library_creation=True) import_model_store = store.get_import_model_store_for_directory( temp_directory, app=app, user=u, import_options=import_options) import_model_store.perform_import() sa_session = app.model.context return sa_session
def import_archive(archive_path, app, user): dest_parent = mkdtemp() dest_dir = os.path.join(dest_parent, 'dest') options = Dummy() options.is_url = False options.is_file = True options.is_b64encoded = False args = (archive_path, dest_dir) unpack_tar_gz_archive.main(options, args) new_history = None model_store = store.get_import_model_store_for_directory(dest_dir, app=app, user=user) with model_store.target_history(default_history=None) as new_history: model_store.perform_import(new_history) return new_history
def cleanup_after_job(self): """ Set history, datasets, collections and jobs' attributes and clean up archive directory. """ # # Import history. # jiha = self.sa_session.query(model.JobImportHistoryArchive).filter_by( job_id=self.job_id).first() if not jiha: return None user = jiha.job.user new_history = None try: archive_dir = jiha.archive_dir model_store = store.get_import_model_store_for_directory( archive_dir, app=self.app, user=user) job = jiha.job with model_store.target_history( default_history=job.history) as new_history: jiha.history = new_history self.sa_session.flush() model_store.perform_import(new_history, job=job, new_history=True) # Cleanup. if os.path.exists(archive_dir): shutil.rmtree(archive_dir) except Exception as e: jiha.job.tool_stderr += "Error cleaning up history import job: %s" % e self.sa_session.flush() raise return new_history
def test_import_export_library(): """Test basics of library, library folder, and library dataset import/export.""" app = _mock_app() sa_session = app.model.context u = model.User(email="*****@*****.**", password="******") library = model.Library(name="my library 1", description="my library description", synopsis="my synopsis") root_folder = model.LibraryFolder(name="my library 1", description='folder description') library.root_folder = root_folder sa_session.add_all((library, root_folder)) sa_session.flush() subfolder = model.LibraryFolder(name="sub folder 1", description="sub folder") root_folder.add_folder(subfolder) sa_session.add(subfolder) ld = model.LibraryDataset(folder=root_folder, name="my name", info="my library dataset") ldda = model.LibraryDatasetDatasetAssociation(create_dataset=True, flush=False) ld.library_dataset_dataset_association = ldda root_folder.add_library_dataset(ld) sa_session.add(ld) sa_session.add(ldda) sa_session.flush() assert len(root_folder.datasets) == 1 assert len(root_folder.folders) == 1 temp_directory = mkdtemp() with store.DirectoryModelExportStore(temp_directory, app=app) as export_store: export_store.export_library(library) import_model_store = store.get_import_model_store_for_directory( temp_directory, app=app, user=u, import_options=store.ImportOptions(allow_library_creation=True)) import_model_store.perform_import() all_libraries = sa_session.query(model.Library).all() assert len(all_libraries) == 2, len(all_libraries) all_lddas = sa_session.query(model.LibraryDatasetDatasetAssociation).all() assert len(all_lddas) == 2, len(all_lddas) new_library = [l for l in all_libraries if l.id != library.id][0] assert new_library.name == "my library 1" assert new_library.description == "my library description" assert new_library.synopsis == "my synopsis" new_root = new_library.root_folder assert new_root assert new_root.name == "my library 1" assert len(new_root.folders) == 1 assert len(new_root.datasets) == 1