def test_scaler_no_model(dbengine): driver = Driver(dbengine) stmt = insert(db.Optimisation).values( opt_dsl_code="test01", app_name="testapp", target="enable_opt_build:false", ) driver.update_sql(stmt) scaler = Scaler(driver) app = Application.construct(app_tag="testapp", mpi_ranks=256) assert not scaler.scale(app)
async def create_container_mapping( container_mapping: ContainerMapping, session: AsyncSession = Depends(get_db_session), # noqa: B008 ): """Add a new infrastructure""" # The following uses a different driver as the app itself. # Meaning that this query goes to the real DB and ignores the # temporary test db for the API when testing. driver = Driver() mapper = Mapper(driver) mapper.add_optcontainer( { "name": container_mapping.opt_dsl_code, "app_name": container_mapping.app_name, "build": { "enable_opt_build": container_mapping.enable_opt_build, **container_mapping.target.dict(), }, "optimisation": container_mapping.selectors, "container_file": container_mapping.container_name, "image_type": container_mapping.container_type, "image_hub": container_mapping.container_registry, } ) return container_mapping
def test_enforce_infra_storage_pref(dbengine): """ Check that Enforcer.enforce_opt returns the storage location from an infra """ driver = Driver(dbengine) enforcer = Enforcer(driver) infra = InfrastructureIn( name="testinfra", configuration={ "storage": { "file:///var/tmp": { "storage_class": "default-ssd" }, "file:///data": { "storage_class": "default-common" }, } }, ) stmt = insert(db.Infrastructure).values(**infra.dict()) driver.update_sql(stmt) _, tenv = enforcer.enforce_opt( "fancy", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm"), application=Application.construct(storage_class_pref=None), ), ["myfeat:true"], ) # no spec will return the "slowest" (cheaptest) storage class first assert tenv["preferred_storage_location"] == "file:///data" _, tenv = enforcer.enforce_opt( "fancy", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm"), application=Application.construct( storage_class_pref="default-ssd"), ), ["myfeat:true"], ) assert tenv["preferred_storage_location"] == "file:///var/tmp"
class test_driver(unittest.TestCase): def setUp(self): self.driver = Driver() def tearDown(self): pass def test_driver(self): data = self.driver.select_sql( select(Optimisation.opt_dsl_code, Optimisation.app_name).where( Optimisation.app_name == "pytorch")) self.assertEqual(data[0][1], "pytorch")
def test_scaler_max(dbengine): driver = Driver(dbengine) stmt = insert(db.Optimisation).values( opt_dsl_code="test01", app_name="testapp", target="enable_opt_build:false", ) driver.update_sql(stmt) MAX_NRANKS = 16 MAX_NTHREADS = 4 stmt = insert(db.ScalingModel).values( opt_dsl_code="test01", model={ "name": "max", "max_nranks": MAX_NRANKS, "max_nthreads": MAX_NTHREADS }, ) driver.update_sql(stmt) scaler = Scaler(driver) app = Application.construct(app_tag="testapp", mpi_ranks=256, threads=8) assert scaler.scale(app) # the scaling should run assert app.mpi_ranks == MAX_NRANKS assert app.threads == MAX_NTHREADS
def test_enforce_app_script(dbengine): """ Check that Enforcer.enforce_opt returns an app-conditioned script """ driver = Driver(dbengine) enforcer = Enforcer(driver) script = ScriptIn( conditions={"application": { "name": "fancy" }}, data={ "stage": "pre", "raw": "echo hello" }, ) stmt = insert(db.Script).values(**script.dict()) driver.update_sql(stmt) scripts, _ = enforcer.enforce_opt( "inexistentapp", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm")), [], ) assert not scripts, "scripts returned while it should not have" # despite target and myfeat, this should return the script scripts, _ = enforcer.enforce_opt( "fancy", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm")), ["myfeat:true"], ) assert scripts, "scripts not found"
def test_enforce_infra_script(dbengine): """ Check that Enforcer.enforce_opt returns an infra-conditioned script """ driver = Driver(dbengine) enforcer = Enforcer(driver) stmt = insert(db.Script).values( conditions={"infrastructure": { "name": "testinfra" }}, data={ "stage": "pre", "raw": "echo hello" }, ) driver.update_sql(stmt) scripts, _ = enforcer.enforce_opt( "inexistentapp", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm")), [], ) assert scripts, "empty list of scripts returned" assert scripts[0].conditions.infrastructure assert scripts[0].conditions.infrastructure.name == "testinfra" scripts, _ = enforcer.enforce_opt( "inexistentapp", Job.construct( target=Target(name="wrongtestinfra", job_scheduler_type="slurm")), [], ) assert not scripts, "scripts returned while it should not have"
def test_enforce_opt(): """ Check that Enforcer.enforce_opt returns a non-empty set of and location for a DB entry """ driver = Driver() enforcer = Enforcer(driver) scripts, _ = enforcer.enforce_opt( "tensorflow", Job.construct(target=Target(name="test", job_scheduler_type="slurm")), ["version:2.1", "xla:true"], ) assert scripts, "empty set returned"
def modak_driver_fixture(dbengine): """Get a Driver instance tied to an in-memory DB engine""" yield Driver(dbengine)
def setUp(self): self.driver = Driver() self.m = Mapper(self.driver)
class test_mapper(unittest.TestCase): def setUp(self): self.driver = Driver() self.m = Mapper(self.driver) def tearDown(self): pass def test_add_opt(self): target_string = '{"cpu_type":"x86","acc_type":"nvidia"}' opt_string = '{"xla":true,"version":"1.1"}' stmt = delete(Optimisation).where( Optimisation.opt_dsl_code == "TF_PIP_XLA" ) # no need to delete Mapper, since DB is set to CASCADE self.driver.update_sql(stmt) self.m.add_optimisation( "TF_PIP_XLA", "tensorflow", json.loads(target_string), json.loads(opt_string), ) data = self.driver.select_sql( select(Optimisation.app_name).where( Optimisation.opt_dsl_code == "TF_PIP_XLA")) self.assertEqual(len(data), 1) print(data[0][0]) self.assertEqual(data[0][0], "tensorflow") def test_add_container(self): stmt = delete(Map).where(Map.opt_dsl_code == "TF_PIP_XLA") self.driver.update_sql(stmt) data = self.driver.select_sql( select(Map.container_file).where(Map.opt_dsl_code == "TF_PIP_XLA")) self.assertEqual(len(data), 0) self.m.add_container("TF_PIP_XLA", "AI/containers/tensorflow/tensorflow_pip_xla") data = self.driver.select_sql( select(Map.container_file).where(Map.opt_dsl_code == "TF_PIP_XLA")) self.assertEqual(len(data), 1) self.assertEqual(data[0][0], "AI/containers/tensorflow/tensorflow_pip_xla") def test_map_container_ai(self): dsl_file = SCRIPT_DIR / "input" / "tf_snow.json" model = JobModel.parse_raw(dsl_file.read_text()) new_container = self.m.map_container(model.job.application, model.job.optimisation) self.assertEqual(new_container, "docker.io://modakopt/modak:tensorflow-2.1-gpu-src") def test_map_container_hpc(self): model = JobModel.parse_raw( SCRIPT_DIR.joinpath("input/mpi_solver.json").read_text()) assert model.job.optimisation dsl_code = self.m._decode_hpc_opt(model.job.application.app_tag, model.job.optimisation) self.assertEqual(dsl_code, "mpich_ub1804_cuda101_mpi314_gnugprof") def test_map_container_aliased(self): dsl_file = SCRIPT_DIR / "input" / "tf_snow.json" with patch.object(Settings, "image_hub_aliases", {"docker": "docker.invalid"}): model = JobModel.parse_raw(dsl_file.read_text()) new_container = self.m.map_container(model.job.application, model.job.optimisation) self.assertEqual( new_container, "docker.invalid://modakopt/modak:tensorflow-2.1-gpu-src", )
def test_enforce_infra_storage_script(dbengine): """ Check that Enforcer.enforce_opt returns an infra- & storage-conditioned script """ driver = Driver(dbengine) enforcer = Enforcer(driver) # insert a script which should be enabled if the chosen infra provides this storage_class script = ScriptIn( conditions={ "infrastructure": { "name": "testinfra", "storage_class": "default-ssd" } }, data={ "stage": "pre", "raw": "echo 'hello any storage'" }, ) stmt = insert(db.Script).values(**script.dict()) driver.update_sql(stmt) scripts, _ = enforcer.enforce_opt( "inexistentapp", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm")), [], ) assert not scripts, "script returned despite no infrastructure entry" infra = InfrastructureIn( name="testinfra", configuration={ "storage": { "file:///var/tmp": { "storage_class": "default-ssd" } } }, ) stmt = insert(db.Infrastructure).values(**infra.dict()) driver.update_sql(stmt) scripts, _ = enforcer.enforce_opt( "fancy", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm"), application=Application.construct(storage_class_pref=None), ), ["myfeat:true"], ) assert scripts, "scripts not found" # insert a script which should be enabled if the chosen infra provides this storage_class script = ScriptIn( conditions={ "infrastructure": { "storage_class": "default-ssd" }, "application": { "name": "testapp" }, }, data={ "stage": "pre", "raw": "echo 'hello ssd-only'" }, ) stmt = insert(db.Script).values(**script.dict()) driver.update_sql(stmt) scripts, _ = enforcer.enforce_opt( "testapp", Job.construct( target=Target(name="testinfra", job_scheduler_type="slurm"), application=Application.construct(storage_class_pref=None), ), [], ) assert len(scripts) == 2, "scripts not found"
def setUp(self): self.driver = Driver()
def test_scaler_no_dsl_code(dbengine): driver = Driver(dbengine) scaler = Scaler(driver) app = Application.construct(app_tag="testapp", mpi_ranks=256) assert not scaler.scale(app)