def test_GIVEN_full_model_WHEN_delete_THEN_model_is_deleted(self): user = self.login('') self.create_model_run_ready_for_submit() with session_scope(Session) as session: dataset_count = session.query(Dataset).count() model = self.model_run_service.get_models_for_user(user)[0] dataset = Dataset() dataset.model_run_id = model.id session.add(dataset) region_count = session.query(LandCoverAction).count() session.add(LandCoverAction(model_run_id=model.id)) model_not_to_delete = self.create_run_model(0, "test", user, constants.MODEL_RUN_STATUS_PUBLISHED) self.job_runner_client.delete = Mock() self.model_run_service.delete_run_model(model.id, user) with session_scope(Session) as session: count = session.query(ModelRun).filter(ModelRun.id == model.id).count() assert_that(count, is_(0), 'Count(Model)') with session_scope(Session) as session: count = session.query(ModelRun).filter(ModelRun.id == model_not_to_delete.id).count() assert_that(count, is_(1), 'Count(Model)') with session_scope(Session) as session: count = session.query(Dataset).count() assert_that(count, is_(dataset_count), 'Count(Datasets)is same as before creating model') with session_scope(Session) as session: count = session.query(LandCoverAction).count() assert_that(count, is_(region_count), 'Count(LandCoverAction)is same as before creating model')
def _create_dataset(self, dataset_type, filename, is_input, model_run, session, frequency=None): """ Create a single dataset :param dataset_type: the dataset type :param filename: filename of the dataset :param is_input: true if this is an input dataset :param model_run: the model run :param session: the session :param frequency: extra label to append to the name to indicate frequency :return: """ netcdf_url = "Did not get url but filename is {}".format(filename) try: netcdf_url = self._dap_client_factory.get_full_url_for_file(filename, config=self._config) if dataset_type.type == constants.DATASET_TYPE_LAND_COVER_FRAC: data_range_from, data_range_to = [0, 1] name = "Land Cover Fractions" elif dataset_type.type == constants.DATASET_TYPE_SOIL_PROP: data_range_from, data_range_to = [0, 1] name = "Soil Properties" else: dap_client = self._dap_client_factory.get_dap_client(netcdf_url) if frequency: name = "{name} ({frequency})".format(name=dap_client.get_longname(), frequency=frequency) else: name = dap_client.get_longname() data_range_from, data_range_to = dap_client.get_data_range() dataset = Dataset() dataset.model_run_id = model_run.id dataset.dataset_type = dataset_type dataset.is_categorical = False dataset.is_input = is_input dataset.viewable_by_user_id = model_run.user.id dataset.wms_url = "{url}?{query}".format( url=self._dap_client_factory.get_full_url_for_file(filename, service='wms', config=self._config), query="service=WMS&version=1.3.0&request=GetCapabilities") dataset.netcdf_url = netcdf_url dataset.data_range_from = data_range_from dataset.data_range_to = data_range_to dataset.name = name session.add(dataset) except DapClientInternalServerErrorException: log.exception("Trouble creating the dataset %s" % netcdf_url)