def get_observation(self, units, observation) -> dict: try: imported_units = json.loads(units) if imported_units.get('name', False): imported_units = self.from_destructured(imported_units) except Exception: imported_units = import_class(imported_units) if not isinstance(imported_units, dict): for key in observation.keys(): if key != 'n': observation[key] = float(observation[key]) * imported_units else: observation[key] = int(observation[key]) else: imported_units = { param: import_class(unit) for param, unit in imported_units.items() } for key in observation.keys(): if key != 'n': observation[key] = self.process_obs( observation[key]) * imported_units[key] else: observation[key] = int(observation[key]) return observation
def importable(unit): try: import_class(unit) except AttributeError: return False return True
def get_units(import_path): klass = import_class(import_path) if isinstance(klass.units, dict): str_units = {} for param in klass.units.keys(): str_units.update( { param: f"{inspect.getmodule(klass.units[param]).__package__}.{klass.units[param].symbol}" # noqa: E501 } ) return json.dumps(str_units) units = \ f"{inspect.getmodule(klass.units).__package__}.{klass.units.symbol}" def importable(unit): try: import_class(unit) except AttributeError: return False return True def destructure_custom_unit(unit): base_unit = list(unit.definition.dimensionality).pop() base_quantity = \ f"{inspect.getmodule(unit).__package__}.{base_unit.symbol}" if not importable(base_quantity): base_quantity = "N/A" destructured = { 'name': unit.name, 'base': { 'quantity': base_quantity, 'coefficient': float(unit.definition.base) }, 'symbol': unit.symbol } return json.dumps(destructured) try: import_class(units) except AttributeError: units = destructure_custom_unit(klass.units) return units
def get_extra_capabilities(model_class_import_path): klass = import_class(model_class_import_path) if klass.extra_capability_checks is not None: return klass.extra_capability_checks else: return {}
def units_name(self): try: destructured = json.loads(self.units) except (json.JSONDecodeError, TypeError): if self.units is not None: return import_class(self.units).name else: return "N/A" if destructured.get('name', False): return build_destructured_unit(destructured).name else: return ' | '.join([ import_class(value).name for key, value in destructured.items() ])
def post(self, request): tests = request.data.get('tests') models = request.data.get('models') test_models = [ test for test in TestInstance.objects.filter(id__in=tests) ] model_models = [ model for model in ModelInstance.objects.filter(id__in=models) ] test_instances = [ import_class(test.test_class.import_path)( self.get_observation(test.test_class.units, test.observation), name=f'{test.name}#{test.pk}') for test in test_models ] model_instances = [ self._create_model_instance(model) for model in model_models ] suite = TestSuite(test_instances) result = suite.check(model_instances) return Response( {'compatibility': result.to_csv(sep='|', line_terminator=";")})
def from_destructured(self, unit_dict): unit = pq.UnitQuantity( unit_dict.get('name'), import_class(unit_dict.get('base').get('quantity')) * unit_dict.get('base').get('coefficient'), unit_dict.get('symbol')) return unit
def get_observation_schema(import_path): klass = import_class(import_path) observation_schema = klass.observation_schema if observation_schema is None: observation_schema = {} return observation_schema
def get_default_params(import_path): klass = import_class(import_path) default_params = klass.default_params if default_params is None: default_params = {} return default_params
def get_test_parameters_schema(import_path): klass = import_class(import_path) params_schema = klass.params_schema if params_schema is None: params_schema = {} return params_schema
def _create_model_instance(self, model_object): models_file_path = s.DOWNLOADED_MODEL_DIR file_name = os.path.basename(model_object.url) path = f"{models_file_path}/{file_name}" download_and_save_model(path, model_object.url) return import_class(model_object.model_class.import_path)( path, name=f'{model_object.name}#{model_object.pk}')
def get_model_parameters(url: t.Union[str, dict], model_id: str) -> dict: servlet_manager = pg.GeppettoServletManager.get_instance( 'model_parameters') if isinstance(url, dict): extractor = pg.interpreters.helpers.NeuroMLDbExtractor( url, model_id, '/tmp/') file_name = extractor.root_file else: file_name = os.path.basename(url) if isinstance(url, dict): interpreter_string = pg.interpreters.helpers.interpreter_detector( file_name) else: interpreter_string = pg.interpreters.helpers.interpreter_detector(url) interpreter_class = import_class(interpreter_string) timestamp = int(time.time()) builder = pg.GeppettoProjectBuilder( model_file_url=url, interpreter=interpreter_class, project_location= f"{s.PYGEPPETTO_BUILDER_PROJECT_BASE_URL}/{timestamp}/p.json", # noqa: E501 xmi_location= f"{s.PYGEPPETTO_BUILDER_PROJECT_BASE_URL}/{timestamp}/m.xmi", # noqa: E501 model_file_location= f"{s.PYGEPPETTO_BUILDER_PROJECT_BASE_URL}/{timestamp}/{file_name}" # noqa: E501 ) project_url = builder.build_project() servlet_manager.handle('load_project_from_url', project_url) wrong_message = True result = {} while wrong_message: result = servlet_manager.read() db_logger.info(result) parsed_result = json.loads(result) wrong_message = parsed_result.get( 'type') != 'geppetto_model_loaded' and parsed_result.get( 'type') != 'generic_error' if parsed_result.get('type') == 'generic_error': parsed_result = json.loads(parsed_result.get('data')) parsed_result = json.loads(parsed_result.get('message')) raise Exception(parsed_result) return parsed_result
def check_capabilities(model_file_path, model_class_import_path): if model_class_import_path == '': return False klass = import_class(model_class_import_path) try: failed = klass(model_file_path).failed_extra_capabilities except Exception as e: db_logger.exception(e) return False return len(failed) == 0
def send_score_to_geppetto(score): db_logger.info(f'Processing score with ID {score.pk}') model_name = os.path.basename(score.model_instance.url) interpreter = import_class(interpreter_detector(score.model_instance.url)) project_builder = pg.GeppettoProjectBuilder( score=score, interpreter=interpreter, project_location= f"{s.PYGEPPETTO_BUILDER_PROJECT_BASE_URL}/{score.owner}/{score.pk}/project.json", # noqa:E501 xmi_location= f"{s.PYGEPPETTO_BUILDER_PROJECT_BASE_URL}/{score.owner}/{score.pk}/model.xmi", # noqa:E501 model_file_location= f"{s.PYGEPPETTO_BUILDER_PROJECT_BASE_URL}/{score.owner}/{score.pk}/{model_name}", # noqa:E501 ) project_url = project_builder.build_project() servlet_manager = pg.GeppettoServletManager.get_instance('scheduler') servlet_manager.handle(S.LOAD_PROJECT_FROM_URL, project_url) project_loaded = False model_loaded = False project_id = None while not project_loaded and not model_loaded: try: response = json.loads(servlet_manager.read()) except Exception as e: return e response_type = response.get('type') db_logger.info(response_type) if response_type == SR.GENERIC_ERROR or response_type == SR.ERROR_LOADING_PROJECT: # noqa: E501 error = get_error(response.get('data')) db_logger.error(error) return error project_loaded = response_type == SR.PROJECT_LOADED model_loaded = response_type == SR.GEPPETTO_MODEL_LOADED if project_loaded: project_id = get_project_id(response.get('data')) db_logger.info(project_id) if project_id is None: return "Project not found" servlet_manager.handle( S.RUN_EXPERIMENT, json.dumps({ 'projectId': project_id, 'experimentId': 1 })) finished = False experiment_loaded = False while not finished: try: response = json.loads(servlet_manager.read()) except WebSocketTimeoutException: db_logger.info('Successfully started experiment') finished = True except Exception as e: db_logger.error(e) score.error = e score.status = score.FAILED score.save() response_type = response.get('type') db_logger.info(response_type) if response_type == SR.ERROR_RUNNING_EXPERIMENT: error = get_error(response.get('data')) db_logger.error(error) score.error = error score.status = score.FAILED score.test_instance.build_info = f' {platform.system()}-{platform.release()}/{platform.system()}' # noqa: E501 score.test_instance.hostname = 'Scidash Host' score.save() finished = True experiment_loaded = response_type == SR.EXPERIMENT_LOADED if experiment_loaded: db_logger.info(f'Score with ID {score.pk} successfully sent')
def validate(self, data): sciunit.settings['PREVALIDATE'] = True class_data = data.get('test_class') if not class_data.get('import_path', False): return data test_class = import_class(class_data.get('import_path')) try: destructured = json.loads(class_data.get('units')) except json.JSONDecodeError: quantity = import_class(class_data.get('units')) else: if destructured.get('name', False): quantity = build_destructured_unit(destructured) else: quantity = destructured observations = data.get('observation') without_units = [] def filter_units(schema): result = [] for key, rules in schema.items(): if not rules.get('units', False): result.append(key) return result if isinstance(test_class.observation_schema, list): for schema in test_class.observation_schema: if isinstance(schema, tuple): without_units += filter_units(schema[1]) else: without_units += filter_units(schema) else: without_units = filter_units(test_class.observation_schema) def process_obs(obs): try: obs = int(obs) except ValueError: obs = np.array(json.loads(obs)) return obs if not isinstance(quantity, dict): obs_with_units = { x: ( process_obs(y) * quantity if x not in without_units else process_obs(y) ) for x, y in observations.items() } else: obs_with_units = { x: ( process_obs(y) * import_class(quantity[x]) if x not in without_units else process_obs(y) ) for x, y in observations.items() } try: test_class(obs_with_units) except Exception as e: raise serializers.ValidationError( f"Can't instantiate class, reason candidates: {e}" ) return data
def get_model_capabilities(model_class_import_path): klass = import_class(model_class_import_path) return klass.get_capabilities()
def calculate_score(self, simulation_result, score_instance): model_class = general_hlp.import_class( score_instance.model_instance.model_class.import_path) model_url = score_instance.model_instance.url model_name = os.path.basename(model_url) model_path = os.path.join(settings.DOWNLOADED_MODEL_DIR, model_name) model_hlp.download_and_save_model(model_path, model_url) model_instance = model_class(model_path, name=score_instance.model_instance.name, backend=ScidashCacheBackend.name) model_instance.set_memory_cache(simulation_result) test_class = general_hlp.import_class( score_instance.test_instance.test_class.import_path) observation = copy.deepcopy(score_instance.test_instance.observation) params = copy.deepcopy(score_instance.test_instance.params) try: destructured = json.loads( score_instance.test_instance.test_class.units) except json.JSONDecodeError: units = general_hlp.import_class( score_instance.test_instance.test_class.units) else: if destructured.get('name', False): base_unit = general_hlp.import_class( destructured.get('base').get('quantity')) units = pq.UnitQuantity( destructured.get('name'), base_unit * destructured.get('base').get('coefficient'), destructured.get('symbol')) else: units = destructured for key in observation: if not isinstance(units, dict): observation[key] = int( observation[key]) * units if key != 'n' else int( observation[key]) else: observation[key] = int( observation[key]) * units[key] if key != 'n' else int( observation[key]) params_units = score_instance.test_instance.test_class.params_units for key in params_units: params_units[key] = general_hlp.import_class(params_units[key]) processed_params = {} for key in params: if params[key] is not None: processed_params[key] = float(params[key]) * params_units[key] test_instance = test_class(observation=observation, **processed_params) score = test_instance.judge(model_instance).json(add_props=True, string=False) self.update_score(score_instance, score) return score