def _populate_algorithms(self, algo_category_class, algo_category_id): for adapter_class in self.introspection_registry.ADAPTERS[algo_category_class]: try: if not adapter_class.can_be_active(): self.logger.warning("Skipped Adapter(probably because MATLAB not found):" + str(adapter_class)) stored_adapter = Algorithm(adapter_class.__module__, adapter_class.__name__, algo_category_id, adapter_class.get_group_name(), adapter_class.get_group_description(), adapter_class.get_ui_name(), adapter_class.get_ui_description(), adapter_class.get_ui_subsection(), datetime.datetime.now()) adapter_inst = adapter_class() adapter_form = adapter_inst.get_form() required_datatype = adapter_form.get_required_datatype() if required_datatype is not None: required_datatype = required_datatype.__name__ filters = adapter_form.get_filters() if filters is not None: filters = filters.to_json() stored_adapter.required_datatype = required_datatype stored_adapter.datatype_filter = filters stored_adapter.parameter_name = adapter_form.get_input_name() stored_adapter.outputlist = str(adapter_inst.get_output()) inst_from_db = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__) if inst_from_db is not None: stored_adapter.id = inst_from_db.id stored_adapter = dao.store_entity(stored_adapter, inst_from_db is not None) adapter_class.stored_adapter = stored_adapter except Exception: self.logger.exception("Could not introspect Adapters file:" + adapter_class.__module__)
def build(adapter_class=DummyAdapter1): all_categories = dao.get_algorithm_categories() algo_category_id = all_categories[0].id stored_adapter = Algorithm(adapter_class.__module__, adapter_class.__name__, algo_category_id, adapter_class.get_group_name(), adapter_class.get_group_description(), adapter_class.get_ui_name(), adapter_class.get_ui_description(), adapter_class.get_ui_subsection(), datetime.now()) adapter_inst = adapter_class() adapter_form = adapter_inst.get_form() required_datatype = adapter_form.get_required_datatype() if required_datatype is not None: required_datatype = required_datatype.__name__ filters = adapter_form.get_filters() if filters is not None: filters = filters.to_json() stored_adapter.required_datatype = required_datatype stored_adapter.datatype_filter_filter = filters stored_adapter.parameter_name = adapter_form.get_input_name() stored_adapter.outputlist = str(adapter_inst.get_output()) inst_from_db = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__) if inst_from_db is not None: stored_adapter.id = inst_from_db.id return dao.store_entity(stored_adapter, inst_from_db is not None)
def build(subject="Datatype Factory User", state="RAW_DATA", project=None): range_1 = ["row1", [1, 2, 3]] range_2 = ["row2", [0.1, 0.3, 0.5]] user = user_factory() if project is None: project = project_factory(user) # Create an algorithm alg_category = AlgorithmCategory('one', True) dao.store_entity(alg_category) ad = Algorithm(IntrospectionRegistry.SIMULATOR_MODULE, IntrospectionRegistry.SIMULATOR_CLASS, alg_category.id) algorithm = dao.get_algorithm_by_module( IntrospectionRegistry.SIMULATOR_MODULE, IntrospectionRegistry.SIMULATOR_CLASS) if algorithm is None: algorithm = dao.store_entity(ad) # Create meta meta = { DataTypeMetaData.KEY_SUBJECT: "Datatype Factory User", DataTypeMetaData.KEY_STATE: "RAW_DATA" } # Create operation operation = operation_factory(algorithm=algorithm, test_user=user, test_project=project, meta=meta) group = OperationGroup( project.id, ranges=[json.dumps(range_1), json.dumps(range_2)]) group = dao.store_entity(group) group_ms = OperationGroup( project.id, ranges=[json.dumps(range_1), json.dumps(range_2)]) group_ms = dao.store_entity(group_ms) datatype_group = DataTypeGroup(group, subject=subject, state=state, operation_id=operation.id) datatype_group = dao.store_entity(datatype_group) dt_group_ms = DataTypeGroup(group_ms, subject=subject, state=state, operation_id=operation.id) dao.store_entity(dt_group_ms) # Now create some data types and add them to group for range_val1 in range_1[1]: for range_val2 in range_2[1]: op = Operation(user.id, project.id, algorithm.id, 'test parameters', meta=json.dumps(meta), status=STATUS_FINISHED, range_values=json.dumps({ range_1[0]: range_val1, range_2[0]: range_val2 })) op.fk_operation_group = group.id op = dao.store_entity(op) datatype = time_series_index_factory(op=op) datatype.number1 = range_val1 datatype.number2 = range_val2 datatype.fk_datatype_group = datatype_group.id datatype.operation_id = op.id dao.store_entity(datatype) op_ms = Operation(user.id, project.id, algorithm.id, 'test parameters', meta=json.dumps(meta), status=STATUS_FINISHED, range_values=json.dumps({ range_1[0]: range_val1, range_2[0]: range_val2 })) op_ms.fk_operation_group = group_ms.id op_ms = dao.store_entity(op_ms) datatype_measure_factory(datatype) return datatype_group
if __name__ == "__main__": operation_service = OperationService() # This ID of a project needs to exists in Db, and it can be taken from the WebInterface: project = dao.get_project_by_id(1) # This is our new added Importer: adapter_instance = FooDataImporter() # We need to store a reference towards the new algorithms also in DB: # First select the category of uploaders: upload_category = dao.get_uploader_categories()[0] # check if the algorithm has been added in DB already algorithm = dao.get_algorithm_by_module(FooDataImporter.__module__, FooDataImporter.__name__) if algorithm is None: # not stored in DB previously, we will store it now: algorithm = Algorithm(FooDataImporter.__module__, FooDataImporter.__name__, upload_category.id) algorithm = dao.store_entity(algorithm) adapter_instance.stored_adapter = algorithm # Prepare the input algorithms as if they were coming from web UI submit: # launch_args = {"array_data": "[1, 2, 3, 4, 5]"} launch_args = {"array_data": "demo_array.txt"} # launch an operation and have the results stored both in DB and on disk launched_operations = operation_service.fire_operation( adapter_instance, project.administrator, project.id, **launch_args)