コード例 #1
0
 def __store_algorithms_for_group(self, group, adapter, has_sub_algorithms):
     """
     For the group passed as parameter do the following:
     If it has sub-algorithms, get the list of them, add sub-algorithm 
     references into the DB with all the required fields.
     If it is not a GroupAdapter add a single algorithm into the DB with an
     empty identifier.
     """
     if has_sub_algorithms:
         algos = adapter.get_algorithms_dictionary()
         for algo_ident in algos:
             in_params = adapter.get_input_for_algorithm(algo_ident)
             req_type, param_name, flt = self.__get_required_input(in_params)
             outputs = adapter.get_output_for_algorithm(algo_ident)
             algo_description = ""
             if self.__is_matlab_parent(inspect.getclasstree([adapter.__class__])):
                 root_folder = adapter.get_matlab_file_root()
                 file_name = adapter.get_matlab_file(algo_ident)
                 if file_name:
                     algo_description = self.extract_matlab_doc_string(os.path.join(root_folder, file_name))
             algorithm = dao.get_algorithm_by_group(group.id, algo_ident)
             if algorithm is None:
                 #Create new
                 algorithm = model.Algorithm(group.id, algo_ident, algos[algo_ident][ATT_NAME],
                                             req_type, param_name, str(outputs), flt, description=algo_description)
             else:
                 #Edit previous
                 algorithm.name = algos[algo_ident][ATT_NAME]
                 algorithm.required_datatype = req_type
                 algorithm.parameter_name = param_name
                 algorithm.outputlist = str(outputs)
                 algorithm.datatype_filter = flt
                 algorithm.description = algo_description
             dao.store_entity(algorithm)
     else:
         input_tree = adapter.get_input_tree()
         req_type, param_name, flt = self.__get_required_input(input_tree)
         outputs = str(adapter.get_output())
         algorithm = dao.get_algorithm_by_group(group.id, None)
         if hasattr(adapter, '_ui_name'):
             algo_name = getattr(adapter, '_ui_name')
         else:
             algo_name = adapter.__class__.__name__
         if algorithm is None:
             #Create new
             algorithm = model.Algorithm(group.id, None, algo_name, req_type, param_name, outputs, flt)
         else:
             #Edit previous
             algorithm.name = algo_name
             algorithm.required_datatype = req_type
             algorithm.parameter_name = param_name
             algorithm.outputlist = str(outputs)
             algorithm.datatype_filter = flt
         dao.store_entity(algorithm)
コード例 #2
0
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """
        for i in xrange(len(self.bct_adapters)):
            for bct_identifier in self.bct_adapters[i].get_algorithms_dictionary():
                ### Prepare Operation and parameters
                algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id, bct_identifier)
                operation = TestFactory.create_operation(algorithm=algorithm, test_user=self.test_user,
                                                         test_project=self.test_project,
                                                         operation_status=model.STATUS_STARTED)
                self.assertEqual(model.STATUS_STARTED, operation.status)
                ### Launch BCT algorithm
                submit_data = {self.algo_groups[i].algorithm_param_name: bct_identifier,
                               algorithm.parameter_name: self.connectivity.gid}
                try:
                    OperationService().initiate_prelaunch(operation, self.bct_adapters[i], {}, **submit_data)
                    if bct_identifier in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise Exception("Algorithm %s was expected to throw input validation "
                                        "exception, but did not!" % (bct_identifier,))

                    operation = dao.get_operation_by_id(operation.id)
                    ### Check that operation status after execution is success.
                    self.assertEqual(STATUS_FINISHED, operation.status)
                    ### Make sure at least one result exists for each BCT algorithm
                    results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation')
                    self.assertTrue(len(results) > 0)

                except InvalidParameterException, excep:
                    ## Some algorithms are expected to throw validation exception.
                    if bct_identifier not in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise excep
コード例 #3
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
コード例 #4
0
def remove_visualizer_references():
    """
    As we removed an algorithm, remove left-overs.
    """

    LOGGER.info("Starting to remove references towards old viewer ....")

    pearson_group = dao.find_group(
        'tvb.adapters.visualizers.cross_correlation',
        'PearsonCorrelationCoefficientVisualizer')
    pearson_algorithm = dao.get_algorithm_by_group(pearson_group.id)

    pearson_operations = dao.get_generic_entity(model.Operation,
                                                pearson_algorithm.id,
                                                "fk_from_algo")
    for op in pearson_operations:
        dao.remove_entity(model.Operation, op.id)

    pearson_workflows = dao.get_generic_entity(model.WorkflowStepView,
                                               pearson_algorithm.id,
                                               "fk_algorithm")
    for ws in pearson_workflows:
        dao.remove_entity(model.WorkflowStepView, ws.id)

    LOGGER.info("References removed.")
コード例 #5
0
 def create_operation(algorithm=None, test_user=None, test_project=None, 
                      operation_status=model.STATUS_FINISHED, parameters="test params"):
     """
     Create persisted operation.
     
     :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
     :return: Operation entity after persistence. 
     """
     if algorithm is None:
         algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
         algorithm = dao.get_algorithm_by_group(algo_group.id)
         
     if test_user is None:
         test_user = TestFactory.create_user()
         
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
         
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
             DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                 status=operation_status)
     dao.store_entity(operation)
     ### Make sure lazy attributes are correctly loaded.
     return dao.get_operation_by_id(operation.id)
コード例 #6
0
    def _export_linked_datatypes(self, project, zip_file):
        files_helper = FilesHelper()
        linked_paths = self._get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        alg_group = dao.find_group(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        algo = dao.get_algorithm_by_group(alg_group.id)
        op = model.Operation(None, project.id, algo.id, '')
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model.STATUS_FINISHED)

        # write operation.xml to disk
        files_helper.write_operation_metadata(op)
        op_folder = files_helper.get_operation_folder(op.project.name, op.id)
        operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add operation.xml
        zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        files_helper.remove_folder(op_folder)
コード例 #7
0
ファイル: bct_test.py プロジェクト: roxanast/tvb-framework
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """
        for i in xrange(len(self.bct_adapters)):
            for bct_identifier in self.bct_adapters[i].get_algorithms_dictionary():
                ### Prepare Operation and parameters
                algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id, bct_identifier)
                operation = TestFactory.create_operation(algorithm=algorithm, test_user=self.test_user,
                                                         test_project=self.test_project,
                                                         operation_status=model.STATUS_STARTED)
                self.assertEqual(model.STATUS_STARTED, operation.status)
                ### Launch BCT algorithm
                submit_data = {self.algo_groups[i].algorithm_param_name: bct_identifier,
                               algorithm.parameter_name: self.connectivity.gid}
                try:
                    OperationService().initiate_prelaunch(operation, self.bct_adapters[i], {}, **submit_data)
                    if bct_identifier in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise Exception("Algorithm %s was expected to throw input validation "
                                        "exception, but did not!" % (bct_identifier,))

                    operation = dao.get_operation_by_id(operation.id)
                    ### Check that operation status after execution is success.
                    self.assertEqual(STATUS_FINISHED, operation.status)
                    ### Make sure at least one result exists for each BCT algorithm
                    results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation')
                    self.assertTrue(len(results) > 0)

                except InvalidParameterException, excep:
                    ## Some algorithms are expected to throw validation exception.
                    if bct_identifier not in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise excep
コード例 #8
0
 def create_operation(algorithm=None, test_user=None, test_project=None, 
                      operation_status=model.STATUS_FINISHED, parameters="test params"):
     """
     Create persisted operation.
     
     :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
     :return: Operation entity after persistence. 
     """
     if algorithm is None:
         algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
         algorithm = dao.get_algorithm_by_group(algo_group.id)
         
     if test_user is None:
         test_user = TestFactory.create_user()
         
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
         
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
             DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                 status=operation_status, method_name=ABCAdapter.LAUNCH_METHOD)
     dao.store_entity(operation)
     ### Make sure lazy attributes are correctly loaded.
     return dao.get_operation_by_id(operation.id)
コード例 #9
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
コード例 #10
0
 def get_algorithm_by_module_and_class(module, classname):
     """
     Get the db entry from the algorithm table for the given module and 
     class.
     """
     group = dao.find_group(module, classname)
     algo = dao.get_algorithm_by_group(group.id)
     return algo, group
コード例 #11
0
ファイル: flowservice.py プロジェクト: wvangeit/framework_tvb
 def get_algorithm_by_module_and_class(module, classname):
     """
     Get the db entry from the algorithm table for the given module and 
     class.
     """
     group = dao.find_group(module, classname)
     algo = dao.get_algorithm_by_group(group.id)
     return algo, group
コード例 #12
0
 def group_operation_launch(self, user_id, project_id, algo_group_id, category_id, **kwargs):
     """
     Create and prepare the launch of a group of operations.
     """
     category = dao.get_category_by_id(category_id)
     algorithm = dao.get_algorithm_by_group(algo_group_id)
     operations, _ = self.prepare_operations(user_id, project_id, algorithm, category, {}, **kwargs)
     for operation in operations:
         self.launch_operation(operation.id, True)
コード例 #13
0
 def test_bct_descriptions(self):
     """
     Iterate all BCT algorithms and check description not empty.
     """
     for i in xrange(len(self.bct_adapters)):
         for bct_identifier in self.bct_adapters[i].get_algorithms_dictionary():
             ### Prepare Operation and parameters
             algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id, bct_identifier)
             self.assertTrue(len(algorithm.description) > 0,
                             "Description was not loaded properly for algorithm %s" % (str(algorithm, )))
コード例 #14
0
ファイル: bct_test.py プロジェクト: roxanast/tvb-framework
 def test_bct_descriptions(self):
     """
     Iterate all BCT algorithms and check description not empty.
     """
     for i in xrange(len(self.bct_adapters)):
         for bct_identifier in self.bct_adapters[i].get_algorithms_dictionary():
             ### Prepare Operation and parameters
             algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id, bct_identifier)
             self.assertTrue(len(algorithm.description) > 0,
                             "Description was not loaded properly for algorithm %s -- %s" % (str(algorithm),
                                                                                             bct_identifier))
コード例 #15
0
 def group_operation_launch(self, user_id, project_id, algo_group_id,
                            category_id, **kwargs):
     """
     Create and prepare the launch of a group of operations.
     """
     category = dao.get_category_by_id(category_id)
     algorithm = dao.get_algorithm_by_group(algo_group_id)
     operations, _ = self.prepare_operations(user_id, project_id, algorithm,
                                             category, {}, **kwargs)
     for operation in operations:
         self.launch_operation(operation.id, True)
コード例 #16
0
 def _launch_test_algo_on_cluster(self, **data):
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #17
0
 def _launch_test_algo_on_cluster(self, **data):
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #18
0
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")
コード例 #19
0
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")
コード例 #20
0
 def _compute_options_for_group(self):
     """Sub-Algorithms"""
     result = []
     algorithms = self.xml_reader.get_algorithms_dictionary()
     for identifier in algorithms.keys():
         option = dict()
         option[self.KEY_VALUE] = identifier
         option[self.KEY_NAME] = algorithms[identifier][self.KEY_NAME]
         algorithm = dao.get_algorithm_by_group(self.algorithm_group.id, identifier)
         option[self.KEY_DESCRIPTION] = algorithm.description
         inputs = algorithms[identifier][INPUTS_KEY]
         option[self.KEY_ATTRIBUTES] = [inputs[key] for key in inputs.keys()]
         option[ELEM_OUTPUTS] = self.xml_reader.get_outputs(identifier)
         result.append(option)
     return result
コード例 #21
0
 def get_algorithm(self):
     """
     Return the algorithm saved in this entity.
     Should be used only if previously an algorithm was saved on this entity by
     calling 'set_algorithm' method.
     """
     if self.ALGO_INFO not in self._data_dict:
         return None
     algo_group = dao.find_group(self._data_dict[self.ALGO_INFO]['module'],
                                 self._data_dict[self.ALGO_INFO]['class'],
                                 self._data_dict[self.ALGO_INFO]['init_param'])
     if algo_group:
         algorithm = dao.get_algorithm_by_group(algo_group.id, self._data_dict[self.ALGO_INFO]['identifier'])
         return algorithm
     return None
コード例 #22
0
 def get_algorithm(self):
     """
     Return the algorithm saved in this entity.
     Should be used only if previously an algorithm was saved on this entity by
     calling 'set_algorithm' method.
     """
     if self.ALGO_INFO not in self._data_dict:
         return None
     algo_group = dao.find_group(self._data_dict[self.ALGO_INFO]['module'], 
                                 self._data_dict[self.ALGO_INFO]['class'],
                                 self._data_dict[self.ALGO_INFO]['init_param'])
     if algo_group:
         algorithm = dao.get_algorithm_by_group(algo_group.id, self._data_dict[self.ALGO_INFO]['identifier'])
         return algorithm
     return None
コード例 #23
0
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
コード例 #24
0
 def test_stop_operations(self):
     module = "tvb_test.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_STARTED)
     self.flow_c.stop_operation(operation.id, 0, False)
     operation = dao.get_operation_by_id(operation.id)
     self.assertEqual(operation.status, model.STATUS_CANCELED)
コード例 #25
0
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
コード例 #26
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS)
        algorithm = dao.get_algorithm_by_group(algo_group.id)
        self.simulator_adapter = FlowService().build_adapter_instance(algo_group)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
コード例 #27
0
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     module = "tvb.tests.framework.adapters.testadapter2"
     class_name = "TestAdapter2"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")
コード例 #28
0
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     module = "tvb.tests.framework.adapters.testadapter2"
     class_name = "TestAdapter2"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")
コード例 #29
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(
            self.CONNECTIVITY_NODES)[1]

        algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS)
        algorithm = dao.get_algorithm_by_group(algo_group.id)
        self.simulator_adapter = FlowService().build_adapter_instance(
            algo_group)
        self.operation = TestFactory.create_operation(
            algorithm, self.test_user, self.test_project, model.STATUS_STARTED,
            json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
コード例 #30
0
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")
コード例 #31
0
def remove_visualizer_references():
    """
    As we removed an algorithm, remove left-overs.
    """

    LOGGER.info("Starting to remove references towards old viewer ....")

    pearson_group = dao.find_group(
        "tvb.adapters.visualizers.cross_correlation", "PearsonCorrelationCoefficientVisualizer"
    )
    pearson_algorithm = dao.get_algorithm_by_group(pearson_group.id)

    pearson_operations = dao.get_generic_entity(model.Operation, pearson_algorithm.id, "fk_from_algo")
    for op in pearson_operations:
        dao.remove_entity(model.Operation, op.id)

    pearson_workflows = dao.get_generic_entity(model.WorkflowStepView, pearson_algorithm.id, "fk_algorithm")
    for ws in pearson_workflows:
        dao.remove_entity(model.WorkflowStepView, ws.id)

    LOGGER.info("References removed.")
コード例 #32
0
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")
コード例 #33
0
    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
コード例 #34
0
    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        algo = dao.get_algorithm_by_group(algo_group.id)

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
コード例 #35
0
    def create_workflow_step(module,
                             classname,
                             static_kwargs=None,
                             dynamic_kwargs=None,
                             step_index=0,
                             base_step=0,
                             tab_index=0,
                             index_in_tab=0,
                             is_view_step=False):
        """
        Build non-persisted WorkflowStep entity.
        """
        if static_kwargs is None:
            static_kwargs = {}
        if dynamic_kwargs is None:
            dynamic_kwargs = {}
        step_group = dao.find_group(module, classname)
        algorithm = dao.get_algorithm_by_group(step_group.id)
        second_step_configuration = wf_cfg(algorithm.id, static_kwargs,
                                           dynamic_kwargs)

        static_params = second_step_configuration.static_params
        dynamic_params = second_step_configuration.dynamic_params
        for entry in dynamic_params:
            dynamic_params[entry][wf_cfg.STEP_INDEX_KEY] += base_step

        if is_view_step:
            return model.WorkflowStepView(algorithm_id=algorithm.id,
                                          tab_index=tab_index,
                                          index_in_tab=index_in_tab,
                                          static_param=static_params,
                                          dynamic_param=dynamic_params)
        return model.WorkflowStep(algorithm_id=algorithm.id,
                                  step_index=step_index,
                                  tab_index=tab_index,
                                  index_in_tab=index_in_tab,
                                  static_param=static_params,
                                  dynamic_param=dynamic_params)
コード例 #36
0
 def create_workflow_step(module, classname, static_kwargs=None, dynamic_kwargs=None,
                          step_index=0, base_step=0, tab_index=0, index_in_tab=0, is_view_step=False):
     """
     Build non-persisted WorkflowStep entity.
     """
     if static_kwargs is None:
         static_kwargs = {}
     if dynamic_kwargs is None:
         dynamic_kwargs = {}
     step_group = dao.find_group(module, classname)
     algorithm = dao.get_algorithm_by_group(step_group.id)
     second_step_configuration = wf_cfg(algorithm.id, static_kwargs, dynamic_kwargs)
     
     static_params = second_step_configuration.static_params
     dynamic_params = second_step_configuration.dynamic_params
     for entry in dynamic_params:
         dynamic_params[entry][wf_cfg.STEP_INDEX_KEY] += base_step
      
     if is_view_step:
         return model.WorkflowStepView(algorithm_id=algorithm.id, tab_index=tab_index, index_in_tab=index_in_tab,
                                       static_param=static_params, dynamic_param=dynamic_params)
     return model.WorkflowStep(algorithm_id=algorithm.id, step_index=step_index, tab_index=tab_index,
                               index_in_tab=index_in_tab, static_param=static_params, dynamic_param=dynamic_params)
コード例 #37
0
    def create_new_portlet_configuration(self, name=""):
        """
        Create a PortletConfiguration entity with the default values from the portlet
        XML declaration and the adapter input trees.
        """
        chain_adapters = self.reader.get_adapters_chain(self.algo_identifier)
        analyze_steps = []
        view_step = None

        idx = 0
        for adapter_declaration in chain_adapters:
            adapter_instance, algorithm_group = self.build_adapter_from_declaration(adapter_declaration)
            ### Get the flatten interface for the adapter, and in case of #####
            ### sub-algorithms also get the pair {algorithm : value}      #####
            algorithm_field = adapter_declaration[KEY_FIELD]
            if algorithm_field:
                default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT]
            else:
                default_algorithm = ""
            if default_algorithm:
                prefix = InputTreeManager.form_prefix(algorithm_field, None, default_algorithm)
                alg_inputs = adapter_instance.tree_manager.flatten(
                    adapter_instance.xml_reader.get_inputs(default_algorithm), prefix
                )
            else:
                alg_inputs = adapter_instance.flaten_input_interface()
            ###################################################################

            ### Get the overwrites defined in the portlet configuration #######
            ### for this specific adapter in the adapter chain          #######
            ### split in static and dynamic ones                        #######
            prepared_params = {KEY_STATIC: {}, KEY_DYNAMIC: {}}
            all_portlet_defined_params = self.reader.get_inputs(self.algo_identifier)
            specific_adapter_overwrites = [
                entry
                for entry in all_portlet_defined_params
                if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] == adapter_declaration[ABCAdapter.KEY_NAME]
            ]

            for entry in specific_adapter_overwrites:
                if ABCAdapter.KEY_DEFAULT in entry:
                    declared_value = entry[ABCAdapter.KEY_DEFAULT]
                elif ABCAdapter.KEY_VALUE in entry:
                    declared_value = entry[ABCAdapter.KEY_VALUE]
                else:
                    declared_value = ""
                if entry[ABCAdapter.KEY_TYPE] == KEY_DYNAMIC:
                    prepared_params[KEY_DYNAMIC][entry[ABCAdapter.KEY_NAME]] = declared_value
                else:
                    prepared_params[KEY_STATIC][entry[ABCAdapter.KEY_NAME]] = declared_value
            ###################################################################

            ### Now just fill the rest of the adapter inputs if they are not ##
            ### present in neither dynamic or static overwrites. In case of  ##
            ### sub-algorithms also add as static the algorithm : value pair ##
            for input_dict in alg_inputs:
                input_name = input_dict[ABCAdapter.KEY_NAME]
                if input_name not in prepared_params[KEY_STATIC] and input_name not in prepared_params[KEY_DYNAMIC]:
                    if ABCAdapter.KEY_DEFAULT in input_dict:
                        input_value = input_dict[ABCAdapter.KEY_DEFAULT]
                    else:
                        input_value = ""
                    prepared_params[KEY_STATIC][input_name] = input_value
            if default_algorithm:
                prepared_params[KEY_STATIC][algorithm_field] = default_algorithm
            ###################################################################

            ### Now parse the dynamic inputs declared in the portlets XML ######
            ### into workflow_step specific format.                        ####
            for param_name in prepared_params[KEY_DYNAMIC]:
                new_value = self._portlet_dynamic2workflow_step(prepared_params[KEY_DYNAMIC][param_name])
                prepared_params[KEY_DYNAMIC][param_name] = new_value
            ###################################################################

            ###Finally get the actual algorithm id from the DB as we need the #
            ###algorithm id, then build the workflow step given the computed  #
            ###parameter set, then build and return the portlet configuration##
            algorithm = dao.get_algorithm_by_group(algorithm_group.id, default_algorithm)

            if idx == len(chain_adapters) - 1:
                view_step = WorkflowStepView(
                    algorithm_id=algorithm.id,
                    portlet_id=self.portlet_id,
                    ui_name=name,
                    static_param=prepared_params[KEY_STATIC],
                    dynamic_param=prepared_params[KEY_DYNAMIC],
                )
            else:
                workflow_step = WorkflowStep(
                    algorithm_id=algorithm.id,
                    static_param=prepared_params[KEY_STATIC],
                    dynamic_param=prepared_params[KEY_DYNAMIC],
                )
                analyze_steps.append(workflow_step)
            idx += 1
        portlet_configuration = PortletConfiguration(self.portlet_id)
        portlet_configuration.set_analyzers(analyze_steps)
        portlet_configuration.set_visualizer(view_step)
        return portlet_configuration
コード例 #38
0
 def __store_algorithms_for_group(self, group, adapter, has_sub_algorithms):
     """
     For the group passed as parameter do the following:
     If it has sub-algorithms, get the list of them, add sub-algorithm 
     references into the DB with all the required fields.
     If it is not a GroupAdapter add a single algorithm into the DB with an
     empty identifier.
     """
     if has_sub_algorithms:
         algos = adapter.get_algorithms_dictionary()
         for algo_ident in algos:
             in_params = adapter.get_input_for_algorithm(algo_ident)
             req_type, param_name, flt = self.__get_required_input(
                 in_params)
             outputs = adapter.get_output_for_algorithm(algo_ident)
             algo_description = ""
             if self.__is_matlab_parent(
                     inspect.getclasstree([adapter.__class__])):
                 root_folder = adapter.get_matlab_file_root()
                 file_name = adapter.get_matlab_file(algo_ident)
                 if file_name:
                     algo_description = self.extract_matlab_doc_string(
                         os.path.join(root_folder, file_name))
             algorithm = dao.get_algorithm_by_group(group.id, algo_ident)
             if algorithm is None:
                 #Create new
                 algorithm = model.Algorithm(group.id,
                                             algo_ident,
                                             algos[algo_ident][ATT_NAME],
                                             req_type,
                                             param_name,
                                             str(outputs),
                                             flt,
                                             description=algo_description)
             else:
                 #Edit previous
                 algorithm.name = algos[algo_ident][ATT_NAME]
                 algorithm.required_datatype = req_type
                 algorithm.parameter_name = param_name
                 algorithm.outputlist = str(outputs)
                 algorithm.datatype_filter = flt
                 algorithm.description = algo_description
             dao.store_entity(algorithm)
     else:
         input_tree = adapter.get_input_tree()
         req_type, param_name, flt = self.__get_required_input(input_tree)
         outputs = str(adapter.get_output())
         algorithm = dao.get_algorithm_by_group(group.id, None)
         if hasattr(adapter, '_ui_name'):
             algo_name = getattr(adapter, '_ui_name')
         else:
             algo_name = adapter.__class__.__name__
         if algorithm is None:
             #Create new
             algorithm = model.Algorithm(group.id, None, algo_name,
                                         req_type, param_name, outputs, flt)
         else:
             #Edit previous
             algorithm.name = algo_name
             algorithm.required_datatype = req_type
             algorithm.parameter_name = param_name
             algorithm.outputlist = str(outputs)
             algorithm.datatype_filter = flt
         dao.store_entity(algorithm)
コード例 #39
0
    def create_new_portlet_configuration(self, name=''):
        """
        Create a PortletConfiguration entity with the default values from the portlet
        XML declaration and the adapter input trees.
        """
        chain_adapters = self.reader.get_adapters_chain(self.algo_identifier)
        analyze_steps = []
        view_step = None

        idx = 0
        for adapter_declaration in chain_adapters:
            adapter_instance, algorithm_group = self.build_adapter_from_declaration(
                adapter_declaration)
            ### Get the flatten interface for the adapter, and in case of #####
            ### sub-algorithms also get the pair {algorithm : value}      #####
            algorithm_field = adapter_declaration[KEY_FIELD]
            if algorithm_field:
                default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT]
            else:
                default_algorithm = ''
            if default_algorithm:
                prefix = ABCAdapter.form_prefix(algorithm_field, None,
                                                default_algorithm)
                alg_inputs = adapter_instance._flaten(
                    adapter_instance.xml_reader.get_inputs(default_algorithm),
                    prefix)
            else:
                alg_inputs = adapter_instance.flaten_input_interface()
            ###################################################################

            ### Get the overwrites defined in the portlet configuration #######
            ### for this specific adapter in the adapter chain          #######
            ### split in static and dynamic ones                        #######
            prepared_params = {KEY_STATIC: {}, KEY_DYNAMIC: {}}
            all_portlet_defined_params = self.reader.get_inputs(
                self.algo_identifier)
            specific_adapter_overwrites = [
                entry for entry in all_portlet_defined_params
                if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] ==
                adapter_declaration[ABCAdapter.KEY_NAME]
            ]

            for entry in specific_adapter_overwrites:
                if ABCAdapter.KEY_DEFAULT in entry:
                    declared_value = entry[ABCAdapter.KEY_DEFAULT]
                elif ABCAdapter.KEY_VALUE in entry:
                    declared_value = entry[ABCAdapter.KEY_VALUE]
                else:
                    declared_value = ''
                if entry[ABCAdapter.KEY_TYPE] == KEY_DYNAMIC:
                    prepared_params[KEY_DYNAMIC][entry[
                        ABCAdapter.KEY_NAME]] = declared_value
                else:
                    prepared_params[KEY_STATIC][entry[
                        ABCAdapter.KEY_NAME]] = declared_value
            ###################################################################

            ### Now just fill the rest of the adapter inputs if they are not ##
            ### present in neither dynamic or static overwrites. In case of  ##
            ### sub-algorithms also add as static the algorithm : value pair ##
            for input_dict in alg_inputs:
                input_name = input_dict[ABCAdapter.KEY_NAME]
                if input_name not in prepared_params[
                        KEY_STATIC] and input_name not in prepared_params[
                            KEY_DYNAMIC]:
                    if ABCAdapter.KEY_DEFAULT in input_dict:
                        input_value = input_dict[ABCAdapter.KEY_DEFAULT]
                    else:
                        input_value = ''
                    prepared_params[KEY_STATIC][input_name] = input_value
            if default_algorithm:
                prepared_params[KEY_STATIC][
                    algorithm_field] = default_algorithm
            ###################################################################

            ### Now parse the dynamic inputs declared in the portlets XML ######
            ### into workflow_step specific format.                        ####
            for param_name in prepared_params[KEY_DYNAMIC]:
                new_value = self._portlet_dynamic2workflow_step(
                    prepared_params[KEY_DYNAMIC][param_name])
                prepared_params[KEY_DYNAMIC][param_name] = new_value
            ###################################################################

            ###Finally get the actual algorithm id from the DB as we need the #
            ###algorithm id, then build the workflow step given the computed  #
            ###parameter set, then build and return the portlet configuration##
            algorithm = dao.get_algorithm_by_group(algorithm_group.id,
                                                   default_algorithm)

            if idx == len(chain_adapters) - 1:
                view_step = WorkflowStepView(
                    algorithm_id=algorithm.id,
                    portlet_id=self.portlet_id,
                    ui_name=name,
                    static_param=prepared_params[KEY_STATIC],
                    dynamic_param=prepared_params[KEY_DYNAMIC])
            else:
                workflow_step = WorkflowStep(
                    algorithm_id=algorithm.id,
                    static_param=prepared_params[KEY_STATIC],
                    dynamic_param=prepared_params[KEY_DYNAMIC])
                analyze_steps.append(workflow_step)
            idx += 1
        portlet_configuration = PortletConfiguration(self.portlet_id)
        portlet_configuration.set_analyzers(analyze_steps)
        portlet_configuration.set_visualizer(view_step)
        return portlet_configuration
コード例 #40
0
class OperationService:
    """
    Class responsible for preparing an operation launch. 
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.workflow_service = WorkflowService()
        self.file_helper = FilesHelper()

    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self,
                           current_user,
                           project_id,
                           adapter_instance,
                           temporary_storage,
                           visible=True,
                           **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" +
                                str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        # Prepare Files parameters
        files = {}
        kw2 = copy(kwargs)
        for i, j in kwargs.iteritems():
            if isinstance(j, FieldStorage) or isinstance(j, Part):
                files[i] = j
                del kw2[i]

        temp_files = {}
        try:
            for i, j in files.iteritems():
                if j.file is None:
                    kw2[i] = None
                    continue
                uq_name = utils.date2string(datetime.now(),
                                            True) + '_' + str(i)
                # We have to add original file name to end, in case file processing
                # involves file extension reading
                file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename
                file_name = os.path.join(temporary_storage, file_name)
                kw2[i] = file_name
                temp_files[i] = file_name
                with open(file_name, 'wb') as file_obj:
                    file_obj.write(j.file.read())
                self.logger.debug("Will store file:" + file_name)
            kwargs = kw2
        except Exception, excep:
            self._handle_exception(
                excep, temp_files,
                "Could not launch operation: invalid input files!")

        ### Store Operation entity.
        algo_group = adapter_instance.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        if algo_group.algorithm_param_name in kwargs:
            algo = dao.get_algorithm_by_group(
                algo_group.id, kwargs[algo_group.algorithm_param_name])
        else:
            algo = dao.get_algorithm_by_group(algo_group.id)

        operations = self.prepare_operations(current_user.id, project_id, algo,
                                             algo_category, {}, visible,
                                             **kwargs)[0]

        if isinstance(adapter_instance, ABCSynchronous):
            if len(operations) > 1:
                raise LaunchException(
                    "Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance,
                                           temp_files, **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance,
                                         current_user.username)