コード例 #1
0
    def _populate_values(data_list, type_, category_key):
        """
        Populate meta-data fields for data_list (list of DataTypes).

        Private method, to be called recursively.
        It will receive a list of Attributes, and it will populate 'options'
        entry with data references from DB.
        """
        values = []
        all_field_values = ''
        for value in data_list:
            # Here we only populate with DB data, actual
            # XML check will be done after select and submit.
            entity_gid = value[2]
            actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
            display_name = ''
            if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
                display_name = actual_entity[0].display_name
            display_name += ' - ' + (value[3] or "None ")
            if value[5]:
                display_name += ' - From: ' + str(value[5])
            else:
                display_name += utils.date2string(value[4])
            if value[6]:
                display_name += ' - ' + str(value[6])
            display_name += ' - ID:' + str(value[0])
            all_field_values += str(entity_gid) + ','
            values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
        if category_key is not None:
            category = dao.get_category_by_id(category_key)
            if not category.display and not category.rawinput and len(data_list) > 1:
                values.insert(0, {KEY_NAME: "All", KEY_VALUE: all_field_values[:-1]})
        return values
コード例 #2
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
コード例 #3
0
 def test_stop_operation_finished(self, test_adapter_factory):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     test_adapter_factory()
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     view_model = adapter.get_view_model()()
     view_model.test1_val1 = 5
     view_model.test1_val2 = 5
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id,
         self.test_project,
         algo,
         algo_category,
         view_model=view_model)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model_operation.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!"
コード例 #4
0
    def _populate_values(data_list, type_, category_key, complex_dt_attributes=None):
        """
        Populate meta-data fields for data_list (list of DataTypes).

        Private method, to be called recursively.
        It will receive a list of Attributes, and it will populate 'options'
        entry with data references from DB.
        """
        values = []
        all_field_values = []
        for id_, _, entity_gid, subject, completion_date, group, gr_name, tag1 in data_list:
            # Here we only populate with DB data, actual
            # XML check will be done after select and submit.
            actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
            display_name = ''
            if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
                display_name = actual_entity[0].display_name
            display_name += ' - ' + (subject or "None ")
            if group:
                display_name += ' - From: ' + str(group)
            else:
                display_name += utils.date2string(completion_date)
            if gr_name:
                display_name += ' - ' + str(gr_name)
            display_name += ' - ID:' + str(id_)
            all_field_values.append(str(entity_gid))
            values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
            if complex_dt_attributes is not None:
                ### TODO apply filter on sub-attributes
                values[-1][KEY_ATTRIBUTES] = complex_dt_attributes  # this is the copy of complex dtype attributes on all db options
        if category_key is not None:
            category = dao.get_category_by_id(category_key)
            if not category.display and not category.rawinput and len(data_list) > 1:
                values.insert(0, {KEY_NAME: "All", KEY_VALUE: ','.join(all_field_values)})
        return values
コード例 #5
0
 def populate_values(data_list, type_, category_key, complex_dt_attributes=None):
     """
     Populate meta-data fields for data_list (list of DataTypes).
     """
     values = []
     all_field_values = ''
     for value in data_list:
         # Here we only populate with DB data, actual
         # XML check will be done after select and submit.
         entity_gid = value[2]
         actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
         display_name = ''
         if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
             display_name = actual_entity[0].display_name
         display_name = display_name + ' - ' + (value[3] or "None ")
         if value[5]:
             display_name = display_name + ' - From: ' + str(value[5])
         else:
             display_name = display_name + utils.date2string(value[4])
         if value[6]:
             display_name = display_name + ' - ' + str(value[6])
         display_name = display_name + ' - ID:' + str(value[0])
         all_field_values = all_field_values + str(entity_gid) + ','
         values.append({ABCAdapter.KEY_NAME: display_name, ABCAdapter.KEY_VALUE: entity_gid})
         if complex_dt_attributes is not None:
             ### TODO apply filter on sub-attributes
             values[-1][ABCAdapter.KEY_ATTRIBUTES] = complex_dt_attributes
     if category_key is not None:
         category = dao.get_category_by_id(category_key)
         if (not category.display) and (not category.rawinput) and len(data_list) > 1:
             values.insert(0, {ABCAdapter.KEY_NAME: "All", ABCAdapter.KEY_VALUE: all_field_values[:-1]})
     return values
コード例 #6
0
ファイル: flowservice.py プロジェクト: wvangeit/framework_tvb
 def populate_values(data_list, type_, category_key, complex_dt_attributes=None):
     """
     Populate meta-data fields for data_list (list of DataTypes).
     """
     values = []
     all_field_values = ''
     for value in data_list:
         # Here we only populate with DB data, actual
         # XML check will be done after select and submit.
         entity_gid = value[2]
         actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
         display_name = ''
         if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
             display_name = actual_entity[0].display_name
         display_name = display_name + ' - ' + value[3]
         if value[5]:
             display_name = display_name + ' - From: ' + str(value[5])
         else:
             display_name = display_name + utils.date2string(value[4])
         if value[6]:
             display_name = display_name + ' - ' + str(value[6])
         display_name = display_name + ' - ID:' + str(value[0])
         all_field_values = all_field_values + str(entity_gid) + ','
         values.append({ABCAdapter.KEY_NAME: display_name, ABCAdapter.KEY_VALUE: entity_gid})
         if complex_dt_attributes is not None:
             ### TODO apply filter on sub-attributes
             values[-1][ABCAdapter.KEY_ATTRIBUTES] = complex_dt_attributes
     if category_key is not None:
         category = dao.get_category_by_id(category_key)
         if (not category.display) and (not category.rawinput) and len(data_list) > 1:
             values.insert(0, {ABCAdapter.KEY_NAME: "All", ABCAdapter.KEY_VALUE: all_field_values[:-1]})
     return values
コード例 #7
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
コード例 #8
0
    def initiate_operation(self, current_user, project, adapter_instance, visible=True, model_view=None, **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        # Store Operation entity.
        algo = adapter_instance.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        operations = self.prepare_operations(current_user.id, project, algo, algo_category,
                                             visible, view_model=model_view, **kwargs)[0]

        if adapter_instance.launch_mode == AdapterLaunchModeEnum.SYNC_SAME_MEM:
            if len(operations) > 1:
                raise LaunchException("Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance, **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance, current_user.username)
コード例 #9
0
ファイル: factory.py プロジェクト: swatibanerjee29/tvb-root
    def create_group(test_user=None, test_project=None, subject="John Doe"):
        """
        Create a group of 2 operations, each with at least one resultant DataType.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        adapter_inst = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        adapter_inst.generic_attributes.subject = subject

        view_model = adapter_inst.get_view_model()()
        args = {RANGE_PARAMETER_1: 'param_5', 'param_5': json.dumps({constants.ATT_MINVALUE: 1,
                                                                     constants.ATT_MAXVALUE: 2.1,
                                                                     constants.ATT_STEP: 1})}
        algo = adapter_inst.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        # Prepare Operations group. Execute them synchronously
        service = OperationService()
        operations = service.prepare_operations(test_user.id, test_project, algo, algo_category,
                                                view_model=view_model, **args)[0]
        service.launch_operation(operations[0].id, False, adapter_inst)
        service.launch_operation(operations[1].id, False, adapter_inst)

        resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
        return resulted_dts, operations[0].fk_operation_group
コード例 #10
0
 def _launch_test_algo_on_cluster(self, **data):
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #11
0
 def get_category_by_id(self, identifier):
     """ Pass to DAO the retrieve of category by ID operation."""
     try:
         return dao.get_category_by_id(identifier)
     except Exception, excep:
         self.logger.warning("Wrong step!")
         self.logger.exception(excep)
         raise OperationException(excep.message)
コード例 #12
0
ファイル: flowservice.py プロジェクト: wvangeit/framework_tvb
 def get_category_by_id(self, identifier):
     """ Pass to DAO the retrieve of category by ID operation."""
     try:
         return dao.get_category_by_id(identifier)
     except Exception, excep:
         self.logger.warning("Wrong step!")
         self.logger.exception(excep)
         raise OperationException(excep.message)
コード例 #13
0
 def _launch_test_algo_on_cluster(self, **data):
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #14
0
 def group_operation_launch(self, user_id, project_id, adapter_id, category_id, **kwargs):
     """
     Create and prepare the launch of a group of operations.
     """
     category = dao.get_category_by_id(category_id)
     algorithm = dao.get_algorithm_by_id(adapter_id)
     operations, _ = self.prepare_operations(user_id, project_id, algorithm, category, {}, **kwargs)
     for operation in operations:
         self.launch_operation(operation.id, True)
コード例 #15
0
 def group_operation_launch(self, user_id, project_id, algo_group_id, category_id, **kwargs):
     """
     Create and prepare the launch of a group of operations.
     """
     category = dao.get_category_by_id(category_id)
     algorithm = dao.get_algorithm_by_group(algo_group_id)
     operations, _ = self.prepare_operations(user_id, project_id, algorithm, category, {}, **kwargs)
     for operation in operations:
         self.launch_operation(operation.id, True)
コード例 #16
0
    def initiate_operation(self, current_user, project_id, adapter_instance,
                           temporary_storage, visible=True, **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        # Prepare Files parameters
        files = {}
        kw2 = copy(kwargs)
        for i, j in six.iteritems(kwargs):
            if isinstance(j, FieldStorage) or isinstance(j, Part):
                files[i] = j
                del kw2[i]

        temp_files = {}
        try:
            for i, j in six.iteritems(files):
                if j.file is None:
                    kw2[i] = None
                    continue
                uq_name = utils.date2string(datetime.now(), True) + '_' + str(i)
                # We have to add original file name to end, in case file processing
                # involves file extension reading
                file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename
                file_name = os.path.join(temporary_storage, file_name)
                kw2[i] = file_name
                temp_files[i] = file_name
                with open(file_name, 'wb') as file_obj:
                    file_obj.write(j.file.read())
                self.logger.debug("Will store file:" + file_name)
            kwargs = kw2
        except Exception as excep:
            self._handle_exception(excep, temp_files, "Could not launch operation: invalid input files!")

        ### Store Operation entity. 
        algo = adapter_instance.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        operations = self.prepare_operations(current_user.id, project_id, algo, algo_category,
                                             {}, visible, **kwargs)[0]

        if isinstance(adapter_instance, ABCSynchronous):
            if len(operations) > 1:
                raise LaunchException("Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance, temp_files, **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance, current_user.username)
コード例 #17
0
 def group_operation_launch(self, user_id, project, algorithm_id, category_id, existing_dt_group=None, **kwargs):
     """
     Create and prepare the launch of a group of operations.
     """
     category = dao.get_category_by_id(category_id)
     algorithm = dao.get_algorithm_by_id(algorithm_id)
     ops, _ = self.prepare_operations(user_id, project, algorithm, category,
                                      existing_dt_group=existing_dt_group, **kwargs)
     for operation in ops:
         self.launch_operation(operation.id, True)
コード例 #18
0
 def _launch_test_algo_on_cluster(self, **data):
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #19
0
 def _launch_test_algo_on_cluster(self, **data):
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #20
0
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
     data = {"test": 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     assert operation.status, model.STATUS_CANCELED == "Operation should have been canceled!"
コード例 #21
0
ファイル: factory.py プロジェクト: swatibanerjee29/tvb-root
    def launch_synchronously(test_user, test_project, adapter_instance, view_model, algo_category=None):
        # Avoid the scheduled execution, as this is asynch, thus launch it immediately
        service = OperationService()
        algorithm = adapter_instance.stored_adapter
        if algo_category is None:
            algo_category = dao.get_category_by_id(algorithm.fk_category)
        operation = service.prepare_operations(test_user.id, test_project, algorithm, algo_category,
                                               True, view_model=view_model)[0][0]
        service.initiate_prelaunch(operation, adapter_instance)

        operation = dao.get_operation_by_id(operation.id)
        # Check that operation status after execution is success.
        assert STATUS_FINISHED == operation.status
        # Make sure at least one result exists for each BCT algorithm
        return dao.get_generic_entity(DataType, operation.id, 'fk_from_operation')
コード例 #22
0
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """

        view_model = BaseBCTModel()
        view_model.connectivity = self.connectivity.gid
        algo_category = dao.get_category_by_id(
            self.bct_adapters[0].stored_adapter.fk_category)

        for adapter_instance in self.bct_adapters:
            results = TestFactory.launch_synchronously(self.test_user.id,
                                                       self.test_project,
                                                       adapter_instance,
                                                       view_model)
            assert len(results) > 0
コード例 #23
0
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
     data = {"test": 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     assert operation.status, model.STATUS_CANCELED == "Operation should have been canceled!"
コード例 #24
0
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     assert operation.status, model.STATUS_FINISHED == "Operation shouldn't have been canceled!"
コード例 #25
0
 def _asynch_launch_simple_op(self, **data):
     adapter = TestFactory.create_adapter(
         'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     view_model = TestModel()
     view_model.test1_val1 = 5
     view_model.test1_val2 = 6
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id,
         self.test_project,
         algo,
         algo_category,
         view_model=view_model,
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
コード例 #26
0
 def test_stop_operation(self, test_adapter_factory):
     """
     Test that an operation is successfully stopped.
     """
     test_adapter_factory(adapter_class=TestAdapter2)
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
     view_model = adapter.get_view_model()()
     view_model.test = 5
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo,
                                                               algo_category, {},
                                                               view_model=view_model)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!"
コード例 #27
0
 def test_stop_operations(self):
     module = "tvb_test.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_STARTED)
     self.flow_c.stop_operation(operation.id, 0, False)
     operation = dao.get_operation_by_id(operation.id)
     self.assertEqual(operation.status, model.STATUS_CANCELED)
コード例 #28
0
    def _populate_values(data_list,
                         type_,
                         category_key,
                         complex_dt_attributes=None):
        """
        Populate meta-data fields for data_list (list of DataTypes).

        Private method, to be called recursively.
        It will receive a list of Attributes, and it will populate 'options'
        entry with data references from DB.
        """
        values = []
        all_field_values = []
        for id_, _, entity_gid, subject, completion_date, group, gr_name, tag1 in data_list:
            # Here we only populate with DB data, actual
            # XML check will be done after select and submit.
            actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
            display_name = ''
            if actual_entity is not None and len(
                    actual_entity) > 0 and isinstance(actual_entity[0],
                                                      model.DataType):
                display_name = actual_entity[0].display_name
            display_name += ' - ' + (subject or "None ")
            if group:
                display_name += ' - From: ' + str(group)
            else:
                display_name += utils.date2string(completion_date)
            if gr_name:
                display_name += ' - ' + str(gr_name)
            display_name += ' - ID:' + str(id_)
            all_field_values.append(str(entity_gid))
            values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
            if complex_dt_attributes is not None:
                ### TODO apply filter on sub-attributes
                values[-1][
                    KEY_ATTRIBUTES] = complex_dt_attributes  # this is the copy of complex dtype attributes on all db options
        if category_key is not None:
            category = dao.get_category_by_id(category_key)
            if not category.display and not category.rawinput and len(
                    data_list) > 1:
                values.insert(0, {
                    KEY_NAME: "All",
                    KEY_VALUE: ','.join(all_field_values)
                })
        return values
コード例 #29
0
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     module = "tvb.tests.framework.adapters.testadapter2"
     class_name = "TestAdapter2"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")
コード例 #30
0
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     module = "tvb.tests.framework.adapters.testadapter2"
     class_name = "TestAdapter2"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")
コード例 #31
0
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED,
                      "Operation shouldn't have been canceled!")
コード例 #32
0
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")
コード例 #33
0
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")
コード例 #34
0
    def prepare_operation(self, user_id, project, algorithm, visible=True, view_model=None, ranges=None,
                          burst_gid=None, op_group_id=None):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        algo_category = dao.get_category_by_id(algorithm.fk_category)
        ga = self.prepare_metadata(algo_category, current_ga=view_model.generic_attributes, burst=burst_gid)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ")")

        operation = Operation(view_model.gid.hex, user_id, project.id, algorithm.id, user_group=ga.operation_tag,
                              op_group_id=op_group_id, range_values=ranges)
        operation = dao.store_entity(operation)

        self.store_view_model(operation, project, view_model)

        return operation
コード例 #35
0
ファイル: bct_test.py プロジェクト: ganiyuolalekan/tvb-root
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """
        service = OperationService()
        algo_category = dao.get_category_by_id(self.bct_adapters[0].stored_adapter.fk_category)
        for adapter_instance in self.bct_adapters:
            algorithm = adapter_instance.stored_adapter
            view_model = BaseBCTModel()
            view_model.connectivity = self.connectivity.gid

            # Avoid the scheduled execution, as this is asynch, thus launch it immediately
            operation = service.prepare_operations(self.test_user.id, self.test_project, algorithm, algo_category,
                                                   {}, True, view_model=view_model)[0][0]
            service.initiate_prelaunch(operation, adapter_instance)

            operation = dao.get_operation_by_id(operation.id)
            # Check that operation status after execution is success.
            assert STATUS_FINISHED == operation.status
            # Make sure at least one result exists for each BCT algorithm
            results = dao.get_generic_entity(DataType, operation.id, 'fk_from_operation')
            assert len(results) > 0
コード例 #36
0
 def get_category_by_id(self, identifier):
     """ Pass to DAO the retrieve of category by ID operation."""
     return dao.get_category_by_id(identifier)
コード例 #37
0
 def get_category_by_id(self, identifier):
     """ Pass to DAO the retrieve of category by ID operation."""
     return dao.get_category_by_id(identifier)
コード例 #38
0
class OperationService:
    """
    Class responsible for preparing an operation launch. 
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.workflow_service = WorkflowService()
        self.file_helper = FilesHelper()

    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self,
                           current_user,
                           project_id,
                           adapter_instance,
                           temporary_storage,
                           visible=True,
                           **kwargs):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).
        
        Invoke custom method on an Adapter Instance. Make sure when the  
        operation has finished that the correct results are stored into DB. 
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" +
                                str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        # Prepare Files parameters
        files = {}
        kw2 = copy(kwargs)
        for i, j in kwargs.iteritems():
            if isinstance(j, FieldStorage) or isinstance(j, Part):
                files[i] = j
                del kw2[i]

        temp_files = {}
        try:
            for i, j in files.iteritems():
                if j.file is None:
                    kw2[i] = None
                    continue
                uq_name = utils.date2string(datetime.now(),
                                            True) + '_' + str(i)
                # We have to add original file name to end, in case file processing
                # involves file extension reading
                file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename
                file_name = os.path.join(temporary_storage, file_name)
                kw2[i] = file_name
                temp_files[i] = file_name
                with open(file_name, 'wb') as file_obj:
                    file_obj.write(j.file.read())
                self.logger.debug("Will store file:" + file_name)
            kwargs = kw2
        except Exception, excep:
            self._handle_exception(
                excep, temp_files,
                "Could not launch operation: invalid input files!")

        ### Store Operation entity.
        algo_group = adapter_instance.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        if algo_group.algorithm_param_name in kwargs:
            algo = dao.get_algorithm_by_group(
                algo_group.id, kwargs[algo_group.algorithm_param_name])
        else:
            algo = dao.get_algorithm_by_group(algo_group.id)

        operations = self.prepare_operations(current_user.id, project_id, algo,
                                             algo_category, {}, visible,
                                             **kwargs)[0]

        if isinstance(adapter_instance, ABCSynchronous):
            if len(operations) > 1:
                raise LaunchException(
                    "Synchronous operations are not supporting ranges!")
            if len(operations) < 1:
                self.logger.warning("No operation was defined")
                raise LaunchException("Invalid empty Operation!!!")
            return self.initiate_prelaunch(operations[0], adapter_instance,
                                           temp_files, **kwargs)
        else:
            return self._send_to_cluster(operations, adapter_instance,
                                         current_user.username)