Exemple #1
0
    def test_new_portlet_configuration(self):
        """
        Test that the correct portlet configuration is generated for the test portlet.
        """
        # Passing an invalid portlet ID should fail and raise an InvalidPortletConfiguration exception.
        self.assertRaises(InvalidPortletConfiguration,
                          self.burst_service.new_portlet_configuration, -1)

        # Now the happy flow
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        analyzers = portlet_configuration.analyzers
        self.assertEqual(
            len(analyzers), 1, "Portlet configuration not build properly."
            " Portlet's analyzers list has unexpected number of elements.")
        self.assertEqual(
            analyzers[0].dynamic_param, {
                u'test_dt_input': {
                    wf_cfg.DATATYPE_INDEX_KEY: 0,
                    wf_cfg.STEP_INDEX_KEY: 0
                }
            }, "Dynamic parameters not loaded properly")
        visualizer = portlet_configuration.visualizer
        self.assertEqual(visualizer.dynamic_param, {},
                         "Dynamic parameters not loaded properly")
        self.assertEqual(visualizer.static_param, {u'test2': u'0'},
                         'Static parameters not loaded properly')
    def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        kwargs_replica = {
            'test1_val1': '[0, 1, 2]',
            'test1_val2': '0',
            RANGE_PARAMETER_1: 'test1_val1'
        }
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        assert 3 == launched_workflows, "3 workflows should have been launched due to group parameter."

        op_groups = self.count_all_entities(OperationGroup)
        dt_groups = self.count_all_entities(DataTypeGroup)
        assert 5 == op_groups, "An operation group should have been created for each step."
        assert 5 == dt_groups, "An dataType group should have been created for each step."
Exemple #3
0
 def test_load_tab_configuration(self):
     """
     Create a burst with some predefined portlets in some known positions. Check that the
     load_tab_configuration method does what it is expected, and we get the portlets in the
     corresponding tab positions.
     """
     burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
     SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
     SIMULATOR_CLASS = 'TestAdapter1'
     algo_id = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS).id
     kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     # Add test_portlet to positions (0,0), (0,1) and (1,0)
     tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
     self._add_portlets_to_burst(burst_config, tab_config)
     burst_config.update_simulator_configuration(kwargs_replica)
     burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
     burst_config = dao.get_burst_by_id(burst_id)
     burst_config = self._wait_for_burst(burst_config)
     burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
     wf_step = dao.get_workflow_steps(burst_wf.id)[0]
     burst_config.prepare_after_load()
     for tab in burst_config.tabs:
         for portlet in tab.portlets:
             self.assertTrue(portlet is None, "Before loading the tab configuration all portlets should be none.")
     burst_config = self.burst_service.load_tab_configuration(burst_config, wf_step.fk_operation)
     for tab_idx, tab in enumerate(burst_config.tabs):
         for portlet_idx, portlet in enumerate(tab.portlets):
             if (tab_idx == 0 and portlet_idx in [0, 1]) or (tab_idx == 1 and portlet_idx == 0):
                 self.assertTrue(portlet is not None, "portlet gonfiguration not set")
                 self.assertEqual(test_portlet.id, portlet.portlet_id, "Unexpected portlet entity loaded.")
             else:
                 self.assertTrue(portlet is None, "Before loading the tab configuration all portlets should be none")
Exemple #4
0
    def test_update_portlet_config(self):
        """
        Test if a portlet configuration parameters are updated accordingly with a set
        of overwrites that would normally come from UI. Make sure to restart only if 
        analyzer parameters change.
        """
        def __update_params(declared_overwrites, expected_result):
            """
            Do the update and check that we get indeed the expected_result.
            :param declared_overwrites: a input dictionary in the form {'$$name$$' : '$$value$$'}. Make
                sure $$name$$ has the prefix that is added in case of portlet parameters,
                namely ADAPTER_PREFIX_ROOT + step_index + actual_name
            :param expected_result: boolean which should represent if we need or not to restart. (Was a
                visualizer parameter change or an analyzer one)
            """
            result = self.burst_service.update_portlet_configuration(
                portlet_configuration, declared_overwrites)
            self.assertEqual(
                expected_result, result,
                "After update expected %s as 'need_restart' but got %s." %
                (expected_result, result))

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        previous_entry = portlet_configuration.analyzers[0].static_param[
            'test_non_dt_input']
        declared_overwrites = {
            ADAPTER_PREFIX_ROOT + '0test_non_dt_input': previous_entry
        }
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '1test2': 'new_value'}
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': '1'}
        __update_params(declared_overwrites, True)
 def test_index(self):
     """
     Test that index returns a dict with all required keys. Also check
     that the default portlets are populated, with only the first being
     the TimeSeries portlet and the rest are empty.
     """
     result_dict = self.burst_c.index()
     self.assertTrue('burst_list' in result_dict
                     and result_dict['burst_list'] == [])
     self.assertTrue('available_metrics' in result_dict
                     and isinstance(result_dict['available_metrics'], list))
     self.assertTrue('portletList' in result_dict
                     and isinstance(result_dict['portletList'], list))
     self.assertEqual(result_dict[common.KEY_SECTION], "burst")
     self.assertTrue(
         'burstConfig' in result_dict
         and isinstance(result_dict['burstConfig'], BurstConfiguration))
     portlets = json.loads(result_dict['selectedPortlets'])
     portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
     for tab_idx, tab in enumerate(portlets):
         for index_in_tab, value in enumerate(tab):
             if tab_idx == 0 and index_in_tab == 0:
                 self.assertEqual(value, [portlet_id, "TimeSeries"])
             else:
                 self.assertEqual(value, [-1, "None"])
     self.assertTrue(result_dict['draw_hidden_ranges'])
Exemple #6
0
    def test_update_portlet_config_invalid_data(self):
        """
        Trying an update on a portlet configuration with invalid data
        should not change the configuration instance in any way.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)

        invalid_overwrites = {
            'this_is_not_a_valid_key': 'for_test_portlet_update'
        }
        before_update = copy.deepcopy(portlet_configuration)
        self.burst_service.update_portlet_configuration(
            portlet_configuration, invalid_overwrites)
        self.assertEqual(set(dir(before_update)),
                         set(dir(portlet_configuration)))
        #An update with invalid input data should have no effect on the configuration, but attributes changed
        for key in portlet_configuration.__dict__.keys():
            if hasattr(getattr(portlet_configuration, key), '__call__'):
                self.assertEqual(
                    getattr(before_update, key),
                    getattr(portlet_configuration, key),
                    "The value of attribute %s changed by a update with invalid data "
                    "when it shouldn't have." % key)
 def test_launch_burst(self):
     """
     Test the launch burst method from burst service.
     """
     first_step_algo = self.flow_service.get_algorithm_by_module_and_class(
         'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     adapter_interface = self.flow_service.prepare_adapter(self.test_project.id, first_step_algo)
     ui_submited_simulator_iface_replica = {}
     kwargs_replica = {}
     for entry in adapter_interface:
         ui_submited_simulator_iface_replica[entry[ABCAdapter.KEY_NAME]] = {model.KEY_PARAMETER_CHECKED: True,
                                                                            model.KEY_SAVED_VALUE: entry[
                                                                                ABCAdapter.KEY_DEFAULT]}
         kwargs_replica[entry[ABCAdapter.KEY_NAME]] = entry[ABCAdapter.KEY_DEFAULT]
     burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
     burst_config.simulator_configuration = ui_submited_simulator_iface_replica
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
     self._add_portlets_to_burst(burst_config, tab_config)
     burst_config.update_simulator_configuration(kwargs_replica)
     burst_id, _ = self.burst_service.launch_burst(burst_config, 0, first_step_algo.id, self.test_user.id)
     burst_config = dao.get_burst_by_id(burst_id)
     self.assertTrue(burst_config.status in (BurstConfiguration.BURST_FINISHED, BurstConfiguration.BURST_RUNNING),
                     "Burst not launched successfully!")
     # Wait maximum x seconds for burst to finish
     self._wait_for_burst(burst_config)
Exemple #8
0
 def test_launch_burst(self):
     """
     Test the launch burst method from burst service.
     """
     first_step_algo = self.flow_service.get_algorithm_by_module_and_class(
         'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     adapter_interface = self.flow_service.prepare_adapter(
         self.test_project.id, first_step_algo)
     ui_submited_simulator_iface_replica = {}
     kwargs_replica = {}
     for entry in adapter_interface:
         ui_submited_simulator_iface_replica[entry[ABCAdapter.KEY_NAME]] = {
             model.KEY_PARAMETER_CHECKED: True,
             model.KEY_SAVED_VALUE: entry[ABCAdapter.KEY_DEFAULT]
         }
         kwargs_replica[entry[ABCAdapter.KEY_NAME]] = entry[
             ABCAdapter.KEY_DEFAULT]
     burst_config = self.burst_service.new_burst_configuration(
         self.test_project.id)
     burst_config.simulator_configuration = ui_submited_simulator_iface_replica
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
     self._add_portlets_to_burst(burst_config, tab_config)
     burst_config.update_simulator_configuration(kwargs_replica)
     burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                   first_step_algo.id,
                                                   self.test_user.id)
     burst_config = dao.get_burst_by_id(burst_id)
     self.assertTrue(
         burst_config.status in (BurstConfiguration.BURST_FINISHED,
                                 BurstConfiguration.BURST_RUNNING),
         "Burst not launched successfully!")
     # Wait maximum x seconds for burst to finish
     self._wait_for_burst(burst_config)
Exemple #9
0
 def _add_new_valid_portlets(self, portlets_list):
     # Now add portlets that were not in DB at previous run but are valid now
     for portlet in portlets_list:
         db_entity = dao.get_portlet_by_identifier(portlet.algorithm_identifier)
         if db_entity is None:
             self.logger.debug("Will now store portlet %s" % (str(portlet),))
             dao.store_entity(portlet)
Exemple #10
0
    def test_launch_burst_invalid_portlet_analyzer_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class('tvb.tests.framework.adapters.testadapter1',
                                                                      'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) and int(test1_val2) so this should be valid
        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
        kwargs_replica = {'test1_val1': '1', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
        #Portlet analyzer tries to do int(input) which should fail
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': 'asa'}
        self.burst_service.update_portlet_configuration(portlet_configuration, declared_overwrites)
        burst_config.tabs[0].portlets[0] = portlet_configuration

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        burst_config = self._wait_for_burst(burst_config, error_expected=True)

        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_steps = dao.get_workflow_steps(burst_wf.id)
        self.assertTrue(len(wf_steps) == 2,
                        "Should have exactly 2 wf steps. One for 'simulation' one for portlet analyze operation.")
        simulator_op = dao.get_operation_by_id(wf_steps[0].fk_operation)
        self.assertEqual(model.STATUS_FINISHED, simulator_op.status,
                         "First operation should be simulator which should have 'finished' status.")
        portlet_analyze_op = dao.get_operation_by_id(wf_steps[1].fk_operation)
        self.assertEqual(portlet_analyze_op.status, model.STATUS_ERROR,
                         "Second operation should be portlet analyze step which should have 'error' status.")
    def test_update_portlet_config(self):
        """
        Test if a portlet configuration parameters are updated accordingly with a set
        of overwrites that would normally come from UI. Make sure to restart only if 
        analyzer parameters change.
        """


        def __update_params(declared_overwrites, expected_result):
            """
            Do the update and check that we get indeed the expected_result.
            :param declared_overwrites: a input dictionary in the form {'$$name$$' : '$$value$$'}. Make
                sure $$name$$ has the prefix that is added in case of portlet parameters,
                namely ADAPTER_PREFIX_ROOT + step_index + actual_name
            :param expected_result: boolean which should represent if we need or not to restart. (Was a
                visualizer parameter change or an analyzer one)
            """
            result = self.burst_service.update_portlet_configuration(portlet_configuration, declared_overwrites)
            self.assertEqual(expected_result, result,
                             "After update expected %s as 'need_restart' but got %s." % (expected_result, result))


        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
        previous_entry = portlet_configuration.analyzers[0].static_param['test_non_dt_input']
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': previous_entry}
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '1test2': 'new_value'}
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': '1'}
        __update_params(declared_overwrites, True)
    def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class('tvb.tests.framework.adapters.testadapter1',
                                                                      'TestAdapter1').id
        kwargs_replica = {'test1_val1': '[0, 1, 2]', 'test1_val2': '0', model.RANGE_PARAMETER_1: 'test1_val1'}
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id, is_count=True)
        self.assertEqual(3, launched_workflows, "3 workflows should have been launched due to group parameter.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.count_all_entities(model.DataTypeGroup)
        self.assertEqual(5, op_groups, "An operation group should have been created for each step.")
        self.assertEqual(5, dt_groups, "An dataType group should have been created for each step.")
 def get_portlet(self):
     """
     Return the portlet that was saved on this entity. Should only be called if 
     beforehand 'set_portlet' was called on this entity.
     """
     if self.PORTLET_IDENT not in self._data_dict:
         return None
     return dao.get_portlet_by_identifier(self._data_dict[self.PORTLET_IDENT])
 def get_portlet(self):
     """
     Return the portlet that was saved on this entity. Should only be called if 
     beforehand 'set_portlet' was called on this entity.
     """
     if self.PORTLET_IDENT not in self._data_dict:
         return None
     return dao.get_portlet_by_identifier(self._data_dict[self.PORTLET_IDENT])
Exemple #15
0
 def test_build_portlet_interface_invalid(self):
     """
     Test that a proper exception is raised in case an invalid portlet configuration is provided.
     """
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
     portlet_configuration.portlet_id = "this-is-invalid"
     self.assertRaises(InvalidPortletConfiguration, self.burst_service.build_portlet_interface,
                       portlet_configuration, self.test_project.id)
Exemple #16
0
 def test_portlet_tab_display(self):
     """
     Update the default portlet configuration, by storing a TimeSeries
     portlet for all postions. Then check that we get the same configuration.
     """
     self.burst_c.index()
     portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
     one_tab = [[portlet_id, "TimeSeries"] for _ in range(NUMBER_OF_PORTLETS_PER_TAB)]
     full_tabs = [one_tab for _ in range(BurstConfiguration.nr_of_tabs)]
     data = {'tab_portlets_list': json.dumps(full_tabs)}
     result = self.burst_c.portlet_tab_display(**data)
     selected_portlets = result['portlet_tab_list']
     for entry in selected_portlets:
         self.assertEqual(entry.id, portlet_id)
 def test_portlet_configurable_interface(self):
     """
     A simple test for the get configurable interface method.
     """        
     test_portlet = dao.get_portlet_by_identifier("TA1TA2")
     
     result = PortletConfigurer(test_portlet).get_configurable_interface()
     self.assertEqual(len(result), 2, "Length of the resulting interface not as expected")
     for one_entry in result:
         for entry in one_entry.interface:
             if entry['name'] == 'test1':
                 self.assertTrue(entry['default'] == 'step_0[0]', "Overwritten default not in effect.")
             if entry['name'] == 'test2':
                 self.assertTrue(entry['default'] == '0', "Value that was not overwritten changed.")
 def test_get_portlet_session_configuration(self):
     """
     Test that the default portlet session sonciguration is generated
     as expected, with a default TimeSeries portlet and rest empty.
     """
     self.burst_c.index()
     result = json.loads(self.burst_c.get_portlet_session_configuration())
     portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
     for tab_idx, tab in enumerate(result):
         for index_in_tab, value in enumerate(tab):
             if tab_idx == 0 and index_in_tab == 0:
                 self.assertEqual(value, [portlet_id, "TimeSeries"])
             else:
                 self.assertEqual(value, [-1, "None"])
Exemple #19
0
 def test_portlet_configurable_interface(self):
     """
     A simple test for the get configurable interface method.
     """        
     test_portlet = dao.get_portlet_by_identifier("TA1TA2")
     
     result = PortletConfigurer(test_portlet).get_configurable_interface()
     self.assertEqual(len(result), 2, "Length of the resulting interface not as expected")
     for one_entry in result:
         for entry in one_entry.interface:
             if entry['name'] == 'test1':
                 self.assertTrue(entry['default'] == 'step_0[0]', "Overwritten default not in effect.")
             if entry['name'] == 'test2':
                 self.assertTrue(entry['default'] == '0', "Value that was not overwritten changed.")
 def test_get_portlet_session_configuration(self):
     """
     Test that the default portlet session sonciguration is generated
     as expected, with a default TimeSeries portlet and rest empty.
     """
     self.burst_c.index()
     result = json.loads(self.burst_c.get_portlet_session_configuration())
     portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
     for tab_idx, tab in enumerate(result):
         for index_in_tab, value in enumerate(tab):
             if tab_idx == 0 and index_in_tab == 0:
                 self.assertEqual(value, [portlet_id, "TimeSeries"])
             else:
                 self.assertEqual(value, [-1, "None"])
 def test_portlet_tab_display(self):
     """
     Update the default portlet configuration, by storing a TimeSeries
     portlet for all postions. Then check that we get the same configuration.
     """
     self.burst_c.index()
     portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
     one_tab = [[portlet_id, "TimeSeries"] for _ in range(NUMBER_OF_PORTLETS_PER_TAB)]
     full_tabs = [one_tab for _ in range(BurstConfiguration.nr_of_tabs)]
     data = {'tab_portlets_list': json.dumps(full_tabs)}
     result = self.burst_c.portlet_tab_display(**data)
     selected_portlets = result['portlet_tab_list']
     for entry in selected_portlets:
         self.assertEqual(entry.id, portlet_id)
Exemple #22
0
    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)
        view_step = TestFactory.create_workflow_step(
            "tvb.tests.framework.adapters.testadapter2",
            "TestAdapter2", {"test2": 2}, {},
            0,
            0,
            0,
            0,
            is_view_step=True)
        view_step.fk_portlet = test_portlet.id
        workflow_step_list.append(view_step)

        workflows = self.workflow_service.create_and_store_workflow(
            self.test_project.id, burst_config.id, 0, first_step_algorithm.id,
            operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list
    def test_update_portlet_config_invalid_data(self):
        """
        Trying an update on a portlet configuration with invalid data
        should not change the configuration instance in any way.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)

        invalid_overwrites = {'this_is_not_a_valid_key': 'for_test_portlet_update'}
        before_update = copy.deepcopy(portlet_configuration)
        self.burst_service.update_portlet_configuration(portlet_configuration, invalid_overwrites)
        self.assertEqual(set(dir(before_update)), set(dir(portlet_configuration)))
        #An update with invalid input data should have no effect on the configuration, but attributes changed
        for key in portlet_configuration.__dict__.keys():
            if hasattr(getattr(portlet_configuration, key), '__call__'):
                self.assertEqual(getattr(before_update, key), getattr(portlet_configuration, key),
                                 "The value of attribute %s changed by a update with invalid data "
                                 "when it shouldn't have." % key)
    def test_new_portlet_configuration(self):
        """
        Test that the correct portlet configuration is generated for the test portlet.
        """
        # Passing an invalid portlet ID should fail and raise an InvalidPortletConfiguration exception.
        self.assertRaises(InvalidPortletConfiguration, self.burst_service.new_portlet_configuration, -1)

        # Now the happy flow
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
        analyzers = portlet_configuration.analyzers
        self.assertEqual(len(analyzers), 1, "Portlet configuration not build properly."
                                            " Portlet's analyzers list has unexpected number of elements.")
        self.assertEqual(analyzers[0].dynamic_param, {u'test_dt_input': {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                         wf_cfg.STEP_INDEX_KEY: 0}},
                         "Dynamic parameters not loaded properly")
        visualizer = portlet_configuration.visualizer
        self.assertEqual(visualizer.dynamic_param, {}, "Dynamic parameters not loaded properly")
        self.assertEqual(visualizer.static_param, {u'test2': u'0'}, 'Static parameters not loaded properly')
Exemple #25
0
 def test_build_portlet_interface(self):
     """
     Test that the portlet interface is build properly, splitted by steps and prefixed.
     """
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     portlet_configuration = self.burst_service.new_portlet_configuration(
         test_portlet.id)
     actual_interface = self.burst_service.build_portlet_interface(
         portlet_configuration, self.test_project.id)
     #The expected portlet steps and interface in correspondace to the xml declaration
     #from tvb.tests.framework/core/portlets/test_portlet.xml
     expected_steps = [{
         'ui_name': 'TestAdapterDatatypeInput'
     }, {
         'ui_name': 'TestAdapter2'
     }]
     expected_interface = [{
         ABCAdapter.KEY_DEFAULT:
         'step_0[0]',
         ABCAdapter.KEY_DISABLED:
         True,
         KEY_DYNAMIC:
         True,
         ABCAdapter.KEY_NAME:
         ADAPTER_PREFIX_ROOT + '0test_dt_input'
     }, {
         ABCAdapter.KEY_DEFAULT:
         '0',
         ABCAdapter.KEY_DISABLED:
         False,
         KEY_DYNAMIC:
         False,
         ABCAdapter.KEY_NAME:
         ADAPTER_PREFIX_ROOT + '1test2'
     }]
     for idx, entry in enumerate(expected_steps):
         step = actual_interface[idx]
         for key in entry:
             self.assertEqual(entry.get(key), getattr(step, key))
         for key in expected_interface[idx]:
             self.assertEqual(expected_interface[idx].get(key, False),
                              step.interface[0].get(key, False))
 def test_index(self):
     """
     Test that index returns a dict with all required keys. Also check
     that the default portlets are populated, with only the first being
     the TimeSeries portlet and the rest are empty.
     """
     result_dict = self.burst_c.index()
     self.assertTrue('burst_list' in result_dict and result_dict['burst_list'] == [])
     self.assertTrue('available_metrics' in result_dict and isinstance(result_dict['available_metrics'], list))
     self.assertTrue('portletList' in result_dict and isinstance(result_dict['portletList'], list))
     self.assertEqual(result_dict[common.KEY_SECTION], "burst")
     self.assertTrue('burstConfig' in result_dict and isinstance(result_dict['burstConfig'], BurstConfiguration))
     portlets = json.loads(result_dict['selectedPortlets'])
     portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
     for tab_idx, tab in enumerate(portlets):
         for index_in_tab, value in enumerate(tab):
             if tab_idx == 0 and index_in_tab == 0:
                 self.assertEqual(value, [portlet_id, "TimeSeries"])
             else:
                 self.assertEqual(value, [-1, "None"])
     self.assertTrue(result_dict['draw_hidden_ranges'])
 def test_build_portlet_interface(self):
     """
     Test that the portlet interface is build properly, splitted by steps and prefixed.
     """
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
     actual_interface = self.burst_service.build_portlet_interface(portlet_configuration, self.test_project.id)
     #The expected portlet steps and interface in correspondace to the xml declaration
     #from tvb.tests.framework/core/portlets/test_portlet.xml
     expected_steps = [{'ui_name': 'TestAdapterDatatypeInput'},
                       {'ui_name': 'TestAdapter2'}]
     expected_interface = [{ABCAdapter.KEY_DEFAULT: 'step_0[0]', ABCAdapter.KEY_DISABLED: True,
                            KEY_DYNAMIC: True, ABCAdapter.KEY_NAME: ADAPTER_PREFIX_ROOT + '0test_dt_input'},
                           {ABCAdapter.KEY_DEFAULT: '0', ABCAdapter.KEY_DISABLED: False,
                            KEY_DYNAMIC: False, ABCAdapter.KEY_NAME: ADAPTER_PREFIX_ROOT + '1test2'}]
     for idx, entry in enumerate(expected_steps):
         step = actual_interface[idx]
         for key in entry:
             self.assertEqual(entry.get(key), getattr(step, key))
         for key in expected_interface[idx]:
             self.assertEqual(expected_interface[idx].get(key, False), step.interface[0].get(key, False))
    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
                                                                      first_step_algorithm,
                                                                      first_step_algorithm.algorithm_category,
                                                                      metadata, **kwargs)
        view_step = TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2", "TestAdapter2",
                                                     {"test2": 2}, {}, 0, 0, 0, 0, is_view_step=True)
        view_step.fk_portlet = test_portlet.id
        workflow_step_list.append(view_step)

        workflows = self.workflow_service.create_and_store_workflow(self.test_project.id, burst_config.id, 0,
                                                                    first_step_algorithm.id, operations)
        self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
                                                                    burst_config.id, self.test_project.id, group,
                                                                    operations)
        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list
Exemple #29
0
    def __get_portlets(self, path_portlets):
        """
        Given a path in the form of a python package e.g.: "tvb.portlets', import
        the package, get it's folder and look for all the XML files defined 
        there, then read all the portlets defined there and store them in DB.
        """
        portlet_package = __import__(path_portlets, globals(), locals(),
                                     ["__init__"])
        portlet_folder = os.path.dirname(portlet_package.__file__)
        portlets_list = []
        for file_n in os.listdir(portlet_folder):
            try:
                if file_n.endswith('.xml'):
                    complete_file_path = os.path.join(portlet_folder, file_n)
                    portlet_reader = XMLPortletReader(complete_file_path)
                    portlet_list = portlet_reader.get_algorithms_dictionary()
                    self.logger.debug(
                        "Starting to verify currently declared portlets in %s."
                        % (file_n, ))
                    for algo_identifier in portlet_list:
                        adapters_chain = portlet_reader.get_adapters_chain(
                            algo_identifier)
                        is_valid = True
                        for adapter in adapters_chain:
                            class_name = adapter[ABCAdapter.KEY_TYPE].split(
                                '.')[-1]
                            module_name = adapter[ABCAdapter.KEY_TYPE].replace(
                                '.' + class_name, '')
                            try:
                                #Check that module is properly declared
                                module = __import__(module_name,
                                                    globals(),
                                                    fromlist=[class_name])
                                if type(module) != ModuleType:
                                    is_valid = False
                                    self.logger.error(
                                        "Wrong module %s in portlet %s" %
                                        (module_name, algo_identifier))
                                    continue
                                #Check that class is properly declared
                                if not hasattr(module, class_name):
                                    is_valid = False
                                    self.logger.error(
                                        "Wrong class %s in portlet %s." %
                                        (class_name, algo_identifier))
                                    continue
                                #Check inputs that refers to this adapter
                                portlet_inputs = portlet_list[algo_identifier][
                                    ELEM_INPUTS]
                                adapter_instance = PortletConfigurer.build_adapter_from_declaration(
                                    adapter)
                                if adapter_instance is None:
                                    is_valid = False
                                    self.logger.warning(
                                        "No group having class=%s stored for "
                                        "portlet %s." %
                                        (class_name, algo_identifier))
                                    continue
                                adapter_input_names = [
                                    entry[ABCAdapter.KEY_NAME] for entry in
                                    adapter_instance.flaten_input_interface()
                                ]
                                for input_entry in portlet_inputs.values():
                                    if input_entry[ATT_OVERWRITE] == adapter[
                                            ABCAdapter.KEY_NAME]:
                                        if input_entry[
                                                ABCAdapter.
                                                KEY_NAME] not in adapter_input_names:
                                            self.logger.error(
                                                "Invalid input %s for adapter %s"
                                                % (input_entry[
                                                    ABCAdapter.KEY_NAME],
                                                   adapter_instance))
                                            is_valid = False
                            except ImportError:
                                self.logger.error(
                                    "Invalid adapter declaration %s in portlet %s"
                                    % (adapter[ABCAdapter.KEY_TYPE],
                                       algo_identifier))
                                is_valid = False
                        if is_valid:
                            portlets_list.append(
                                model.Portlet(
                                    algo_identifier, complete_file_path,
                                    portlet_list[algo_identifier]['name']))
            except XmlParserException as excep:
                self.logger.exception(excep)
                self.logger.error("Invalid Portlet description File " +
                                  file_n + " will continue without it!!")

        self.logger.debug("Refreshing portlets from xml declarations.")
        stored_portlets = dao.get_available_portlets()
        #First update old portlets from DB
        for stored_portlet in stored_portlets:
            for verified_portlet in portlets_list:
                if stored_portlet.algorithm_identifier == verified_portlet.algorithm_identifier:
                    stored_portlet.xml_path = verified_portlet.xml_path
                    stored_portlet.last_introspection_check = datetime.datetime.now(
                    )
                    stored_portlet.name = verified_portlet.name
                    dao.store_entity(stored_portlet)
                    break

        #Now add portlets that were not in DB at previous run but are valid now
        for portlet in portlets_list:
            db_entity = dao.get_portlet_by_identifier(
                portlet.algorithm_identifier)
            if db_entity is None:
                self.logger.debug("Will now store portlet %s" %
                                  (str(portlet), ))
                dao.store_entity(portlet)
Exemple #30
0
        self.logger.debug("Refreshing portlets from xml declarations.")
        stored_portlets = dao.get_available_portlets()
        #First update old portlets from DB
        for stored_portlet in stored_portlets:
            for verified_portlet in portlets_list:
                if stored_portlet.algorithm_identifier == verified_portlet.algorithm_identifier:
                    stored_portlet.xml_path = verified_portlet.xml_path
                    stored_portlet.last_introspection_check = datetime.datetime.now(
                    )
                    stored_portlet.name = verified_portlet.name
                    dao.store_entity(stored_portlet)
                    break

        #Now add portlets that were not in DB at previous run but are valid now
        for portlet in portlets_list:
            db_entity = dao.get_portlet_by_identifier(
                portlet.algorithm_identifier)
            if db_entity is None:
                self.logger.debug("Will now store portlet %s" %
                                  (str(portlet), ))
                dao.store_entity(portlet)

    def __get_datatypes(self, path_types):
        """
        Imports each DataType to update the DB model, by creating a new table for each DataType.
        """
        for my_type in Introspector.__get_variable(path_types):
            try:
                module_ref = __import__(path_types, globals(), locals(),
                                        [my_type])
                module_ref = eval("module_ref." + my_type)
                tree = [
 def get_portlet_by_identifier(portlet_identifier):
     """
     :returns: the portlet entity with the algorithm identifier =@portlet_identifier
     """
     return dao.get_portlet_by_identifier(portlet_identifier)
Exemple #32
0
 def get_portlet_by_identifier(portlet_identifier):
     """
     :returns: the portlet entity with the algorithm identifier =@portlet_identifier
     """
     return dao.get_portlet_by_identifier(portlet_identifier)
    def __get_portlets(self, path_portlets):
        """
        Given a path in the form of a python package e.g.: "tvb.portlets', import
        the package, get it's folder and look for all the XML files defined 
        there, then read all the portlets defined there and store them in DB.
        """
        portlet_package = __import__(path_portlets, globals(), locals(), ["__init__"])
        portlet_folder = os.path.dirname(portlet_package.__file__)
        portlets_list = []
        for file_n in os.listdir(portlet_folder):
            try:
                if file_n.endswith('.xml'):
                    complete_file_path = os.path.join(portlet_folder, file_n)
                    portlet_reader = XMLPortletReader(complete_file_path)
                    portlet_list = portlet_reader.get_algorithms_dictionary()
                    self.logger.debug("Starting to verify currently declared portlets in %s." % (file_n,))
                    for algo_identifier in portlet_list:
                        adapters_chain = portlet_reader.get_adapters_chain(algo_identifier)
                        is_valid = True
                        for adapter in adapters_chain:
                            class_name = adapter[ABCAdapter.KEY_TYPE].split('.')[-1]
                            module_name = adapter[ABCAdapter.KEY_TYPE].replace('.' + class_name, '')
                            try:
                                #Check that module is properly declared
                                module = __import__(module_name, globals(), fromlist=[class_name])
                                if type(module) != ModuleType:
                                    is_valid = False
                                    self.logger.error("Wrong module %s in portlet %s" % (module_name, algo_identifier))
                                    continue
                                #Check that class is properly declared
                                if not hasattr(module, class_name):
                                    is_valid = False
                                    self.logger.error("Wrong class %s in portlet %s." % (class_name, algo_identifier))
                                    continue
                                #Check inputs that refers to this adapter
                                portlet_inputs = portlet_list[algo_identifier][ELEM_INPUTS]
                                adapter_instance = PortletConfigurer.build_adapter_from_declaration(adapter)
                                if adapter_instance is None:
                                    is_valid = False
                                    self.logger.warning("No group having class=%s stored for "
                                                        "portlet %s." % (class_name, algo_identifier))
                                    continue
                                adapter_input_names = [entry[ABCAdapter.KEY_NAME] for entry
                                                       in adapter_instance.flaten_input_interface()]
                                for input_entry in portlet_inputs.values():
                                    if input_entry[ATT_OVERWRITE] == adapter[ABCAdapter.KEY_NAME]:
                                        if input_entry[ABCAdapter.KEY_NAME] not in adapter_input_names:
                                            self.logger.error("Invalid input %s for adapter %s" % (
                                                input_entry[ABCAdapter.KEY_NAME], adapter_instance))
                                            is_valid = False
                            except ImportError:
                                self.logger.error("Invalid adapter declaration %s in portlet %s" % (
                                                  adapter[ABCAdapter.KEY_TYPE], algo_identifier))
                                is_valid = False
                        if is_valid:
                            portlets_list.append(model.Portlet(algo_identifier, complete_file_path,
                                                               portlet_list[algo_identifier]['name']))
            except XmlParserException as excep:
                self.logger.exception(excep)
                self.logger.error("Invalid Portlet description File " + file_n + " will continue without it!!")

        self.logger.debug("Refreshing portlets from xml declarations.")
        stored_portlets = dao.get_available_portlets()
        #First update old portlets from DB
        for stored_portlet in stored_portlets:
            for verified_portlet in portlets_list:
                if stored_portlet.algorithm_identifier == verified_portlet.algorithm_identifier:
                    stored_portlet.xml_path = verified_portlet.xml_path
                    stored_portlet.last_introspection_check = datetime.datetime.now()
                    stored_portlet.name = verified_portlet.name
                    dao.store_entity(stored_portlet)
                    break

        #Now add portlets that were not in DB at previous run but are valid now
        for portlet in portlets_list:
            db_entity = dao.get_portlet_by_identifier(portlet.algorithm_identifier)
            if db_entity is None:
                self.logger.debug("Will now store portlet %s" % (str(portlet),))
                dao.store_entity(portlet)
        self.logger.debug("Refreshing portlets from xml declarations.")
        stored_portlets = dao.get_available_portlets()
        #First update old portlets from DB
        for stored_portlet in stored_portlets:
            for verified_portlet in portlets_list:
                if stored_portlet.algorithm_identifier == verified_portlet.algorithm_identifier:
                    stored_portlet.xml_path = verified_portlet.xml_path
                    stored_portlet.last_introspection_check = datetime.datetime.now()
                    stored_portlet.name = verified_portlet.name
                    dao.store_entity(stored_portlet)
                    break

        #Now add portlets that were not in DB at previous run but are valid now
        for portlet in portlets_list:
            db_entity = dao.get_portlet_by_identifier(portlet.algorithm_identifier)
            if db_entity is None:
                self.logger.debug("Will now store portlet %s" % (str(portlet),))
                dao.store_entity(portlet)

    @staticmethod
    def _is_concrete_subclass(clz, super_cls):
        return inspect.isclass(clz) and not inspect.isabstract(clz) and issubclass(clz, super_cls)


    def __get_datatypes(self, path_types):
        """
        Imports each DataType to update the DB model, by creating a new table for each DataType.
        """
        for my_type in Introspector.__read_module_variable(path_types):
            try: