Exemplo n.º 1
0
    def get_data_uploader_overlay(self, project_id):
        """
        Returns the html which displays a dialog which allows the user
        to upload certain data into the application.
        """
        upload_categories = self.flow_service.get_uploader_categories()
        upload_algorithms = self.flow_service.get_groups_for_categories(
            upload_categories)

        flow_controller = FlowController()
        algorithms_interface = dict()
        tabs = []

        for algo_group in upload_algorithms:
            adapter_template = flow_controller.get_adapter_template(
                project_id, algo_group.id, True, None)
            algorithms_interface['template_for_algo_' +
                                 str(algo_group.id)] = adapter_template
            tabs.append(
                OverlayTabDefinition(algo_group.displayname,
                                     algo_group.subsection_name,
                                     description=algo_group.description))

        template_specification = self.fill_overlay_attributes(
            None, "Upload", "Upload data for this project",
            "project/upload_data_overlay", "dialog-upload", tabs)
        template_specification['uploadAlgorithms'] = upload_algorithms
        template_specification['projectId'] = project_id
        template_specification['algorithmsInterface'] = algorithms_interface

        return flow_controller.fill_default_attributes(template_specification)
Exemplo n.º 2
0
 def setUp(self):
     """
     Sets up the environment for testing;
     creates a `FlowController`
     """
     BaseControllersTest.init(self)
     self.flow_c =  FlowController()
     self.burst_c = BurstController()
     self.operation_service = OperationService()
Exemplo n.º 3
0
    def get_operation_details(self,
                              entity_gid,
                              is_group=False,
                              back_page='burst'):
        """
        Returns the HTML which contains the details for the given operation.
        """
        if is_group is True or is_group == "1":
            ### we have an OperationGroup entity.
            template_specification = self._compute_operation_details(
                entity_gid, True)
            #I expect that all the operations from a group are visible or not
            template_specification[
                "nodeType"] = graph_structures.NODE_OPERATION_GROUP_TYPE

        else:
            ### we have a simple Operation
            template_specification = self._compute_operation_details(
                entity_gid)
            template_specification["displayRelevantButton"] = True
            template_specification[
                "nodeType"] = graph_structures.NODE_OPERATION_TYPE

        template_specification["backPageIdentifier"] = back_page
        overlay_class = "can-browse editor-node node-type-" + template_specification[
            "nodeType"]
        if template_specification["isRelevant"]:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"

        template_specification = self.fill_overlay_attributes(
            template_specification, "Details", "Operation",
            "project/details_operation_overlay", overlay_class)
        return FlowController().fill_default_attributes(template_specification)
Exemplo n.º 4
0
    def get_project_uploader_overlay(self):
        """
        Returns the html which displays a dialog which allows the user
        to upload an entire project.
        """
        template_specification = self.fill_overlay_attributes(
            None, "Upload", "Project structure",
            "project/upload_project_overlay", "dialog-upload")

        return FlowController().fill_default_attributes(template_specification)
Exemplo n.º 5
0
    def launchloader(self, project_id, algo_group_id, cancel=False, **data):
        """ 
        Start Upload mechanism
        """
        success_link = "/project/editstructure/" + str(project_id)
        if ((cherrypy.request.method == 'POST' and cancel)
                or not (project_id and int(project_id) and
                        (algo_group_id is not None) and int(algo_group_id))):
            raise cherrypy.HTTPRedirect(success_link)

        project = self.project_service.find_project(project_id)
        group = self.flow_service.get_algo_group_by_identifier(algo_group_id)
        template_specification = FlowController().execute_post(
            project.id, success_link, success_link, group.fk_category, group,
            **data)
        # In case no redirect was done until now, it means there was a problem.
        if template_specification is None or cherrypy.request.method == 'POST':
            # It is a non-recoverable problem, error message is in session.
            raise cherrypy.HTTPRedirect(success_link)
        template_specification[KEY_CONTENT] = "project/structure",
        template_specification["baseUrl"] = cfg.BASE_URL,
        template_specification[bc.KEY_TITLE] = ""
        template_specification["project"] = project
        return self.fill_default_attributes(template_specification, 'data')
Exemplo n.º 6
0
    def get_datatype_details(self,
                             entity_gid,
                             back_page='burst',
                             exclude_tabs=None):
        """
        Returns the HTML which contains the details for the given dataType.
        """
        if exclude_tabs is None:
            exclude_tabs = []
        selected_project = bc.get_current_project()
        datatype_details, states, entity = self.project_service.get_datatype_details(
            entity_gid)

        ### Load DataType categories
        current_type = datatype_details.data_type
        datatype_gid = datatype_details.gid
        categories = {}
        if not entity.invalid:
            categories = self.getalgorithmsfordatatype(str(current_type),
                                                       str(datatype_gid))
            categories = json.loads(categories)

        datatype_id = datatype_details.data_type_id
        is_group = False
        if datatype_details.operation_group_id is not None:
            ## Is a DataTypeGroup
            datatype_id = datatype_details.operation_group_id
            is_group = True

        ### Retrieve links
        linkable_projects_dict = self._get_linkable_projects_dict(datatype_id)
        ### Load all exporters
        exporters = {}
        if not entity.invalid:
            exporters = ExportManager().get_exporters_for_data(entity)
        is_relevant = entity.visible

        template_specification = dict()
        template_specification["entity_gid"] = entity_gid
        template_specification["nodeFields"] = datatype_details.get_ui_fields()
        template_specification["allStates"] = states
        template_specification["project"] = selected_project
        template_specification["categories"] = categories
        template_specification["exporters"] = exporters
        template_specification["datatype_id"] = datatype_id
        template_specification["isGroup"] = is_group
        template_specification["isRelevant"] = is_relevant
        template_specification["nodeType"] = 'datatype'
        template_specification["backPageIdentifier"] = back_page
        template_specification.update(linkable_projects_dict)

        overlay_class = "can-browse editor-node node-type-" + str(
            current_type).lower()
        if is_relevant:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"
        overlay_title = current_type
        if datatype_details.datatype_tag_1:
            overlay_title += " " + datatype_details.datatype_tag_1

        tabs = []
        overlay_indexes = []
        if "Metadata" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Metadata", "metadata"))
            overlay_indexes.append(0)
        if "Analyzers" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Analyzers",
                                     "analyzers",
                                     enabled=categories
                                     and 'Analyze' in categories))
            overlay_indexes.append(1)
        if "Visualizers" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Visualizers",
                                     "visualizers",
                                     enabled=categories
                                     and 'View' in categories))
            overlay_indexes.append(2)

        enable_link_tab = False
        if (not entity.invalid) and (linkable_projects_dict is not None):
            if self.PRROJECTS_FOR_LINK_KEY in linkable_projects_dict:
                projects_for_link = linkable_projects_dict[
                    self.PRROJECTS_FOR_LINK_KEY]
                if projects_for_link is not None and len(
                        projects_for_link) > 0:
                    enable_link_tab = True
            if self.PRROJECTS_LINKED_KEY in linkable_projects_dict:
                projects_linked = linkable_projects_dict[
                    self.PRROJECTS_LINKED_KEY]
                if projects_linked is not None and len(projects_linked) > 0:
                    enable_link_tab = True
        if "Links" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Links",
                                     "link_to",
                                     enabled=enable_link_tab))
            overlay_indexes.append(3)
        if "Export" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Export",
                                     "export",
                                     enabled=(exporters
                                              and len(exporters) > 0)))
            overlay_indexes.append(4)
        if "Resulted Datatypes" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition(
                    "Resulted Datatypes",
                    "result_dts",
                    enabled=self.project_service.
                    count_datatypes_generated_from(entity_gid)))
            overlay_indexes.append(5)
        template_specification = self.fill_overlay_attributes(
            template_specification, "DataType Details", overlay_title,
            "project/details_datatype_overlay", overlay_class, tabs,
            overlay_indexes)
        template_specification['baseUrl'] = cfg.BASE_URL
        #template_specification[bc.KEY_OVERLAY_PAGINATION] = True
        #template_specification[bc.KEY_OVERLAY_PREVIOUS] = "alert(1);"
        #template_specification[bc.KEY_OVERLAY_NEXT] = "alert(2);"
        return FlowController().fill_default_attributes(template_specification)
Exemplo n.º 7
0
def init_cherrypy(arguments=None):
    #### Mount static folders from modules marked for introspection
    arguments = arguments or []
    CONFIGUER = TVBSettings.CHERRYPY_CONFIGURATION
    for module in arguments:
        module_inst = __import__(str(module), globals(), locals(),
                                 ["__init__"])
        module_path = os.path.dirname(os.path.abspath(module_inst.__file__))
        CONFIGUER["/static_" + str(module)] = {
            'tools.staticdir.on': True,
            'tools.staticdir.dir': '.',
            'tools.staticdir.root': module_path
        }

    #### Mount controllers, and specify the root URL for them.
    cherrypy.tree.mount(BaseController(), "/", config=CONFIGUER)
    cherrypy.tree.mount(UserController(), "/user/", config=CONFIGUER)
    cherrypy.tree.mount(ProjectController(), "/project/", config=CONFIGUER)
    cherrypy.tree.mount(FigureController(),
                        "/project/figure/",
                        config=CONFIGUER)
    cherrypy.tree.mount(FlowController(), "/flow/", config=CONFIGUER)
    cherrypy.tree.mount(SettingsController(), "/settings/", config=CONFIGUER)
    cherrypy.tree.mount(DTIPipelineController(),
                        "/pipeline/",
                        config=CONFIGUER)
    cherrypy.tree.mount(HelpController(), "/help/", config=CONFIGUER)
    cherrypy.tree.mount(BurstController(), "/burst/", config=CONFIGUER)
    cherrypy.tree.mount(ParameterExplorationController(),
                        "/burst/explore/",
                        config=CONFIGUER)
    cherrypy.tree.mount(SpatioTemporalController(),
                        "/spatial/",
                        config=CONFIGUER)
    cherrypy.tree.mount(RegionsModelParametersController(),
                        "/spatial/modelparameters/regions/",
                        config=CONFIGUER)
    cherrypy.tree.mount(SurfaceModelParametersController(),
                        "/spatial/modelparameters/surface/",
                        config=CONFIGUER)
    cherrypy.tree.mount(RegionStimulusController(),
                        "/spatial/stimulus/region/",
                        config=CONFIGUER)
    cherrypy.tree.mount(SurfaceStimulusController(),
                        "/spatial/stimulus/surface/",
                        config=CONFIGUER)
    cherrypy.tree.mount(LocalConnectivityController(),
                        "/spatial/localconnectivity/",
                        config=CONFIGUER)
    cherrypy.config.update(CONFIGUER)

    #----------------- Register additional request handlers -----------------
    # This tool checks for MAX upload size
    cherrypy.tools.upload = Tool('on_start_resource',
                                 RequestHandler.check_upload_size)
    # This tools clean up files on disk (mainly after export)
    cherrypy.tools.cleanup = Tool('on_end_request',
                                  RequestHandler.clean_files_on_disk)
    #----------------- End register additional request handlers ----------------

    #### HTTP Server is fired now ######
    cherrypy.engine.start()
Exemplo n.º 8
0
    def test_reduce_dimension_component(self):
        """
        Tests the generation of the component which allows the user
        to select one dimension from a multi dimension array
        """
        flow_service = FlowService()
        inserted_data = flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted_data), 0, "Expected to find no data")
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {}, **PARAMS)
        inserted_data = flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted_data), 1, "Problems when inserting data")

        algogroup = dao.find_group('tvb_test.adapters.ndimensionarrayadapter',
                                   'NDimensionArrayAdapter')
        _, interface = flow_service.prepare_adapter(self.test_project.id,
                                                    algogroup)
        self.template_specification['inputList'] = interface
        resulted_html = _template2string(self.template_specification)
        self.soup = BeautifulSoup(resulted_html)

        found_divs = self.soup.findAll(
            'p', attrs=dict(id="dimensionsDiv_input_data"))
        self.assertEqual(len(found_divs), 1, "Data generated incorrect")

        gid = inserted_data[0][2]
        cherrypy.session = {'user': self.test_user}
        entity = dao.get_datatype_by_gid(gid)
        component_content = FlowController().gettemplatefordimensionselect(
            gid, "input_data")
        self.soup = BeautifulSoup(component_content)

        #check dimensions
        found_selects_0 = self.soup.findAll(
            'select', attrs=dict(id="dimId_input_data_dimensions_0"))
        found_selects_1 = self.soup.findAll(
            'select', attrs=dict(id="dimId_input_data_dimensions_1"))
        found_selects_2 = self.soup.findAll(
            'select', attrs=dict(id="dimId_input_data_dimensions_2"))
        self.assertEqual(len(found_selects_0), 1, "select not found")
        self.assertEqual(len(found_selects_1), 1, "select not found")
        self.assertEqual(len(found_selects_2), 1, "select not found")

        #check the aggregation functions selects
        agg_selects_0 = self.soup.findAll(
            'select', attrs=dict(id="funcId_input_data_dimensions_0"))
        agg_selects_1 = self.soup.findAll(
            'select', attrs=dict(id="funcId_input_data_dimensions_1"))
        agg_selects_2 = self.soup.findAll(
            'select', attrs=dict(id="funcId_input_data_dimensions_2"))
        self.assertEqual(len(agg_selects_0), 1, "incorrect first dim")
        self.assertEqual(len(agg_selects_1), 1, "incorrect second dim")
        self.assertEqual(len(agg_selects_2), 1, "incorrect third dim.")

        data_shape = entity.shape
        self.assertEqual(len(data_shape), 3, "Shape of the array is incorrect")
        for i in range(data_shape[0]):
            options = self.soup.findAll('option',
                                        attrs=dict(value=gid + "_0_" + str(i)))
            self.assertEqual(len(options), 1, "Generated option is incorrect")
            self.assertEqual(options[0].text, "Time " + str(i),
                             "The label of the option is not correct")
            self.assertEqual(options[0].parent.attrMap["name"],
                             "input_data_dimensions_0")
        for i in range(data_shape[1]):
            options = self.soup.findAll('option',
                                        attrs=dict(value=gid + "_1_" + str(i)))
            self.assertEqual(len(options), 1, "Generated option is incorrect")
            self.assertEqual(options[0].text, "Channel " + str(i),
                             "Option's label incorrect")
            self.assertEqual(options[0].parent.attrMap["name"],
                             "input_data_dimensions_1", "incorrect parent")
        for i in range(data_shape[2]):
            options = self.soup.findAll('option',
                                        attrs=dict(value=gid + "_2_" + str(i)))
            self.assertEqual(len(options), 1, "Generated option is incorrect")
            self.assertEqual(options[0].text, "Line " + str(i),
                             "The label of the option is not correct")
            self.assertEqual(options[0].parent.attrMap["name"],
                             "input_data_dimensions_2")

        #check the expected hidden fields
        expected_shape = self.soup.findAll(
            'input', attrs=dict(id="input_data_expected_shape"))
        self.assertEqual(len(expected_shape), 1,
                         "The generated option is not correct")
        self.assertEqual(expected_shape[0]["value"], "expected_shape_",
                         "The generated option is not correct")
        input_hidden_op = self.soup.findAll(
            'input', attrs=dict(id="input_data_operations"))
        self.assertEqual(len(input_hidden_op), 1,
                         "The generated option is not correct")
        self.assertEqual(input_hidden_op[0]["value"], "operations_",
                         "The generated option is not correct")
        input_hidden_dim = self.soup.findAll(
            'input', attrs=dict(id="input_data_expected_dim"))
        self.assertEqual(len(input_hidden_dim), 1,
                         "The generated option is not correct")
        self.assertEqual(input_hidden_dim[0]["value"], "requiredDim_1",
                         "The generated option is not correct")
        input_hidden_shape = self.soup.findAll(
            'input', attrs=dict(id="input_data_array_shape"))
        self.assertEqual(len(input_hidden_shape), 1,
                         "The generated option is not correct")
        self.assertEqual(input_hidden_shape[0]["value"], "[5, 1, 3]",
                         "The generated option is not correct")

        #check only the first option from the aggregations functions selects
        options = self.soup.findAll('option', attrs=dict(value="func_none"))
        self.assertEqual(len(options), 3,
                         "The generated option is not correct")
Exemplo n.º 9
0
class FlowContollerTest(BaseControllersTest):
    """ Unit tests for flowcontoller """
    
    def setUp(self):
        """
        Sets up the environment for testing;
        creates a `FlowController`
        """
        BaseControllersTest.init(self)
        self.flow_c =  FlowController()
        self.burst_c = BurstController()
        self.operation_service = OperationService()
    
    
    def tearDown(self):
        """ Cleans up the testing environment """
        BaseControllersTest.cleanup(self)
        self.reset_database()
            
            
    def test_context_selected(self):
        """
        Remove the project from cherrypy session and check that you are
        redirected to projects page.
        """
        del cherrypy.session[b_c.KEY_PROJECT]
        self._expect_redirect('/project/viewall', self.flow_c.step)
    

    def test_invalid_step(self):
        """
        Pass an invalid step and make sure we are redirected to tvb start page.
        """
        self._expect_redirect('/tvb', self.flow_c.step)
        
        
    def test_valid_step(self):
        """
        For all algorithm categories check that a submenu is generated and the result
        page has it's title given by category name.
        """
        categories = dao.get_algorithm_categories()
        for categ in categories:
            result_dict = self.flow_c.step(categ.id)
            self.assertTrue(b_c.KEY_SUBMENU_LIST in result_dict, 
                            "Expect to have a submenu with available algorithms for category.")
            self.assertEqual(result_dict["section_name"], categ.displayname.lower())


    def test_step_connectivity(self):
        """
        Check that the correct section name and connectivity submenu are returned for the 
        connectivity step.
        """
        result_dict = self.flow_c.step_connectivity()
        self.assertEqual(result_dict['section_name'], 'connectivity')
        self.assertEqual(result_dict['submenu_list'], self.flow_c.connectivity_submenu)


    def test_default(self):
        """
        Test default method from step controllers. Check that the submit link is ok, that a mainContent
        is present in result dict and that the isAdapter flag is set to true.
        """
        cherrypy.request.method = "GET"
        categories = dao.get_algorithm_categories()
        for categ in categories:
            algo_groups = dao.get_groups_by_categories([categ.id])
            for algo in algo_groups:
                result_dict = self.flow_c.default(categ.id, algo.id)
                self.assertEqual(result_dict[b_c.KEY_SUBMIT_LINK], '/flow/%i/%i'%(categ.id, algo.id))
                self.assertTrue('mainContent' in result_dict)
                self.assertTrue(result_dict['isAdapter'])
                
                
    def test_default_cancel(self):
        """
        On cancel we should get a redirect to the back page link.
        """
        cherrypy.request.method = "POST"
        categories = dao.get_algorithm_categories()
        algo_groups = dao.get_groups_by_categories([categories[0].id])
        self._expect_redirect('/project/viewoperations/%i'%(self.test_project.id), 
                              self.flow_c.default, categories[0].id, algo_groups[0].id, 
                              cancel=True, back_page='operations')
        
        
    def test_default_invalid_key(self):
        """
        Pass invalid keys for adapter and step and check you get redirect to tvb entry
        page with error set.
        """
        self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid')
        
        
    def test_read_datatype_attribute(self):
        """
        Read an attribute from a datatype.
        """
        dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split())
        returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data")
        self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]')
        
        
    def test_read_datatype_attribute_method_call(self):
        """
        Call method on given datatype.
        """
        dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split())
        args = {'length' : 101}
        returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args)
        self.assertTrue(returned_data == str(range(101)))
        
        
    def test_get_simple_adapter_interface(self):
        adapter = dao.find_group('tvb_test.adapters.testadapter1', 'TestAdapter1')
        result = self.flow_c.get_simple_adapter_interface(adapter.id)
        expected_interface = TestAdapter1().get_input_tree()
        self.assertEqual(result['inputList'], expected_interface)
        
    
    def _long_burst_launch(self, is_range=False):
        self.burst_c.index()
        connectivity = DatatypesFactory().create_connectivity()[1]
        launch_params = copy.deepcopy(SIMULATOR_PARAMETERS)
        launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid
        if not is_range:
            launch_params['simulation_length'] = '10000'
        else:
            launch_params['simulation_length'] = '[10000,10001,10002]'
            launch_params['first_range'] = 'simulation_length'
        burst_id, _ = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))
        return dao.get_burst_by_id(burst_id)
        
            
    def test_stop_burst_operation(self):
        burst_config = self._long_burst_launch()
        waited = 1
        timeout = 50
        operations = dao.get_operations_in_burst(burst_config.id)
        while not len(operations) and waited <= timeout:
            sleep(1)
            waited += 1
            operations = dao.get_operations_in_burst(burst_config.id)
        operation = dao.get_operations_in_burst(burst_config.id)[0]
        self.assertEqual(operation.status, model.STATUS_STARTED)
        self.flow_c.stop_burst_operation(operation.id, 0, False)
        operation = dao.get_operation_by_id(operation.id)
        self.assertEqual(operation.status, model.STATUS_CANCELED)
        
        
    def test_stop_burst_operation_group(self):
        burst_config = self._long_burst_launch(True)
        waited = 1
        timeout = 50
        operations = dao.get_operations_in_burst(burst_config.id)
        while not len(operations) and waited <= timeout:
            sleep(1)
            waited += 1
            operations = dao.get_operations_in_burst(burst_config.id)
        operations = dao.get_operations_in_burst(burst_config.id)
        for operation in operations:
            self.assertEqual(operation.status, model.STATUS_STARTED)
        self.flow_c.stop_burst_operation(operation.fk_operation_group, 1, False)
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            self.assertEqual(operation.status, model.STATUS_CANCELED)
        
        
    def test_remove_burst_operation(self):
        burst_config = self._long_burst_launch()
        waited = 1
        timeout = 50
        operations = dao.get_operations_in_burst(burst_config.id)
        while not len(operations) and waited <= timeout:
            sleep(1)
            waited += 1
            operations = dao.get_operations_in_burst(burst_config.id)
        operation = dao.get_operations_in_burst(burst_config.id)[0]
        self.assertEqual(operation.status, model.STATUS_STARTED)
        self.flow_c.stop_burst_operation(operation.id, 0, True)
        operation = dao.get_operation_by_id(operation.id)
        self.assertTrue(operation is None)
        
        
    def test_remove_burst_operation_group(self):
        burst_config = self._long_burst_launch(True)
        waited = 1
        timeout = 50
        operations = dao.get_operations_in_burst(burst_config.id)
        while not len(operations) and waited <= timeout:
            sleep(1)
            waited += 1
            operations = dao.get_operations_in_burst(burst_config.id)
        operations = dao.get_operations_in_burst(burst_config.id)
        for operation in operations:
            self.assertEqual(operation.status, model.STATUS_STARTED)
        self.flow_c.stop_burst_operation(operation.fk_operation_group, 1, True)
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            self.assertTrue(operation is None)
            
            
    def test_stop_operations(self):
        module = "tvb_test.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test1_val1": 5, 'test1_val2': 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                                  algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_STARTED)
        self.flow_c.stop_operation(operation.id, 0, False)
        operation = dao.get_operation_by_id(operation.id)
        self.assertEqual(operation.status, model.STATUS_CANCELED)
        
        
    def test_stop_operations_group(self):
        module = "tvb_test.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {'first_range' : "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                                  algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            self.assertEqual(operation.status, model.STATUS_STARTED)
        self.flow_c.stop_operation(operation.fk_operation_group, 1, False)
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            self.assertEqual(operation.status, model.STATUS_CANCELED)
Exemplo n.º 10
0
 def setUp(self):
     BaseControllersTest.init(self)
     self.flow_c = FlowController()
Exemplo n.º 11
0
class FlowContollerTest(TransactionalTestCase, BaseControllersTest):
    """ Unit tests for flowcontoller """
    def setUp(self):
        BaseControllersTest.init(self)
        self.flow_c = FlowController()

    def tearDown(self):
        BaseControllersTest.cleanup(self)

    def test_context_selected(self):
        """
        Remove the project from cherrypy session and check that you are
        redirected to projects page.
        """
        del cherrypy.session[b_c.KEY_PROJECT]
        self._expect_redirect('/project/viewall', self.flow_c.step)

    def test_invalid_step(self):
        """
        Pass an invalid step and make sure we are redirected to tvb start page.
        """
        self._expect_redirect('/tvb', self.flow_c.step)

    def test_valid_step(self):
        """
        For all algorithm categories check that a submenu is generated and the result
        page has it's title given by category name.
        """
        categories = dao.get_algorithm_categories()
        for categ in categories:
            result_dict = self.flow_c.step(categ.id)
            self.assertTrue(
                b_c.KEY_SUBMENU_LIST in result_dict,
                "Expect to have a submenu with available algorithms for category."
            )
            self.assertEqual(result_dict["section_name"],
                             categ.displayname.lower())

    def test_step_connectivity(self):
        """
        Check that the correct section name and connectivity submenu are returned for the 
        connectivity step.
        """
        result_dict = self.flow_c.step_connectivity()
        self.assertEqual(result_dict['section_name'], 'connectivity')
        self.assertEqual(result_dict['submenu_list'],
                         self.flow_c.connectivity_submenu)

    def test_default(self):
        """
        Test default method from step controllers. Check that the submit link is ok, that a mainContent
        is present in result dict and that the isAdapter flag is set to true.
        """
        cherrypy.request.method = "GET"
        categories = dao.get_algorithm_categories()
        for categ in categories:
            algo_groups = dao.get_groups_by_categories([categ.id])
            for algo in algo_groups:
                result_dict = self.flow_c.default(categ.id, algo.id)
                self.assertEqual(result_dict[b_c.KEY_SUBMIT_LINK],
                                 '/flow/%i/%i' % (categ.id, algo.id))
                self.assertTrue('mainContent' in result_dict)
                self.assertTrue(result_dict['isAdapter'])

    def test_default_cancel(self):
        """
        On cancel we should get a redirect to the back page link.
        """
        cherrypy.request.method = "POST"
        categories = dao.get_algorithm_categories()
        algo_groups = dao.get_groups_by_categories([categories[0].id])
        self._expect_redirect('/project/viewoperations/%i' %
                              (self.test_project.id),
                              self.flow_c.default,
                              categories[0].id,
                              algo_groups[0].id,
                              cancel=True,
                              back_page='operations')

    def test_default_invalid_key(self):
        """
        Pass invalid keys for adapter and step and check you get redirect to tvb entry
        page with error set.
        """
        self._expect_redirect('/tvb?error=True', self.flow_c.default,
                              'invalid', 'invalid')

    def test_read_datatype_attribute(self):
        """
        Read an attribute from a datatype.
        """
        dt = DatatypesFactory().create_datatype_with_storage(
            "test_subject", "RAW_STATE", 'this is the stored data'.split())
        returned_data = self.flow_c.read_datatype_attribute(
            dt.gid, "string_data")
        self.assertEqual(returned_data,
                         '["this", "is", "the", "stored", "data"]')

    def test_read_datatype_attribute_method_call(self):
        """
        Call method on given datatype.
        """
        dt = DatatypesFactory().create_datatype_with_storage(
            "test_subject", "RAW_STATE", 'this is the stored data'.split())
        args = {'length': 101}
        returned_data = self.flow_c.read_datatype_attribute(
            dt.gid, 'return_test_data', **args)
        self.assertTrue(returned_data == str(range(101)))

    def test_get_simple_adapter_interface(self):
        adapter = dao.find_group('tvb_test.adapters.testadapter1',
                                 'TestAdapter1')
        result = self.flow_c.get_simple_adapter_interface(adapter.id)
        expected_interface = TestAdapter1().get_input_tree()
        self.assertEqual(result['inputList'], expected_interface)