def __init__(self):
        self.logger = get_logger(self.__class__.__module__)

        self.user_service = UserService()
        self.flow_service = FlowService()

        analyze_category = self.flow_service.get_launchable_non_viewers()
        self.analyze_category_link = '/flow/step/' + str(analyze_category.id)
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.flow_service.get_visualisers_category()
        conn_id = self.flow_service.get_algorithm_by_module_and_class(CONNECTIVITY_MODULE, CONNECTIVITY_CLASS)[1].id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        self.connectivity_submenu = [dict(title="Large Scale Connectivity", subsection="connectivity",
                                          description="View Connectivity Regions. Perform Connectivity lesions",
                                          link=connectivity_link),
                                     dict(title="Local Connectivity", subsection="local",
                                          link='/spatial/localconnectivity/step_1/1',
                                          description="Create or view existent Local Connectivity entities.")]
        self.burst_submenu = [dict(link='/burst', subsection='burst',
                                   title='Simulation Cockpit', description='Manage simulations'),
                              dict(link='/burst/dynamic', subsection='dynamic',
                                   title='Phase plane', description='Configure model dynamics')]
Example #2
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
Example #3
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)

        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 1 + float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(
            adapter.get_required_disk_size(**data) - 1
        )

        self.assertRaises(
            NoMemoryAvailableException,
            self.operation_service.initiate_operation,
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            **data
        )
        self._assert_stored_dt2()
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started,
     )
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(
         NoMemoryAvailableException,
         self.operation_service.initiate_operation,
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         **data
     )
     self._assert_no_dt2()
Example #6
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id],
                                        project.id)
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data
     )
     self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
     self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Example #8
0
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            expected_data.module + "." + expected_data.type)[0]
        assert 1 == len(
            data_types), "Project should contain only one data type = Sensors."

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert time_series is not None, "Sensors instance should not be none"

        return time_series
Example #9
0
    def import_surface_zip(user,
                           project,
                           zip_path,
                           surface_type,
                           zero_based='True'):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')

        form = ZIPSurfaceImporterForm()
        form.fill_from_post({
            '_uploaded': Part(zip_path, HeaderMap({}), ''),
            '_zero_based_triangles': zero_based,
            '_should_center': 'True',
            '_surface_type': surface_type,
            '_Data_Subject': 'John Doe'
        })
        form.uploaded.data = zip_path
        importer.submit_form(form)

        ### Launch import Operation
        FlowService().fire_operation(importer, user, project.id,
                                     **form.get_form_values())

        data_types = FlowService().get_available_datatypes(
            project.id, SurfaceIndex)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        surface.user_tag_3 = ''
        assert surface is not None, "Surface should not be None"
        return surface
Example #10
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(
                    datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id],
                                        project.id)
Example #11
0
def launch_simulation_workflow(json_path, prj_id):
    """

    :param json_path: Path towards a local JSON file exported from GUI
    :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface
    """
    project = dao.get_project_by_id(prj_id)

    with open(json_path, 'rb') as input_file:
        simulation_json = input_file.read()
        simulation_json = json.loads(simulation_json)
        LOG.info("Simulation JSON loaded from file '%s': \n  %s", json_path,
                 simulation_json)

        importer = ImportService()
        simulation_config = importer.load_burst_entity(simulation_json, prj_id)
        LOG.info("Simulation Workflow configuration object loaded: \n  %s",
                 simulation_config)

        flow_service = FlowService()
        stored_adapter = flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        LOG.info("Found Simulation algorithm in local DB: \n   %s",
                 stored_adapter)

        burst_service = BurstService()
        burst_service.launch_burst(simulation_config, 0, stored_adapter.id,
                                   project.administrator.id, LAUNCH_NEW)
        LOG.info(
            "Check in the web GUI for your operation. It should be starting now ..."
        )
Example #12
0
    def import_surface_gifti(user, project, path):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.gifti_surface_importer', 'GIFTISurfaceImporter')

        form = GIFTISurfaceImporterForm()
        form.fill_from_post({'_file_type': form.get_view_model().KEY_OPTION_READ_METADATA,
                             '_data_file': Part(path, HeaderMap({}), ''),
                             '_data_file_part2': Part('', HeaderMap({}), ''),
                             '_should_center': 'False',
                             '_Data_Subject': 'John Doe',
                            })
        form.data_file.data = path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer, user, project.id, view_model=view_model)

        surface = CorticalSurface
        data_types = FlowService().get_available_datatypes(project.id,
                                                           surface.__module__ + "." + surface.__name__)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None == "TimeSeries should not be none"

        return surface
Example #13
0
class _BaseLinksTest(TransactionalTestCase):
    GEORGE1st = "george the grey"
    GEORGE2nd = "george"

    def setUpTVB(self):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        datatype_factory = DatatypesFactory()
        self.user = datatype_factory.user
        self.src_project = datatype_factory.project

        self.red_datatype = datatype_factory.create_simple_datatype(subject=self.GEORGE1st)
        self.blue_datatype = datatype_factory.create_datatype_with_storage(subject=self.GEORGE2nd)

        # create the destination project
        self.dest_project = TestFactory.create_project(admin=datatype_factory.user, name="destination")

        self.flow_service = FlowService()
        self.project_service = ProjectService()

    def tearDown(self):
        self.clean_database(delete_folders=True)

    def red_datatypes_in(self, project_id):
        return self.flow_service.get_available_datatypes(project_id, Datatype1)[1]

    def blue_datatypes_in(self, project_id):
        return self.flow_service.get_available_datatypes(project_id, Datatype2)[1]
Example #14
0
    def initialize_two_projects(self, simple_datatype_factory,
                                datatype_with_storage_factory):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.clean_database(delete_folders=True)

        src_user = TestFactory.create_user('Source_User')
        self.src_usr_id = src_user.id
        self.src_project = TestFactory.create_project(src_user,
                                                      "Source_Project")

        self.red_datatype = simple_datatype_factory(subject=self.GEORGE1st)
        self.blue_datatype = datatype_with_storage_factory(
            subject=self.GEORGE2nd)

        # create the destination project
        dst_user = TestFactory.create_user('Destination_User')
        self.dst_usr_id = dst_user.id
        self.dest_project = TestFactory.create_project(dst_user,
                                                       "Destination_Project")

        self.flow_service = FlowService()
        self.project_service = ProjectService()
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 0, "There should be no operation"

        adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
Example #16
0
    def import_surface_obj(user, project, obj_path, surface_type):
        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        form = ObjSurfaceImporterForm()
        form.fill_from_post({
            'data_file': Part(obj_path, HeaderMap({}), ''),
            'surface_type': surface_type,
            'Data_Subject': 'John Doe'
        })
        form.data_file.data = obj_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        data_types = FlowService().get_available_datatypes(
            project.id, SurfaceIndex)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface
Example #17
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id], project.id)
Example #18
0
    def import_sensors(user, project, zip_path, sensors_type):
        """
        This method is used for importing sensors
        :param zip_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.sensors_importer', 'SensorsImporter')

        form = SensorsImporterForm()
        form.fill_from_post({
            'sensors_file': Part(zip_path, HeaderMap({}), ''),
            'sensors_type': sensors_type,
            'Data_Subject': 'John Doe'
        })
        form.sensors_file.data = zip_path
        form.sensors_type.data = sensors_type
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        data_types = FlowService().get_available_datatypes(
            project.id, SensorsIndex)[0]
        assert 1 == len(
            data_types), "Project should contain only one data type = Sensors."
        sensors = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert sensors is not None, "Sensors instance should not be none"
        return sensors
Example #19
0
def update():
    """
    Update TVB code to SVN revision version 4455.
    This update was done for release 1.0.2.
    """
    projects_count = dao.get_all_projects(is_count=True)

    for page_start in range(0, projects_count, PAGE_SIZE):
        projects_page = dao.get_all_projects(page_start=page_start,
                                             page_size=PAGE_SIZE)

        for project in projects_page:
            try:
                user = dao.get_system_user()
                adapter = ObjSurfaceImporter()
                FlowService().fire_operation(adapter,
                                             user,
                                             project.id,
                                             visible=False,
                                             surface_type=EEG_CAP,
                                             data_file=DATA_FILE_EEG_CAP)
                adapter = ObjSurfaceImporter()
                FlowService().fire_operation(adapter,
                                             user,
                                             project.id,
                                             visible=False,
                                             surface_type=FACE,
                                             data_file=DATA_FILE_FACE)
            except Exception as excep:
                LOGGER.exception(excep)

    TvbProfile.current.manager.add_entries_to_config_file(
        {stored.KEY_MATLAB_EXECUTABLE: get_matlab_executable()})
 def __init__(self, conf):
     """
     :param conf: burst configuration entity
     """
     self.logger = get_logger(__name__)
     self.flow_service = FlowService()
     self.conf = conf
def update():
    """
    Update TVB code to SVN revision version 4455.
    This update was done for release 1.0.2.
    """
    projects_count = dao.get_all_projects(is_count=True)

    for page_start in range(0, projects_count, PAGE_SIZE):
        projects_page = dao.get_all_projects(page_start=page_start,
                                             page_size=PAGE_SIZE)

        for project in projects_page:
            try:
                user = dao.get_system_user()
                adapter = ObjSurfaceImporter()
                FlowService().fire_operation(adapter,
                                             user,
                                             project.id,
                                             visible=False,
                                             surface_type=EEG_CAP,
                                             data_file=DATA_FILE_EEG_CAP)
                adapter = ObjSurfaceImporter()
                FlowService().fire_operation(adapter,
                                             user,
                                             project.id,
                                             visible=False,
                                             surface_type=FACE,
                                             data_file=DATA_FILE_FACE)
            except Exception, excep:
                LOGGER.exception(excep)
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)

        self.user_service = UserService()
        self.flow_service = FlowService()

        self.analyze_category_link = '/flow/step_analyzers'
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.flow_service.get_visualisers_category()
        conn_id = self.flow_service.get_algorithm_by_module_and_class(CONNECTIVITY_MODULE, CONNECTIVITY_CLASS).id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        self.connectivity_submenu = [dict(title="Large Scale Connectivity", link=connectivity_link,
                                          subsection=WebStructure.SUB_SECTION_CONNECTIVITY,
                                          description="View Connectivity Regions. Perform Connectivity lesions"),
                                     dict(title="Local Connectivity", link='/spatial/localconnectivity/step_1/1',
                                          subsection=WebStructure.SUB_SECTION_LOCAL_CONNECTIVITY,
                                          description="Create or view existent Local Connectivity entities.")]

        allen_algo = self.flow_service.get_algorithm_by_module_and_class(ALLEN_CREATOR_MODULE, ALLEN_CREATOR_CLASS)
        if allen_algo:
            # Only add the Allen Creator if AllenSDK is installed
            allen_link = self.get_url_adapter(allen_algo.fk_category, allen_algo.id)
            self.connectivity_submenu.append(dict(title="Allen Connectome Downloader", link=allen_link,
                                                  subsection=WebStructure.SUB_SECTION_ALLEN,
                                                  description="Download a mouse connectivity from Allen dataset"))

        self.burst_submenu = [dict(link='/burst', subsection=WebStructure.SUB_SECTION_BURST,
                                   title='Simulation Cockpit', description='Manage simulations'),
                              dict(link='/burst/dynamic', subsection='dynamic',
                                   title='Phase plane', description='Configure model dynamics')]
Example #23
0
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.gifti_surface_importer',
                               'GIFTISurfaceImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: ""
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, surface.module + "." + surface.type)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "TimeSeries should not be none")

        return surface
Example #27
0
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user, self.test_project.id, adapter,
                       tmp_folder, **data)
     self._assert_no_dt2()
Example #28
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module,
                      'tvb.tests.framework.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id,
                                             Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Example #29
0
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)

        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (
            1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException,
                          self.operation_service.initiate_operation,
                          self.test_user, self.test_project.id, adapter,
                          tmp_folder, **data)
        self._assert_stored_dt2()
Example #30
0
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2(2)
Example #31
0
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2()
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        user = model.User("test_user", "test_pass", "*****@*****.**", True,
                          "user")
        self.test_user = dao.store_entity(user)
        data = dict(name='test_proj', description='desc', users=[])
        self.test_project = ProjectService().store_project(
            self.test_user, True, None, **data)
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "INTERMEDIATE"
        }
        algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = FlowService().build_adapter_instance(
            algo_group)

        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(SIMULATOR_PARAMETERS),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)

        SIMULATOR_PARAMETERS['connectivity'] = self._create_connectivity(
            self.CONNECTIVITY_NODES)
 def test_adapter_memory(self):
     """
     Test that a method not implemented exception is raised in case the
     get_required_memory_size method is not implemented.
     """
     algo_group = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
     adapter = FlowService().build_adapter_instance(algo_group)
     self.assertEqual(42, adapter.get_required_memory_size())
    def transactional_setup_method(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(model_operation.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                                                    TEST_ADAPTER_VALID_CLASS, category.id))
Example #35
0
    def transactional_setup_method(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(model.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                                          TEST_ADAPTER_VALID_CLASS, category.id))
Example #36
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     self.flow_service = FlowService()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(admin=self.test_user)
     self.operation = TestFactory.create_operation(
         test_user=self.test_user, test_project=self.test_project)
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        algo_group = FlowService().get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0]
        self.assertEqual(2, len(connectivities))
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__,
                                                   dummy_time_series.gid,
                                                   'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure),
                        "Result should be a datatype measure.")
        self.assertTrue(
            len(resulted_metric.metrics) >= len(
                ts_metric_adapter.available_algorithms.keys()),
            "At least a result should have been generated for every metric.")
        for metric_value in resulted_metric.metrics.values():
            self.assertTrue(isinstance(metric_value, (float, int)))
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Example #39
0
 def create_adapter(algo_group=None, test_project=None, ):
     """
     :returns: Adapter Class after initialization.
     """
     if algo_group is None:
         algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
         
     if test_project is None:
         test_user = TestFactory.create_user()
         test_project = TestFactory.create_project(test_user)
         
     group, _ = FlowService().prepare_adapter(test_project.id, algo_group)
     return FlowService().build_adapter_instance(group)
    def test_launch_operation_HDD_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()
 def test_launch_operation_HDD_full_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                       self.test_project.id, adapter,
                       tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
     self._assert_no_dt2()
 def __init__(self, conf):
     """
     :param conf: burst configuration entity
     """
     self.logger = get_logger(__name__)
     self.flow_service = FlowService()
     self.conf = conf
    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)
        ### Insert some starting data in the database.
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        categ2 = model.AlgorithmCategory('two', rawinput=True)
        self.categ2 = dao.store_entity(categ2)

        group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
        self.algo_group1 = dao.store_entity(group1)
        group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
        self.algo_group2 = dao.store_entity(group2)
        group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
        self.algo_group3 = dao.store_entity(group3)

        group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id)
        self.algo_group_v = dao.store_entity(group_v)

        algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='')
        self.algorithm_v = dao.store_entity(algo_v)

        algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm1 = dao.store_entity(algo1)
 def __init__(self):
     BaseController.__init__(self)
     self.flow_service = FlowService()
     self.logger = get_logger(__name__)
     editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
                               subsection='regionstim', description='Create a new Stimulus on Region level'),
                          dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
                               subsection='surfacestim', description='Create a new Stimulus on Surface level')]
     self.submenu_list = editable_entities
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     self.flow_service = FlowService()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(admin=self.test_user)
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
Example #46
0
class _BaseLinksTest(TransactionalTestCase):

    GEORGE1st = "george the grey"
    GEORGE2nd = "george"


    def _initialize_two_projects(self):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.datatype_factory_src = DatatypesFactory()
        self.src_project = self.datatype_factory_src.project
        self.src_usr_id = self.datatype_factory_src.user.id

        self.red_datatype = self.datatype_factory_src.create_simple_datatype(subject=self.GEORGE1st)
        self.blue_datatype = self.datatype_factory_src.create_datatype_with_storage(subject=self.GEORGE2nd)

        # create the destination project
        self.datatype_factory_dest = DatatypesFactory()
        self.dest_project = self.datatype_factory_dest.project
        self.dest_usr_id = self.datatype_factory_dest.user.id

        self.flow_service = FlowService()
        self.project_service = ProjectService()


    def transactional_setup_method(self):
        self.clean_database(delete_folders=True)
        self._initialize_two_projects()


    def transactional_teardown_method(self):
        self.clean_database(delete_folders=True)


    def red_datatypes_in(self, project_id):
        return self.flow_service.get_available_datatypes(project_id, Datatype1)[1]


    def blue_datatypes_in(self, project_id):
        return self.flow_service.get_available_datatypes(project_id, Datatype2)[1]
Example #47
0
def import_h5(file_path, project_id):

    flow_service = FlowService()

    ## This ID of a project needs to exists in Db, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(project_id)

    adapter_instance = ABCAdapter.build_adapter_from_class(TVBImporter)

    ## Prepare the input algorithms as if they were coming from web UI submit:
    launch_args = {"data_file": file_path}

    print "We will try to import file at path " + file_path
    ## launch an operation and have the results stored both in DB and on disk
    launched_operations = flow_service.fire_operation(
        adapter_instance, project.administrator, project.id, **launch_args
    )

    print "Operation launched. Check the web UI"
    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(model.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                                          TEST_ADAPTER_VALID_CLASS, category.id))
 def test_launch_operation_HDD_with_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                               tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()
 def transactional_setup_method(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        self._assert_stored_dt2(2)
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.import_service = ImportService()
     self.flow_service = FlowService()
     self.project_service = ProjectService()
     
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc")
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
     self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
     self.zip_path = None 
 def __init__(self, overwrites=None, settings_file=None):
     """ Parameters can be overwritten either from a settigns file or from a dictionary. """
     if overwrites is not None:
         self.overwrites.update(overwrites)
     if settings_file is not None:
         settings = open(sys.argv[1]).read()
         for line in settings.split('\n'):
             key, value = line.split('=')
             self.overwrites[key.strip()] = value.strip()
     if KEY_PROJECT not in self.overwrites:
         raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
     self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
     self.flow_service = FlowService()
     self.operation_service = OperationService()
def launch_simulation_workflow(json_path, prj_id):
    """

    :param json_path: Path towards a local JSON file exported from GUI
    :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface
    """
    project = dao.get_project_by_id(prj_id)

    with open(json_path, 'rb') as input_file:
        simulation_json = input_file.read()
        simulation_json = json.loads(simulation_json)
        LOG.info("Simulation JSON loaded from file '%s': \n  %s", json_path, simulation_json)

        importer = ImportService()
        simulation_config = importer.load_burst_entity(simulation_json, prj_id)
        LOG.info("Simulation Workflow configuration object loaded: \n  %s", simulation_config)

        flow_service = FlowService()
        simulator_algorithm, _ = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        LOG.info("Found Simulation algorithm in local DB: \n   %s", simulator_algorithm)

        burst_service = BurstService()
        burst_service.launch_burst(simulation_config, 0, simulator_algorithm.id, project.administrator.id, LAUNCH_NEW)
        LOG.info("Check in the web GUI for your operation. It should be starting now ...")
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS)
        algorithm = dao.get_algorithm_by_group(algo_group.id)
        self.simulator_adapter = FlowService().build_adapter_instance(algo_group)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid