Example #1
0
 def __init__(self,
              fk_launched_by,
              fk_launched_in,
              fk_from_algo,
              parameters,
              meta='',
              status=STATUS_PENDING,
              start_date=None,
              completion_date=None,
              op_group_id=None,
              additional_info='',
              user_group=None,
              range_values=None,
              estimated_disk_size=0):
     self.fk_launched_by = fk_launched_by
     self.fk_launched_in = fk_launched_in
     self.fk_from_algo = fk_from_algo
     self.parameters = parameters
     self.meta_data = meta
     self.create_date = datetime.datetime.now()
     self.start_date = start_date
     self.completion_date = completion_date
     self.status = status
     self.visible = True
     self.fk_operation_group = op_group_id
     self.range_values = range_values
     self.user_group = user_group
     self.additional_info = additional_info
     self.gid = generate_guid()
     self.estimated_disk_size = estimated_disk_size
 def __init__(self, project_id, name='incomplete', ranges=[]):
     self.name = name
     if len(ranges) > 0:
         self.range1 = ranges[0]
     if len(ranges) > 1:
         self.range2 = ranges[1]
     if len(ranges) > 2:
         self.range3 = ranges[2]
     self.gid = generate_guid()
     self.fk_launched_in = project_id
 def __init__(self, project_id, name='incomplete', ranges=[]):
     self.name = name
     if len(ranges) > 0:
         self.range1 = ranges[0]
     if len(ranges) > 1:
         self.range2 = ranges[1]
     if len(ranges) > 2:
         self.range3 = ranges[2]
     self.gid = generate_guid()
     self.fk_launched_in = project_id
Example #4
0
 def __portlet_config2portlet_entity(self, portlet_cfg):
     """
     From a portlet configuration as it is stored in session, update status and add the index in 
     tab so we can properly display it in the burst page.
     """
     portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id)
     portlet_status, error_msg = self.burst_service.get_portlet_status(portlet_cfg)
     portlet_entity.error_msg = error_msg
     portlet_entity.status = portlet_status
     portlet_entity.name = portlet_cfg.name
     portlet_entity.index_in_tab = portlet_cfg.index_in_tab
     portlet_entity.td_gid = generate_guid()
     return portlet_entity
 def __portlet_config2portlet_entity(self, portlet_cfg):
     """
     From a portlet configuration as it is stored in session, update status and add the index in 
     tab so we can properly display it in the burst page.
     """
     portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id)
     portlet_status, error_msg = self.burst_service.get_portlet_status(portlet_cfg)
     portlet_entity.error_msg = error_msg
     portlet_entity.status = portlet_status
     portlet_entity.name = portlet_cfg.name
     portlet_entity.index_in_tab = portlet_cfg.index_in_tab
     portlet_entity.td_gid = generate_guid()
     return portlet_entity
Example #6
0
    def __init__(self, gid=None, **kwargs):

        if gid is None:
            self.gid = generate_guid()
        else:
            self.gid = gid
        self.type = self.__class__.__name__
        self.module = self.__class__.__module__

        try:
            self.__initdb__(**kwargs)
        except Exception as exc:
            LOG.warning('Could not perform __initdb__: %r', exc)
        super(DataType, self).__init__()
    def __init__(self, gid=None, **kwargs):

        if gid is None:
            self.gid = generate_guid()
        else:
            self.gid = gid
        self.type = self.__class__.__name__
        self.module = self.__class__.__module__

        try:
            self.__initdb__(**kwargs)
        except Exception as exc:
            LOG.warning('Could not perform __initdb__: %r', exc)
        super(DataType, self).__init__()
    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reseted!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(
            test_proj[0])
        result_meta = {
            DataTypeMetaData.KEY_OPERATION_TYPE: "Upload",
            DataTypeMetaData.KEY_AUTHOR: "John Doe",
            DataTypeMetaData.KEY_SUBJECT: "subj1",
            DataTypeMetaData.KEY_STATE: "test_state",
            DataTypeMetaData.KEY_NODE_TYPE: "test_data",
            DataTypeMetaData.KEY_DATE: "test_date",
            DataTypeMetaData.KEY_GID: generate_guid()
        }

        entity = dao.store_entity(model.AlgorithmCategory("category"))
        entity = dao.store_entity(
            model.AlgorithmGroup("module", "classname", entity.id))
        entity = dao.store_entity(model.Algorithm(entity.id, "algo"))
        operation = model.Operation(self.test_user.id, test_proj[0].id,
                                    entity.id, "")
        operation = dao.store_entity(operation)
        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        entity = DataTypeMetaData(result_meta)
        datatype = dao.store_entity(
            model.DataType(module="test_data",
                           subject="subj1",
                           state="test_state",
                           operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2,
                         "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)
Example #9
0
 def _internal_pipeline_thread(self, dti_scans, current_project, current_user, number_of_threads=1):
     """
     Actual Fire Pipeline execution remotely.
     """
     ### Prepare file-names
     uq_identifier = "TVB_" + str(utils.generate_guid())
     temp_input_folder = os.path.join(TVBSettings.TVB_TEMP_FOLDER, "IN_PIPELINE_" + uq_identifier)
     temp_output_folder = os.path.join(TVBSettings.TVB_TEMP_FOLDER, "OUT_PIPELINE_" + uq_identifier)
     zip_output = os.path.join(TVBSettings.TVB_TEMP_FOLDER, "Connectivity" + uq_identifier + ".zip")
     
     remote_input_folder = "/home/" + self.remote_user + "/processing/INPUT_" + uq_identifier + os.path.sep
     remote_output_folder = "/home/" + self.remote_user + "/processing/" + uq_identifier
     result_matrix1 = os.path.join(remote_output_folder, uq_identifier + self.REMOTE_WEIGHTS_FILE)
     result_matrix2 = os.path.join(remote_output_folder, uq_identifier + self.REMOTE_TRACT_FILE)
     
     self._execute_remote("rm -R -f " + remote_input_folder)
     self._execute_remote("mkdir " + remote_input_folder)
     self._execute_remote("rm -R -f " + remote_output_folder)
     
     ### Prepare and Copy required Input Files on the DTI remote machine.
     os.mkdir(temp_input_folder)
     prefix_files = os.path.split(os.path.dirname(remote_input_folder))[1]
     
     zip_arch = zipfile.ZipFile(dti_scans)
     dti_scans = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "Scans", 1, False)
     dti_ev = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "EigenVectors", 3)
     dti_fa = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "FA", 1)
     dti_md = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "MD", 1)
     dti_seg = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "Seg2DTI", 1)
     for local_name in os.listdir(temp_input_folder):
         self._copy_file_remote(os.path.join(temp_input_folder, local_name), 
                                os.path.join(remote_input_folder, local_name))
         
     ### Execute remote DTI Pipeline command.
     dti_ev = dti_ev.split(' ')
     remote_command = self.DTI_PIPELINE_COMMAND % (os.path.split(remote_output_folder)[1], dti_seg, dti_ev[0], 
                                                   dti_ev[1], dti_ev[2], dti_scans, dti_fa, dti_md, number_of_threads)
     self._execute_remote(remote_command)
     
     self._gather_results(current_user, current_project, result_matrix1, result_matrix2, 
                          temp_output_folder, zip_output)
     ### Clean left-over files
     self.file_handler.remove_folder(temp_output_folder)
     self.file_handler.remove_folder(temp_input_folder)
     os.remove(zip_output)
     self._execute_remote("rm -R -f " + remote_input_folder)
     self._execute_remote("rm -R -f " + remote_output_folder)
 def _internal_pipeline_thread(self, dti_scans, current_project, current_user, number_of_th=1):
     """
     Actual Fire Pipeline execution remotely.
     """
     ### Prepare file-names
     uq_identifier = "TVB_" + str(utils.generate_guid())
     temp_input_folder = os.path.join(TVBSettings.TVB_TEMP_FOLDER, "IN_PIPELINE_" + uq_identifier)
     temp_output_folder = os.path.join(TVBSettings.TVB_TEMP_FOLDER, "OUT_PIPELINE_" + uq_identifier)
     zip_output = os.path.join(TVBSettings.TVB_TEMP_FOLDER, "Connectivity" + uq_identifier + ".zip")
     
     remote_input_folder = "/home/" + self.remote_user + "/processing/INPUT_" + uq_identifier + os.path.sep
     remote_output_folder = "/home/" + self.remote_user + "/processing/" + uq_identifier
     result_matrix1 = os.path.join(remote_output_folder, uq_identifier + self.REMOTE_WEIGHTS_FILE)
     result_matrix2 = os.path.join(remote_output_folder, uq_identifier + self.REMOTE_TRACT_FILE)
     
     self._execute_remote("rm -R -f " + remote_input_folder)
     self._execute_remote("mkdir " + remote_input_folder)
     self._execute_remote("rm -R -f " + remote_output_folder)
     
     ### Prepare and Copy required Input Files on the DTI remote machine.
     os.mkdir(temp_input_folder)
     prefix_files = os.path.split(os.path.dirname(remote_input_folder))[1]
     
     zip_arch = zipfile.ZipFile(dti_scans)
     dti_scans = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "Scans", 1, False)
     dti_ev = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "EigenVectors", 3)
     dti_fa = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "FA", 1)
     dti_md = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "MD", 1)
     dti_seg = self._process_input_zip(zip_arch, temp_input_folder, prefix_files, "Seg2DTI", 1)
     for local_name in os.listdir(temp_input_folder):
         self._copy_file_remote(os.path.join(temp_input_folder, local_name), 
                                os.path.join(remote_input_folder, local_name))
         
     ### Execute remote DTI Pipeline command.
     dti_ev = dti_ev.split(' ')
     remote_command = self.DTI_PIPELINE_COMMAND % (os.path.split(remote_output_folder)[1], dti_seg, dti_ev[0], 
                                                   dti_ev[1], dti_ev[2], dti_scans, dti_fa, dti_md, number_of_th)
     self._execute_remote(remote_command)
     
     self._gather_results(current_user, current_project, result_matrix1, result_matrix2, 
                          temp_output_folder, zip_output)
     ### Clean left-over files
     self.file_handler.remove_folder(temp_output_folder)
     self.file_handler.remove_folder(temp_input_folder)
     os.remove(zip_output)
     self._execute_remote("rm -R -f " + remote_input_folder)
     self._execute_remote("rm -R -f " + remote_output_folder)
Example #11
0
    def index(self):
        dynamic_gid = utils.generate_guid()
        adapter = _LeftFragmentAdapter(self.available_models)
        input_tree = adapter.get_input_tree()
        #WARN: If this input tree will contain data type references then to render it correctly we have to use flow_service.prepare_parameters
        input_tree = adapter.prepare_param_names(input_tree)

        params = {
            'title': "Dynamic model",
            'mainContent': 'burst/dynamic',
            'input_tree': input_tree,
            'dynamic_gid': dynamic_gid
        }
        self.fill_default_attributes(params)

        dynamic = self.get_cached_dynamic(dynamic_gid)
        self._configure_integrator_noise(dynamic.integrator, dynamic.model)
        return params
    def index(self):
        dynamic_gid = utils.generate_guid()
        adapter = _LeftFragmentAdapter(self.available_models)
        input_tree = adapter.get_input_tree()
        #WARN: If this input tree will contain data type references then to render it correctly we have to use flow_service.prepare_parameters
        input_tree = adapter.prepare_param_names(input_tree)

        params = {
            'title': "Dynamic model",
            'mainContent': 'burst/dynamic',
            'input_tree': input_tree,
            'dynamic_gid': dynamic_gid
        }
        self.fill_default_attributes(params)

        dynamic = self.get_cached_dynamic(dynamic_gid)
        self._configure_integrator_noise(dynamic.integrator, dynamic.model)
        return params
Example #13
0
    def index(self):
        dynamic_gid = utils.generate_guid()
        model_name_fragment = _InputTreeFragment()
        model_fragment = SimulatorModelFragment()
        integrator_fragment = SimulatorIntegratorFragment()

        params = {
            'title': "Dynamic model",
            'mainContent': 'burst/dynamic',
            'model_name_fragment': model_name_fragment,
            'model_form': model_fragment,
            'integrator_form': integrator_fragment,
            'dynamic_gid': dynamic_gid
        }
        self.fill_default_attributes(params)

        dynamic = self.get_cached_dynamic(dynamic_gid)
        self._configure_integrator_noise(dynamic.integrator, dynamic.model)
        return params
 def __init__(self, fk_launched_by, fk_launched_in, fk_from_algo, parameters, meta='',
              status=STATUS_PENDING, start_date=None, completion_date=None, op_group_id=None, additional_info='',
              user_group=None, range_values=None, estimated_disk_size=0):
     self.fk_launched_by = fk_launched_by
     self.fk_launched_in = fk_launched_in
     self.fk_from_algo = fk_from_algo
     self.parameters = parameters
     self.meta_data = meta
     self.create_date = datetime.datetime.now()
     self.start_date = start_date
     self.completion_date = completion_date
     self.status = status
     self.visible = True
     self.fk_operation_group = op_group_id
     self.range_values = range_values
     self.user_group = user_group
     self.additional_info = additional_info
     self.gid = generate_guid()
     self.estimated_disk_size = estimated_disk_size
    def index(self):
        dynamic_gid = utils.generate_guid()
        model_name_fragment = _InputTreeFragment()
        model_fragment = self.algorithm_service.prepare_adapter_form(form_instance=SimulatorModelFragment())
        integrator_fragment = self.algorithm_service.prepare_adapter_form(form_instance=SimulatorIntegratorFragment())
        model_description = configure_matjax_doc(self.available_models)

        params = {
            'title': "Dynamic model",
            'mainContent': 'burst/dynamic',
            'model_name_fragment': self.render_adapter_form(model_name_fragment),
            'model_form': self.render_adapter_form(model_fragment),
            'integrator_form': self.render_adapter_form(integrator_fragment),
            'dynamic_gid': dynamic_gid,
            'model_description': model_description
        }
        self.fill_default_attributes(params)

        dynamic = self.get_cached_dynamic(dynamic_gid)
        self._configure_integrator_noise(dynamic.integrator, dynamic.model)
        return params
Example #16
0
 def __init__(self, selected_nodes, labels, project_id, ui_name='Default'):
     self.ui_name = ui_name
     self.selected_nodes = selected_nodes
     self.labels = labels
     self.fk_in_project = project_id
     self.gid = generate_guid()
Example #17
0
 def __init__(self, name, fk_admin, description=''):
     self.name = name
     self.fk_admin = fk_admin
     self.description = description
     self.gid = generate_guid()
 def __init__(self, name, fk_admin, description=''):
     self.name = name
     self.fk_admin = fk_admin
     self.description = description
     self.gid = utils.generate_guid()
     self.version = TvbProfile.current.version.PROJECT_VERSION
 def __init__(self, name, fk_admin, description=''):
     self.name = name
     self.fk_admin = fk_admin
     self.description = description
     self.gid = generate_guid()
 def __init__(self, selected_nodes, labels, project_id, ui_name='Default'):
     self.ui_name = ui_name
     self.selected_nodes = selected_nodes
     self.labels = labels
     self.fk_in_project = project_id
     self.gid = generate_guid()