예제 #1
0
    def test_get_inputs_for_group(self, datatype_group_factory,
                                  test_adapter_factory):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        conn = TestFactory.import_zip_connectivity(self.test_user,
                                                   self.test_project, zip_path)
        conn.visible = False
        dao.store_entity(conn)

        group = OperationGroup(self.test_project.id, "group", "range1[1..2]")
        group = dao.store_entity(group)

        view_model = BaseBCTModel()
        view_model.connectivity = conn.gid
        adapter = ABCAdapter.build_adapter_from_class(
            TransitivityBinaryDirected)
        algorithm = adapter.stored_adapter

        operation1 = Operation(self.test_user.id,
                               self.test_project.id,
                               algorithm.id,
                               json.dumps({'gid': view_model.gid.hex}),
                               op_group_id=group.id)
        operation2 = Operation(self.test_user.id,
                               self.test_project.id,
                               algorithm.id,
                               json.dumps({'gid': view_model.gid.hex}),
                               op_group_id=group.id)
        dao.store_entities([operation1, operation2])

        OperationService()._store_view_model(
            operation1, dao.get_project_by_id(self.test_project.id),
            view_model)
        OperationService()._store_view_model(
            operation2, dao.get_project_by_id(self.test_project.id),
            view_model)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."

        conn.visible = True
        dao.store_entity(conn)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.relevant_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(
            group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert group.id == inputs[0].id, "Retrieved wrong dataType."
예제 #2
0
 def test_editone_remove(self):
     """
     Test that a project is indeed deleted.
     """
     cherrypy.request.method = "POST"
     self._expect_redirect('/project/viewall', self.project_c.editone,
                           self.test_project.id, delete=True)
     with pytest.raises(NoResultFound):
         dao.get_project_by_id(self.test_project.id)
예제 #3
0
    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {},
                                     None,
                                     error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException(
                "A project can not be renamed while operations are still running!"
            )
        if is_create:
            current_proj = Project(new_name, current_user.id,
                                   data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(
                    current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) +
                          ' by user:' + current_user.username)
        return current_proj
    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:' + current_user.username)
        return current_proj
예제 #5
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(model.Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException, excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
예제 #6
0
    def fire_operation(self, adapter_instance, current_user, project_id,  
                       method_name=ABCAdapter.LAUNCH_METHOD, visible=True, **data):
        """
        Launch an operation, specified by AdapterInstance, for CurrentUser, 
        Current Project and a given set of UI Input Data.
        """
        operation_name = str(adapter_instance.__class__.__name__) + "." + method_name
        try:
#            if OperationService.ATT_UID in data:
#                existent = dao.get_last_data_with_uid(data[OperationService.ATT_UID])
#                if existent is not None:
#                    self.create_link(existent, project_id)
#                    return "Created required links."
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)
            tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER)
            
            result = OperationService().initiate_operation(current_user, project.id, adapter_instance, 
                                                           tmp_folder, method_name, visible, **data)
            self.logger.info("Finished operation:" + operation_name)
            return result

        except TVBException, excep:
            self.logger.error("Could not launch operation " + operation_name + " with the given set of input data!")
            self.logger.exception(excep)
            raise OperationException(excep.message, excep)
    def stage_out_to_operation_folder(working_dir, operation, simulator_gid):
        # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str)
        encrypted_files = HPCSchedulerClient._stage_out_results(
            working_dir, simulator_gid)
        encryption_handler = EncryptionHandler(simulator_gid)

        simulation_results = list()
        metric_op = None
        metric_file = None
        for encrypted_file in encrypted_files:
            if os.path.basename(encrypted_file).startswith(
                    DatatypeMeasureH5.file_name_base()):
                metric_op_dir, metric_op = BurstService.prepare_metrics_operation(
                    operation)
                metric_files = encryption_handler.decrypt_files_to_dir(
                    [encrypted_file], metric_op_dir)
                metric_file = metric_files[0]
            else:
                simulation_results.append(encrypted_file)

        project = dao.get_project_by_id(operation.fk_launched_in)
        operation_dir = HPCSchedulerClient.file_handler.get_project_folder(
            project, str(operation.id))
        h5_filenames = EncryptionHandler(simulator_gid).decrypt_files_to_dir(
            simulation_results, operation_dir)
        return h5_filenames, metric_op, metric_file
    def stage_out_to_operation_folder(working_dir, operation, simulator_gid):
        # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str)
        encrypted_files = HPCSchedulerClient._stage_out_results(
            working_dir, simulator_gid)
        encryption_handler = EncryptionHandler(simulator_gid)

        simulation_results = list()
        metric_encrypted_file = None
        metric_vm_encrypted_file = None
        for encrypted_file in encrypted_files:
            if os.path.basename(encrypted_file).startswith(
                    DatatypeMeasureH5.file_name_base()):
                metric_encrypted_file = encrypted_file
            elif os.path.basename(encrypted_file).startswith(
                    TimeseriesMetricsAdapterModel.__name__):
                metric_vm_encrypted_file = encrypted_file
            else:
                simulation_results.append(encrypted_file)

        metric_op, metric_file = HPCSchedulerClient._handle_metric_results(
            metric_encrypted_file, metric_vm_encrypted_file, operation,
            encryption_handler)
        project = dao.get_project_by_id(operation.fk_launched_in)
        operation_dir = HPCSchedulerClient.file_handler.get_project_folder(
            project, str(operation.id))
        h5_filenames = EncryptionHandler(simulator_gid).decrypt_files_to_dir(
            simulation_results, operation_dir)
        return h5_filenames, metric_op, metric_file
예제 #9
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_datatypes = dao.get_datatypes_in_project(project_id)

            # Delete datatypes one by one in the reversed order of their creation date
            project_datatypes.sort(key=lambda dt: dt.create_date, reverse=True)
            links = []
            for one_data in project_datatypes:
                new_links = self.remove_datatype(project_id, one_data.gid, True, links)
                if new_links is not None:
                    # Keep track of links so we don't create the same link more than once
                    links.extend(new_links)

            self.storage_interface.remove_project(project2delete)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))
예제 #10
0
    def fire_operation(self,
                       adapter_instance,
                       current_user,
                       project_id,
                       visible=True,
                       view_model=None):
        """
        Launch an operation, specified by AdapterInstance, for current_user and project with project_id.
        """
        operation_name = str(adapter_instance.__class__.__name__)
        try:
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)

            result = self.initiate_operation(current_user,
                                             project,
                                             adapter_instance,
                                             visible,
                                             model_view=view_model)
            self.logger.info("Finished operation launch:" + operation_name)
            return result

        except TVBException as excep:
            self.logger.exception(
                "Could not launch operation " + operation_name +
                " with the given set of input data, because: " + excep.message)
            raise OperationException(excep.message, excep)
        except Exception as excep:
            self.logger.exception("Could not launch operation " +
                                  operation_name +
                                  " with the given set of input data!")
            raise OperationException(str(excep))
    def fire_operation(self, adapter_instance, current_user, project_id,  
                       method_name=ABCAdapter.LAUNCH_METHOD, visible=True, **data):
        """
        Launch an operation, specified by AdapterInstance, for CurrentUser, 
        Current Project and a given set of UI Input Data.
        """
        operation_name = str(adapter_instance.__class__.__name__) + "." + method_name
        try:
#            if OperationService.ATT_UID in data:
#                existent = dao.get_last_data_with_uid(data[OperationService.ATT_UID])
#                if existent is not None:
#                    self.create_link(existent, project_id)
#                    return "Created required links."
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)
            tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER)
            
            result = OperationService().initiate_operation(current_user, project.id, adapter_instance, 
                                                           tmp_folder, method_name, visible, **data)
            self.logger.info("Finished operation:" + operation_name)
            return result

        except TVBException, excep:
            self.logger.error("Could not launch operation " + operation_name + " with the given set of input data!")
            self.logger.exception(excep)
            raise OperationException(excep.message, excep)
예제 #12
0
    def stage_out_to_operation_folder(working_dir, operation, simulator_gid):
        # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str)
        encrypted_files = HPCSchedulerClient._stage_out_results(
            working_dir, simulator_gid)

        simulation_results = list()
        metric_encrypted_file = None
        metric_vm_encrypted_file = None
        for encrypted_file in encrypted_files:
            if os.path.basename(encrypted_file).startswith(
                    DatatypeMeasureH5.file_name_base()):
                metric_encrypted_file = encrypted_file
            elif os.path.basename(encrypted_file).startswith(
                    MEASURE_METRICS_MODEL_CLASS):
                metric_vm_encrypted_file = encrypted_file
            else:
                simulation_results.append(encrypted_file)

        encryption_handler = StorageInterface.get_encryption_handler(
            simulator_gid)
        metric_op, metric_file = HPCSchedulerClient._handle_metric_results(
            metric_encrypted_file, metric_vm_encrypted_file, operation,
            encryption_handler)
        project = dao.get_project_by_id(operation.fk_launched_in)
        operation_dir = HPCSchedulerClient.storage_interface.get_project_folder(
            project.name, str(operation.id))
        h5_filenames = encryption_handler.decrypt_files_to_dir(
            simulation_results, operation_dir)
        encryption_handler.cleanup_encryption_handler()
        LOGGER.info("Decrypted h5: {}".format(h5_filenames))
        LOGGER.info("Metric op: {}".format(metric_op))
        LOGGER.info("Metric file: {}".format(metric_file))

        return h5_filenames, metric_op, metric_file
예제 #13
0
파일: forms.py 프로젝트: yop0/tvb-root
    def fill_from_post(self, post_data):
        super(TraitUploadField, self).fill_from_post(post_data)

        if self.data.file is None:
            self.data = None
            return

        project = dao.get_project_by_id(self.owner.project_id)
        temporary_storage = self.files_helper.get_project_folder(
            project, self.files_helper.TEMP_FOLDER)

        file_name = None
        try:
            uq_name = utils.date2string(datetime.now(), True) + '_' + str(0)
            file_name = TEMPORARY_PREFIX + uq_name + '_' + self.data.filename
            file_name = os.path.join(temporary_storage, file_name)

            with open(file_name, 'wb') as file_obj:
                file_obj.write(self.data.file.read())
        except Exception as excep:
            # TODO: is this handled properly?
            self.files_helper.remove_files([file_name])
            excep.message = 'Could not continue: Invalid input files'
            raise excep

        if file_name:
            self.data = file_name
            self.owner.temporary_files.append(file_name)
예제 #14
0
def launch_simulation_workflow(json_path, prj_id):
    """

    :param json_path: Path towards a local JSON file exported from GUI
    :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface
    """
    project = dao.get_project_by_id(prj_id)

    with open(json_path, 'rb') as input_file:
        simulation_json = input_file.read()
        simulation_json = json.loads(simulation_json)
        LOG.info("Simulation JSON loaded from file '%s': \n  %s", json_path,
                 simulation_json)

        importer = ImportService()
        simulation_config = importer.load_burst_entity(simulation_json, prj_id)
        LOG.info("Simulation Workflow configuration object loaded: \n  %s",
                 simulation_config)

        flow_service = FlowService()
        stored_adapter = flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        LOG.info("Found Simulation algorithm in local DB: \n   %s",
                 stored_adapter)

        burst_service = BurstService()
        burst_service.launch_burst(simulation_config, 0, stored_adapter.id,
                                   project.administrator.id, LAUNCH_NEW)
        LOG.info(
            "Check in the web GUI for your operation. It should be starting now ..."
        )
예제 #15
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
예제 #16
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
예제 #17
0
 def store_project(self, current_user, is_create, selected_id, **data):
     """
     We want to create/update a project entity.
     """
     #Validate Unique Name
     new_name = data["name"]
     if len(new_name) < 1:
         raise ProjectServiceException("Invalid project name!")
     projects_no = dao.count_projects_for_name(new_name, selected_id)
     if projects_no > 0:
         err = {'name': 'Please choose another name, this one is used!'}
         raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
     started_operations = dao.get_operation_numbers(selected_id)[1]
     if started_operations > 0:
         raise ProjectServiceException("A project can not be renamed while operations are still running!")
     if is_create:
         current_proj = model.Project(new_name, current_user.id, data["description"])
         self.structure_helper.get_project_folder(current_proj)
     else:
         try:
             current_proj = dao.get_project_by_id(selected_id)
         except Exception, excep:
             self.logger.exception("An error has occurred!")
             raise ProjectServiceException(str(excep))
         if current_proj.name != new_name:
             self.structure_helper.rename_project_structure(current_proj.name, new_name)
         current_proj.name = new_name
         current_proj.description = data["description"]
예제 #18
0
 def extract_operation_data(self, operation):
     operation = dao.get_operation_by_id(operation.id)
     project = dao.get_project_by_id(operation.fk_launched_in)
     self.storage_path = self.file_handler.get_project_folder(project, str(operation.id))
     self.operation_id = operation.id
     self.current_project_id = operation.project.id
     self.user_id = operation.fk_launched_by
예제 #19
0
    def fire_operation(self,
                       adapter_instance,
                       current_user,
                       project_id,
                       visible=True,
                       **data):
        """
        Launch an operation, specified by AdapterInstance, for CurrentUser, 
        Current Project and a given set of UI Input Data.
        """
        operation_name = str(adapter_instance.__class__.__name__)
        try:
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)
            tmp_folder = self.file_helper.get_project_folder(
                project, self.file_helper.TEMP_FOLDER)

            result = OperationService().initiate_operation(
                current_user, project.id, adapter_instance, tmp_folder,
                visible, **data)
            self.logger.info("Finished operation:" + operation_name)
            return result

        except TVBException, excep:
            self.logger.exception(
                "Could not launch operation " + operation_name +
                " with the given set of input data, because: " + excep.message)
            raise OperationException(excep.message, excep)
예제 #20
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))
 def test_find_project_happy_flow(self):
     """
     Standard flow for finding a project by it's id.
     """
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(initial_projects), 0,
                      "Database reset probably failed!")
     inserted_project = TestFactory.create_project(self.test_user,
                                                   'test_project')
     self.assertTrue(
         self.project_service.find_project(inserted_project.id) is not None,
         "Project not found !")
     dao_returned_project = dao.get_project_by_id(inserted_project.id)
     service_returned_project = self.project_service.find_project(
         inserted_project.id)
     self.assertEqual(
         dao_returned_project.id, service_returned_project.id,
         "Data returned from service is different from data returned by DAO."
     )
     self.assertEqual(
         dao_returned_project.name, service_returned_project.name,
         "Data returned from service is different than  data returned by DAO."
     )
     self.assertEqual(
         dao_returned_project.description,
         service_returned_project.description,
         "Data returned from service is different from data returned by DAO."
     )
     self.assertEqual(
         dao_returned_project.members, service_returned_project.members,
         "Data returned from service is different from data returned by DAO."
     )
예제 #22
0
 def store_project(self, current_user, is_create, selected_id, **data):
     """
     We want to create/update a project entity.
     """
     #Validate Unique Name
     new_name = data["name"]
     if len(new_name) < 1:
         raise ProjectServiceException("Invalid project name!")
     projects_no = dao.count_projects_for_name(new_name, selected_id)
     if projects_no > 0:
         err = {'name': 'Please choose another name, this one is used!'}
         raise formencode.Invalid("Duplicate Name Error", {},
                                  None,
                                  error_dict=err)
     started_operations = dao.get_operation_numbers(selected_id)[1]
     if started_operations > 0:
         raise ProjectServiceException(
             "A project can not be renamed while operations are still running!"
         )
     if is_create:
         current_proj = model.Project(new_name, current_user.id,
                                      data["description"])
         self.structure_helper.get_project_folder(current_proj)
     else:
         try:
             current_proj = dao.get_project_by_id(selected_id)
         except Exception, excep:
             self.logger.exception("An error has occurred!")
             raise ProjectServiceException(str(excep))
         if current_proj.name != new_name:
             self.structure_helper.rename_project_structure(
                 current_proj.name, new_name)
         current_proj.name = new_name
         current_proj.description = data["description"]
예제 #23
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning(
                "Attempt to delete DT[%s] which no longer exists." %
                datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid,
                                                skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(
                model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid,
                                            skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id,
                                                  "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation,
                                                    operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(
                project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " +
                                          str(datatype_gid))
예제 #24
0
 def _update_vm_generic_operation_tag(view_model, operation):
     project = dao.get_project_by_id(operation.fk_launched_in)
     storage_path = FilesHelper().get_project_folder(
         project, str(operation.id))
     h5_path = h5.path_for(storage_path, ViewModelH5, view_model.gid,
                           type(view_model).__name__)
     with ViewModelH5(h5_path, view_model) as vm_h5:
         vm_h5.operation_tag.store(operation.user_group)
예제 #25
0
    def __import_time_series_csv_datatype(self, hrf_folder, connectivity_gid,
                                          patient, user_tag):
        path = os.path.join(hrf_folder, self.TIME_SERIES_CSV_FILE)
        with open(path) as csv_file:
            csv_reader = csv.reader(
                csv_file, delimiter=CSVDelimiterOptionsEnum.COMMA.value)
            ts = list(csv_reader)

        ts_data = np.array(ts, dtype=np.float64).reshape(
            (len(ts), 1, len(ts[0]), 1))
        ts_time = np.random.rand(ts_data.shape[0], )

        project = dao.get_project_by_id(self.current_project_id)

        ts_gid = uuid.uuid4()
        h5_path = "TimeSeries_{}.h5".format(ts_gid.hex)
        operation_folder = self.storage_interface.get_project_folder(
            project.name, str(self.operation_id))
        h5_path = os.path.join(operation_folder, h5_path)

        conn = h5.load_from_gid(connectivity_gid)
        ts = TimeSeriesRegion()
        ts.data = ts_data
        ts.time = ts_time
        ts.gid = ts_gid
        ts.connectivity = conn
        generic_attributes = GenericAttributes()
        generic_attributes.user_tag_1 = user_tag
        generic_attributes.state = DEFAULTDATASTATE_RAW_DATA

        with TimeSeriesRegionH5(h5_path) as ts_h5:
            ts_h5.store(ts)
            ts_h5.nr_dimensions.store(4)
            ts_h5.subject.store(patient)
            ts_h5.store_generic_attributes(generic_attributes)

        ts_index = TimeSeriesIndex()
        ts_index.gid = ts_gid.hex
        ts_index.fk_from_operation = self.operation_id
        ts_index.time_series_type = "TimeSeriesRegion"
        ts_index.data_length_1d = ts_data.shape[0]
        ts_index.data_length_2d = ts_data.shape[1]
        ts_index.data_length_3d = ts_data.shape[2]
        ts_index.data_length_4d = ts_data.shape[3]
        ts_index.data_ndim = len(ts_data.shape)
        ts_index.sample_period_unit = 'ms'
        ts_index.sample_period = TimeSeries.sample_period.default
        ts_index.sample_rate = 1024.0
        ts_index.subject = patient
        ts_index.state = DEFAULTDATASTATE_RAW_DATA
        ts_index.labels_ordering = json.dumps(
            list(TimeSeries.labels_ordering.default))
        ts_index.labels_dimensions = json.dumps(
            TimeSeries.labels_dimensions.default)
        ts_index.visible = False  # we don't want to show these TimeSeries because they are dummy
        dao.store_entity(ts_index)

        return ts_gid
예제 #26
0
파일: lab.py 프로젝트: liadomide/tvb-root
def fire_operation(project_id, adapter_instance, view_model):
    TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE)
    project = dao.get_project_by_id(project_id)

    # launch an operation and have the results stored both in DB and on disk
    launched_operation = OperationService().fire_operation(adapter_instance, project.administrator,
                                                           project.id, view_model=view_model)
    LOG.info("Operation launched....")
    return launched_operation
예제 #27
0
 def find_project(self, project_id):
     """
     Simply retrieve Project entity from Database.
     """
     try:
         return dao.get_project_by_id(project_id)
     except Exception as excep:
         self.logger.exception("Given Project ID was not found in DB!")
         raise ProjectServiceException(str(excep))
예제 #28
0
 def find_project(self, project_id):
     """
     Simply retrieve Project entity from Database.
     """
     try:
         return dao.get_project_by_id(project_id)
     except Exception, excep:
         self.logger.exception("Given Project ID was not found in DB!")
         raise ProjectServiceException(str(excep))
 def prepare_next_step(self, last_executed_op_id):
     """
     If the operation with id 'last_executed_op_id' resulted after
     the execution of a workflow step then this method will launch
     the operation corresponding to the next step from the workflow.
     """
     try:
         current_step, next_workflow_step = self._get_data(last_executed_op_id)
         if next_workflow_step is not None:
             operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
             dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
             if len(dynamic_param_names) > 0:
                 op_params = json.loads(operation.parameters)
                 for param_name in dynamic_param_names:
                     dynamic_param = op_params[param_name]
                     former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
                                                                       dynamic_param[wf_cfg.STEP_INDEX_KEY])
                     if type(dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]) is IntType: 
                         datatypes = dao.get_results_for_operation(former_step.fk_operation)
                         op_params[param_name] = datatypes[dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]].gid
                     else:
                         previous_operation = dao.get_operation_by_id(former_step.fk_operation)
                         op_params[param_name] = json.loads(previous_operation.parameters)[
                             dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]]
                 operation.parameters = json.dumps(op_params)
                 operation = dao.store_entity(operation)
             return operation.id
         else:
             if current_step is not None:
                 current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
                 current_workflow.status = current_workflow.STATUS_FINISHED
                 dao.store_entity(current_workflow)
                 burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                 parallel_workflows = dao.get_workflows_for_burst(burst_entity.id)
                 all_finished = True
                 for workflow in parallel_workflows:
                     if workflow.status == workflow.STATUS_STARTED:
                         all_finished = False
                 if all_finished:
                     self.mark_burst_finished(burst_entity, success=True)
                     disk_size = dao.get_burst_disk_size(burst_entity.id)  # Transform from kB to MB
                     if disk_size > 0:
                         user = dao.get_project_by_id(burst_entity.fk_project).administrator
                         user.used_disk_space = user.used_disk_space + disk_size
                         dao.store_entity(user)
             else:
                 operation = dao.get_operation_by_id(last_executed_op_id)
                 disk_size = dao.get_disk_size_for_operation(operation.id)  # Transform from kB to MB
                 if disk_size > 0:
                     user = dao.get_user_by_id(operation.fk_launched_by)
                     user.used_disk_space = user.used_disk_space + disk_size
                     dao.store_entity(user)
         return None
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception(excep)
         raise WorkflowInterStepsException(excep)
예제 #30
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return
        user = dao.get_user_for_datatype(datatype.id)
        freed_space = datatype.disk_size or 0
        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            freed_space = dao.get_datatype_group_disk_size(datatype.id)
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
            freed_space = dao.get_datatype_group_disk_size(datatype.id)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

        user.used_disk_space = user.used_disk_space - freed_space
        dao.store_entity(user)
예제 #31
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)

    importer = ABCAdapter.build_adapter_from_class(ZIPConnectivityImporter)
    params = {"_uploaded": zip_path}
    form = ZIPConnectivityImporterForm()
    form.uploaded.data = zip_path
    importer.submit_form(form)

    FlowService().fire_operation(importer, project.administrator, project_id, **params)
예제 #32
0
파일: lab.py 프로젝트: liadomide/tvb-root
def import_conn_zip(project_id, zip_path):

    TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE)
    project = dao.get_project_by_id(project_id)

    importer = ABCAdapter.build_adapter_from_class(ZIPConnectivityImporter)
    view_model = ZIPConnectivityImporterModel()
    view_model.uploaded = zip_path

    return OperationService().fire_operation(importer, project.administrator, project_id, view_model=view_model)
예제 #33
0
    def initiate_prelaunch(self, operation, adapter_instance):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        temp_files = []
        try:
            operation = dao.get_operation_by_id(operation.id)  # Load Lazy fields

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.compute_user_generated_disk_size(operation.fk_launched_by)  # From kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            view_model = adapter_instance.load_view_model(operation)
            try:
                form = adapter_instance.get_form()
                form = form() if isclass(form) else form
                fields = form.get_upload_field_names()
                project = dao.get_project_by_id(operation.fk_launched_in)
                tmp_folder = self.storage_interface.get_temp_folder(project.name)
                for upload_field in fields:
                    if hasattr(view_model, upload_field):
                        file = getattr(view_model, upload_field)
                        if file.startswith(tmp_folder) or file.startswith(TvbProfile.current.TVB_TEMP_FOLDER):
                            temp_files.append(file)
            except AttributeError:
                # Skip if we don't have upload fields on current form
                pass
            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, view_model, available_space)
            operation = dao.get_operation_by_id(operation.id)
            operation.mark_complete(STATUS_FINISHED)
            dao.store_entity(operation)

            self._update_vm_generic_operation_tag(view_model, operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile as excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
        except TVBException as excep:
            self._handle_exception(excep, temp_files, excep.message, operation)
        except MemoryError:
            msg = ("Could not execute operation because there is not enough free memory." +
                   " Please adjust operation parameters and re-launch it.")
            self._handle_exception(Exception(msg), temp_files, msg, operation)
        except Exception as excep1:
            msg = "Could not launch Operation with the given input data!"
            self._handle_exception(excep1, temp_files, msg, operation)

        if operation.fk_operation_group and 'SimulatorAdapter' in operation.algorithm.classname:
            next_op = self._prepare_metric_operation(operation)
            self.launch_operation(next_op.id)
        return result_msg
예제 #34
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)
    group = dao.get_algorithm_by_module(
        'tvb.adapters.uploaders.zip_connectivity_importer',
        'ZIPConnectivityImporter')
    importer = ABCAdapter.build_adapter(group)
    ### Launch Operation
    FlowService().fire_operation(importer,
                                 project.administrator,
                                 project_id,
                                 uploaded=zip_path)
예제 #35
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)

    importer = ABCAdapter.build_adapter_from_class(ZIPConnectivityImporter)
    view_model = ZIPConnectivityImporterModel()
    view_model.uploaded = zip_path

    FlowService().fire_operation(importer,
                                 project.administrator,
                                 project_id,
                                 view_model=view_model)
예제 #36
0
    def _update_vm_generic_operation_tag(view_model, operation):
        project = dao.get_project_by_id(operation.fk_launched_in)
        h5_path = h5.path_for(operation.id, ViewModelH5, view_model.gid,
                              project.name,
                              type(view_model).__name__)

        if not os.path.exists(h5_path):
            return

        with ViewModelH5(h5_path, view_model) as vm_h5:
            vm_h5.operation_tag.store(operation.user_group)
예제 #37
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        #Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid,
                                    False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup,
                                                      oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(
                        model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(
                project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException(
                "Could not remove Burst because a linked operation could not be dropped!!"
            )
        return True
def get_gifty_file_name(project_id, desired_name):
    """
    Compute non-existent file name, in the TEMP folder of
    the given project.
    Try desired_name, and if already exists, try adding a number.
    """
    if project_id:
        project = dao.get_project_by_id(project_id)
        file_helper = FilesHelper()
        temp_path = file_helper.get_project_folder(project, FilesHelper.TEMP_FOLDER)
        return get_unique_file_name(temp_path, desired_name)[0]
    return get_unique_file_name(cfg.TVB_STORAGE, desired_name)[0]
예제 #39
0
def get_gifty_file_name(project_id, desired_name):
    """
    Compute non-existent file name, in the TEMP folder of
    the given project.
    Try desired_name, and if already exists, try adding a number.
    """
    if project_id:
        project = dao.get_project_by_id(project_id)
        file_helper = FilesHelper()
        temp_path = file_helper.get_project_folder(project,
                                                   FilesHelper.TEMP_FOLDER)
        return get_unique_file_name(temp_path, desired_name)[0]
    return get_unique_file_name(cfg.TVB_STORAGE, desired_name)[0]
예제 #40
0
 def __init__(self, overwrites=None, settings_file=None):
     """ Parameters can be overwritten either from a settigns file or from a dictionary. """
     if overwrites is not None:
         self.overwrites.update(overwrites)
     if settings_file is not None:
         settings = open(sys.argv[1]).read()
         for line in settings.split('\n'):
             key, value = line.split('=')
             self.overwrites[key.strip()] = value.strip()
     if KEY_PROJECT not in self.overwrites:
         raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
     self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
     self.flow_service = FlowService()
     self.operation_service = OperationService()
예제 #41
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True
 def test_find_project_happy_flow(self):
     """
     Standard flow for finding a project by it's id.
     """
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
     inserted_project = TestFactory.create_project(self.test_user, 'test_project')
     self.assertTrue(self.project_service.find_project(inserted_project.id) is not None, "Project not found !")
     dao_returned_project = dao.get_project_by_id(inserted_project.id)
     service_returned_project = self.project_service.find_project(inserted_project.id)
     self.assertEqual(dao_returned_project.id, service_returned_project.id,
                      "Data returned from service is different from data returned by DAO.")
     self.assertEqual(dao_returned_project.name, service_returned_project.name, 
                      "Data returned from service is different than  data returned by DAO.")  
     self.assertEqual(dao_returned_project.description, service_returned_project.description,
                      "Data returned from service is different from data returned by DAO.")        
     self.assertEqual(dao_returned_project.members, service_returned_project.members,
                      "Data returned from service is different from data returned by DAO.")
예제 #43
0
 def get_users_for_project(self, user_name, project_id, page=1):
     """
     Return tuple: (All Users except the project administrator, Project Members).
     Parameter "user_name" is the current user. 
     Parameter "user_name" is used for new projects (project_id is None). 
     When "project_id" not None, parameter "user_name" is ignored.       
     """
     try:
         admin_name = user_name
         if project_id is not None:
             project = dao.get_project_by_id(project_id)
             if project is not None:
                 admin_name = project.administrator.username
         all_users, total_pages = self.retrieve_all_users(admin_name, page)
         members = dao.get_members_of_project(project_id)
         return all_users, members, total_pages
     except Exception as excep:
         self.logger.exception("Invalid userName or project identifier")
         raise UsernameException(str(excep))
예제 #44
0
def import_h5(file_path, project_id):

    flow_service = FlowService()

    ## This ID of a project needs to exists in Db, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(project_id)

    adapter_instance = ABCAdapter.build_adapter_from_class(TVBImporter)

    ## Prepare the input algorithms as if they were coming from web UI submit:
    launch_args = {"data_file": file_path}

    print "We will try to import file at path " + file_path
    ## launch an operation and have the results stored both in DB and on disk
    launched_operations = flow_service.fire_operation(
        adapter_instance, project.administrator, project.id, **launch_args
    )

    print "Operation launched. Check the web UI"
예제 #45
0
    def fire_operation(self, adapter_instance, current_user, project_id, visible=True, **data):
        """
        Launch an operation, specified by AdapterInstance, for CurrentUser, 
        Current Project and a given set of UI Input Data.
        """
        operation_name = str(adapter_instance.__class__.__name__)
        try:
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)
            tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER)
            
            result = OperationService().initiate_operation(current_user, project.id, adapter_instance, 
                                                           tmp_folder, visible, **data)
            self.logger.info("Finished operation:" + operation_name)
            return result

        except TVBException, excep:
            self.logger.exception("Could not launch operation " + operation_name +
                                  " with the given set of input data, because: " + excep.message)
            raise OperationException(excep.message, excep)
 def remove_project(self, project_id):
     """
     Remove Project from DB and File Storage.
     """
     try:
         project2delete = dao.get_project_by_id(project_id)
         project_bursts = dao.get_bursts_for_project(project_id)
         for burst in project_bursts:
             dao.remove_entity(burst.__class__, burst.id)
         project_datatypes = dao.get_datatypes_info_for_project(project_id)
         for one_data in project_datatypes:
             self.remove_datatype(project_id, one_data[9], True)
         self.structure_helper.remove_project_structure(project2delete.name)
         name = project2delete.name
         dao.delete_project(project_id)
         self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + name)
     except RemoveDataTypeError, excep:
         self.logger.error("Invalid DataType to remove!")
         self.logger.exception(excep)
         raise ProjectServiceException(excep.message)
예제 #47
0
def launch_simulation_workflow(json_path, prj_id):
    """

    :param json_path: Path towards a local JSON file exported from GUI
    :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface
    """
    project = dao.get_project_by_id(prj_id)

    with open(json_path, 'rb') as input_file:
        simulation_json = input_file.read()
        simulation_json = json.loads(simulation_json)
        LOG.info("Simulation JSON loaded from file '%s': \n  %s", json_path, simulation_json)

        importer = ImportService()
        simulation_config = importer.load_burst_entity(simulation_json, prj_id)
        LOG.info("Simulation Workflow configuration object loaded: \n  %s", simulation_config)

        flow_service = FlowService()
        simulator_algorithm, _ = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        LOG.info("Found Simulation algorithm in local DB: \n   %s", simulator_algorithm)

        burst_service = BurstService()
        burst_service.launch_burst(simulation_config, 0, simulator_algorithm.id, project.administrator.id, LAUNCH_NEW)
        LOG.info("Check in the web GUI for your operation. It should be starting now ...")
예제 #48
0
from tvb.adapters.simulator.simulator_adapter import SimulatorAdapter
from tvb.datatypes.time_series import TimeSeriesRegion
from tvb.datatypes.connectivity import Connectivity


LOG = get_logger(__name__)


## Before starting this, we need to have TVB web interface launched at least once (to have a default project, user, etc)
if __name__ == "__main__":

    flow_service = FlowService()
    operation_service = OperationService()

    ## This ID of a project needs to exists in DB, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(1)

    ## Prepare the Adapter
    adapter_instance = ABCAdapter.build_adapter_from_class(SimulatorAdapter)

    ## Prepare the input algorithms as if they were coming from web UI submit:
    ## TODO create helper methods for working with objects instead of strings.
    connectivity = dao.get_generic_entity(Connectivity, DataTypeMetaData.DEFAULT_SUBJECT, "subject")[0]
    launch_args = dict()
    for f in adapter_instance.flaten_input_interface():
        launch_args[f["name"]] = str(f["default"]) if 'default' in f else None
    launch_args["connectivity"] = connectivity.gid
    launch_args["model_parameters_option_Generic2dOscillator_variables_of_interest"] = 'V'

    if len(sys.argv) > 1:
        launch_args["model_parameters_option_Generic2dOscillator_tau"] = sys.argv[1]
    def launch(self, resolution, weighting, inj_f_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inj_f_thresh = float(inj_f_thresh)/100.
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, 'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = dictionary_builder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = download_an_construct_matrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pms_cleaner(projmaps)

        # download from the AllenSDK the annotation volume, the template volume
        vol, annot_info = cache.get_annotation_volume()
        template, template_info = cache.get_template_volume()

        # rotate template in the TVB 3D reference:
        template = rotate_reference(template)

        # grab the StructureTree instance
        structure_tree = cache.get_structure_tree()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = areas_volume_threshold(cache, projmaps, vol_thresh, resolution)
        
        # the method exclude from the experimental dataset
        # those exps where the injected fraction of pixel in the injection site is lower than than the inj_f_thr 
        projmaps = infected_threshold(cache, projmaps, inj_f_thresh)

        # the method creates file order and keyword that will be the link between the SC order and the
        # id key in the Allen database
        [order, key_ord] = create_file_order(projmaps, structure_tree)

        # the method builds the Structural Connectivity (SC) matrix
        structural_conn = construct_structural_conn(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = construct_centres(cache, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = construct_tract_lengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with
        # the biggest volume
        [unique_parents, unique_grandparents] = parents_and_grandparents_finder(cache, order, key_ord, structure_tree)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation.
        # -1=background and areas that are not in the parcellation
        vol_parcel = mouse_brain_visualizer(vol, order, key_ord, unique_parents, unique_grandparents,
                                            structure_tree, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = structural_conn
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI(storage_path=self.storage_path)
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume
        return [result_connectivity, result_volume, result_rvm, result_template]
예제 #50
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)
    group = dao.get_algorithm_by_module('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
    importer = ABCAdapter.build_adapter(group)
    ### Launch Operation
    FlowService().fire_operation(importer, project.administrator, project_id, uploaded=zip_path)
예제 #51
0
    def launch(self, resolution, weighting, inf_vox_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inf_vox_thresh = float(inf_vox_thresh)
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, "mouse_connectivity_manifest.json")
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = DictionaireBuilder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = DownloadAndConstructMatrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pmsCleaner(projmaps)

        Vol, annot_info = cache.get_annotation_volume()
        ontology = cache.get_ontology()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = AreasVolumeTreshold(projmaps, vol_thresh, resolution, Vol, ontology)

        # the method includes in the parcellation only brain regions where at least one injection experiment had infected more than N voxel (where N is inf_vox_thresh)
        projmaps = AreasVoxelTreshold(cache, projmaps, inf_vox_thresh, Vol, ontology)

        # the method creates file order and keyord that will be the link between the SC order and the id key in the Allen database
        [order, key_ord] = CreateFileOrder(projmaps, ontology)

        # the method builds the Structural Connectivity (SC) matrix
        SC = ConstructingSC(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = Construct_centres(ontology, order, key_ord, Vol)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = ConstructTractLengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with the biggest volume
        [unique_parents, unique_grandparents] = ParentsAndGrandParentsFinder(order, key_ord, Vol, ontology)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation. -1=background and areas that are not in the parcellation
        Vol_parcel = MouseBrainVisualizer(Vol, order, key_ord, unique_parents, unique_grandparents, ontology, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = SC
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = Vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        return [result_connectivity, result_rvm, result_volume]