コード例 #1
0
    def load_burst_from_json(self, **data):
        """Upload Burst from previously exported JSON file"""
        self.logger.debug("Uploading ..." + str(data))

        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:

                upload_param = data[upload_param]
                if isinstance(upload_param, FieldStorage) or isinstance(upload_param, Part):
                    if not upload_param.file:
                        raise BurstServiceException("Please select a valid JSON file.")
                    upload_param = upload_param.file.read()

                upload_param = json.loads(upload_param)
                prj_id = common.get_current_project().id
                importer = ImportService()
                burst_entity = importer.load_burst_entity(upload_param, prj_id)
                common.add2session(common.KEY_BURST_CONFIG, burst_entity)

        except Exception as excep:
            self.logger.warning(excep.message)
            common.set_error_message(excep.message)

        raise cherrypy.HTTPRedirect('/burst/')
コード例 #2
0
def _update_localconnectivity_metadata(folder, file_name):

    service = ImportService()
    operation_id = int(os.path.split(folder)[1])

    dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
    info_dict = SparseMatrix.extract_sparse_matrix_metadata(dt.matrix)
    dt.set_metadata(info_dict, "", True, SparseMatrix.ROOT_PATH + "matrix")
コード例 #3
0
ファイル: factory.py プロジェクト: maedoc/tvb-framework
    def import_default_project(admin_user=None):

        if not admin_user:
            admin_user = TestFactory.create_user()

        project_path = os.path.join(os.path.dirname(os.path.dirname(cff_dataset.__file__)), 'Default_Project.zip')
        import_service = ImportService()
        import_service.import_project_structure(project_path, admin_user.id)
        return import_service.created_projects[0]
コード例 #4
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
                                               "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
                                               "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = projections.EEG_POLYMORPHIC_IDENTITY
        if "SEEG" in class_name:
            projection_type = projections.SEEG_POLYMORPHIC_IDENTITY
        elif "MEG" in class_name:
            projection_type = projections.MEG_POLYMORPHIC_IDENTITY

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        operation_id = int(os.path.split(folder)[1])
        dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
        dt_db = dao.get_datatype_by_gid(dt.gid)

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception("Could not update flags in DB, but we continue with the update!")

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
コード例 #5
0
def update():
    """
    Try to import Default_Project, so that new users created with the latest code can share this project.
    """

    try:
        admins = dao.get_administrators()
        service = ImportService()
        service.import_project_structure(DATA_FILE, admins[0].id)
    except Exception:
        LOGGER.exception("Could import DefaultProject!")
コード例 #6
0
 def projectupload(self, **data):
     """Upload Project from TVB ZIP."""
     self.logger.debug("Uploading ..." + str(data))
     try:
         upload_param = "uploadedfile"
         if upload_param in data and data[upload_param]:
             import_service = ImportService()
             import_service.import_project_structure(data[upload_param], common.get_logged_user().id)
     except ServicesBaseException, excep:
         self.logger.warning(excep.message)
         common.set_error_message(excep.message)
コード例 #7
0
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException("Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(folder, h5file, self.operation_id)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException("Invalid file received as input. Most probably incomplete "
                                              "meta-data ...  " + str(excep))
                else:
                    raise LaunchException("Uploaded file: %s is neither in ZIP or HDF5 format" % data_file)

        else:
            raise LaunchException("File: %s to import does not exists." % data_file)
コード例 #8
0
def update_localconnectivity_metadata(folder, file_name):
    service = ImportService()
    operation_id = int(os.path.split(folder)[1])

    dt = service.load_datatype_from_file(os.path.join(folder, file_name),
                                         operation_id)
    info_dict = {
        "dtype": dt.matrix.dtype.str,
        "format": dt.matrix.format,
        "Shape": str(dt.matrix.shape),
        "Maximum": dt.matrix.data.max(),
        "Minimum": dt.matrix.data.min(),
        "Mean": dt.matrix.mean()
    }
    dt.set_metadata(info_dict, '', True, '/matrix')
コード例 #9
0
    def load_from_zip(self, zip_file, project):
        import_service = ImportService()
        simulator_folder = import_service.import_simulator_configuration_zip(
            zip_file)

        simulator_h5_filename = DirLoader(
            simulator_folder,
            None).find_file_for_has_traits_type(SimulatorAdapterModel)
        simulator_h5_filepath = os.path.join(simulator_folder,
                                             simulator_h5_filename)
        simulator = h5.load_view_model_from_file(simulator_h5_filepath)

        burst_config = self.burst_service.load_burst_configuration_from_folder(
            simulator_folder, project)
        return simulator, burst_config
コード例 #10
0
    def load_simulator_configuration_from_zip(self, **data):
        """Upload Simulator from previously exported ZIP file"""
        self.logger.debug("Uploading ..." + str(data))
        last_loaded_form_url = SimulatorWizzardURLs.SETUP_PSE_URL

        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:
                simulator, burst_config, sim_folder = self.burst_service.load_simulation_from_zip(
                    data[upload_param], self.context.project)

                dts_folder = os.path.join(
                    sim_folder, StorageInterface.EXPORTED_SIMULATION_DTS_DIR)
                ImportService().import_project_operations(
                    self.context.project, dts_folder, False, None)

                self.monitors_handler.build_list_of_monitors_from_view_models(
                    simulator)
                if burst_config.is_pse_burst():
                    last_loaded_form_url = SimulatorWizzardURLs.LAUNCH_PSE_URL
                self.context.init_session_at_sim_config_from_zip(
                    burst_config, simulator, last_loaded_form_url)
        except IOError as ioexcep:
            self.logger.exception(ioexcep)
            self.context.set_warning_message(
                "This ZIP does not contain a complete simulator configuration")
        except ServicesBaseException as excep:
            self.logger.warning(excep.message)
            self.context.set_warning_message(excep.message)

        raise cherrypy.HTTPRedirect('/burst/')
コード例 #11
0
    def load_from_zip(self, zip_file, project):
        import_service = ImportService()
        simulator_folder = import_service.import_simulator_configuration_zip(
            zip_file)

        simulator_h5_filename = DirLoader(
            simulator_folder, None).find_file_for_has_traits_type(Simulator)
        with SimulatorH5(os.path.join(simulator_folder,
                                      simulator_h5_filename)) as sim_h5:
            simulator_gid = sim_h5.gid.load()
        simulator = SimulatorSerializer.deserialize_simulator(
            simulator_gid, simulator_folder)

        burst_config = self.burst_service.load_burst_configuration_from_folder(
            simulator_folder, project)
        return simulator, burst_config
コード例 #12
0
ファイル: user_service.py プロジェクト: rajul/tvb-framework
    def create_user(self,
                    username=None,
                    password=None,
                    password2=None,
                    role=None,
                    email=None,
                    comment=None,
                    email_msg=None,
                    validated=False):
        """
        Service Layer for creating a new user.
        """
        #Basic fields validation.
        if (username is None) or len(username) < 1:
            raise UsernameException("Empty UserName!")
        if (password is None) or len(password) < 1:
            raise UsernameException("Empty password!")
        if password2 is None:
            password2 = password
        if password != password2:
            raise UsernameException("Passwords do not match!")
        try:
            user_validated = (role == 'ADMINISTRATOR') or validated
            user = model.User(username, password, email, user_validated, role)
            if email_msg is None:
                email_msg = 'Hello ' + username + TEXT_CREATE
            admin_msg = (TEXT_CREATE_TO_ADMIN + username + ' :\n ' +
                         TvbProfile.current.web.BASE_URL + 'user/validate/' +
                         username + '\n\n"' + str(comment) + '"')
            self.logger.info("Registering user " + username + " !")
            if role != 'ADMINISTRATOR' and email is not None:
                admins = UserService.get_administrators()
                admin = admins[randint(0, len(admins) - 1)]
                if admin.email is not None and (
                        admin.email !=
                        TvbProfile.current.web.admin.DEFAULT_ADMIN_EMAIL):
                    # Do not send validation email in case default admin email remained unchanged
                    email_sender.send(FROM_ADDRESS, admin.email,
                                      SUBJECT_REGISTER, admin_msg)
                    self.logger.debug("Email sent to:" + admin.email +
                                      " for validating user:"******" !")
                email_sender.send(FROM_ADDRESS, email, SUBJECT_REGISTER,
                                  email_msg)
                self.logger.debug("Email sent to:" + email +
                                  " for notifying new user:"******" !")
            user = dao.store_entity(user)

            if role == model.ROLE_ADMINISTRATOR:
                uploaded = os.path.join(os.path.dirname(tvb_data.__file__),
                                        "Default_Project.zip")
                ImportService().import_project_structure(uploaded, user.id)
            else:
                handle_event(
                    ".".join([self.__class__.__name__,
                              stack()[0][3]]), user)
            return TEXT_DISPLAY
        except Exception, excep:
            self.logger.exception("Could not create user!")
            raise UsernameException(str(excep))
コード例 #13
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            if zipfile.is_zipfile(view_model.data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(view_model.data_file, tmp_folder)
                operations = service.import_project_operations(
                    current_op.project, tmp_folder)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                folder, h5file = os.path.split(view_model.data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        service.check_import_references(
                            view_model.data_file, datatype)
                        service.store_datatype(datatype, view_model.data_file)
                        self.nr_of_datatypes += 1
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
コード例 #14
0
def read_h5(full_paths):

    # We need to load the DataType in the context of an operation, and a project
    all_projects = dao.get_all_projects()
    all_operations = dao.get_generic_entity(model.Operation, all_projects[0].id, "fk_launched_in")

    service = ImportService()
    results = []

    for full_path in full_paths:
        folder, h5file = os.path.split(full_path)
        # The actual read of H5:
        datatype = service.load_datatype_from_file(folder, h5file, all_operations[0].id, move=False)

        print "We've build DataType: [%s]" % datatype.__class__.__name__, datatype
        results.append(datatype)

    return results
コード例 #15
0
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       name="GeneratedProject",
                                                       description="test_desc")
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.zip_path = None
コード例 #16
0
    def create_user(self, username=None, display_name=None, password=None, password2=None,
                    role=None, email=None, comment=None, email_msg=None, validated=False, skip_import=False,
                    gid=None, skip_sending_email=False):
        """
        Service Layer for creating a new user.
        """
        if (username is None) or len(username) < 1:
            raise UsernameException("Empty UserName!")
        if (display_name is None) or len(display_name) < 1:
            raise UsernameException("Empty display name!")
        if (password is None) or len(password) < 1:
            raise UsernameException("Empty password!")
        if password2 is None:
            password2 = password
        if password != password2:
            raise UsernameException("Passwords do not match!")

        try:
            user_validated = (role == ROLE_ADMINISTRATOR) or validated
            user = User(username, display_name, password, email, user_validated, role, gid)
            if email_msg is None:
                email_msg = 'Hello ' + username + TEXT_CREATE
            admin_msg = (TEXT_CREATE_TO_ADMIN + username + ' :\n ' + TvbProfile.current.web.BASE_URL +
                         '/user/validate/' + username + '\n\n"' + str(comment) + '"')
            self.logger.info("Registering user " + username + " !")

            if role != ROLE_ADMINISTRATOR and email is not None and not skip_sending_email:
                admins = UserService.get_administrators()
                admin = admins[random.randint(0, len(admins) - 1)]
                if admin.email is not None and (admin.email != TvbProfile.current.web.admin.DEFAULT_ADMIN_EMAIL):
                    # Do not send validation email in case default admin email remained unchanged
                    email_sender.send(FROM_ADDRESS, admin.email, SUBJECT_REGISTER, admin_msg)
                    self.logger.debug("Email sent to:" + admin.email + " for validating user:"******" !")
                email_sender.send(FROM_ADDRESS, email, SUBJECT_REGISTER, email_msg)
                self.logger.debug("Email sent to:" + email + " for notifying new user:"******" !")

            user = dao.store_entity(user)

            if role == ROLE_ADMINISTRATOR and not skip_import:
                to_upload = os.path.join(os.path.dirname(tvb_data.__file__), "Default_Project.zip")
                if not os.path.exists(to_upload):
                    self.logger.warning("Could not find DEFAULT PROJECT at path %s. You might want to import it "
                                        "yourself. See TVB documentation about where to find it!" % to_upload)
                    return TEXT_DISPLAY
                ImportService().import_project_structure(to_upload, user.id)
            else:
                try:
                    default_prj_id = dao.get_project_by_gid(DEFAULT_PROJECT_GID).id
                    dao.add_members_to_project(default_prj_id, [user.id])
                except Exception:
                    self.logger.warning(
                        "Could not link user_id: %d with project_gid: %s " % (user.id, DEFAULT_PROJECT_GID))

            return TEXT_DISPLAY
        except Exception as excep:
            self.logger.exception("Could not create user!")
            raise UsernameException(str(excep))
コード例 #17
0
ファイル: read_from_h5.py プロジェクト: liadomide/tvb-root
def read_h5(full_paths):
    # We need to load the DataType in the context of an operation, and a project
    all_projects = dao.get_all_projects()
    all_operations = dao.get_generic_entity(Operation, all_projects[0].id,
                                            "fk_launched_in")

    service = ImportService()
    results = []

    for full_path in full_paths:
        # The actual read of H5:
        datatype = service.load_datatype_from_file(full_path,
                                                   all_operations[0].id)

        print("We've build DataType: [%s]" % datatype.__class__.__name__,
              datatype)
        results.append(datatype)

    return results
コード例 #18
0
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       name="GeneratedProject",
                                                       description="test_desc")
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)
        self.zip_path = None
コード例 #19
0
ファイル: project_import.py プロジェクト: rajul/tvb-framework
def run_import(project_path):

    ## If we would know a UserID to have as admin, next step would not be necessary.
    ## Make sure at least one user exists in TVB DB:
    user_service = UserService()
    admins = user_service.get_administrators()

    if admins:
        admin = admins[0]
    else:
        ## No Admin user was found, we will create one
        user_service.create_user("admin", "pass", role=model.ROLE_ADMINISTRATOR,
                                 email="*****@*****.**", validated=True)
        admin = user_service.get_administrators()[0]

    ## Do the actual import of a project from ZIP:
    import_service = ImportService()
    import_service.import_project_structure(project_path, admin.id)

    print "Project imported successfully. Check the Web UI!"
コード例 #20
0
def run_import(project_path):

    ## If we would know a UserID to have as admin, next step would not be necessary.
    ## Make sure at least one user exists in TVB DB:
    user_service = UserService()
    admins = user_service.get_administrators()

    if admins:
        admin = admins[0]
    else:
        ## No Admin user was found, we will create one
        user_service.create_user("admin", "pass", role=model.ROLE_ADMINISTRATOR,
                                 email="*****@*****.**", validated=True, skip_import=True)
        admin = user_service.get_administrators()[0]

    ## Do the actual import of a project from ZIP:
    import_service = ImportService()
    import_service.import_project_structure(project_path, admin.id)

    print("Project imported successfully. Check the Web UI!")
コード例 #21
0
ファイル: links_test.py プロジェクト: unimauro/tvb-framework
 def setUpTVB(self):
     """
     Adds to the _BaseLinksTest setup the following
     2 links from src to dest project
     Import/export services
     """
     _BaseLinksTest.setUpTVB(self)
     dest_id = self.dest_project.id
     self.flow_service.create_link([self.red_datatype.id], dest_id)
     self.flow_service.create_link([self.blue_datatype.id], dest_id)
     self.export_mng = ExportManager()
     self.import_service = ImportService()
コード例 #22
0
ファイル: tvb_importer.py プロジェクト: nedkab/tvb-framework
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(
                    current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(
                            folder,
                            h5file,
                            self.operation_id,
                            final_storage=self.storage_path)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(
                                datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException(
                            "Invalid file received as input. Most probably incomplete "
                            "meta-data ...  " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  data_file)
コード例 #23
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.import_service = ImportService()
     self.flow_service = FlowService()
     self.project_service = ProjectService()
     
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc")
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
     self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
     self.zip_path = None 
コード例 #24
0
    def load_burst_from_json(self, **data):
        """Upload Burst from previously exported JSON file"""
        self.logger.debug("Uploading ..." + str(data))

        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:

                upload_param = data[upload_param]
                if isinstance(upload_param, FieldStorage) or isinstance(upload_param, Part):
                    if not upload_param.file:
                        raise BurstServiceException("Please select a valid JSON file.")
                    upload_param = upload_param.file.read()

                upload_param = json.loads(upload_param)
                prj_id = common.get_current_project().id
                importer = ImportService()
                burst_entity = importer.load_burst_entity(upload_param, prj_id)
                common.add2session(common.KEY_BURST_CONFIG, burst_entity)

        except Exception, excep:
            self.logger.warning(excep.message)
            common.set_error_message(excep.message)
コード例 #25
0
def launch_simulation_workflow(json_path, prj_id):
    """

    :param json_path: Path towards a local JSON file exported from GUI
    :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface
    """
    project = dao.get_project_by_id(prj_id)

    with open(json_path, 'rb') as input_file:
        simulation_json = input_file.read()
        simulation_json = json.loads(simulation_json)
        LOG.info("Simulation JSON loaded from file '%s': \n  %s", json_path, simulation_json)

        importer = ImportService()
        simulation_config = importer.load_burst_entity(simulation_json, prj_id)
        LOG.info("Simulation Workflow configuration object loaded: \n  %s", simulation_config)

        flow_service = FlowService()
        simulator_algorithm, _ = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        LOG.info("Found Simulation algorithm in local DB: \n   %s", simulator_algorithm)

        burst_service = BurstService()
        burst_service.launch_burst(simulation_config, 0, simulator_algorithm.id, project.administrator.id, LAUNCH_NEW)
        LOG.info("Check in the web GUI for your operation. It should be starting now ...")
コード例 #26
0
    def transactional_setup_method(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        assert 0 == result, "There should be no data type in DB"
        result = self.count_all_entities(Project)
        assert 0 == result

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
コード例 #27
0
    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
コード例 #28
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            current_op = dao.get_operation_by_id(self.operation_id)
            if zipfile.is_zipfile(view_model.data_file):
                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                self.storage_interface.unpack_zip(view_model.data_file,
                                                  tmp_folder)
                is_group = False
                current_op_id = current_op.id
                for file in os.listdir(tmp_folder):
                    # In case we import a DatatypeGroup, we want the default import flow
                    if os.path.isdir(os.path.join(tmp_folder, file)):
                        current_op_id = None
                        is_group = True
                        break
                try:
                    operations, all_dts, stored_dts_count = service.import_project_operations(
                        current_op.project, tmp_folder, is_group,
                        current_op_id)
                    self.nr_of_datatypes += stored_dts_count
                    if stored_dts_count == 0:
                        current_op.additional_info = 'All chosen datatypes already exist!'
                        dao.store_entity(current_op)
                    elif stored_dts_count < all_dts:
                        current_op.additional_info = 'Part of the chosen datatypes already exist!'
                        dao.store_entity(current_op)
                except ImportException as excep:
                    self.log.exception(excep)
                    current_op.additional_info = excep.message
                    current_op.status = STATUS_ERROR
                    raise LaunchException("Invalid file received as input. " +
                                          str(excep))
                finally:
                    shutil.rmtree(tmp_folder)
            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                if self.storage_interface.get_storage_manager(
                        view_model.data_file).is_valid_tvb_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        stored_new_dt = service.store_or_link_datatype(
                            datatype, view_model.data_file,
                            current_op.project.id)
                        if stored_new_dt == 0:
                            current_op.additional_info = 'The chosen datatype already exists!'
                            dao.store_entity(current_op)
                        self.nr_of_datatypes += stored_new_dt
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
コード例 #29
0
ファイル: links_test.py プロジェクト: ganiyuolalekan/tvb-root
 def _import(self, export_file, user_id):
     """ import a project zip for a user """
     # instantiated for every use because it is stateful
     import_service = ImportService()
     import_service.import_project_structure(export_file, user_id)
     return import_service.created_projects[0].id
コード例 #30
0
class TestImportService(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       name="GeneratedProject",
                                                       description="test_desc")
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.zip_path = None

    def transactional_teardown_method(self):
        """
        Reset the database when test is done.
        """
        ### Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)

        ### Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])

        self.delete_project_folders()

    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        inserted = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        assert 1 == inserted, "Problems when inserting data"

        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)
        assert 2 == count_operations, "Invalid ops number before export!"

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "GeneratedProject", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        self.test_project = result[0]

        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)

        #1 op. - import cff; 2 op. - save the array wrapper;
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(
            self.test_project.id,
            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        assert 1 == len(new_val), "One !=" + str(len(new_val))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"

    def test_import_export_existing(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)
        assert 2 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(self.test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ProjectImportException):
            self.import_service.import_project_structure(
                self.zip_path, self.test_user.id)

    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        activity_data = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9],
                                     [10, 11, 12]])
        time_data = numpy.array([1, 2, 3])
        storage_path = FilesHelper().get_project_folder(self.test_project)
        time_series = TimeSeries(time_files=None,
                                 activity_files=None,
                                 max_chunk=10,
                                 maxes=None,
                                 mins=None,
                                 data_shape=numpy.shape(activity_data),
                                 storage_path=storage_path,
                                 label_y="Time",
                                 time_data=time_data,
                                 data_name='TestSeries',
                                 activity_data=activity_data,
                                 sample_period=10.0)
        self._store_entity(time_series, "TimeSeries",
                           "tvb.datatypes.time_series")
        timeseries_count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.time_series.TimeSeries")[1]
        assert timeseries_count == 1, "Should be only one TimeSeries"

    def _create_value_wrapper(self):
        """Persist ValueWrapper"""
        value_ = ValueWrapper(data_value=5.0, data_name="my_value")
        self._store_entity(value_, "ValueWrapper",
                           "tvb.datatypes.mapped_values")
        valuew = self.flow_service.get_available_datatypes(
            self.test_project.id,
            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        assert len(valuew) == 1, "Should be only one value wrapper"
        return ABCAdapter.load_entity_by_gid(valuew[0][2])

    def _store_entity(self, entity, type_, module):
        """Launch adapter to store a create a persistent DataType."""
        entity.type = type_
        entity.module = module
        entity.subject = "John Doe"
        entity.state = "RAW_STATE"
        entity.set_operation_id(self.operation.id)
        adapter_instance = StoreAdapter([entity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})
コード例 #31
0
 def _import(self, export_file, user_id):
     """ import a project zip for a user """
     # instantiated for every use because it is stateful
     import_service = ImportService()
     import_service.import_project_structure(export_file, user_id)
     return import_service.created_projects[0].id
コード例 #32
0
class TestImportService(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """
    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.project_service = ProjectService()
        self.zip_path = None

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        # Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)

        # Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])

        self.delete_project_folders()

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_import_export(self, user_factory, project_factory,
                           value_wrapper_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport",
                                       "test_desc")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)
        value_wrapper = value_wrapper_factory(test_user, test_project)

        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "TestImportExport", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        test_project = result[0]

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)

        # 1 op. - import conn; 2 op. - BCT Analyzer
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        # check the value wrapper
        new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"

    def test_import_export_existing(self, user_factory, project_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport2")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ImportException):
            self.import_service.import_project_structure(
                self.zip_path, test_user.id)
コード例 #33
0
ファイル: links_test.py プロジェクト: unimauro/tvb-framework
class ImportExportProjectWithLinksTest(_BaseLinksTest):
    def setUpTVB(self):
        """
        Adds to the _BaseLinksTest setup the following
        2 links from src to dest project
        Import/export services
        """
        _BaseLinksTest.setUpTVB(self)
        dest_id = self.dest_project.id
        self.flow_service.create_link([self.red_datatype.id], dest_id)
        self.flow_service.create_link([self.blue_datatype.id], dest_id)
        self.export_mng = ExportManager()
        self.import_service = ImportService()

    def test_export(self):
        export_file = self.export_mng.export_project(self.dest_project)
        with TvbZip(export_file) as z:
            self.assertTrue('links-to-external-projects/Operation.xml' in z.namelist())

    def _export_and_remove_dest(self):
        """export the destination project and remove it"""
        dest_id = self.dest_project.id
        export_file = self.export_mng.export_project(self.dest_project)
        self.project_service.remove_project(dest_id)
        return export_file

    def _import_dest(self, export_file):
        self.import_service.import_project_structure(export_file, self.user.id)
        return self.import_service.created_projects[0].id

    def test_links_recreated_on_import(self):
        export_file = self._export_and_remove_dest()
        imported_proj_id = self._import_dest(export_file)
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        links = dao.get_linked_datatypes_for_project(imported_proj_id)
        self.assertEqual(2, len(links))

    def test_datatypes_recreated_on_import(self):
        export_file = self._export_and_remove_dest()
        self.project_service.remove_project(self.src_project.id)
        # both projects have been deleted
        # import should recreate links as datatypes
        imported_proj_id = self._import_dest(export_file)
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        links = dao.get_linked_datatypes_for_project(imported_proj_id)
        self.assertEqual(0, len(links))

    def test_datatypes_and_links_recreated_on_import(self):
        export_file = self._export_and_remove_dest()
        # remove datatype 2 from source project
        self.project_service.remove_datatype(self.src_project.id, self.blue_datatype.gid)
        imported_proj_id = self._import_dest(export_file)
        # both datatypes should be recreated
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        # only datatype 1 should be a link
        links = dao.get_linked_datatypes_for_project(imported_proj_id)
        self.assertEqual(1, len(links))
        self.assertEquals(self.red_datatype.gid, links[0].gid)
コード例 #34
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException(
            "The input path %s received for upgrading from 3 -> 4 is not a "
            "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException(
            "File %s received for upgrading 3 -> 4 is not valid, due to missing "
            "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" %
                    (file_name, folder))

        projection_type = projections.EEG_POLYMORPHIC_IDENTITY
        if "SEEG" in class_name:
            projection_type = projections.SEEG_POLYMORPHIC_IDENTITY
        elif "MEG" in class_name:
            projection_type = projections.MEG_POLYMORPHIC_IDENTITY

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" %
                     (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        operation_id = int(os.path.split(folder)[1])
        dt = service.load_datatype_from_file(folder,
                                             file_name,
                                             operation_id,
                                             move=False)
        dt_db = dao.get_datatype_by_gid(dt.gid)

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception(
                    "Could not update flags in DB, but we continue with the update!"
                )

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[
        TvbProfile.current.version.
        DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
コード例 #35
0
class TestImportService(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """
    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.project_service = ProjectService()
        self.zip_path = None

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        # Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)

        # Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])

        self.delete_project_folders()

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_import_export(self, user_factory, project_factory,
                           value_wrapper_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport",
                                       "test_desc")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)
        value_wrapper = value_wrapper_factory(test_user, test_project)
        ProjectService.set_datatype_visibility(value_wrapper.gid, False)

        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "TestImportExport", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        test_project = result[0]

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)

        # 1 op. - import conn; 2 op. - BCT Analyzer
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        # check the value wrapper
        new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
        assert False == new_val.visible, "Visibility incorrectly restored"

    def test_import_export_existing(self, user_factory, project_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport2")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ImportException):
            self.import_service.import_project_structure(
                self.zip_path, test_user.id)

    def test_export_import_burst(self, user_factory, project_factory,
                                 simulation_launch):
        """
        Test that fk_parent_burst is correctly preserved after export/import
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestIESim")
        sim_op = simulation_launch(test_user,
                                   test_project,
                                   simulation_length=10)
        tries = 5
        while not sim_op.has_finished and tries > 0:
            sleep(5)
            tries = tries - 1
            sim_op = dao.get_operation_by_id(sim_op.id)
        assert sim_op.has_finished, "Simulation did not finish in the given time"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"
        self.project_service.remove_project(test_project.id)

        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        retrieved_project = self.project_service.retrieve_projects_for_user(
            test_user.id)[0][0]
        ts = try_get_last_datatype(retrieved_project.id, TimeSeriesRegionIndex)
        bursts = dao.get_bursts_for_project(retrieved_project.id)
        assert 1 == len(bursts)
        assert ts.fk_parent_burst == bursts[0].gid

    def test_export_import_figures(self, user_factory, project_factory):
        """
        Test that ResultFigure instances are correctly restores after an export+import project
        """
        # Prepare data
        user = user_factory()
        project = project_factory(user, "TestImportExportFigures")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'paupau.zip')
        TestFactory.import_zip_connectivity(user, project, zip_path)

        figure_service = FigureService()
        figure_service.store_result_figure(project, user, "png", IMG_DATA,
                                           "bla")
        figure_service.store_result_figure(project, user, "png", IMG_DATA,
                                           "bla")
        figures = list(
            figure_service.retrieve_result_figures(project,
                                                   user)[0].values())[0]
        assert 2 == len(figures)

        # export, delete and the import project
        self.zip_path = ExportManager().export_project(project)
        assert self.zip_path is not None, "Exported file is none"
        self.project_service.remove_project(project.id)

        self.import_service.import_project_structure(self.zip_path, user.id)

        # Check that state is as before export: one operation, one DT, 2 figures
        retrieved_project = self.project_service.retrieve_projects_for_user(
            user.id)[0][0]
        count_operations = dao.get_filtered_operations(retrieved_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations
        count_datatypes = dao.count_datatypes(retrieved_project.id, DataType)
        assert 1 == count_datatypes

        figures = list(
            figure_service.retrieve_result_figures(retrieved_project,
                                                   user)[0].values())[0]
        assert 2 == len(figures)
        assert "bla" in figures[0].name
        assert "bla" in figures[1].name
        image_path = utils.url2path(figures[0].file_path)
        img_data = Image.open(image_path).load()
        assert img_data is not None
コード例 #36
0
class ImportServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """  
    
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc")
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.zip_path = None 
        

    def tearDown(self):
        """
        Reset the database when test is done.
        """
        ### Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)
        
        ### Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])
            
        self.delete_project_folders()

            
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)
        
        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
        inserted = self.flow_service.get_available_datatypes(self.test_project.id,
                                                             "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(1, inserted, "Problems when inserting data")
        
        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        self.assertEqual(2, count_operations, "Invalid ops number before export!")

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")
        
        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, len(result), "Project Not removed!")
        self.assertEqual(0, lng_, "Project Not removed!")
        
        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path, self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(result), 1, "There should be only one project.")
        self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.")
        self.assertEqual(result[0].description, "test_desc", "The project description is not correct.")
        self.test_project = result[0]
        
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        
        #1 op. - import cff; 2 op. - save the array wrapper;
        self.assertEqual(2, count_operations, "Invalid ops number after export and import !")
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly')
            self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly')
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(self.test_project.id, 
                                                            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        self.assertEqual(1, len(new_val), "One !=" + str(len(new_val)))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect")
        self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect")
        self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
        

    def test_import_export_existing(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        self.assertEqual(2, count_operations, "Invalid ops before export!")

        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")

        self.assertRaises(ProjectImportException, self.import_service.import_project_structure,
                          self.zip_path, self.test_user.id)


    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        activity_data = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]])
        time_data = numpy.array([1, 2, 3])
        storage_path = FilesHelper().get_project_folder(self.test_project)
        time_series = TimeSeries(time_files=None, activity_files=None, 
                                 max_chunk=10, maxes=None, mins=None, data_shape=numpy.shape(activity_data), 
                                 storage_path=storage_path, label_y="Time", time_data=time_data, data_name='TestSeries',
                                 activity_data=activity_data, sample_period=10.0)
        self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series")
        timeseries_count = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                     "tvb.datatypes.time_series.TimeSeries")[1]
        self.assertEqual(timeseries_count, 1, "Should be only one TimeSeries")


    def _create_value_wrapper(self):
        """Persist ValueWrapper"""
        value_ = ValueWrapper(data_value=5.0, data_name="my_value")
        self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
        valuew = self.flow_service.get_available_datatypes(self.test_project.id,
                                                           "tvb.datatypes.mapped_values.ValueWrapper")[0]
        self.assertEqual(len(valuew), 1, "Should be only one value wrapper")
        return ABCAdapter.load_entity_by_gid(valuew[0][2])


    def _store_entity(self, entity, type_, module):
        """Launch adapter to store a create a persistent DataType."""
        entity.type = type_
        entity.module = module
        entity.subject = "John Doe"
        entity.state = "RAW_STATE"
        entity.set_operation_id(self.operation.id)
        adapter_instance = StoreAdapter([entity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {})