class ProjectService:
    """
    Services layer for Project entities.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()


    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        #Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception, excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        #Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        #Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        #Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        #Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:' + current_user.username)
        return current_proj
class ProjectUpdateManager(UpdateManager):
    """
    This goes through all the scripts that are newer than the version number
    written in the current project metadata xml, and executes them on the project folder.
    """

    def __init__(self, project_path):

        self.project_path = project_path
        self.files_helper = FilesHelper()
        # This assumes that old project metadata file can be parsed by current version.
        self.project_meta = self.files_helper.read_project_metadata(project_path)
        from_version = int(self.project_meta.get('version', 0))

        super(ProjectUpdateManager, self).__init__(project_update_scripts, from_version,
                                                   TvbProfile.current.version.PROJECT_VERSION)


    def run_all_updates(self):
        """
        Upgrade the project to the latest structure
        Go through all update scripts, from project version up to the current_version in the code
        """
        super(ProjectUpdateManager, self).run_all_updates(project_path=self.project_path)

        # update project version in metadata
        self.project_meta['version'] = self.current_version
        self.files_helper.write_project_metadata_from_dict(self.project_path, self.project_meta)
    def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(weights, weights_delimiter)
        tract_matrix = self._read_csv_file(tracts, tracts_delimiter)

        FilesHelper.remove_files([weights, tracts])

        if weights_matrix.shape[0] != input_data.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result
class ImportService():
    """
    Service for importing TVB entities into system.
    It supports TVB exported H5 files as input, but it should also handle H5 files 
    generated outside of TVB, as long as they respect the same structure.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.user_id = None
        self.files_helper = FilesHelper()
        self.created_projects = []


    def _download_and_unpack_project_zip(self, uploaded, uq_file_name, temp_folder):

        if isinstance(uploaded, FieldStorage) or isinstance(uploaded, Part):
            if not uploaded.file:
                raise ProjectImportException("Please select the archive which contains the project structure.")
            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(uploaded.file, file_obj)
        else:
            shutil.copy2(uploaded, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
        except FileStructureException, excep:
            self.logger.exception(excep)
            raise ProjectImportException("Bad ZIP archive provided. A TVB exported project is expected!")
    def launch(self, weights, tracts, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        dti_service = DTIPipelineService()
        dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
        dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
        weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
        tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
        FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                                  os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])

        if weights_matrix.shape[0] != input_data.orientations.shape[0]:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.orientations.shape[0]))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = input_data.nose_correction
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result
    def export(self, data, export_folder, project):
        """
        Exports data type:
        1. If data is a normal data type, simply exports storage file (HDF format)
        2. If data is a DataTypeGroup creates a zip with all files for all data types
        """
        download_file_name = self.get_export_file_name(data)
        files_helper = FilesHelper()
         
        if self.is_data_a_group(data):
            all_datatypes = self._get_all_data_types_arr(data)
            
            if all_datatypes is None or len(all_datatypes) == 0:
                raise ExportException("Could not export a data type group with no data")    
            
            zip_file = os.path.join(export_folder, download_file_name)
            
            # Now process each data type from group and add it to ZIP file
            operation_folders = []
            for data_type in all_datatypes:
                operation_folder = files_helper.get_operation_folder(project.name, data_type.fk_from_operation)
                operation_folders.append(operation_folder)
                
            # Create ZIP archive    
            files_helper.zip_folders(zip_file, operation_folders, self.OPERATION_FOLDER_PREFIX)
                        
            return download_file_name, zip_file, True

        else:
            project_folder = files_helper.get_project_folder(project)
            data_file = os.path.join(project_folder, data.get_storage_file_path())

            return download_file_name, data_file, False
    def export_project(self, project, optimize_size=False):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        files_helper = FilesHelper()
        project_folder = files_helper.get_project_folder(project)
        project_datatypes = self._gather_project_datatypes(project, optimize_size)
        to_be_exported_folders = []
        considered_op_ids = []
        min_dt_date = datetime.now()

        if optimize_size:
            ## take only the DataType with visibility flag set ON
            for dt in project_datatypes:
                if dt[KEY_OPERATION_ID] not in considered_op_ids:
                    to_be_exported_folders.append({'folder': files_helper.get_project_folder(project,
                                                                                             str(dt[KEY_OPERATION_ID])),
                                                   'archive_path_prefix': str(dt[KEY_OPERATION_ID]) + os.sep})
                    considered_op_ids.append(dt[KEY_OPERATION_ID])
                    if min_dt_date > dt[KEY_DT_DATE]:
                        min_dt_date = dt[KEY_DT_DATE]
        else:
            to_be_exported_folders.append({'folder': project_folder,
                                           'archive_path_prefix': '', 'exclude': ["TEMP"]})
            if project_datatypes:
                min_dt_date = min([dt[KEY_DT_DATE] for dt in project_datatypes])

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION)

        export_folder = self._build_data_export_folder(project)
        result_path = os.path.join(export_folder, zip_file_name)

        with TvbZip(result_path, "w") as zip_file:
            # Pack project [filtered] content into a ZIP file:
            LOG.debug("Done preparing, now we will write folders " + str(len(to_be_exported_folders)))
            LOG.debug(str(to_be_exported_folders))
            for pack in to_be_exported_folders:
                zip_file.write_folder(**pack)
            LOG.debug("Done exporting files, now we will write the burst configurations...")
            self._export_bursts(project, project_datatypes, zip_file)
            LOG.debug("Done exporting burst configurations, now we will export linked DTs")
            self._export_linked_datatypes(project, zip_file, min_dt_date)
            ## Make sure the Project.xml file gets copied:
            if optimize_size:
                LOG.debug("Done linked, now we write the project xml")
                zip_file.write(files_helper.get_project_meta_file_path(project.name), files_helper.TVB_PROJECT_FILE)
            LOG.debug("Done, closing")

        return result_path
 def _update_datatype_disk_size(self, file_path):
     """
     Computes and updates the disk_size attribute of the DataType, for which was created the given file.
     """
     file_handler = FilesHelper()
     datatype_gid = self._get_manager(file_path).get_gid_attribute()
     datatype = dao.get_datatype_by_gid(datatype_gid)
     
     if datatype is not None:
         datatype.disk_size = file_handler.compute_size_on_disk(file_path)
         dao.store_entity(datatype)
Beispiel #9
0
def initialize_storage():
    """
    Create Projects storage root folder in case it does not exist.
    """
    try:
        helper = FilesHelper()
        helper.check_created()
    except FileStructureException:
        # Do nothing, because we do not have any UI to display exception
        logger = get_logger("tvb.core.services.initialize_storage")
        logger.exception("Could not make sure the root folder exists!")
def get_gifty_file_name(project_id, desired_name):
    """
    Compute non-existent file name, in the TEMP folder of
    the given project.
    Try desired_name, and if already exists, try adding a number.
    """
    if project_id:
        project = dao.get_project_by_id(project_id)
        file_helper = FilesHelper()
        temp_path = file_helper.get_project_folder(project, FilesHelper.TEMP_FOLDER)
        return get_unique_file_name(temp_path, desired_name)[0]
    return get_unique_file_name(cfg.TVB_STORAGE, desired_name)[0]
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {'value': epileptor_new}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
    def _build_data_export_folder(self, data):
        """
        This method computes the folder where results of an export operation will be 
        stored for a while (e.g until download is done; or for 1 day)
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour,
                                             now.minute, now.second, now.microsecond)
        tmp_str = date_str + "@" + data.gid
        data_export_folder = os.path.join(self.export_folder, tmp_str)
        files_helper = FilesHelper()
        files_helper.check_created(data_export_folder)

        return data_export_folder
Beispiel #13
0
def introduce_unmapped_node(out_pth, conn_zip_pth):
    """
    Creates a connectivity with one extra node in the first position.
    This node represents the unmapped regions.
    :param out_pth: destination path
    :param conn_zip_pth: connectivity zip path.
    """
    fh = FilesHelper()
    tmp_pth = os.path.splitext(out_pth)[0]
    fh.check_created(tmp_pth)
    files = fh.unpack_zip(conn_zip_pth, tmp_pth)
    for file_name in files:
        file_name_low = file_name.lower()
        if "centres" in file_name_low:
            with open(file_name) as f:
                lines = f.readlines()
            with open(file_name, "w") as f:
                f.write("None  0.000000  0.000000  0.000000\n")
                f.writelines(lines)
        elif "weight" in file_name_low or "tract" in file_name_low:
            with open(file_name) as f:
                lines = f.readlines()
                nr_regions = len(lines)
            with open(file_name, "w") as f:
                f.write("   0.0000000e+00" * (nr_regions + 1) + "\n")
                for line in lines:
                    f.write("   0.0000000e+00" + line)
        else:
            raise Exception("this transformation does not support the file " + file_name)

    fh.zip_folder(out_pth, tmp_pth)
    fh.remove_folder(tmp_pth)
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm(SIMULATOR_MODULE, SIMULATOR_CLASS, alg_category.id)
        self.algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        if self.algorithm is None:
            self.algorithm = dao.store_entity(ad)

        # Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
    def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id], project.id)
 def setUp(self):
     """
     Set up the context needed by the tests.
     """
     self.files_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED,
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)
 def transactional_setup_method(self):
     """
     Reset the database before each test.
     """
     self.project_service = ProjectService()
     self.structure_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
    def export_project(self, project):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        files_helper = FilesHelper()
        project_folder = files_helper.get_project_folder(project)
        
        bursts_dict = {}
        datatype_burst_mapping = {}
        bursts_count = dao.get_bursts_for_project(project.id, count=True)
        for start_idx in range(0, bursts_count, BURST_PAGE_SIZE):
            bursts = dao.get_bursts_for_project(project.id, page_start=start_idx, page_end=start_idx + BURST_PAGE_SIZE)
            for burst in bursts:
                self._build_burst_export_dict(burst, bursts_dict)
                
        datatypes_count = dao.get_datatypes_for_project(project.id, count=True)
        for start_idx in range(0, datatypes_count, DATAYPES_PAGE_SIZE):
            datatypes = dao.get_datatypes_for_project(project.id, page_start=start_idx,
                                                      page_end=start_idx + DATAYPES_PAGE_SIZE)
            for datatype in datatypes:
                datatype_burst_mapping[datatype.gid] = datatype.fk_parent_burst

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name, self.ZIP_FILE_EXTENSION)
        
        export_folder = self._build_data_export_folder(project)    
        result_path = os.path.join(export_folder, zip_file_name) 
        
        bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE)
        burst_info = {BURSTS_DICT_KEY: bursts_dict,
                      DT_BURST_MAP: datatype_burst_mapping}
        with open(bursts_file_name, 'w') as bursts_file:
            bursts_file.write(json.dumps(burst_info))
            
        # pack project content into a ZIP file
        result_zip = files_helper.zip_folder(result_path, project_folder)
        
        # remove these files, since we only want them in export archive
        os.remove(bursts_file_name)
        return result_zip
 def setUp(self):
     """
     Reset the database before each test.
     """
     config.EVENTS_FOLDER = ''
     self.project_service = ProjectService()
     self.structure_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
    def _export_linked_datatypes(self, project, zip_file):
        files_helper = FilesHelper()
        linked_paths = self._get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        op = model.Operation(None, project.id, algo.id, '')
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model.STATUS_FINISHED)

        # write operation.xml to disk
        files_helper.write_operation_metadata(op)
        op_folder = files_helper.get_operation_folder(op.project.name, op.id)
        operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add operation.xml
        zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        files_helper.remove_folder(op_folder)
    def __init__(self, project_path):

        self.project_path = project_path
        self.files_helper = FilesHelper()
        # This assumes that old project metadata file can be parsed by current version.
        self.project_meta = self.files_helper.read_project_metadata(project_path)
        from_version = int(self.project_meta.get('version', 0))

        super(ProjectUpdateManager, self).__init__(project_update_scripts, from_version,
                                                   TvbProfile.current.version.PROJECT_VERSION)
 def __init__(self):
     # It will be populate with key from DataTypeMetaData
     self.meta_data = {DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}
     self.file_handler = FilesHelper()
     self.storage_path = '.'
     # Will be populate with current running operation's identifier
     self.operation_id = None
     self.user_id = None
     self.log = get_logger(self.__class__.__module__)
     self.tree_manager = InputTreeManager()
def search_and_export_ts(project_id, export_folder=os.path.join("~", "TVB")):

    #### This is the simplest filter you could write: filter and entity by Subject
    filter_connectivity = FilterChain(fields=[FilterChain.datatype + '.subject'],
                                      operations=["=="],
                                      values=[DataTypeMetaData.DEFAULT_SUBJECT])

    connectivities = _retrieve_entities_by_filters(Connectivity, project_id, filter_connectivity)


    #### A more complex filter: by linked entity (connectivity), BOLD monitor, sampling, operation param:
    filter_timeseries = FilterChain(fields=[FilterChain.datatype + '._connectivity',
                                            FilterChain.datatype + '._title',
                                            FilterChain.datatype + '._sample_period',
                                            FilterChain.datatype + '._sample_rate',
                                            FilterChain.operation + '.parameters'
                                            ],
                                    operations=["==", "like", ">=", "<=", "like"],
                                    values=[connectivities[0].gid,
                                            "Bold",
                                            "500", "0.002",
                                            '"conduction_speed": "3.0"'
                                            ]
                                    )

    #### If you want to filter another type of TS, change the kind class bellow,
    #### instead of TimeSeriesRegion use TimeSeriesEEG, or TimeSeriesSurface, etc.
    timeseries = _retrieve_entities_by_filters(TimeSeriesRegion, project_id, filter_timeseries)

    for ts in timeseries:
        print("=============================")
        print(ts.summary_info)
        print(" Original file: " + str(ts.get_storage_file_path()))
        destination_file = os.path.expanduser(os.path.join(export_folder, ts.get_storage_file_name()))
        FilesHelper.copy_file(ts.get_storage_file_path(), destination_file)
        if os.path.exists(destination_file):
            print(" TS file copied at: " + destination_file)
        else:
            print(" Some error happened when trying to copy at destination folder!!")
    def _process_input_zip(self, zip_arch, result_folder, remote_prefix, 
                           file_name_base, expected_pairs, fix_number=True):
        """
        Read entries in uploaded ZIP.
        Raise Exception in case pairs HDR/IMG are not matched or number "expected_pairs" is not met.
        :returns: string with HDR list (to be passed to DTI pipeline).
        """
        
        hdr_files = []
        for file_name in zip_arch.namelist():
            if not file_name.startswith(file_name_base) or file_name.endswith("/"):
                continue
            if file_name.endswith(".hdr"):
                pair_img = file_name.replace(".hdr", ".img")
                if pair_img not in zip_arch.namelist():
                    raise ConnectException("Could not find pair for HDR file :" + str(file_name))
                
                new_file_name = os.path.join(result_folder, file_name_base + str(len(hdr_files)) + ".hdr")
                src = zip_arch.open(file_name, 'rU')
                FilesHelper.copy_file(src, new_file_name)
                hdr_files.append(os.path.join(remote_prefix, os.path.split(new_file_name)[1]))
                new_file_name = new_file_name.replace(".hdr", ".img")
                src = zip_arch.open(pair_img, 'rU')
                FilesHelper.copy_file(src, new_file_name)
                
            elif not file_name.endswith(".img"):
                self.logger.warning("Ignored file :" + str(file_name))
            
        if len(hdr_files) < expected_pairs or (fix_number and len(hdr_files) > expected_pairs):
            raise ConnectException("Invalid number of files:" + str(len(hdr_files)) +
                                   " expected:" + str(expected_pairs))

        result = ""
        for hdr_name in hdr_files:
            result = result + hdr_name + " "
        return result
class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """    
    
    def setUp(self):
        """
        Reset the database before each test.
        """
        config.EVENTS_FOLDER = ''
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
    
    
    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()
    
    
    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        TestFactory.create_project(self.test_user, 'test_project', users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1, "Project with valid data not inserted!")  
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description", "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id], "Users not stored properly.")
        self.assertTrue(os.path.exists(os.path.join(TvbProfile.current.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER,
                                                    "test_project")), "Folder for project was not created")
   
   
    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.assertRaises(ProjectServiceException, self.project_service.store_project, 
                          self.test_user, True, None, **data)
   
   
    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        
        edited_data = dict(name="test_project", description="test_description", users=[])
        edited_project = self.project_service.store_project(self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root), "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name, "Project was no changed!")  
        
             
    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        data = dict(name="test_project", description="test_description", users=[])
        self.assertRaises(ProjectServiceException, self.project_service.store_project,
                          self.test_user, False, 99, **data)

    
    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user, 'test_project')
        self.assertTrue(self.project_service.find_project(inserted_project.id) is not None, "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(inserted_project.id)
        self.assertEqual(dao_returned_project.id, service_returned_project.id,
                         "Data returned from service is different from data returned by DAO.")
        self.assertEqual(dao_returned_project.name, service_returned_project.name, 
                         "Data returned from service is different than  data returned by DAO.")  
        self.assertEqual(dao_returned_project.description, service_returned_project.description,
                         "Data returned from service is different from data returned by DAO.")        
        self.assertEqual(dao_returned_project.members, service_returned_project.members,
                         "Data returned from service is different from data returned by DAO.")
                      
        
    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.project_service.store_project(self.test_user, True, None, **data)
        self.assertRaises(ProjectServiceException, self.project_service.find_project, 99)  
        
        
    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(project.name, "test_project3", "This project should not have been retrieved")   
            
            
    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        
        
    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1, 'TestProject1', users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3, 'TestProject3', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")
        
        
    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(current_user.id, expected_pages)
            self.assertEqual(len(projects), exp_proj_per_page, "Projects not retrieved properly! Expected:" +
                             str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages, "Pages not retrieved properly!")

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'): 
                shutil.rmtree(full_path)
        
            
    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')
        
        
    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(TestFactory.create_project(self.test_user, 'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')


    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, projects[0].disk_size)
        self.assertEqual('0.0 KiB', projects[0].disk_size_human)


    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, 'testSubject', zip_path)

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        TestFactory.import_cff(test_user=self.test_user, test_project=project2)

        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertNotEqual(projects[0].disk_size, projects[1].disk_size, "projects should have different size")

        for project in projects:
            self.assertNotEqual(0, project.disk_size)
            self.assertNotEqual('0.0 KiB', project.disk_size_human)

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = self.compute_recursive_h5_disk_usage(prj_folder)[0]

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (actual_disk_size, project.disk_size)
            self.assertTrue(ratio < 1.4, msg)


    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", 
                                                   state="test_state", operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)    
    
    
    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user, 'test_proj')
        project_root = self.structure_helper.get_project_folder(inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertTrue(os.path.exists(project_root), "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")  
        self.assertFalse(os.path.exists(project_root), "Root folder not deleted!")  
        
        
    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertRaises(ProjectServiceException, self.project_service.remove_project, 99)   
    

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user, test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(test_project.id,
                                                                   "tvb.datatypes.mapped_values.ValueWrapper")[0]
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

     
    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {DataTypeMetaData.KEY_SUBJECT: "subject", DataTypeMetaData.KEY_STATE: "state"}
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model.OperationGroup, op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value, new_datatype.parent_operation.user_group) 
    
    
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.
        
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        
    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "second_state",
                         DataTypeOverlayDetails.CODE_GID: gid,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'}
        self.project_service.update_metadata(new_meta_data)
        
        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)
        
        op_path = FilesHelper().get_operation_meta_file_path(inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group', 'UserGroup not updated!')


    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")

        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "updated_state",
                         DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'}
        self.project_service.update_metadata(new_meta_data)  
          
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id, new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup, group_id)[0]
            self.assertEqual(new_group.name, "newGroupName") 
            self.__check_meta_data(new_meta_data, new_datatype)
            
    
    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx,)
            dt.row2 = "value%i" % (idx + 1,)
            dt_factory._store_datatype(dt)
            
            
    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(dt_factory.project.id)
        self.assertEqual(ops_nr, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        self.assertEqual(pages_no, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3, "3 datatypes should be created.")
        
        
    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
                                                               DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree), "invalid number of nodes in tree")
        self.assertFalse(link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json, "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree, "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json, "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree, "Expected Link not present")
 def transactional_setup_method(self):
     self.figure_service = FigureService()
     self.user = TestFactory.create_user()
     self.project = TestFactory.create_project(admin=self.user)
     self.files_helper = FilesHelper()
Beispiel #28
0
class AlgorithmService(object):
    """
    Service Layer for Algorithms manipulation (e.g. find all Uploaders, Filter algo by category, etc)
    """
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    @staticmethod
    def get_category_by_id(identifier):
        """ Pass to DAO the retrieve of category by ID operation."""
        return dao.get_category_by_id(identifier)

    @staticmethod
    def get_raw_categories():
        """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)"""
        return dao.get_raw_categories()

    @staticmethod
    def get_visualisers_category():
        """Retrieve all Algorithm categories, with display capability"""
        result = dao.get_visualisers_categories()
        if not result:
            raise ValueError("View Category not found!!!")
        return result[0]

    @staticmethod
    def get_algorithm_by_identifier(ident):
        """
        Retrieve Algorithm entity by ID.
        Return None, if ID is not found in DB.
        """
        return dao.get_algorithm_by_id(ident)

    @staticmethod
    def get_operation_numbers(proj_id):
        """ Count total number of operations started for current project. """
        return dao.get_operation_numbers(proj_id)

    def _prepare_dt_display_name(self, dt_index, dt):
        # dt is a result of the get_values_of_datatype function
        db_dt = dao.get_generic_entity(dt_index, dt[2], "gid")
        display_name = db_dt[0].display_name
        display_name += ' - ' + (dt[3] or "None ")  # Subject
        if dt[5]:
            display_name += ' - From: ' + str(dt[5])
        else:
            display_name += utils.date2string(dt[4])
        if dt[6]:
            display_name += ' - ' + str(dt[6])
        display_name += ' - ID:' + str(dt[0])

        return display_name

    def fill_selectfield_with_datatypes(self,
                                        field,
                                        project_id,
                                        extra_conditions=None):
        # type: (TraitDataTypeSelectField, int, list) -> None
        filtering_conditions = FilterChain()
        filtering_conditions += field.conditions
        filtering_conditions += extra_conditions
        datatypes, _ = dao.get_values_of_datatype(project_id,
                                                  field.datatype_index,
                                                  filtering_conditions)
        datatype_options = []
        for datatype in datatypes:
            display_name = self._prepare_dt_display_name(
                field.datatype_index, datatype)
            datatype_options.append((datatype, display_name))
        field.datatype_options = datatype_options

    def _fill_form_with_datatypes(self,
                                  form,
                                  project_id,
                                  extra_conditions=None):
        for form_field in form.trait_fields:
            if isinstance(form_field, TraitDataTypeSelectField):
                self.fill_selectfield_with_datatypes(form_field, project_id,
                                                     extra_conditions)
        return form

    def prepare_adapter_form(self,
                             adapter_instance=None,
                             form_instance=None,
                             project_id=None,
                             extra_conditions=None):
        # type: (ABCAdapter, ABCAdapterForm, int, []) -> ABCAdapterForm
        form = None
        if form_instance is not None:
            form = form_instance
        elif adapter_instance is not None:
            form = adapter_instance.get_form()()

        if form is None:
            raise OperationException("Cannot prepare None form")

        form = self._fill_form_with_datatypes(form, project_id,
                                              extra_conditions)
        return form

    def _prepare_upload_post_data(self, form, post_data, project_id):
        for form_field in form.trait_fields:
            if isinstance(form_field,
                          TraitUploadField) and form_field.name in post_data:
                field = post_data[form_field.name]
                file_name = None
                if hasattr(field, 'file') and field.file is not None:
                    project = dao.get_project_by_id(project_id)
                    temporary_storage = self.file_helper.get_project_folder(
                        project, self.file_helper.TEMP_FOLDER)
                    try:
                        uq_name = utils.date2string(datetime.now(),
                                                    True) + '_' + str(0)
                        file_name = TEMPORARY_PREFIX + uq_name + '_' + field.filename
                        file_name = os.path.join(temporary_storage, file_name)

                        with open(file_name, 'wb') as file_obj:
                            file_obj.write(field.file.read())
                    except Exception as excep:
                        # TODO: is this handled properly?
                        self.file_helper.remove_files([file_name])
                        excep.message = 'Could not continue: Invalid input files'
                        raise excep
                post_data[form_field.name] = file_name

    def fill_adapter_form(self, adapter_instance, post_data, project_id):
        # type: (ABCAdapter, dict, int) -> ABCAdapterForm
        form = self.prepare_adapter_form(adapter_instance=adapter_instance,
                                         project_id=project_id)
        if isinstance(form, ABCUploaderForm):
            self._prepare_upload_post_data(form, post_data, project_id)

        if 'fill_defaults' in post_data:
            form.fill_from_post_plus_defaults(post_data)
        else:
            form.fill_from_post(post_data)

        return form

    def prepare_adapter(self, stored_adapter):

        adapter_module = stored_adapter.module
        adapter_name = stored_adapter.classname
        try:
            # Prepare Adapter Interface, by populating with existent data,
            # in case of a parameter of type DataType.
            adapter_instance = ABCAdapter.build_adapter(stored_adapter)
            return adapter_instance
        except Exception:
            self.logger.exception('Not found:' + adapter_name + ' in:' +
                                  adapter_module)
            raise OperationException("Could not prepare " + adapter_name)

    @staticmethod
    def get_algorithm_by_module_and_class(module, classname):
        """
        Get the db entry from the algorithm table for the given module and 
        class.
        """
        return dao.get_algorithm_by_module(module, classname)

    @staticmethod
    def create_link(data_ids, project_id):
        """
        For a list of dataType IDs and a project id create all the required links.
        """
        for data in data_ids:
            link = Links(data, project_id)
            dao.store_entity(link)

    @staticmethod
    def remove_link(dt_id, project_id):
        """
        Remove the link from the datatype given by dt_id to project given by project_id.
        """
        link = dao.get_link(dt_id, project_id)
        if link is not None:
            dao.remove_entity(Links, link.id)

    @staticmethod
    def get_upload_algorithms():
        """
        :return: List of StoredAdapter entities
        """
        categories = dao.get_uploader_categories()
        categories_ids = [categ.id for categ in categories]
        return dao.get_adapters_from_categories(categories_ids)

    @staticmethod
    def get_analyze_groups():
        """
        :return: list of AlgorithmTransientGroup entities
        """
        categories = dao.get_launchable_categories(elimin_viewers=True)
        categories_ids = [categ.id for categ in categories]
        stored_adapters = dao.get_adapters_from_categories(categories_ids)

        groups_list = []
        for adapter in stored_adapters:
            # For empty groups, this time, we fill the actual adapter
            group = AlgorithmTransientGroup(
                adapter.group_name or adapter.displayname,
                adapter.group_description or adapter.description)
            group = AlgorithmService._find_group(groups_list, group)
            group.children.append(adapter)
        return categories[0], groups_list

    @staticmethod
    def _find_group(groups_list, new_group):
        for i in range(len(groups_list) - 1, -1, -1):
            current_group = groups_list[i]
            if current_group.name == new_group.name and current_group.description == new_group.description:
                return current_group
        # Not found in list
        groups_list.append(new_group)
        return new_group

    def get_visualizers_for_group(self, dt_group_gid):

        categories = dao.get_visualisers_categories()
        return self._get_launchable_algorithms(dt_group_gid, categories)[1]

    def get_launchable_algorithms(self, datatype_gid):
        """
        :param datatype_gid: Filter only algorithms compatible with this GUID
        :return: dict(category_name: List AlgorithmTransientGroup)
        """
        categories = dao.get_launchable_categories()
        datatype_instance, filtered_adapters, has_operations_warning = self._get_launchable_algorithms(
            datatype_gid, categories)

        categories_dict = dict()
        for c in categories:
            categories_dict[c.id] = c.displayname

        return self._group_adapters_by_category(
            filtered_adapters, categories_dict), has_operations_warning

    def _get_launchable_algorithms(self, datatype_gid, categories):
        datatype_instance = dao.get_datatype_by_gid(datatype_gid)
        return self.get_launchable_algorithms_for_datatype(
            datatype_instance, categories)

    def get_launchable_algorithms_for_datatype(self, datatype, categories):
        data_class = datatype.__class__
        all_compatible_classes = [data_class.__name__]
        for one_class in getmro(data_class):
            # from tvb.basic.traits.types_mapped import MappedType

            if issubclass(
                    one_class, DataType
            ) and one_class.__name__ not in all_compatible_classes:
                all_compatible_classes.append(one_class.__name__)

        self.logger.debug("Searching in categories: " + str(categories) +
                          " for classes " + str(all_compatible_classes))
        categories_ids = [categ.id for categ in categories]
        launchable_adapters = dao.get_applicable_adapters(
            all_compatible_classes, categories_ids)

        filtered_adapters = []
        has_operations_warning = False
        for stored_adapter in launchable_adapters:
            filter_chain = FilterChain.from_json(
                stored_adapter.datatype_filter)
            try:
                if not filter_chain or filter_chain.get_python_filter_equivalent(
                        datatype):
                    filtered_adapters.append(stored_adapter)
            except (TypeError, InvalidFilterChainInput):
                self.logger.exception("Could not evaluate filter on " +
                                      str(stored_adapter))
                has_operations_warning = True

        return datatype, filtered_adapters, has_operations_warning

    def _group_adapters_by_category(self, stored_adapters, categories):
        """
        :param stored_adapters: list StoredAdapter
        :return: dict(category_name: List AlgorithmTransientGroup), empty groups all in the same AlgorithmTransientGroup
        """
        categories_dict = dict()
        for adapter in stored_adapters:
            category_name = categories.get(adapter.fk_category)
            if category_name in categories_dict:
                groups_list = categories_dict.get(category_name)
            else:
                groups_list = []
                categories_dict[category_name] = groups_list
            group = AlgorithmTransientGroup(adapter.group_name,
                                            adapter.group_description)
            group = self._find_group(groups_list, group)
            group.children.append(adapter)
        return categories_dict

    @staticmethod
    def get_generic_entity(entity_type, filter_value, select_field):
        return dao.get_generic_entity(entity_type, filter_value, select_field)

    ##########################################################################
    ######## Methods below are for MeasurePoint selections ###################
    ##########################################################################

    @staticmethod
    def get_selections_for_project(project_id, datatype_gid):
        """
        Retrieved from DB saved selections for current project. If a certain selection
        doesn't have all the labels between the labels of the given connectivity than
        this selection will not be returned.
        :returns: List of ConnectivitySelection entities.
        """
        return dao.get_selections_for_project(project_id, datatype_gid)

    @staticmethod
    def save_measure_points_selection(ui_name, selected_nodes, datatype_gid,
                                      project_id):
        """
        Store in DB a ConnectivitySelection.
        """
        select_entities = dao.get_selections_for_project(
            project_id, datatype_gid, ui_name)

        if select_entities:
            # when the name of the new selection is within the available selections then update that selection:
            select_entity = select_entities[0]
            select_entity.selected_nodes = selected_nodes
        else:
            select_entity = MeasurePointsSelection(ui_name, selected_nodes,
                                                   datatype_gid, project_id)

        dao.store_entity(select_entity)

    ##########################################################################
    ##########    Bellow are PSE Filters specific methods   ##################
    ##########################################################################

    @staticmethod
    def get_stored_pse_filters(datatype_group_gid):
        return dao.get_stored_pse_filters(datatype_group_gid)

    @staticmethod
    def save_pse_filter(ui_name, datatype_group_gid, threshold_value,
                        applied_on):
        """
        Store in DB a PSE filter.
        """
        select_entities = dao.get_stored_pse_filters(datatype_group_gid,
                                                     ui_name)

        if select_entities:
            # when the UI name is already in DB, update the existing entity
            select_entity = select_entities[0]
            select_entity.threshold_value = threshold_value
            select_entity.applied_on = applied_on  # this is the type, as in applied on size or color
        else:
            select_entity = StoredPSEFilter(ui_name, datatype_group_gid,
                                            threshold_value, applied_on)

        dao.store_entity(select_entity)
Beispiel #29
0
    def test_get_filtered_datatypes(self):
        """
        Test the filter function when retrieving dataTypes.
        """
        #Create some test operations
        start_dates = [
            datetime.now(),
            datetime.strptime("08-06-2010", "%m-%d-%Y"),
            datetime.strptime("07-21-2010", "%m-%d-%Y"),
            datetime.strptime("05-06-2010", "%m-%d-%Y"),
            datetime.strptime("07-21-2011", "%m-%d-%Y")
        ]
        end_dates = [
            datetime.now(),
            datetime.strptime("08-12-2010", "%m-%d-%Y"),
            datetime.strptime("08-12-2010", "%m-%d-%Y"),
            datetime.strptime("08-12-2011", "%m-%d-%Y"),
            datetime.strptime("08-12-2011", "%m-%d-%Y")
        ]
        for i in range(5):
            operation = model.Operation(self.test_user.id,
                                        self.test_project.id,
                                        self.algorithm.id,
                                        'test params',
                                        status=model.STATUS_FINISHED,
                                        start_date=start_dates[i],
                                        completion_date=end_dates[i])
            operation = dao.store_entity(operation)
            storage_path = FilesHelper().get_project_folder(
                self.test_project, str(operation.id))
            if i < 4:
                datatype_inst = Datatype1()
                datatype_inst.type = "Datatype1"
                datatype_inst.subject = "John Doe" + str(i)
                datatype_inst.state = "RAW"
                datatype_inst.set_operation_id(operation.id)
                dao.store_entity(datatype_inst)
            else:
                for _ in range(2):
                    datatype_inst = Datatype2()
                    datatype_inst.storage_path = storage_path
                    datatype_inst.type = "Datatype2"
                    datatype_inst.subject = "John Doe" + str(i)
                    datatype_inst.state = "RAW"
                    datatype_inst.string_data = ["data"]
                    datatype_inst.set_operation_id(operation.id)
                    dao.store_entity(datatype_inst)

        returned_data = self.flow_service.get_available_datatypes(
            self.test_project.id, Datatype1)[0]
        for row in returned_data:
            if row[1] != 'Datatype1':
                self.fail("Some invalid data was returned!")
        self.assertEqual(4, len(returned_data), "Invalid length of result")

        filter_op = FilterChain(
            fields=[
                FilterChain.datatype + ".state",
                FilterChain.operation + ".start_date"
            ],
            values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")],
            operations=["==", ">"])
        returned_data = self.flow_service.get_available_datatypes(
            self.test_project.id, Datatype1, filter_op)[0]
        returned_subjects = [one_data[3] for one_data in returned_data]

        if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len(
                returned_subjects) != 2:
            self.fail("DataTypes were not filtered properly!")
Beispiel #30
0
 def __init__(self):
     BaseController.__init__(self)
     self.context = SelectedAdapterContext()
     self.files_helper = FilesHelper()
 def __init__(self):
     self.logger = get_logger(__name__)
     self.structure_helper = FilesHelper()
Beispiel #32
0
class TestFigureService(TransactionalTestCase):
    """
    Tests for the figure service
    """
    def transactional_setup_method(self):
        self.figure_service = FigureService()
        self.user = TestFactory.create_user()
        self.project = TestFactory.create_project(admin=self.user)
        self.files_helper = FilesHelper()

    def transactional_teardown_method(self):
        self.delete_project_folders()

    def assertCanReadImage(self, image_path):
        try:
            Image.open(image_path).load()
        except (IOError, ValueError):
            raise AssertionError("Could not open %s as a image" % image_path)

    def store_test_png(self):
        self.figure_service.store_result_figure(self.project,
                                                self.user,
                                                "png",
                                                IMG_DATA,
                                                image_name="test-figure")

    def retrieve_images(self):
        figures_by_session, _ = self.figure_service.retrieve_result_figures(
            self.project, self.user)
        # flatten image session grouping
        figures = []
        for fg in figures_by_session.itervalues():
            figures.extend(fg)
        return figures

    def test_store_image(self):
        self.store_test_png()

    def test_store_image_from_operation(self):
        # test that image can be retrieved from operation
        test_operation = TestFactory.create_operation(
            test_user=self.user, test_project=self.project)

        self.figure_service.store_result_figure(self.project,
                                                self.user,
                                                "png",
                                                IMG_DATA,
                                                operation_id=test_operation.id)
        figures = dao.get_figures_for_operation(test_operation.id)
        assert 1 == len(figures)
        image_path = self.files_helper.get_images_folder(self.project.name)
        image_path = os.path.join(image_path, figures[0].file_path)
        self.assertCanReadImage(image_path)

    def test_store_and_retrieve_image(self):
        self.store_test_png()
        figures = self.retrieve_images()
        assert 1 == len(figures)
        image_path = utils.url2path(figures[0].file_path)
        self.assertCanReadImage(image_path)

    def test_load_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.load_figure(figures[0].id)

    def test_edit_figure(self):
        session_name = 'the altered ones'
        name = 'altered'
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.edit_result_figure(figures[0].id,
                                               session_name=session_name,
                                               name=name)
        figures_by_session, _ = self.figure_service.retrieve_result_figures(
            self.project, self.user)
        assert [session_name] == figures_by_session.keys()
        assert name == figures_by_session.values()[0][0].name

    def test_remove_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        assert 1 == len(figures)
        self.figure_service.remove_result_figure(figures[0].id)
        figures = self.retrieve_images()
        assert 0 == len(figures)
 def transactional_teardown_method(self):
     FilesHelper().remove_project_structure(self.test_project.name)
Beispiel #34
0
 def transactional_teardown_method(self):
     """
     Clean-up tests data
     """
     FilesHelper().remove_project_structure(self.test_project.name)
    def export_project(self, project, optimize_size=False):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        files_helper = FilesHelper()
        project_folder = files_helper.get_project_folder(project)
        project_datatypes = dao.get_datatypes_in_project(
            project.id, only_visible=optimize_size)
        to_be_exported_folders = []
        considered_op_ids = []

        if optimize_size:
            # take only the DataType with visibility flag set ON
            for dt in project_datatypes:
                op_id = dt.fk_from_operation
                if op_id not in considered_op_ids:
                    to_be_exported_folders.append({
                        'folder':
                        files_helper.get_project_folder(project, str(op_id)),
                        'archive_path_prefix':
                        str(op_id) + os.sep
                    })
                    considered_op_ids.append(op_id)

        else:
            to_be_exported_folders.append({
                'folder': project_folder,
                'archive_path_prefix': '',
                'exclude': ["TEMP"]
            })

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name,
                                      self.ZIP_FILE_EXTENSION)

        export_folder = self._build_data_export_folder(project)
        result_path = os.path.join(export_folder, zip_file_name)

        with TvbZip(result_path, "w") as zip_file:
            # Pack project [filtered] content into a ZIP file:
            self.logger.debug("Done preparing, now we will write folders " +
                              str(len(to_be_exported_folders)))
            self.logger.debug(str(to_be_exported_folders))
            for pack in to_be_exported_folders:
                zip_file.write_folder(**pack)
            self.logger.debug(
                "Done exporting files, now we will export linked DTs")
            self._export_linked_datatypes(project, zip_file)
            # Make sure the Project.xml file gets copied:
            if optimize_size:
                self.logger.debug("Done linked, now we write the project xml")
                zip_file.write(
                    files_helper.get_project_meta_file_path(project.name),
                    files_helper.TVB_PROJECT_FILE)
            self.logger.debug("Done, closing")

        return result_path
Beispiel #36
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.files_helper = FilesHelper()
Beispiel #37
0
class SimulatorService(object):
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.files_helper = FilesHelper()

    def _reset_model(self, session_stored_simulator):
        session_stored_simulator.model = type(session_stored_simulator.model)()
        vi_indexes = MonitorForm.determine_indexes_for_chosen_vars_of_interest(
            session_stored_simulator)
        vi_indexes = numpy.array(list(vi_indexes.values()))
        for monitor in session_stored_simulator.monitors:
            monitor.variables_of_interest = vi_indexes

    def reset_at_connectivity_change(self, is_simulator_copy, form,
                                     session_stored_simulator):
        """
        In case the user copies a simulation and changes the Connectivity, we want to reset the Model and Noise
        parameters because they might not fit to the new Connectivity's nr of regions.
        """
        if is_simulator_copy and form.connectivity.value != session_stored_simulator.connectivity:
            self._reset_model(session_stored_simulator)
            if issubclass(type(session_stored_simulator.integrator),
                          IntegratorStochastic):
                session_stored_simulator.integrator.noise = type(
                    session_stored_simulator.integrator.noise)()

    def reset_at_surface_change(self, is_simulator_copy, form,
                                session_stored_simulator):
        """
        In case the user copies a surface-simulation and changes the Surface, we want to reset the Model
        parameters because they might not fit to the new Surface's nr of vertices.
        """
        if is_simulator_copy and (
                session_stored_simulator.surface is None and form.surface.value
                or session_stored_simulator.surface and form.surface.value !=
                session_stored_simulator.surface.surface_gid):
            self._reset_model(session_stored_simulator)

    @staticmethod
    def _set_simulator_range_parameter(simulator, range_parameter_name,
                                       range_parameter_value):
        range_param_name_list = range_parameter_name.split('.')
        current_attr = simulator
        for param_name in range_param_name_list[:len(range_param_name_list) -
                                                1]:
            current_attr = getattr(current_attr, param_name)
        setattr(current_attr, range_param_name_list[-1], range_parameter_value)

    def async_launch_and_prepare_simulation(self, burst_config, user, project,
                                            simulator_algo,
                                            session_stored_simulator):
        try:
            operation = self.operation_service.prepare_operation(
                user.id, project.id, simulator_algo,
                session_stored_simulator.gid)
            ga = self.operation_service._prepare_metadata(
                simulator_algo.algorithm_category, {}, None, burst_config.gid)
            session_stored_simulator.generic_attributes = ga
            storage_path = self.files_helper.get_project_folder(
                project, str(operation.id))
            h5.store_view_model(session_stored_simulator, storage_path)
            burst_config = self.burst_service.update_simulation_fields(
                burst_config.id, operation.id, session_stored_simulator.gid)
            self.burst_service.store_burst_configuration(
                burst_config, storage_path)

            wf_errs = 0
            try:
                OperationService().launch_operation(operation.id, True)
                return operation
            except Exception as excep:
                self.logger.error(excep)
                wf_errs += 1
                if burst_config:
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug(
                "Finished launching workflow. The operation was launched successfully, "
                + str(wf_errs) + " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            if burst_config:
                self.burst_service.mark_burst_finished(
                    burst_config, error_message=str(excep))

    def prepare_simulation_on_server(self, user_id, project, algorithm,
                                     zip_folder_path, simulator_file):
        simulator_vm = h5.load_view_model_from_file(simulator_file)
        operation = self.operation_service.prepare_operation(
            user_id, project.id, algorithm, simulator_vm.gid)
        storage_operation_path = self.files_helper.get_project_folder(
            project, str(operation.id))
        self.async_launch_simulation_on_server(operation, zip_folder_path,
                                               storage_operation_path)

        return operation

    def async_launch_simulation_on_server(self, operation, zip_folder_path,
                                          storage_operation_path):
        try:
            for file in os.listdir(zip_folder_path):
                shutil.move(os.path.join(zip_folder_path, file),
                            storage_operation_path)
            try:
                OperationService().launch_operation(operation.id, True)
                shutil.rmtree(zip_folder_path)
                return operation
            except Exception as excep:
                self.logger.error(excep)
        except Exception as excep:
            self.logger.error(excep)

    @staticmethod
    def _set_range_param_in_dict(param_value):
        if type(param_value) is numpy.ndarray:
            return param_value[0]
        elif isinstance(param_value, uuid.UUID):
            return param_value.hex
        else:
            return param_value

    def async_launch_and_prepare_pse(self, burst_config, user, project,
                                     simulator_algo, range_param1,
                                     range_param2, session_stored_simulator):
        try:
            algo_category = simulator_algo.algorithm_category
            operation_group = burst_config.operation_group
            metric_operation_group = burst_config.metric_operation_group
            operations = []
            range_param2_values = [None]
            if range_param2:
                range_param2_values = range_param2.get_range_values()
            first_simulator = None

            for param1_value in range_param1.get_range_values():
                for param2_value in range_param2_values:
                    # Copy, but generate a new GUID for every Simulator in PSE
                    simulator = copy.deepcopy(session_stored_simulator)
                    simulator.gid = uuid.uuid4()
                    self._set_simulator_range_parameter(
                        simulator, range_param1.name, param1_value)

                    ranges = {
                        range_param1.name:
                        self._set_range_param_in_dict(param1_value)
                    }

                    if param2_value is not None:
                        self._set_simulator_range_parameter(
                            simulator, range_param2.name, param2_value)
                        ranges[
                            range_param2.name] = self._set_range_param_in_dict(
                                param2_value)

                    ranges = json.dumps(ranges)

                    operation = self.operation_service.prepare_operation(
                        user.id, project.id, simulator_algo, simulator.gid,
                        operation_group, ranges)

                    storage_path = self.files_helper.get_project_folder(
                        project, str(operation.id))
                    h5.store_view_model(simulator, storage_path)
                    operations.append(operation)
                    if first_simulator is None:
                        first_simulator = simulator

            first_operation = operations[0]
            storage_path = self.files_helper.get_project_folder(
                project, str(first_operation.id))
            burst_config = self.burst_service.update_simulation_fields(
                burst_config.id, first_operation.id, first_simulator.gid)
            self.burst_service.store_burst_configuration(
                burst_config, storage_path)
            datatype_group = DataTypeGroup(
                operation_group,
                operation_id=first_operation.id,
                fk_parent_burst=burst_config.gid,
                state=algo_category.defaultdatastate)
            dao.store_entity(datatype_group)

            metrics_datatype_group = DataTypeGroup(
                metric_operation_group, fk_parent_burst=burst_config.gid)
            dao.store_entity(metrics_datatype_group)

            wf_errs = 0
            for operation in operations:
                try:
                    OperationService().launch_operation(operation.id, True)
                except Exception as excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " +
                              str(len(operations) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")
            return first_operation

        except Exception as excep:
            self.logger.error(excep)
            self.burst_service.mark_burst_finished(burst_config,
                                                   error_message=str(excep))

    def load_from_zip(self, zip_file, project):
        import_service = ImportService()
        simulator_folder = import_service.import_simulator_configuration_zip(
            zip_file)

        simulator_h5_filename = DirLoader(
            simulator_folder,
            None).find_file_for_has_traits_type(SimulatorAdapterModel)
        simulator_h5_filepath = os.path.join(simulator_folder,
                                             simulator_h5_filename)
        simulator = h5.load_view_model_from_file(simulator_h5_filepath)

        burst_config = self.burst_service.load_burst_configuration_from_folder(
            simulator_folder, project)
        return simulator, burst_config
Beispiel #38
0
class FlowService:
    """
    Service Layer for all TVB generic Work-Flow operations.
    """

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()
        self.input_tree_manager = InputTreeManager()
    
    def get_category_by_id(self, identifier):
        """ Pass to DAO the retrieve of category by ID operation."""
        return dao.get_category_by_id(identifier)
    
    @staticmethod
    def get_raw_categories():
        """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)"""
        return dao.get_raw_categories()
    
    @staticmethod
    def get_visualisers_category():
        """Retrieve all Algorithm categories, with display capability"""
        result = dao.get_visualisers_categories()
        if not result:
            raise ValueError("View Category not found!!!")
        return result[0]
    
    @staticmethod
    def get_algorithm_by_identifier(ident):
        """
        Retrieve Algorithm entity by ID.
        Return None, if ID is not found in DB.
        """
        return dao.get_algorithm_by_id(ident)

    
    @staticmethod
    def load_operation(operation_id):
        """ Retrieve previously stored Operation from DB, and load operation.burst attribute"""
        operation = dao.get_operation_by_id(operation_id)
        operation.burst = dao.get_burst_for_operation_id(operation_id)
        return operation


    @staticmethod
    def get_operation_numbers(proj_id):
        """ Count total number of operations started for current project. """
        return dao.get_operation_numbers(proj_id)
              

    def prepare_adapter(self, project_id, stored_adapter):
        """
        Having a  StoredAdapter, return the Tree Adapter Interface object, populated with datatypes from 'project_id'.
        """
        adapter_module = stored_adapter.module
        adapter_name = stored_adapter.classname
        try:
            # Prepare Adapter Interface, by populating with existent data,
            # in case of a parameter of type DataType.
            adapter_instance = ABCAdapter.build_adapter(stored_adapter)
            interface = adapter_instance.get_input_tree()
            interface = self.input_tree_manager.fill_input_tree_with_options(interface, project_id, stored_adapter.fk_category)
            interface = self.input_tree_manager.prepare_param_names(interface)
            return interface
        except Exception:
            self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module)
            raise OperationException("Could not prepare " + adapter_name)
    
    
    @staticmethod
    def get_algorithm_by_module_and_class(module, classname):
        """
        Get the db entry from the algorithm table for the given module and 
        class.
        """
        return dao.get_algorithm_by_module(module, classname)
    
    
    @staticmethod
    def get_available_datatypes(project_id, data_type_cls, filters=None):
        """
        Return all dataTypes that match a given name and some filters.
        :param data_type_cls: either a fully qualified class name or a class object
        """
        return get_filtered_datatypes(project_id, data_type_cls, filters)


    @staticmethod
    def create_link(data_ids, project_id):
        """
        For a list of dataType IDs and a project id create all the required links.
        """
        for data in data_ids:
            link = model.Links(data, project_id)
            dao.store_entity(link)


    @staticmethod
    def remove_link(dt_id, project_id):
        """
        Remove the link from the datatype given by dt_id to project given by project_id.
        """
        link = dao.get_link(dt_id, project_id)
        if link is not None:
            dao.remove_entity(model.Links, link.id)
    
        
    def fire_operation(self, adapter_instance, current_user, project_id, visible=True, **data):
        """
        Launch an operation, specified by AdapterInstance, for CurrentUser, 
        Current Project and a given set of UI Input Data.
        """
        operation_name = str(adapter_instance.__class__.__name__)
        try:
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)
            tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER)
            
            result = OperationService().initiate_operation(current_user, project.id, adapter_instance, 
                                                           tmp_folder, visible, **data)
            self.logger.info("Finished operation:" + operation_name)
            return result

        except TVBException, excep:
            self.logger.exception("Could not launch operation " + operation_name +
                                  " with the given set of input data, because: " + excep.message)
            raise OperationException(excep.message, excep)
        except Exception, excep:
            self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!")
            raise OperationException(str(excep))      
Beispiel #39
0
class SimulatorService(object):
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.files_helper = FilesHelper()

    @transactional
    def _prepare_operation(self,
                           project_id,
                           user_id,
                           simulator_id,
                           simulator_gid,
                           algo_category,
                           op_group,
                           metadata,
                           ranges=None):
        operation_parameters = json.dumps({'gid': simulator_gid.hex})
        metadata, user_group = self.operation_service._prepare_metadata(
            metadata, algo_category, op_group, {})
        meta_str = json.dumps(metadata)

        op_group_id = None
        if op_group:
            op_group_id = op_group.id

        operation = Operation(user_id,
                              project_id,
                              simulator_id,
                              operation_parameters,
                              op_group_id=op_group_id,
                              meta=meta_str,
                              range_values=ranges)

        self.logger.info("Saving Operation(userId=" + str(user_id) +
                         ", projectId=" + str(project_id) + "," +
                         str(metadata) + ", algorithmId=" + str(simulator_id) +
                         ", ops_group= " + str(op_group_id) + ", params=" +
                         str(operation_parameters) + ")")

        operation = dao.store_entity(operation)
        # TODO: prepare portlets/handle operation groups/no workflows
        return operation

    @staticmethod
    def _set_simulator_range_parameter(simulator, range_parameter_name,
                                       range_parameter_value):
        range_param_name_list = range_parameter_name.split('.')
        current_attr = simulator
        for param_name in range_param_name_list[:len(range_param_name_list) -
                                                1]:
            current_attr = getattr(current_attr, param_name)
        setattr(current_attr, range_param_name_list[-1], range_parameter_value)

    def async_launch_and_prepare_simulation(self, burst_config, user, project,
                                            simulator_algo,
                                            session_stored_simulator,
                                            simulation_state_gid):
        try:
            metadata = {}
            metadata.update({DataTypeMetaData.KEY_BURST: burst_config.id})
            simulator_id = simulator_algo.id
            algo_category = simulator_algo.algorithm_category
            operation = self._prepare_operation(project.id, user.id,
                                                simulator_id,
                                                session_stored_simulator.gid,
                                                algo_category, None, metadata)
            storage_path = self.files_helper.get_project_folder(
                project, str(operation.id))
            SimulatorSerializer().serialize_simulator(session_stored_simulator,
                                                      simulation_state_gid,
                                                      storage_path)
            burst_config = self.burst_service.update_simulation_fields(
                burst_config.id, operation.id, session_stored_simulator.gid)
            self.burst_service.store_burst_configuration(
                burst_config, storage_path)

            wf_errs = 0
            try:
                OperationService().launch_operation(operation.id, True)
                return operation
            except Exception as excep:
                self.logger.error(excep)
                wf_errs += 1
                if burst_config:
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug(
                "Finished launching workflow. The operation was launched successfully, "
                + str(wf_errs) + " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            if burst_config:
                self.burst_service.mark_burst_finished(
                    burst_config, error_message=str(excep))

    def prepare_simulation_on_server(self, user_id, project, algorithm,
                                     zip_folder_path, simulator_file):
        with SimulatorH5(simulator_file) as simulator_h5:
            simulator_gid = simulator_h5.gid.load()

        metadata = {}
        simulator_id = algorithm.id
        algo_category = algorithm.algorithm_category
        operation = self._prepare_operation(project.id, user_id, simulator_id,
                                            simulator_gid, algo_category, None,
                                            metadata)
        storage_operation_path = self.files_helper.get_project_folder(
            project, str(operation.id))
        self.async_launch_simulation_on_server(operation, zip_folder_path,
                                               storage_operation_path)

        return operation

    def async_launch_simulation_on_server(self, operation, zip_folder_path,
                                          storage_operation_path):
        try:
            for file in os.listdir(zip_folder_path):
                shutil.move(os.path.join(zip_folder_path, file),
                            storage_operation_path)
            try:
                OperationService().launch_operation(operation.id, True)
                shutil.rmtree(zip_folder_path)
                return operation
            except Exception as excep:
                self.logger.error(excep)
        except Exception as excep:
            self.logger.error(excep)

    @staticmethod
    def _set_range_param_in_dict(param_value):
        if type(param_value) is numpy.ndarray:
            return param_value[0]
        elif isinstance(param_value, uuid.UUID):
            return param_value.hex
        else:
            return param_value

    def async_launch_and_prepare_pse(self, burst_config, user, project,
                                     simulator_algo, range_param1,
                                     range_param2, session_stored_simulator):
        try:
            simulator_id = simulator_algo.id
            algo_category = simulator_algo.algorithm_category
            operation_group = burst_config.operation_group
            metric_operation_group = burst_config.metric_operation_group
            operations = []
            range_param2_values = [None]
            if range_param2:
                range_param2_values = range_param2.get_range_values()
            first_simulator = None

            for param1_value in range_param1.get_range_values():
                for param2_value in range_param2_values:
                    # Copy, but generate a new GUID for every Simulator in PSE
                    simulator = copy.deepcopy(session_stored_simulator)
                    simulator.gid = uuid.uuid4()
                    self._set_simulator_range_parameter(
                        simulator, range_param1.name, param1_value)

                    ranges = {
                        range_param1.name:
                        self._set_range_param_in_dict(param1_value)
                    }

                    if param2_value is not None:
                        self._set_simulator_range_parameter(
                            simulator, range_param2.name, param2_value)
                        ranges[
                            range_param2.name] = self._set_range_param_in_dict(
                                param2_value)

                    ranges = json.dumps(ranges)

                    operation = self._prepare_operation(
                        project.id, user.id, simulator_id, simulator.gid,
                        algo_category, operation_group,
                        {DataTypeMetaData.KEY_BURST: burst_config.id}, ranges)

                    storage_path = self.files_helper.get_project_folder(
                        project, str(operation.id))
                    SimulatorSerializer().serialize_simulator(
                        simulator, None, storage_path)
                    operations.append(operation)
                    if first_simulator is None:
                        first_simulator = simulator

            first_operation = operations[0]
            storage_path = self.files_helper.get_project_folder(
                project, str(first_operation.id))
            burst_config = self.burst_service.update_simulation_fields(
                burst_config.id, first_operation.id, first_simulator.gid)
            self.burst_service.store_burst_configuration(
                burst_config, storage_path)
            datatype_group = DataTypeGroup(
                operation_group,
                operation_id=first_operation.id,
                fk_parent_burst=burst_config.id,
                state=json.loads(
                    first_operation.meta_data)[DataTypeMetaData.KEY_STATE])
            dao.store_entity(datatype_group)

            metrics_datatype_group = DataTypeGroup(
                metric_operation_group, fk_parent_burst=burst_config.id)
            dao.store_entity(metrics_datatype_group)

            wf_errs = 0
            for operation in operations:
                try:
                    OperationService().launch_operation(operation.id, True)
                except Exception as excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " +
                              str(len(operations) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            self.burst_service.mark_burst_finished(burst_config,
                                                   error_message=str(excep))

    def load_from_zip(self, zip_file, project):
        import_service = ImportService()
        simulator_folder = import_service.import_simulator_configuration_zip(
            zip_file)

        simulator_h5_filename = DirLoader(
            simulator_folder, None).find_file_for_has_traits_type(Simulator)
        with SimulatorH5(os.path.join(simulator_folder,
                                      simulator_h5_filename)) as sim_h5:
            simulator_gid = sim_h5.gid.load()
        simulator = SimulatorSerializer.deserialize_simulator(
            simulator_gid, simulator_folder)

        burst_config = self.burst_service.load_burst_configuration_from_folder(
            simulator_folder, project)
        return simulator, burst_config
class TestOperationResource(TransactionalTestCase):
    def transactional_setup_method(self):
        self.test_user = TestFactory.create_user('Rest_User')
        self.test_project = TestFactory.create_project(self.test_user,
                                                       'Rest_Project')
        self.operations_resource = GetOperationsInProjectResource()
        self.status_resource = GetOperationStatusResource()
        self.results_resource = GetOperationResultsResource()
        self.launch_resource = LaunchOperationResource()
        self.files_helper = FilesHelper()

    def test_server_get_operation_status_inexistent_gid(self):
        operation_gid = "inexistent-gid"
        with pytest.raises(InvalidIdentifierException):
            self.status_resource.get(operation_gid)

    def test_server_get_operation_status(self):
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)

        operations = self.operations_resource.get(self.test_project.gid)

        result = self.status_resource.get(operations[0].gid)
        assert type(result) is str
        assert result in OperationPossibleStatus

    def test_server_get_operation_results_inexistent_gid(self):
        operation_gid = "inexistent-gid"
        with pytest.raises(InvalidIdentifierException):
            self.results_resource.get(operation_gid)

    def test_server_get_operation_results(self):
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)

        operations = self.operations_resource.get(self.test_project.gid)

        result = self.results_resource.get(operations[0].gid)
        assert type(result) is list
        assert len(result) == 1

    def test_server_get_operation_results_failed_operation(self):
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_90.zip')
        with pytest.raises(TVBException):
            TestFactory.import_zip_connectivity(self.test_user,
                                                self.test_project, zip_path)

        operations = self.operations_resource.get(self.test_project.gid)

        result = self.results_resource.get(operations[0].gid)
        assert type(result) is list
        assert len(result) == 0

    def test_server_launch_operation_no_file(self, mocker):
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {}

        with pytest.raises(BadRequestException):
            self.launch_resource.post('', '', '')

    def test_server_launch_operation_wrong_file_extension(self, mocker):
        dummy_file = FileStorage(BytesIO(b"test"), 'test.txt')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {'file': dummy_file}

        with pytest.raises(BadRequestException):
            self.launch_resource.post('', '', '')

    def test_server_launch_operation_inexistent_gid(self, mocker):
        project_gid = "inexistent-gid"
        dummy_file = FileStorage(BytesIO(b"test"), 'test.h5')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {'file': dummy_file}

        with pytest.raises(InvalidIdentifierException):
            self.launch_resource.post(project_gid, '', '')

    def test_server_launch_operation_inexistent_algorithm(self, mocker):
        inexistent_algorithm = "inexistent-algorithm"

        dummy_file = FileStorage(BytesIO(b"test"), 'test.h5')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {'file': dummy_file}

        with pytest.raises(InvalidIdentifierException):
            self.launch_resource.post(self.test_project.gid,
                                      inexistent_algorithm, '')

    def test_server_launch_operation(self, mocker, time_series_index_factory):
        algorithm_module = "tvb.adapters.analyzers.fourier_adapter"
        algorithm_class = "FourierAdapter"

        input_ts_index = time_series_index_factory()

        fft_model = FFTAdapterModel()
        fft_model.time_series = UUID(input_ts_index.gid)
        fft_model.window_function = list(SUPPORTED_WINDOWING_FUNCTIONS)[0]

        input_folder = self.files_helper.get_project_folder(self.test_project)
        view_model_h5_path = h5.path_for(input_folder, ViewModelH5,
                                         fft_model.gid)

        view_model_h5 = ViewModelH5(view_model_h5_path, fft_model)
        view_model_h5.store(fft_model)
        view_model_h5.close()

        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        fp = open(view_model_h5_path, 'rb')
        request_mock.files = {
            'file': FileStorage(fp, os.path.basename(view_model_h5_path))
        }

        # Mock launch_operation() call
        mocker.patch.object(OperationService, 'launch_operation')

        operation_gid, status = self.launch_resource.post(
            self.test_project.gid, algorithm_module, algorithm_class)
        fp.close()

        assert type(operation_gid) is str
        assert len(operation_gid) > 0

    def transactional_teardown_method(self):
        self.files_helper.remove_project_structure(self.test_project.name)
Beispiel #41
0
 def transactional_setup_method(self):
     self.figure_service = FigureService()
     self.user = TestFactory.create_user()
     self.project = TestFactory.create_project(admin=self.user)
     self.files_helper = FilesHelper()
 def tearDown(self):
     """
     Clean-up tests data
     """
     FilesHelper().remove_project_structure(self.test_project.name)
Beispiel #43
0
 def teardown_method(self):
     """
     Clean-up tests data
     """
     self.clean_database()
     FilesHelper().remove_project_structure(self.test_project.name)
class FilesHelperTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """
    PROJECT_NAME = "test_proj"

    def setUp(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       self.PROJECT_NAME)

    def tearDown(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()

    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")

        self.files_helper.check_created(os.path.join(root_storage, "test"))
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        self.assertTrue(os.path.exists(os.path.join(root_storage, "test")),
                        "Test directory not created!")

    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")

        folder_path = self.files_helper.get_project_folder(
            self.test_project, "43")
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")

    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(
            self.test_project.name, "new_name")
        self.assertNotEqual(path, name, "Rename didn't take effect.")

    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)

        self.assertRaises(FileStructureException,
                          self.files_helper.rename_project_structure,
                          self.test_project.name, self.PROJECT_NAME)

    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(full_path), "Folder was not created.")

        self.files_helper.remove_project_structure(self.test_project.name)
        self.assertFalse(os.path.exists(full_path),
                         "Project folder not deleted.")

    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(
            self.PROJECT_NAME)
        self.assertTrue(os.path.exists(expected_file))
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        self.assertEqual(self.test_project.name, loaded_project.name)
        self.assertEqual(self.test_project.description,
                         loaded_project.description)
        self.assertEqual(self.test_project.gid, loaded_project.gid)
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self.assertDictContainsSubset(expected_dict, found_dict)
        self.assertDictContainsSubset(found_dict, expected_dict)

    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(
            self.PROJECT_NAME, operation.id)
        self.assertFalse(os.path.exists(expected_file))
        self.files_helper.write_operation_metadata(operation)
        self.assertTrue(os.path.exists(expected_file))
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.iteritems():
            self.assertEqual(str(value), str(found_dict[key]))
        # Now validate that operation metaData can be also updated
        self.assertNotEqual("new_group_name", found_dict['user_group'])
        self.files_helper.update_operation_metadata(self.PROJECT_NAME,
                                                    "new_group_name",
                                                    operation.id)
        found_dict = XMLReader(expected_file).read_metadata()
        self.assertEqual("new_group_name", found_dict['user_group'])

    def test_remove_dt_happy_flow(self):
        """
        Happy flow for removing a file related to a DataType.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w')
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")
        self.files_helper.remove_datatype(datatype)
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()),
                         "Test file was not deleted!")

    def test_remove_dt_non_existent(self):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
        self.files_helper.remove_datatype(datatype)

    def test_move_datatype(self):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w')
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11',
                                        "43")

        self.assertFalse(os.path.exists(datatype.get_storage_file_path()),
                         "Test file was not moved!")
        datatype.storage_path = self.files_helper.get_project_folder(
            self.PROJECT_NAME + '11', "43")
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")

    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path(
            "/root/up/to/here/test/it/now", "/root/up/to/here")
        self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]),
                         "Did not extract relative path as expected.")

    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            self.assertTrue(os.path.isfile(file_n))
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            self.assertFalse(os.path.isfile(file_n))

    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        self.assertTrue(os.path.isdir(folder_name),
                        "Folder should be created.")
        self.files_helper.remove_folder(folder_name)
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should be deleted.")

    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should not exist before call.")
        self.files_helper.remove_folder(folder_name, ignore_errors=True)

    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should not exist before call.")
        self.assertRaises(FileStructureException,
                          self.files_helper.remove_folder, folder_name, False)
class ProjectService:
    """
    Services layer for Project entities.
    """
    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()

    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {},
                                     None,
                                     error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException(
                "A project can not be renamed while operations are still running!"
            )
        if is_create:
            current_proj = Project(new_name, current_user.id,
                                   data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(
                    current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_users_except([prj_admin], int(page),
                                                        MEMBERS_PAGE_SIZE)[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)

        selected_user_ids = data["users"]
        if is_create and current_user.id not in selected_user_ids:
            # Make the project admin also member of the current project
            selected_user_ids.append(current_user.id)
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) +
                          ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(
                            OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(
                            one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        # Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = AlgorithmService(
                        ).get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception(
                            "We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result[
                        "start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] -
                                                          result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(
                        result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning(
                                "Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(
                        result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(
                            figure.project.name)
                        figure_full_path = os.path.join(
                            figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(
                            figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                # We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception(
                    "Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no

    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx,
                                                       PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total %
                                                  PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(
                prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) +
                          " projects in UI for user " + str(user_id))
        return available_projects, pages_no

    @staticmethod
    def retrieve_all_user_projects(user_id,
                                   page_start=0,
                                   page_size=PROJECTS_PAGE_SIZE):
        """
        Return a list with all projects visible for current user, without pagination.
        """
        return dao.get_projects_for_user(user_id,
                                         page_start=page_start,
                                         page_size=page_size)

    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)

    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) +
                              ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) +
                              ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))

    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)

    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)

    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)

    @staticmethod
    def load_operation_lazy_by_gid(operation_gid):
        """ Retrieve lazy Operation from DB"""
        return dao.get_operation_lazy_by_gid(operation_gid)

    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)

    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)

    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)

    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)

    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self,
                                           entity_gid,
                                           is_visible,
                                           is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """
        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)

    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False,
                                                    True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id,
                                                            is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(
                operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(
            operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(
            operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name,
                                             len(datatypes_param),
                                             count_result, burst,
                                             no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details

    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()

    def get_project_structure(self, project, visibility_filter, first_level,
                              second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter,
                                          filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning(
                    "Ignored entity (possibly removed DT class)" + str(dt))
                continue
            #  Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.
                 KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.
                 KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.
                 KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.
                 KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.
                 KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.
                 KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[
                DataTypeMetaData.
                KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.
                 KEY_AUTHOR] = dt.parent_operation.user.username
            data[
                DataTypeMetaData.
                KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.
                 KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(
                MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(
                DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (
                completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[
                DataTypeMetaData.
                KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level,
                                           second_level, project.id,
                                           project.name)

    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.
        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result,
                                         datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            # We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None

    def _remove_project_node_files(self,
                                   project_id,
                                   gid,
                                   skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(
                        dao.get_system_user().id, links[0].fk_to_project,
                        datatype.parent_operation.fk_from_algo,
                        datatype.parent_operation.parameters,
                        datatype.parent_operation.status,
                        datatype.parent_operation.start_date,
                        datatype.parent_operation.completion_date,
                        datatype.parent_operation.fk_operation_group,
                        datatype.parent_operation.additional_info,
                        datatype.parent_operation.user_group,
                        datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(
                        datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException(
                "Remove operation failed for unknown reasons.Please contact system administrator."
            )

    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            # Here the Operation is mot probably already removed - in case DTs were found inside
            # but we still remove it for the case when no DTs exist
            dao.remove_entity(Operation, operation.id)
            self.structure_helper.remove_operation_data(
                operation.project.name, operation_id)

            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning(
                "Attempt to delete operation with id=%s which no longer exists."
                % operation_id)

    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning(
                "Attempt to delete DT[%s] which no longer exists." %
                datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid,
                                                skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_entity(DataTypeGroup, datatype.id)
            correct = correct and dao.remove_entity(
                OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid,
                                            skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id,
                                                  "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(
                project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " +
                                          str(datatype_gid))

    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                value = submit_data[key]
                if value == "None":
                    value = None
                if value == "" and key in [
                        CommonDetails.CODE_OPERATION_TAG,
                        CommonDetails.CODE_OPERATION_GROUP_ID
                ]:
                    value = None
                new_data[key] = value

        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(
                    operation_group_id=new_data[
                        CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException(
                        "Inconsistent group, can not be updated!")
                # datatype_group = dao.get_generic_entity(DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                # all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))

    def _edit_data(self, datatype, new_data, from_group=False):
        # type: (DataType, dict, bool) -> None
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(
                OperationGroup,
                new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup,
                                                      new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name +
                                             "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)
            op_folder = self.structure_helper.get_project_folder(
                operation.project, str(operation.id))
            vm_gid = json.loads(operation.parameters)['gid']
            view_model_file = h5.determine_filepath(vm_gid, op_folder)
            if view_model_file:
                view_model_class = H5File.determine_type(view_model_file)
                view_model = view_model_class()
                with ViewModelH5(view_model_file, view_model) as f:
                    ga = f.load_generic_attributes()
                    ga.operation_tag = new_group_name
                    f.store_generic_attributes(ga, False)
            else:
                self.logger.warning(
                    "Could not find ViewModel H5 file for op: {}".format(
                        operation))

        # 2. Update GenericAttributes in the associated H5 files:
        h5_path = h5.path_for_stored_index(datatype)
        with H5File.from_file(h5_path) as f:
            ga = f.load_generic_attributes()

            ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
            ga.state = new_data[DataTypeOverlayDetails.DATA_STATE]
            ga.operation_tag = new_group_name
            if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
                ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
            if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
                ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
            if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
                ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
            if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
                ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
            if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
                ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

            f.store_generic_attributes(ga, False)

        # 3. Update MetaData in DT Index DB as well.
        datatype.fill_from_generic_attributes(ga)
        dao.store_entity(datatype)

    def get_datatype_and_datatypegroup_inputs_for_operation(
            self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for v in datatype_groups.values()])
        return datatypes

    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return review_operation_inputs_from_adapter(adapter, operation)

        except Exception:
            self.logger.exception("Could not load details for operation %s" %
                                  operation_gid)
            parameters = json.loads(operation.parameters)
            if 'gid' in parameters.keys():
                changed_parameters = dict(
                    Warning=
                    "Algorithm changed dramatically. We can not offer more details"
                )
            else:
                changed_parameters = dict(
                    Warning=
                    "GID parameter is missing. Old implementation of the operation."
                )
            return [], changed_parameters

    def get_datatypes_inputs_for_operation_group(self, group_id,
                                                 selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(
                gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return list(op_group_inputs.values())

    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)

    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id,
                                          visibility_filter,
                                          only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(
                datatype_group_id,
                only_relevant=False,
                only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(
            datatype_group_id, only_in_groups=only_in_groups)

    @staticmethod
    def get_operations_for_datatype(datatype_gid,
                                    visibility_filter,
                                    only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(
                datatype_gid,
                only_relevant=False,
                only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid,
                                               only_in_groups=only_in_groups)

    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)

    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)

    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)

    @staticmethod
    def get_datatypes_in_project(project_id, only_visible=False):
        return dao.get_data_in_project(project_id, only_visible)

    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """
        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)

            h5_path = h5.path_for_stored_index(dt)
            with H5File.from_file(h5_path) as f:
                f.visible.store(is_visible)

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(
                datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
            datatype.visible = is_visible
            dao.store_entity(datatype)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            parent = dao.get_datatype_by_id(datatype.fk_datatype_group)
            parent.visible = is_visible
            dao.store_entity(parent)
        else:
            # update the single datatype.
            set_visibility(datatype)

    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
class SimulatorController(BurstBaseController):
    KEY_IS_LOAD_AFTER_REDIRECT = "is_load_after_redirect"
    COPY_NAME_FORMAT = "copy_of_{}"
    BRANCH_NAME_FORMAT = "{}_branch{}"

    def __init__(self):
        BurstBaseController.__init__(self)
        self.range_parameters = SimulatorRangeParameters()
        self.burst_service = BurstService()
        self.simulator_service = SimulatorService()
        self.files_helper = FilesHelper()
        self.cached_simulator_algorithm = self.algorithm_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.SIMULATOR_MODULE,
            IntrospectionRegistry.SIMULATOR_CLASS)
        self.context = SimulatorContext()
        self.monitors_handler = MonitorsWizardHandler()

    @expose_json
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel or Remove the burst entity given by burst_id (and all linked entities: op, DTs)
        :returns True: if the op was successfully.
        """
        burst_config = BurstService.load_burst_configuration(int(burst_id))
        op_id, is_group = burst_config.operation_info_for_burst_removal

        return self.cancel_or_remove_operation(op_id, is_group,
                                               burst_config.is_finished)

    def cancel_or_remove_operation(self,
                                   operation_id,
                                   is_group,
                                   remove_after_stop=False):
        """
        Stop the operation given by operation_id. If is_group is true stop all the
        operations from that group.
        """
        # Load before we remove, to have its data in memory here
        burst_config = BurstService.get_burst_for_operation_id(
            operation_id, is_group)
        result = OperationService.stop_operation(operation_id, is_group,
                                                 remove_after_stop)

        if remove_after_stop:
            current_burst = self.context.burst_config
            if (current_burst is not None and burst_config is not None
                    and current_burst.id == burst_config.id and
                ((current_burst.fk_simulation == operation_id and not is_group)
                 or (current_burst.fk_operation_group == operation_id
                     and is_group))):
                self.reset_simulator_configuration()
        return result

    @expose_page
    @settings
    @context_selected
    def index(self):
        """Get on burst main page"""
        template_specification = dict(
            mainContent="burst/main_burst",
            title="Simulation Cockpit",
            includedResources='project/included_resources')

        if not self.context.last_loaded_fragment_url:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_CONNECTIVITY_URL)

        self.context.set_burst_config()

        _, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )
        if self.context.burst_config.start_time is not None:
            is_simulation_load = True
            self.context.add_simulator_load_to_session(True)

        template_specification['burstConfig'] = self.context.burst_config
        template_specification[
            'burst_list'] = self.burst_service.get_available_bursts(
                self.context.project.id)
        portlets_list = []  # self.burst_service.get_available_portlets()
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(portlets_list)

        form = self.prepare_first_fragment()
        rendering_rules = SimulatorFragmentRenderingRules(
            form,
            SimulatorWizzardURLs.SET_CONNECTIVITY_URL,
            None,
            is_simulation_copy,
            is_simulation_load,
            last_form_url=self.context.last_loaded_fragment_url,
            last_request_type=cherrypy.request.method,
            is_first_fragment=True,
            is_branch=is_branch)
        template_specification.update(**rendering_rules.to_dict())
        return self.fill_default_attributes(template_specification)

    def prepare_first_fragment(self):
        self.context.set_simulator()
        simulator, _, _, is_branch = self.context.get_common_params()
        branch_conditions = self.simulator_service.compute_conn_branch_conditions(
            is_branch, simulator)
        form = self.algorithm_service.prepare_adapter_form(
            form_instance=SimulatorAdapterForm(),
            project_id=self.context.project.id,
            extra_conditions=branch_conditions)

        self.simulator_service.validate_first_fragment(form,
                                                       self.context.project.id,
                                                       ConnectivityIndex)
        form.fill_from_trait(self.context.simulator)
        return form

    @expose_fragment('simulator_fragment')
    def set_connectivity(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_COUPLING_PARAMS_URL)
            form = SimulatorAdapterForm()
            form.fill_from_post(data)
            self.simulator_service.reset_at_connectivity_change(
                is_simulation_copy, form, session_stored_simulator)
            form.fill_trait(session_stored_simulator)

        next_form = self.algorithm_service.prepare_adapter_form(
            form_instance=get_form_for_coupling(
                type(session_stored_simulator.coupling))())
        self.range_parameters.coupling_parameters = next_form.get_range_parameters(
        )
        next_form.fill_from_trait(session_stored_simulator.coupling)

        rendering_rules = SimulatorFragmentRenderingRules(
            next_form, SimulatorWizzardURLs.SET_COUPLING_PARAMS_URL,
            SimulatorWizzardURLs.SET_CONNECTIVITY_URL, is_simulation_copy,
            is_simulation_load, self.context.last_loaded_fragment_url,
            cherrypy.request.method)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_coupling_params(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_SURFACE_URL)
            form = get_form_for_coupling(
                type(session_stored_simulator.coupling))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.coupling)

        surface_fragment = self.algorithm_service.prepare_adapter_form(
            form_instance=SimulatorSurfaceFragment(),
            project_id=self.context.project.id)
        surface_fragment.fill_from_trait(session_stored_simulator.surface)

        rendering_rules = SimulatorFragmentRenderingRules(
            surface_fragment,
            SimulatorWizzardURLs.SET_SURFACE_URL,
            SimulatorWizzardURLs.SET_COUPLING_PARAMS_URL,
            is_simulation_copy,
            is_simulation_load,
            self.context.last_loaded_fragment_url,
            cherrypy.request.method,
            is_branch=is_branch)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_surface(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )
        rendering_rules = SimulatorFragmentRenderingRules(
            previous_form_action_url=SimulatorWizzardURLs.SET_SURFACE_URL,
            is_simulation_copy=is_simulation_copy,
            is_simulation_readonly_load=is_simulation_load,
            last_form_url=self.context.last_loaded_fragment_url,
            last_request_type=cherrypy.request.method,
            is_branch=is_branch)

        if cherrypy.request.method == POST_REQUEST:
            form = SimulatorSurfaceFragment()
            form.fill_from_post(data)
            self.simulator_service.reset_at_surface_change(
                is_simulation_copy, form, session_stored_simulator)
            form.fill_trait(session_stored_simulator)

            if session_stored_simulator.surface:
                self.context.add_last_loaded_form_url_to_session(
                    SimulatorWizzardURLs.SET_CORTEX_URL)
            else:
                self.context.add_last_loaded_form_url_to_session(
                    SimulatorWizzardURLs.SET_STIMULUS_URL)

        return SimulatorSurfaceFragment.prepare_next_fragment_after_surface(
            session_stored_simulator, rendering_rules, self.context.project.id,
            SimulatorWizzardURLs.SET_CORTEX_URL,
            SimulatorWizzardURLs.SET_STIMULUS_URL)

    @expose_fragment('simulator_fragment')
    def set_cortex(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_STIMULUS_URL)
            rm_fragment = SimulatorRMFragment()
            rm_fragment.fill_from_post(data)
            rm_fragment.fill_trait(session_stored_simulator.surface)

        rendering_rules = SimulatorFragmentRenderingRules(
            None, None, SimulatorWizzardURLs.SET_CORTEX_URL,
            is_simulation_copy, is_simulation_load,
            self.context.last_loaded_fragment_url, cherrypy.request.method)

        return SimulatorStimulusFragment.prepare_stimulus_fragment(
            session_stored_simulator, rendering_rules, True,
            SimulatorWizzardURLs.SET_STIMULUS_URL, self.context.project.id)

    @expose_fragment('simulator_fragment')
    def set_stimulus(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_MODEL_URL)
            stimuli_fragment = SimulatorStimulusFragment(
                session_stored_simulator.is_surface_simulation)
            stimuli_fragment.fill_from_post(data)
            stimuli_fragment.fill_trait(session_stored_simulator)

        model_fragment = self.algorithm_service.prepare_adapter_form(
            form_instance=SimulatorModelFragment())
        model_fragment.fill_from_trait(session_stored_simulator)

        rendering_rules = SimulatorFragmentRenderingRules(
            model_fragment,
            SimulatorWizzardURLs.SET_MODEL_URL,
            SimulatorWizzardURLs.SET_STIMULUS_URL,
            is_simulation_copy,
            is_simulation_load,
            self.context.last_loaded_fragment_url,
            cherrypy.request.method,
            is_model_fragment=True,
            is_surface_simulation=session_stored_simulator.
            is_surface_simulation,
            is_branch=is_branch)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_model(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_MODEL_PARAMS_URL)
            form = SimulatorModelFragment()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator)

        form = self.algorithm_service.prepare_adapter_form(
            form_instance=get_form_for_model(
                type(session_stored_simulator.model))())
        self.range_parameters.model_parameters = form.get_range_parameters()
        form.fill_from_trait(session_stored_simulator.model)

        rendering_rules = SimulatorFragmentRenderingRules(
            form, SimulatorWizzardURLs.SET_MODEL_PARAMS_URL,
            SimulatorWizzardURLs.SET_MODEL_URL, is_simulation_copy,
            is_simulation_load, self.context.last_loaded_fragment_url,
            cherrypy.request.method)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_model_params(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_INTEGRATOR_URL)
            form = get_form_for_model(type(session_stored_simulator.model))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.model)

        integrator_fragment = self.algorithm_service.prepare_adapter_form(
            form_instance=SimulatorIntegratorFragment())
        integrator_fragment.integrator.display_subform = False
        integrator_fragment.fill_from_trait(session_stored_simulator)

        rendering_rules = SimulatorFragmentRenderingRules(
            integrator_fragment,
            SimulatorWizzardURLs.SET_INTEGRATOR_URL,
            SimulatorWizzardURLs.SET_MODEL_PARAMS_URL,
            is_simulation_copy,
            is_simulation_load,
            self.context.last_loaded_fragment_url,
            cherrypy.request.method,
            is_branch=is_branch)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_integrator(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_INTEGRATOR_PARAMS_URL)
            fragment = SimulatorIntegratorFragment()
            fragment.fill_from_post(data)
            fragment.fill_trait(session_stored_simulator)

        form = self.algorithm_service.prepare_adapter_form(
            form_instance=get_form_for_integrator(
                type(session_stored_simulator.integrator))())

        if hasattr(form, 'noise'):
            form.noise.display_subform = False

        form.fill_from_trait(session_stored_simulator.integrator)

        rendering_rules = SimulatorFragmentRenderingRules(
            form, SimulatorWizzardURLs.SET_INTEGRATOR_PARAMS_URL,
            SimulatorWizzardURLs.SET_INTEGRATOR_URL, is_simulation_copy,
            is_simulation_load, self.context.last_loaded_fragment_url,
            cherrypy.request.method)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_integrator_params(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            form = get_form_for_integrator(
                type(session_stored_simulator.integrator))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.integrator)
            if isinstance(session_stored_simulator.integrator,
                          IntegratorStochasticViewModel):
                self.context.add_last_loaded_form_url_to_session(
                    SimulatorWizzardURLs.SET_NOISE_PARAMS_URL)
            else:
                self.context.add_last_loaded_form_url_to_session(
                    SimulatorWizzardURLs.SET_MONITORS_URL)

        rendering_rules = SimulatorFragmentRenderingRules(
            None,
            None,
            SimulatorWizzardURLs.SET_INTEGRATOR_PARAMS_URL,
            is_simulation_copy,
            is_simulation_load,
            self.context.last_loaded_fragment_url,
            cherrypy.request.method,
            is_noise_fragment=False)

        if not isinstance(session_stored_simulator.integrator,
                          IntegratorStochasticViewModel):
            return self.monitors_handler.prepare_monitor_fragment(
                session_stored_simulator, rendering_rules,
                SimulatorWizzardURLs.SET_MONITORS_URL)

        integrator_noise_fragment = get_form_for_noise(
            type(session_stored_simulator.integrator.noise))()
        if hasattr(integrator_noise_fragment, 'equation'):
            integrator_noise_fragment.equation.display_subform = False

        self.range_parameters.integrator_noise_parameters = integrator_noise_fragment.get_range_parameters(
        )
        integrator_noise_fragment.fill_from_trait(
            session_stored_simulator.integrator.noise)

        rendering_rules.form = integrator_noise_fragment
        rendering_rules.form_action_url = SimulatorWizzardURLs.SET_NOISE_PARAMS_URL
        rendering_rules.is_noise_fragment = True
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_noise_params(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            form = get_form_for_noise(
                type(session_stored_simulator.integrator.noise))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.integrator.noise)
            if isinstance(session_stored_simulator.integrator.noise,
                          AdditiveNoiseViewModel):
                self.context.add_last_loaded_form_url_to_session(
                    SimulatorWizzardURLs.SET_MONITORS_URL)
            else:
                self.context.add_last_loaded_form_url_to_session(
                    SimulatorWizzardURLs.SET_NOISE_EQUATION_PARAMS_URL)

        rendering_rules = SimulatorFragmentRenderingRules(
            None, None, SimulatorWizzardURLs.SET_NOISE_PARAMS_URL,
            is_simulation_copy, is_simulation_load,
            self.context.last_loaded_fragment_url, cherrypy.request.method)

        return self.monitors_handler.prepare_next_fragment_after_noise(
            session_stored_simulator, is_branch, rendering_rules,
            SimulatorWizzardURLs.SET_MONITORS_URL,
            SimulatorWizzardURLs.SET_NOISE_EQUATION_PARAMS_URL)

    @expose_fragment('simulator_fragment')
    def set_noise_equation_params(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_MONITORS_URL)
            form = get_form_for_equation(
                type(session_stored_simulator.integrator.noise.b))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.integrator.noise.b)

        rendering_rules = SimulatorFragmentRenderingRules(
            None, None, SimulatorWizzardURLs.SET_NOISE_EQUATION_PARAMS_URL,
            is_simulation_copy, is_simulation_load,
            self.context.last_loaded_fragment_url, cherrypy.request.method)

        return self.monitors_handler.prepare_monitor_fragment(
            session_stored_simulator, rendering_rules,
            SimulatorWizzardURLs.SET_MONITORS_URL)

    @staticmethod
    def build_monitor_url(fragment_url, monitor):
        url_regex = '{}/{}'
        url = url_regex.format(fragment_url, monitor)
        return url

    def get_first_monitor_fragment_url(self, simulator, monitors_url):
        first_monitor = simulator.first_monitor
        if first_monitor is not None:
            monitor_vm_name = type(first_monitor).__name__
            return self.build_monitor_url(monitors_url, monitor_vm_name)
        return SimulatorWizzardURLs.SETUP_PSE_URL

    @expose_fragment('simulator_fragment')
    def set_monitors(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, is_branch = self.context.get_common_params(
        )
        if cherrypy.request.method == POST_REQUEST:
            fragment = SimulatorMonitorFragment(
                is_surface_simulation=session_stored_simulator.
                is_surface_simulation)
            fragment.fill_from_post(data)
            self.monitors_handler.set_monitors_list_on_simulator(
                session_stored_simulator, fragment.monitors.value)

        last_loaded_fragment_url = self.get_first_monitor_fragment_url(
            session_stored_simulator,
            SimulatorWizzardURLs.SET_MONITOR_PARAMS_URL)

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                last_loaded_fragment_url)

        rendering_rules = SimulatorFragmentRenderingRules(
            is_simulation_copy=is_simulation_copy,
            is_simulation_readonly_load=is_simulation_load,
            last_request_type=cherrypy.request.method,
            form_action_url=last_loaded_fragment_url,
            previous_form_action_url=SimulatorWizzardURLs.SET_MONITORS_URL)
        return self.monitors_handler.get_fragment_after_monitors(
            session_stored_simulator, self.context.burst_config,
            self.context.project.id, is_branch, rendering_rules,
            SimulatorWizzardURLs.SETUP_PSE_URL)

    def get_url_after_monitors(self, current_monitor, monitor_name,
                               next_monitor):
        if isinstance(current_monitor, BoldViewModel):
            return self.build_monitor_url(
                SimulatorWizzardURLs.SET_MONITOR_EQUATION_URL, monitor_name)
        if next_monitor is not None:
            return self.build_monitor_url(
                SimulatorWizzardURLs.SET_MONITOR_PARAMS_URL,
                type(next_monitor).__name__)
        return SimulatorWizzardURLs.SETUP_PSE_URL

    @staticmethod
    def get_url_for_final_fragment(burst_config):
        if burst_config.is_pse_burst():
            return SimulatorWizzardURLs.LAUNCH_PSE_URL
        return SimulatorWizzardURLs.SETUP_PSE_URL

    def get_urls_for_next_monitor_fragment(self, next_monitor,
                                           current_monitor):
        form_action_url = self.build_monitor_url(
            SimulatorWizzardURLs.SET_MONITOR_PARAMS_URL,
            type(next_monitor).__name__)
        if_bold_url = self.build_monitor_url(
            SimulatorWizzardURLs.SET_MONITOR_EQUATION_URL,
            type(current_monitor).__name__)
        return form_action_url, if_bold_url

    @expose_fragment('simulator_fragment')
    def set_monitor_params(self, current_monitor_name, **data):
        session_stored_simulator, is_simulation_copy, is_simulator_load, is_branch = self.context.get_common_params(
        )

        current_monitor, next_monitor = self.monitors_handler.get_current_and_next_monitor_form(
            current_monitor_name, session_stored_simulator)

        if cherrypy.request.method == POST_REQUEST:
            form = get_form_for_monitor(
                type(current_monitor))(session_stored_simulator)
            form.fill_from_post(data)
            form.fill_trait(current_monitor)

            last_loaded_form_url = self.get_url_after_monitors(
                current_monitor, current_monitor_name, next_monitor)
            self.context.add_last_loaded_form_url_to_session(
                last_loaded_form_url)

        previous_form_action_url = self.build_monitor_url(
            SimulatorWizzardURLs.SET_MONITOR_PARAMS_URL, current_monitor_name)
        rendering_rules = SimulatorFragmentRenderingRules(
            is_simulation_copy=is_simulation_copy,
            is_simulation_readonly_load=is_simulator_load,
            last_request_type=cherrypy.request.method,
            last_form_url=self.context.last_loaded_fragment_url,
            previous_form_action_url=previous_form_action_url)

        form_action_url, if_bold_url = self.get_urls_for_next_monitor_fragment(
            next_monitor, current_monitor)
        return self.monitors_handler.handle_next_fragment_for_monitors(
            self.context, rendering_rules, current_monitor, next_monitor,
            False, form_action_url, if_bold_url)

    def get_url_after_monitor_equation(self, next_monitor):
        if next_monitor is None:
            return SimulatorWizzardURLs.SETUP_PSE_URL

        last_loaded_fragment_url = self.build_monitor_url(
            SimulatorWizzardURLs.SET_MONITOR_PARAMS_URL,
            type(next_monitor).__name__)
        return last_loaded_fragment_url

    @expose_fragment('simulator_fragment')
    def set_monitor_equation(self, current_monitor_name, **data):
        session_stored_simulator, is_simulation_copy, is_simulator_load, is_branch = self.context.get_common_params(
        )
        current_monitor, next_monitor = self.monitors_handler.get_current_and_next_monitor_form(
            current_monitor_name, session_stored_simulator)

        if cherrypy.request.method == POST_REQUEST:
            form = get_form_for_equation(type(current_monitor.hrf_kernel))()
            form.fill_from_post(data)
            form.fill_trait(current_monitor.hrf_kernel)

            last_loaded_fragment_url = self.get_url_after_monitor_equation(
                next_monitor)
            self.context.add_last_loaded_form_url_to_session(
                last_loaded_fragment_url)

        previous_form_action_url = self.build_monitor_url(
            SimulatorWizzardURLs.SET_MONITOR_EQUATION_URL,
            current_monitor_name)
        rendering_rules = SimulatorFragmentRenderingRules(
            None, None, previous_form_action_url, is_simulation_copy,
            is_simulator_load, self.context.last_loaded_fragment_url,
            cherrypy.request)

        form_action_url, if_bold_url = self.get_urls_for_next_monitor_fragment(
            next_monitor, current_monitor)
        return self.monitors_handler.handle_next_fragment_for_monitors(
            self.context, rendering_rules, current_monitor, next_monitor, True,
            form_action_url, if_bold_url)

    @expose_fragment('simulator_fragment')
    def setup_pse(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulator_load, _ = self.context.get_common_params(
        )
        burst_config = self.context.burst_config
        all_range_parameters = self.range_parameters.get_all_range_parameters()
        next_form = self.algorithm_service.prepare_adapter_form(
            form_instance=SimulatorPSEConfigurationFragment(
                self.range_parameters.get_all_range_parameters()))

        if cherrypy.request.method == POST_REQUEST:
            session_stored_simulator.simulation_length = float(
                data['simulation_length'])
            burst_config.name = data['input_simulation_name_id']
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.SET_PSE_PARAMS_URL)

        param1, param2 = self.burst_service.handle_range_params_at_loading(
            burst_config, all_range_parameters)
        if param1:
            param_dict = {'pse_param1': param1.name}
            if param2 is not None:
                param_dict['pse_param2'] = param2.name
            next_form.fill_from_post(param_dict)

        rendering_rules = SimulatorFragmentRenderingRules(
            next_form, SimulatorWizzardURLs.SET_PSE_PARAMS_URL,
            SimulatorWizzardURLs.SETUP_PSE_URL, is_simulation_copy,
            is_simulator_load, self.context.last_loaded_fragment_url,
            cherrypy.request.method)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def set_pse_params(self, **data):
        session_stored_simulator, is_simulation_copy, is_simulation_load, _ = self.context.get_common_params(
        )
        burst_config = self.context.burst_config
        form = SimulatorPSEConfigurationFragment(
            self.range_parameters.get_all_range_parameters())

        if cherrypy.request.method == POST_REQUEST:
            self.context.add_last_loaded_form_url_to_session(
                SimulatorWizzardURLs.LAUNCH_PSE_URL)
            form.fill_from_post(data)

            param1 = form.pse_param1.value
            burst_config.range1 = param1.to_json()
            param2 = None
            if form.pse_param2.value:
                param2 = form.pse_param2.value
                burst_config.range2 = param2.to_json()
        else:
            all_range_parameters = self.range_parameters.get_all_range_parameters(
            )
            param1, param2 = self.burst_service.handle_range_params_at_loading(
                burst_config, all_range_parameters)
        next_form = self.algorithm_service.prepare_adapter_form(
            form_instance=SimulatorPSERangeFragment(param1, param2))

        rendering_rules = SimulatorFragmentRenderingRules(
            next_form,
            SimulatorWizzardURLs.LAUNCH_PSE_URL,
            SimulatorWizzardURLs.SET_PSE_PARAMS_URL,
            is_simulation_copy,
            is_simulation_load,
            last_form_url=self.context.last_loaded_fragment_url,
            is_launch_pse_fragment=True)
        return rendering_rules.to_dict()

    @expose_json
    def launch_pse(self, **data):
        session_stored_simulator = self.context.simulator
        all_range_parameters = self.range_parameters.get_all_range_parameters()
        range_param1, range_param2 = SimulatorPSERangeFragment.fill_from_post(
            all_range_parameters, **data)

        burst_config = self.context.burst_config
        burst_config.start_time = datetime.now()
        burst_config.range1 = range_param1.to_json()
        if range_param2:
            burst_config.range2 = range_param2.to_json()
        burst_config = self.burst_service.prepare_burst_for_pse(burst_config)
        session_stored_simulator.operation_group_gid = uuid.UUID(
            burst_config.operation_group.gid)
        session_stored_simulator.ranges = json.dumps(burst_config.ranges)

        try:
            thread = threading.Thread(
                target=self.simulator_service.async_launch_and_prepare_pse,
                kwargs={
                    'burst_config': burst_config,
                    'user': self.context.logged_user,
                    'project': self.context.project,
                    'simulator_algo': self.cached_simulator_algorithm,
                    'range_param1': range_param1,
                    'range_param2': range_param2,
                    'session_stored_simulator': session_stored_simulator
                })
            thread.start()
            return {'id': burst_config.id}
        except BurstServiceException as e:
            self.logger.exception("Could not launch burst!")
            return {'error': e.message}

    @expose_json
    def launch_simulation(self, launch_mode, **data):
        current_form = SimulatorFinalFragment()
        burst_config = self.context.burst_config
        burst_config.range1 = None
        burst_config.range2 = None

        try:
            current_form.fill_from_post(data)
        except Exception as exc:
            self.logger.exception(exc)
            return {'error': str(exc)}

        burst_name = current_form.simulation_name.value
        session_stored_simulator = self.context.simulator
        session_stored_simulator.simulation_length = current_form.simulation_length.value

        if burst_name != 'none_undefined':
            burst_config.name = burst_name

        if launch_mode == self.burst_service.LAUNCH_BRANCH:
            simulation_state_index = self.simulator_service.get_simulation_state_index(
                burst_config, SimulationHistoryIndex)
            session_stored_simulator.history_gid = simulation_state_index[
                0].gid

        burst_config.start_time = datetime.now()
        session_burst_config = self.burst_service.store_burst(burst_config)

        try:
            thread = threading.Thread(target=self.simulator_service.
                                      async_launch_and_prepare_simulation,
                                      kwargs={
                                          'burst_config': session_burst_config,
                                          'user': self.context.logged_user,
                                          'project': self.context.project,
                                          'simulator_algo':
                                          self.cached_simulator_algorithm,
                                          'simulator': session_stored_simulator
                                      })
            thread.start()
            return {'id': session_burst_config.id}
        except BurstServiceException as e:
            self.logger.exception('Could not launch burst!')
            return {'error': e.message}

    @expose_fragment('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        bursts = self.burst_service.get_available_bursts(
            self.context.project.id)
        self.burst_service.populate_burst_disk_usage(bursts)
        return {
            'burst_list': bursts,
            'selectedBurst': self.context.burst_config.id,
            'first_fragment_url':
            SimulatorFragmentRenderingRules.FIRST_FORM_URL
        }

    @cherrypy.expose
    def get_last_fragment_url(self, burst_config_id):
        burst_config = self.burst_service.load_burst_configuration(
            burst_config_id)
        return self.get_url_for_final_fragment(burst_config)

    @expose_fragment('simulator_fragment')
    def load_burst_read_only(self, burst_config_id):
        try:
            burst_config = self.burst_service.load_burst_configuration(
                burst_config_id)
            storage_path = self.files_helper.get_project_folder(
                self.context.project, str(burst_config.fk_simulation))
            simulator = h5.load_view_model(burst_config.simulator_gid,
                                           storage_path)
            last_loaded_form_url = self.get_url_for_final_fragment(
                burst_config)
            self.context.init_session_at_burst_loading(burst_config, simulator,
                                                       last_loaded_form_url)

            form = self.prepare_first_fragment()
            self.monitors_handler.build_list_of_monitors_from_view_models(
                self.context.simulator)
            rendering_rules = SimulatorFragmentRenderingRules(
                form,
                SimulatorWizzardURLs.SET_CONNECTIVITY_URL,
                is_simulation_readonly_load=True,
                is_first_fragment=True)
            return rendering_rules.to_dict()
        except Exception:
            # Most probably Burst was removed. Delete it from session, so that client
            # has a good chance to get a good response on refresh
            self.logger.exception("Error loading burst")
            self.context.remove_burst_config_from_session()
            raise

    def _prepare_first_fragment_for_burst_copy(self, burst_config_id,
                                               burst_name_format):
        simulator, burst_config_copy = self.burst_service.prepare_data_for_burst_copy(
            burst_config_id, burst_name_format, self.context.project)
        self.monitors_handler.build_list_of_monitors_from_view_models(
            simulator)

        last_loaded_form_url = self.get_url_for_final_fragment(
            burst_config_copy)
        self.context.init_session_at_copy_preparation(burst_config_copy,
                                                      simulator,
                                                      last_loaded_form_url)
        return self.prepare_first_fragment()

    @expose_fragment('simulator_fragment')
    def copy_simulator_configuration(self, burst_config_id):
        self.context.add_branch_and_copy_to_session(False, True)
        form = self._prepare_first_fragment_for_burst_copy(
            burst_config_id, self.COPY_NAME_FORMAT)
        rendering_rules = SimulatorFragmentRenderingRules(
            form,
            SimulatorWizzardURLs.SET_CONNECTIVITY_URL,
            is_simulation_copy=True,
            is_simulation_readonly_load=True,
            is_first_fragment=True)
        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def branch_simulator_configuration(self, burst_config_id):
        self.context.add_branch_and_copy_to_session(True, False)
        form = self._prepare_first_fragment_for_burst_copy(
            burst_config_id, self.BRANCH_NAME_FORMAT)
        rendering_rules = SimulatorFragmentRenderingRules(
            form,
            SimulatorWizzardURLs.SET_CONNECTIVITY_URL,
            is_simulation_copy=True,
            is_simulation_readonly_load=True,
            is_first_fragment=True)

        return rendering_rules.to_dict()

    @expose_fragment('simulator_fragment')
    def reset_simulator_configuration(self):
        burst_config = BurstConfiguration(self.context.project.id)
        self.context.init_session_at_sim_reset(
            burst_config, SimulatorWizzardURLs.SET_CONNECTIVITY_URL)
        self.monitors_handler.clear_next_monitors_dict()

        form = self.prepare_first_fragment()
        rendering_rules = SimulatorFragmentRenderingRules(
            form,
            SimulatorWizzardURLs.SET_CONNECTIVITY_URL,
            is_first_fragment=True)
        return rendering_rules.to_dict()

    @expose_json
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        validation_result = SimulatorFinalFragment.is_burst_name_ok(burst_name)
        if validation_result is True:
            self.burst_service.rename_burst(burst_id, burst_name)
            return {'success': "Simulation successfully renamed!"}
        else:
            self.logger.exception(validation_result)
            return {'error': validation_result}

    @expose_json
    def get_history_status(self, **data):
        """
        For each burst id received, get the status and return it.
        """
        return self.burst_service.update_history_status(
            json.loads(data['burst_ids']))

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def export(self, burst_id):
        export_manager = ExportManager()
        export_zip = export_manager.export_simulator_configuration(burst_id)

        result_name = "tvb_simulation_" + str(burst_id) + ".zip"
        return serve_file(export_zip, "application/x-download", "attachment",
                          result_name)

    @expose_fragment("overlay")
    def get_upload_overlay(self):
        template_specification = self.fill_overlay_attributes(
            None, "Upload", "Simulation ZIP", "burst/upload_burst_overlay",
            "dialog-upload")
        template_specification[
            'first_fragment_url'] = SimulatorWizzardURLs.SET_CONNECTIVITY_URL
        return self.fill_default_attributes(template_specification)

    @cherrypy.expose
    @handle_error(redirect=True)
    @check_user
    @settings
    def load_simulator_configuration_from_zip(self, **data):
        """Upload Simulator from previously exported ZIP file"""
        self.logger.debug("Uploading ..." + str(data))
        last_loaded_form_url = SimulatorWizzardURLs.SETUP_PSE_URL

        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:
                simulator, burst_config, sim_folder = self.burst_service.load_simulation_from_zip(
                    data[upload_param], self.context.project)

                dts_folder = os.path.join(
                    sim_folder, ExportManager.EXPORTED_SIMULATION_DTS_DIR)
                ImportService().import_project_operations(
                    self.context.project, dts_folder, False, None)

                self.monitors_handler.build_list_of_monitors_from_view_models(
                    simulator)
                if burst_config.is_pse_burst():
                    last_loaded_form_url = SimulatorWizzardURLs.LAUNCH_PSE_URL
                self.context.init_session_at_sim_config_from_zip(
                    burst_config, simulator, last_loaded_form_url)
        except IOError as ioexcep:
            self.logger.exception(ioexcep)
            self.context.set_warning_message(
                "This ZIP does not contain a complete simulator configuration")
        except ServicesBaseException as excep:
            self.logger.warning(excep.message)
            self.context.set_warning_message(excep.message)

        raise cherrypy.HTTPRedirect('/burst/')
    def launch(self, view_model):
        # type: (ZIPConnectivityImporterModel) -> [ConnectivityIndex]
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.
        :raises LaunchException: when `uploaded` is empty or nonexistent
        :raises Exception: when
                    * weights or tracts matrix is invalid (negative values, wrong shape)
                    * any of the vector orientation, areas, cortical or hemisphere is \
                      different from the expected number of nodes
        """
        if view_model.uploaded is None:
            raise LaunchException(
                "Please select ZIP file which contains data to import")

        files = FilesHelper().unpack_zip(view_model.uploaded,
                                         self.storage_path)

        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None

        for file_name in files:
            file_name_low = file_name.lower()
            if self.WEIGHT_TOKEN in file_name_low:
                weights_matrix = self.read_list_data(file_name)
            elif self.CENTRES_TOKEN in file_name_low or self.CENTRES_TOKEN2 in file_name_low:
                centres = self.read_list_data(file_name, usecols=[1, 2, 3])
                labels_vector = self.read_list_data(file_name,
                                                    dtype=numpy.str,
                                                    usecols=[0])
            elif self.TRACT_TOKEN in file_name_low:
                tract_matrix = self.read_list_data(file_name)
            elif self.ORIENTATION_TOKEN in file_name_low:
                orientation = self.read_list_data(file_name)
            elif self.AREA_TOKEN in file_name_low:
                areas = self.read_list_data(file_name)
            elif self.CORTICAL_INFO in file_name_low:
                cortical_vector = self.read_list_data(file_name,
                                                      dtype=numpy.bool)
            elif self.HEMISPHERE_INFO in file_name_low:
                hemisphere_vector = self.read_list_data(file_name,
                                                        dtype=numpy.bool)

        # Clean remaining text-files.
        FilesHelper.remove_files(files, True)

        result = Connectivity()

        # Fill positions
        if centres is None:
            raise Exception(
                "Region centres are required for Connectivity Regions! "
                "We expect a file that contains *centres* inside the uploaded ZIP."
            )
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector

        # Fill and check weights
        if weights_matrix is not None:
            if weights_matrix.shape != (expected_number_of_nodes,
                                        expected_number_of_nodes):
                raise Exception(
                    "Unexpected shape for weights matrix! "
                    "Should be %d x %d " %
                    (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix
            if view_model.normalization:
                result.weights = result.scaled_weights(
                    view_model.normalization)

        # Fill and check tracts. Allow empty files for tracts, they will be computed by tvb-library.
        if tract_matrix is not None:
            if tract_matrix.size != 0:
                if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                    raise Exception(
                        "Negative values are not accepted in tracts matrix! "
                        "Please check your file, and use values >= 0")
                if tract_matrix.shape != (expected_number_of_nodes,
                                          expected_number_of_nodes):
                    raise Exception(
                        "Unexpected shape for tracts matrix! "
                        "Should be %d x %d " %
                        (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix

        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector orientation. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.orientations = orientation

        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector areas. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.areas = areas

        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector cortical. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.cortical = cortical_vector

        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector hemispheres. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.hemispheres = hemisphere_vector

        result.configure()
        return h5.store_complete(result, self.storage_path)
Beispiel #48
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.file_helper = FilesHelper()
     self.input_tree_manager = InputTreeManager()
 def __init__(self):
     self.logger = get_logger(__name__)
     self.user_id = None
     self.files_helper = FilesHelper()
     self.created_projects = []
Beispiel #50
0
class TVBLoader(object):
    def __init__(self, registry):
        self.file_handler = FilesHelper()
        self.registry = registry

    def path_for_stored_index(self, dt_index_instance):
        # type: (DataType) -> str
        """ Given a Datatype(HasTraitsIndex) instance, build where the corresponding H5 should be or is stored"""
        operation = dao.get_operation_by_id(
            dt_index_instance.fk_from_operation)
        operation_folder = self.file_handler.get_project_folder(
            operation.project, str(operation.id))

        gid = uuid.UUID(dt_index_instance.gid)
        h5_file_class = self.registry.get_h5file_for_index(
            dt_index_instance.__class__)
        fname = get_h5_filename(h5_file_class.file_name_base(), gid)

        return os.path.join(operation_folder, fname)

    def path_for(self, operation_dir, h5_file_class, gid, dt_class=None):
        if isinstance(gid, str):
            gid = uuid.UUID(gid)
        fname = get_h5_filename(dt_class or h5_file_class.file_name_base(),
                                gid)
        return os.path.join(operation_dir, fname)

    def load_from_index(self, dt_index):
        # type: (DataType) -> HasTraits
        h5_path = self.path_for_stored_index(dt_index)
        h5_file_class = self.registry.get_h5file_for_index(dt_index.__class__)
        traits_class = self.registry.get_datatype_for_index(dt_index)
        with h5_file_class(h5_path) as f:
            result_dt = traits_class()
            f.load_into(result_dt)
        return result_dt

    def load_complete_by_function(self, file_path, load_ht_function):
        # type: (str, callable) -> (HasTraits, GenericAttributes)
        with H5File.from_file(file_path) as f:
            try:
                datatype_cls = self.registry.get_datatype_for_h5file(f)
            except KeyError:
                datatype_cls = f.determine_datatype_from_file()
            datatype = datatype_cls()
            f.load_into(datatype)
            ga = f.load_generic_attributes()
            sub_dt_refs = f.gather_references(datatype_cls)

        for traited_attr, sub_gid in sub_dt_refs:
            if sub_gid is None:
                continue
            is_monitor = False
            if isinstance(sub_gid, list):
                sub_gid = sub_gid[0]
                is_monitor = True
            ref_ht = load_ht_function(sub_gid, traited_attr)
            if is_monitor:
                ref_ht = [ref_ht]
            setattr(datatype, traited_attr.field_name, ref_ht)

        return datatype, ga

    def load_with_references(self, file_path):
        def load_ht_function(sub_gid, traited_attr):
            ref_idx = dao.get_datatype_by_gid(sub_gid.hex, load_lazy=False)
            ref_ht = self.load_from_index(ref_idx)
            return ref_ht

        return self.load_complete_by_function(file_path, load_ht_function)

    def load_with_links(self, file_path):
        def load_ht_function(sub_gid, traited_attr):
            # Used traited_attr.default for cases similar to ProjectionMonitor which has obsnoise of type Noise and
            # it cannot be instantiated due to abstract methods, while the default is Additive()
            ref_ht = traited_attr.default or traited_attr.field_type()
            ref_ht.gid = sub_gid
            return ref_ht

        return self.load_complete_by_function(file_path, load_ht_function)
class DatatypesFactory():
    """
    This class provides a set of methods that helps user to create
    different data types for testing.
    These data types will be automatically stored in DB and file system if needed.
    """
    USER_FULL_NAME = "Datatype Factory User"
    DATATYPE_STATE = "RAW_DATA"
    DATATYPE_DATA = ["test", "for", "datatypes", "factory"]

    DATATYPE_MEASURE_METRIC = {'v': 3}
    RANGE_1 = ["row1", [1, 2, 3]]
    RANGE_2 = ["row2", [0.1, 0.3, 0.5]]

    user = None
    project = None
    operation = None


    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED,
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)


    def get_project(self):
        """
        Return project to which generated data types are assigned
        """
        return self.project


    def get_operation(self):
        """
        Return operation to which generated data types are assigned
        """
        return self.operation


    def get_user(self):
        """
        Return user to which generated data types are assigned
        """
        return self.user


    def _store_datatype(self, data_type, operation_id=None):
        """
        Launch adapter to store a create a persistent DataType.
        """
        operation_id = operation_id or self.operation.id
        data_type.type = data_type.__class__.__name__
        data_type.module = data_type.__class__.__module__
        data_type.subject = self.USER_FULL_NAME
        data_type.state = self.DATATYPE_STATE
        data_type.set_operation_id(operation_id)

        adapter_instance = StoreAdapter([data_type])
        operation = dao.get_operation_by_id(operation_id)
        OperationService().initiate_prelaunch(operation, adapter_instance, {})

        return data_type


    def create_simple_datatype(self, subject=USER_FULL_NAME, state=DATATYPE_STATE):
        """
        This method creates a simple data type
        """
        datatype_inst = Datatype1()
        self._fill_datatype(datatype_inst, subject, state)

        # Store data type
        return self._store_datatype(datatype_inst)


    def create_datatype_with_storage(self, subject=USER_FULL_NAME, state=DATATYPE_STATE,
                                     data=DATATYPE_DATA, operation_id=None):
        """
        This method creates and stores a data type which imply storage on the file system.
        """
        datatype_inst = Datatype2()
        self._fill_datatype(datatype_inst, subject, state, operation_id)

        datatype_inst.string_data = data

        return self._store_datatype(datatype_inst, operation_id)


    def _fill_datatype(self, datatype, subject, state, operation_id=None):
        """
        This method sets some common attributes on dataType 
        """
        operation_id = operation_id or self.operation.id
        datatype.subject = subject
        datatype.state = state
        # Set_operation_id also sets storage_path attribute
        datatype.set_operation_id(operation_id)


    def __create_operation(self):
        """
        Create a operation entity. Return the operation, algo_id and the storage path.
        """
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        algorithm, algo_group = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        operation = model.Operation(self.user.id, self.project.id, algo_group.id, json.dumps(''), meta=json.dumps(meta),
                                    status=model.STATUS_STARTED, method_name=ABCAdapter.LAUNCH_METHOD)
        operation = dao.store_entity(operation)
        storage_path = FilesHelper().get_project_folder(self.project, str(operation.id))
        return operation, algorithm.id, storage_path


    def create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        operation, algo_id, storage_path = self.__create_operation()
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return algo_id, connectivity


    def create_timeseries(self, connectivity, ts_type=None, sensors=None):
        """
        Create a stored TimeSeries entity.
        """
        operation, _, storage_path = self.__create_operation()

        if ts_type == "EEG":
            time_series = TimeSeriesEEG(storage_path=storage_path, sensors=sensors)
        else:
            rm = dao.get_generic_entity(RegionMapping, connectivity.gid, '_connectivity')
            if len(rm) < 1:
                rm = None
            else:
                rm = rm[0]
            time_series = TimeSeriesRegion(storage_path=storage_path, connectivity=connectivity, region_mapping=rm)

        data = numpy.random.random((10, 10, 10, 10))
        time_series.write_data_slice(data)
        time_series.write_time_slice(numpy.arange(10))
        adapter_instance = StoreAdapter([time_series])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        time_series = dao.get_datatype_by_gid(time_series.gid)
        return time_series


    def create_covariance(self, time_series):
        """
        :returns: a stored DataType Covariance.
        """
        operation, _, storage_path = self.__create_operation()
        covariance = Covariance(storage_path=storage_path, source=time_series)
        covariance.write_data_slice(numpy.random.random((10, 10, 10)))
        adapter_instance = StoreAdapter([covariance])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return covariance


    def create_crosscoherence(self, time_series):
        """
        :returns: a stored entity of type CoherenceSpectrum
        """
        operation, _, storage_path = self.__create_operation()
        partial_coh = CoherenceSpectrum(array_data=numpy.random.random((10, 10, 10, 10)), use_storage=False)
        coherence = CoherenceSpectrum(source=time_series, storage_path=storage_path, frequency=0.1, nfft=256)
        coherence.write_data_slice(partial_coh)
        coherence.close_file()
        adapter_instance = StoreAdapter([coherence])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return coherence


    def create_crosscorrelation(self, time_series):
        """
        :returns: `CrossCorrelation` stored entity.
        """
        operation, _, storage_path = self.__create_operation()
        partial_corr = CrossCorrelation(array_data=numpy.random.random((10, 10, 10, 10, 10)), use_storage=False)
        crossc = CrossCorrelation(source=time_series, storage_path=storage_path, time=range(10))
        crossc.write_data_slice(partial_corr)
        crossc.close_file()
        adapter_instance = StoreAdapter([crossc])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return crossc


    def create_surface(self):
        """
        Create a dummy surface entity.
        :returns: (Algorithm Identifier, stored Surface entity)
        """
        operation, algo_id, storage_path = self.__create_operation()
        surface = CorticalSurface(storage_path=storage_path)
        surface.vertices = numpy.array([[-10, 0, 0],
                                        [0, 0, -10],
                                        [10, 0, 0],
                                        [0, 10, 0]], dtype=float)
        surface.triangles = numpy.array([[0, 1, 2],
                                         [0, 1, 3],
                                         [1, 2, 3],
                                         [0, 2, 3]], dtype=int)
        surface.number_of_triangles = 4
        surface.number_of_vertices = 4
        surface.triangle_normals = numpy.ones((4, 3))
        surface.vertex_normals = numpy.ones((4, 3))
        surface.zero_based_triangles = True
        surface.validate()
        adapter_instance = StoreAdapter([surface])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return algo_id, surface


    def create_connectivity_measure(self, connectivity):
        """
        :returns: persisted entity ConnectivityMeasure
        """
        operation, _, storage_path = self.__create_operation()
        conn_measure = ConnectivityMeasure(storage_path=storage_path)
        conn_measure.connectivity = connectivity
        adapter_instance = StoreAdapter([conn_measure])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return conn_measure


    def create_datatype_measure(self, analyzed_entity, operation=None, storage_path=None):
        """
        :return: persisted DatatypeMeasure
        """
        if operation is None:
            operation, _, storage_path = self.__create_operation()
        measure = DatatypeMeasure(storage_path=storage_path, metrics=self.DATATYPE_MEASURE_METRIC)
        measure.analyzed_datatype = analyzed_entity
        adapter_instance = StoreAdapter([measure])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return measure


    def create_ICA(self, timeseries):
        """
        :returns: persisted entity IndependentComponents
        """
        operation, _, storage_path = self.__create_operation()
        partial_ts = TimeSeries(use_storage=False)
        partial_ts.data = numpy.random.random((10, 10, 10, 10))
        partial_ica = IndependentComponents(source=partial_ts,
                                            component_time_series=numpy.random.random((10, 10, 10, 10)),
                                            prewhitening_matrix=numpy.random.random((10, 10, 10, 10)),
                                            unmixing_matrix=numpy.random.random((10, 10, 10, 10)),
                                            n_components=10, use_storage=False)
        ica = IndependentComponents(source=timeseries, n_components=10, storage_path=storage_path)
        ica.write_data_slice(partial_ica)
        adapter_instance = StoreAdapter([ica])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return ica


    def create_datatype_group(self, subject=USER_FULL_NAME, state=DATATYPE_STATE, ):
        """ 
        This method creates, stores and returns a DataTypeGroup entity.
        """
        group = model.OperationGroup(self.project.id, ranges=[json.dumps(self.RANGE_1), json.dumps(self.RANGE_2)])
        group = dao.store_entity(group)
        group_ms = model.OperationGroup(self.project.id, ranges=[json.dumps(self.RANGE_1), json.dumps(self.RANGE_2)])
        group_ms = dao.store_entity(group_ms)

        datatype_group = model.DataTypeGroup(group, subject=subject, state=state, operation_id=self.operation.id)
        # Set storage path, before setting data
        datatype_group.storage_path = self.files_helper.get_project_folder(self.project, str(self.operation.id))
        datatype_group = dao.store_entity(datatype_group)

        dt_group_ms = model.DataTypeGroup(group_ms, subject=subject, state=state, operation_id=self.operation.id)
        # Set storage path, before setting data
        dt_group_ms.storage_path = self.files_helper.get_project_folder(self.project, str(self.operation.id))
        dao.store_entity(dt_group_ms)

        # Now create some data types and add them to group
        for range_val1 in self.RANGE_1[1]:
            for range_val2 in self.RANGE_2[1]:
                operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                            meta=json.dumps(self.meta), status=model.STATUS_FINISHED,
                                            method_name=ABCAdapter.LAUNCH_METHOD,
                                            range_values=json.dumps({self.RANGE_1[0]: range_val1,
                                                                     self.RANGE_2[0]: range_val2}))
                operation.fk_operation_group = group.id
                operation = dao.store_entity(operation)
                datatype = self.create_datatype_with_storage(operation_id=operation.id)
                datatype.number1 = range_val1
                datatype.number2 = range_val2
                datatype.fk_datatype_group = datatype_group.id
                datatype.set_operation_id(operation.id)
                dao.store_entity(datatype)

                op_ms = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                        meta=json.dumps(self.meta), status=model.STATUS_FINISHED,
                                        method_name=ABCAdapter.LAUNCH_METHOD,
                                        range_values=json.dumps({self.RANGE_1[0]: range_val1,
                                                                 self.RANGE_2[0]: range_val2}))
                op_ms.fk_operation_group = group_ms.id
                op_ms = dao.store_entity(op_ms)
                self.create_datatype_measure(datatype, op_ms,
                                             FilesHelper().get_project_folder(self.project, str(op_ms.id)))

        return datatype_group
Beispiel #52
0
 def __init__(self, registry):
     self.file_handler = FilesHelper()
     self.registry = registry
 def _store_view_model(operation, project, view_model):
     storage_path = FilesHelper().get_project_folder(project, str(operation.id))
     h5.store_view_model(view_model, storage_path)
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def _write_png(self, store_path, export_data):
        img_data = base64.b64decode(export_data)  # decode the image
        final_image = Image.open(
            StringIO(img_data))  # place it in a PIL stream

        branding_bar = Image.open(
            FigureService._BRANDING_BAR_PNG)  # place the branding bar over
        final_image.paste(branding_bar,
                          (0, final_image.size[1] - branding_bar.size[1]),
                          branding_bar)

        final_image.save(store_path)  # store to disk as PNG

    def _write_svg(self, store_path, export_data):
        dom = xml.dom.minidom.parseString(export_data)
        figureSvg = dom.getElementsByTagName('svg')[
            0]  # get the original image

        dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)

        try:
            width = float(figureSvg.getAttribute('width').replace('px', ''))
            height = float(figureSvg.getAttribute('height').replace('px', ''))
        except ValueError:  # defaults when dimensions are not given
            width = 1024
            height = 768
            figureSvg.setAttribute("width", str(width))
            figureSvg.setAttribute("height", str(height))

        finalSvg = dom.createElement('svg')  # prepare the final svg
        brandingSvg = dom.getElementsByTagName('svg')[
            0]  # get the branding bar
        brandingSvg.setAttribute("y",
                                 str(height))  # position it below the figure
        height += float(brandingSvg.getAttribute('height').replace(
            'px', ''))  # increase original height with branding bar's height
        finalSvg.setAttribute("width",
                              str(width))  # same width as original figure
        finalSvg.setAttribute("height", str(height))

        finalSvg.appendChild(figureSvg)  # add the image
        finalSvg.appendChild(brandingSvg)  # and the branding bar

        # Generate path where to store image
        with open(store_path, 'w') as dest:
            finalSvg.writexml(dest)  # store to disk

    def _image_path(self, project_name, img_type):
        "Generate path where to store image"
        images_folder = self.file_helper.get_images_folder(project_name)
        file_name = FigureService._DEFAULT_IMAGE_FILE_NAME + img_type
        return utils.get_unique_file_name(images_folder, file_name)

    @staticmethod
    def _generate_image_name(project, user, operation, image_name):
        if not image_name:
            if operation is not None:
                # create a name based on the operation that created the image
                # e.g. TVB-Algo-Name-354
                image_name = operation.algorithm.name.replace(' ', '-')
            else:
                # default to a generic name
                image_name = "figure"
        figure_count = dao.get_figure_count(project.id, user.id) + 1
        return 'TVB-%s-%s' % (image_name, figure_count)

    def store_result_figure(self,
                            project,
                            user,
                            img_type,
                            export_data,
                            image_name=None,
                            operation_id=None):
        """
        Store into a file, Result Image and reference in DB.
        """
        store_path, file_name = self._image_path(project.name, img_type)

        if img_type == FigureService._TYPE_PNG:  # PNG file from canvas
            self._write_png(store_path, export_data)
        elif img_type == FigureService._TYPE_SVG:  # SVG file from svg viewer
            self._write_svg(store_path, export_data)

        if operation_id:
            operation = dao.get_operation_by_id(operation_id)
        else:
            operation = None
            operation_id = None

        image_name = self._generate_image_name(project, user, operation,
                                               image_name)

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id,
                                    FigureService._DEFAULT_SESSION_NAME,
                                    image_name, file_name, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk
        self.file_helper.write_image_metadata(figure)

        if operation:
            # Force writing operation meta data on disk.
            # This is important later for operation import
            self.file_helper.write_operation_metadata(operation)

    def retrieve_result_figures(self,
                                project,
                                user,
                                selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id,
                                                 selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(
                    project.name)
                figure_full_path = os.path.join(figures_folder,
                                                figure.file_path)
                # Compute the path
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info

    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)

    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)

    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(
            figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
class TestFigureService(TransactionalTestCase):
    """
    Tests for the figure service
    """
    def transactional_setup_method(self):
        self.figure_service = FigureService()
        self.user = TestFactory.create_user()
        self.project = TestFactory.create_project(admin=self.user)
        self.files_helper = FilesHelper()


    def transactional_teardown_method(self):
        self.delete_project_folders()


    def assertCanReadImage(self, image_path):
        try:
            Image.open(image_path).load()
        except (IOError, ValueError):
            raise AssertionError("Could not open %s as a image" % image_path)


    def store_test_png(self):
        self.figure_service.store_result_figure(self.project, self.user, "png", IMG_DATA, image_name="test-figure")


    def retrieve_images(self):
        figures_by_session, _ = self.figure_service.retrieve_result_figures(self.project, self.user)
        # flatten image session grouping
        figures = []
        for fg in figures_by_session.itervalues():
            figures.extend(fg)
        return figures


    def test_store_image(self):
        self.store_test_png()


    def test_store_image_from_operation(self):
        # test that image can be retrieved from operation
        test_operation = TestFactory.create_operation(test_user=self.user, test_project=self.project)

        self.figure_service.store_result_figure(self.project, self.user, "png",
                                                IMG_DATA, operation_id=test_operation.id)
        figures = dao.get_figures_for_operation(test_operation.id)
        assert 1 == len(figures)
        image_path = self.files_helper.get_images_folder(self.project.name)
        image_path = os.path.join(image_path, figures[0].file_path)
        self.assertCanReadImage(image_path)


    def test_store_and_retrieve_image(self):
        self.store_test_png()
        figures = self.retrieve_images()
        assert 1 == len(figures)
        image_path = utils.url2path(figures[0].file_path)
        self.assertCanReadImage(image_path)


    def test_load_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.load_figure(figures[0].id)


    def test_edit_figure(self):
        session_name = 'the altered ones'
        name = 'altered'
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.edit_result_figure(figures[0].id, session_name=session_name, name=name)
        figures_by_session, _ = self.figure_service.retrieve_result_figures(self.project, self.user)
        assert [session_name] == figures_by_session.keys()
        assert name == figures_by_session.values()[0][0].name


    def test_remove_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        assert 1 == len(figures)
        self.figure_service.remove_result_figure(figures[0].id)
        figures = self.retrieve_images()
        assert 0 == len(figures)
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.file_helper = FilesHelper()
    def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0):
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.

        :param uploaded: an archive containing the Connectivity data to be imported

        :returns: `Connectivity`

        :raises LaunchException: when `uploaded` is empty or nonexistent
        :raises Exception: when
                    * weights or tracts matrix is invalid (negative values, wrong shape)
                    * any of the vector orientation, areas, cortical or hemisphere is \
                      different from the expected number of nodes
        """
        if uploaded is None:
            raise LaunchException("Please select ZIP file which contains data to import")
        
        files = FilesHelper().unpack_zip(uploaded, self.storage_path)
        
        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None
        
        for file_name in files:
            if file_name.lower().find(self.WEIGHT_TOKEN) >= 0:
                weights_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.POSITION_TOKEN) >= 0:
                centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3])
                labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0])
                continue
            if file_name.lower().find(self.TRACT_TOKEN) >= 0:
                tract_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0:
                orientation = read_list_data(file_name)
                continue
            if file_name.lower().find(self.AREA_TOKEN) >= 0:
                areas = read_list_data(file_name)
                continue
            if file_name.lower().find(self.CORTICAL_INFO) >= 0:
                cortical_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
            if file_name.lower().find(self.HEMISPHERE_INFO) >= 0:
                hemisphere_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
        ### Clean remaining text-files.
        FilesHelper.remove_files(files, True)
        
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = [rotate_x, rotate_y, rotate_z]
        
        ### Fill positions
        if centres is None:
            raise Exception("Positions for Connectivity Regions are required! "
                            "We expect a file *position* inside the uploaded ZIP.")
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector
            
        ### Fill and check weights
        if weights_matrix is not None:
            if numpy.any([x < 0 for x in weights_matrix.flatten()]):
                raise Exception("Negative values are not accepted in weights matrix! "
                                "Please check your file, and use values >= 0")
            if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for weights matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix
            
        ### Fill and check tracts    
        if tract_matrix is not None:
            if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                raise Exception("Negative values are not accepted in tracts matrix! "
                                "Please check your file, and use values >= 0")
            if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for tracts matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix
        
        
        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception("Invalid size for vector orientation. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.orientations = orientation
            
        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception("Invalid size for vector areas. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.areas = areas
            
        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector cortical. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.cortical = cortical_vector
            
        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector hemispheres. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.hemispheres = hemisphere_vector
        return result
Beispiel #58
0
class SimulatorController(BurstBaseController):
    ACTION_KEY = 'action'
    PREVIOUS_ACTION_KEY = 'previous_action'
    FORM_KEY = 'form'
    IS_MODEL_FRAGMENT_KEY = 'is_model_fragment'
    IS_SURFACE_SIMULATION_KEY = 'is_surface_simulation'
    IS_FIRST_FRAGMENT_KEY = 'is_first_fragment'
    IS_LAST_FRAGMENT_KEY = 'is_last_fragment'
    IS_COPY = 'sim_copy'
    IS_LOAD = 'sim_load'

    dict_to_render = {
        ACTION_KEY: None,
        PREVIOUS_ACTION_KEY: None,
        FORM_KEY: None,
        IS_MODEL_FRAGMENT_KEY: False,
        IS_SURFACE_SIMULATION_KEY: False,
        IS_FIRST_FRAGMENT_KEY: False,
        IS_LAST_FRAGMENT_KEY: False,
        IS_COPY: False,
        IS_LOAD: False
    }

    def __init__(self):
        BurstBaseController.__init__(self)
        self.range_parameters = SimulatorRangeParameters()
        self.burst_service2 = BurstService2()
        self.simulator_service = SimulatorService()
        self.files_helper = FilesHelper()
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.SIMULATOR_MODULE,
            IntrospectionRegistry.SIMULATOR_CLASS)

    @expose_page
    @settings
    @context_selected
    def index(self):
        """Get on burst main page"""
        template_specification = dict(
            mainContent="burst/main_burst",
            title="Simulation Cockpit",
            baseUrl=TvbProfile.current.web.BASE_URL,
            includedResources='project/included_resources')
        project = common.get_current_project()

        burst_config = BurstConfiguration2(project.id)
        common.add2session(common.KEY_BURST_CONFIG, burst_config)
        template_specification['burstConfig'] = burst_config
        template_specification[
            'burst_list'] = self.burst_service2.get_available_bursts(
                common.get_current_project().id)

        portlets_list = []  # self.burst_service.get_available_portlets()
        template_specification['portletList'] = portlets_list
        template_specification['selectedPortlets'] = json.dumps(portlets_list)

        form = self.prepare_first_fragment()

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.IS_FIRST_FRAGMENT_KEY] = True
        dict_to_render[self.FORM_KEY] = form
        dict_to_render[self.ACTION_KEY] = "/burst/set_connectivity"
        template_specification.update(**dict_to_render)

        return self.fill_default_attributes(template_specification)

    def prepare_first_fragment(self):
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        if is_simulator_copy is None:
            is_simulator_copy = False

        adapter_instance = ABCAdapter.build_adapter(
            self.cached_simulator_algorithm)
        form = adapter_instance.get_form()('', common.get_current_project().id,
                                           is_simulator_copy)

        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        if session_stored_simulator is None:
            session_stored_simulator = Simulator(
            )  # self.burst_service.new_burst_configuration(common.get_current_project().id)
            common.add2session(common.KEY_SIMULATOR_CONFIG,
                               session_stored_simulator)

        form.fill_from_trait(session_stored_simulator)
        return form

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_connectivity(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)
        form = SimulatorAdapterForm()

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form.fill_from_post(data)

            connectivity_index_gid = form.connectivity.value
            conduction_speed = form.conduction_speed.value
            coupling = form.coupling.value

            connectivity_index = ABCAdapter.load_entity_by_gid(
                connectivity_index_gid)
            connectivity = h5.load_from_index(connectivity_index)

            # TODO: handle this cases in a better manner
            session_stored_simulator.connectivity = connectivity
            session_stored_simulator.conduction_speed = conduction_speed
            session_stored_simulator.coupling = coupling()

        next_form = get_form_for_coupling(
            type(session_stored_simulator.coupling))()
        self.range_parameters.coupling_parameters = next_form.get_range_parameters(
        )
        next_form.fill_from_trait(session_stored_simulator.coupling)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/set_coupling_params'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_connectivity'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_coupling_params(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_coupling(
                type(session_stored_simulator.coupling))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.coupling)

        surface_fragment = SimulatorSurfaceFragment(
            '',
            common.get_current_project().id)
        surface_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = surface_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_surface'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_coupling_params'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_surface(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)
        dict_to_render = copy.deepcopy(self.dict_to_render)
        surface_index = None

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = SimulatorSurfaceFragment()
            form.fill_from_post(data)

            surface_index_gid = form.surface.value
            # surface_index_gid = data['_surface']
            if surface_index_gid is None:
                session_stored_simulator.surface = None
            else:
                surface_index = ABCAdapter.load_entity_by_gid(
                    surface_index_gid)
                session_stored_simulator.surface = Cortex()

        if session_stored_simulator.surface is None:
            stimuli_fragment = SimulatorStimulusFragment(
                '',
                common.get_current_project().id, False)
            stimuli_fragment.fill_from_trait(session_stored_simulator)

            dict_to_render[self.FORM_KEY] = stimuli_fragment
            dict_to_render[self.ACTION_KEY] = '/burst/set_stimulus'
            dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_surface'
            dict_to_render[self.IS_COPY] = is_simulator_copy
            dict_to_render[self.IS_LOAD] = is_simulator_load
            return dict_to_render

        # TODO: work-around this situation: surf_index filter
        rm_fragment = SimulatorRMFragment('',
                                          common.get_current_project().id,
                                          surface_index)
        rm_fragment.fill_from_trait(session_stored_simulator)
        dict_to_render[self.FORM_KEY] = rm_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_cortex'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_surface'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_cortex(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            rm_fragment = SimulatorRMFragment()
            rm_fragment.fill_from_post(data)

            session_stored_simulator.surface.coupling_strength = rm_fragment.coupling_strength.data

            lc_gid = rm_fragment.lc.value
            if lc_gid == 'None':
                lc_index = ABCAdapter.load_entity_by_gid(lc_gid)
                lc = h5.load_from_index(lc_index)
                session_stored_simulator.surface.local_connectivity = lc

            rm_gid = rm_fragment.rm.value
            rm_index = ABCAdapter.load_entity_by_gid(rm_gid)
            rm = h5.load_from_index(rm_index)
            session_stored_simulator.surface.region_mapping_data = rm

        stimuli_fragment = SimulatorStimulusFragment(
            '',
            common.get_current_project().id, True)
        stimuli_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = stimuli_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_stimulus'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_cortex'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_stimulus(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            stimuli_fragment = SimulatorStimulusFragment(
                '',
                common.get_current_project().id,
                session_stored_simulator.is_surface_simulation)
            stimuli_fragment.fill_from_post(data)
            stimulus_gid = stimuli_fragment.stimulus.value
            if stimulus_gid != None:
                stimulus_index = ABCAdapter.load_entity_by_gid(stimulus_gid)
                stimulus = h5.load_from_index(stimulus_index)
                session_stored_simulator.stimulus = stimulus

        model_fragment = SimulatorModelFragment(
            '',
            common.get_current_project().id)
        model_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = model_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_model'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_stimulus'
        dict_to_render[self.IS_MODEL_FRAGMENT_KEY] = True
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        dict_to_render[
            self.
            IS_SURFACE_SIMULATION_KEY] = session_stored_simulator.is_surface_simulation
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_model(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = SimulatorModelFragment()
            form.fill_from_post(data)
            session_stored_simulator.model = form.model.value()

        form = get_form_for_model(type(session_stored_simulator.model))()
        self.range_parameters.model_parameters = form.get_range_parameters()
        form.fill_from_trait(session_stored_simulator.model)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = form
        dict_to_render[self.ACTION_KEY] = '/burst/set_model_params'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_model'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_model_params(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_model(type(session_stored_simulator.model))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.model)

        integrator_fragment = SimulatorIntegratorFragment(
            '',
            common.get_current_project().id)
        integrator_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = integrator_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_integrator'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_model_params'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    # TODO: add state_variables selection step
    # @cherrypy.expose
    # @using_jinja_template("wizzard_form")
    # @handle_error(redirect=False)
    # @check_user
    # def set_model_variables_to_monitor(self, data):
    #     session_stored_simulator = common.get_from_session(common.KEY_SIMULATOR_CONFIG)
    #     form = get_form_for_model(type(session_stored_simulator.model.variables))()
    #     form.fill_from_post(data)
    #
    #     form.fill_trait(session_stored_simulator.model)
    #
    #     integrator_fragment = SimulatorIntegratorFragment('', common.get_current_project().id)
    #
    #     return {'form': integrator_fragment, 'action': '/burst/set_integrator',
    #             'previous_action': '/burst/set_model_variables_to_monitor'}

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_integrator(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            fragment = SimulatorIntegratorFragment()
            fragment.fill_from_post(data)
            session_stored_simulator.integrator = fragment.integrator.value()

        form = get_form_for_integrator(
            type(session_stored_simulator.integrator))()
        form.fill_from_trait(session_stored_simulator.integrator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = form
        dict_to_render[self.ACTION_KEY] = '/burst/set_integrator_params'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_integrator'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_integrator_params(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_integrator(
                type(session_stored_simulator.integrator))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.integrator)

        if isinstance(session_stored_simulator.integrator,
                      IntegratorStochastic):
            integrator_noise_fragment = get_form_for_noise(
                type(session_stored_simulator.integrator.noise))()
            self.range_parameters.integrator_noise_parameters = integrator_noise_fragment.get_range_parameters(
            )
            integrator_noise_fragment.fill_from_trait(
                session_stored_simulator.integrator.noise)

            dict_to_render = copy.deepcopy(self.dict_to_render)
            dict_to_render[self.FORM_KEY] = integrator_noise_fragment
            dict_to_render[self.ACTION_KEY] = '/burst/set_noise_params'
            dict_to_render[
                self.PREVIOUS_ACTION_KEY] = '/burst/set_integrator_params'
            dict_to_render[self.IS_COPY] = is_simulator_copy
            dict_to_render[self.IS_LOAD] = is_simulator_load
            return dict_to_render

        monitor_fragment = SimulatorMonitorFragment(
            '',
            common.get_current_project().id)
        monitor_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = monitor_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_monitors'
        dict_to_render[
            self.PREVIOUS_ACTION_KEY] = '/burst/set_integrator_params'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_noise_params(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_noise(
                type(session_stored_simulator.integrator.noise))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.integrator.noise)

        if isinstance(session_stored_simulator.integrator.noise, Additive):
            monitor_fragment = SimulatorMonitorFragment(
                '',
                common.get_current_project().id)
            monitor_fragment.fill_from_trait(session_stored_simulator)

            dict_to_render = copy.deepcopy(self.dict_to_render)
            dict_to_render[self.FORM_KEY] = monitor_fragment
            dict_to_render[self.ACTION_KEY] = '/burst/set_monitors'
            dict_to_render[
                self.PREVIOUS_ACTION_KEY] = '/burst/set_noise_params'
            dict_to_render[self.IS_COPY] = is_simulator_copy
            dict_to_render[self.IS_LOAD] = is_simulator_load
            return dict_to_render

        equation_form = get_form_for_equation(
            type(session_stored_simulator.integrator.noise.b))()
        equation_form.equation.data = session_stored_simulator.integrator.noise.b.__class__.__name__

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = equation_form
        dict_to_render[self.ACTION_KEY] = '/burst/set_noise_equation_params'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_noise_params'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_noise_equation_params(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_equation(
                type(session_stored_simulator.integrator.noise.b))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.integrator.noise.b)

        monitor_fragment = SimulatorMonitorFragment(
            '',
            common.get_current_project().id)
        monitor_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = monitor_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_monitors'
        dict_to_render[
            self.PREVIOUS_ACTION_KEY] = '/burst/set_noise_equation_params'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_monitors(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            # TODO: handle multiple monitors
            fragment = SimulatorMonitorFragment()
            fragment.fill_from_post(data)

            session_stored_simulator.monitors = [fragment.monitor.value()]

        monitor = session_stored_simulator.monitors[0]
        form = get_form_for_monitor(type(monitor))(
            '', common.get_current_project().id)
        form.fill_from_trait(monitor)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = form
        dict_to_render[self.ACTION_KEY] = '/burst/set_monitor_params'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_monitors'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_monitor_params(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        monitor = session_stored_simulator.monitors[0]
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_monitor(type(monitor))()
            form.fill_from_post(data)
            form.fill_trait(monitor)

        if isinstance(monitor, Bold):
            next_form = get_form_for_equation(type(monitor.hrf_kernel))()
            next_form.fill_from_trait(
                session_stored_simulator.monitors[0].hrf_kernel)

            dict_to_render = copy.deepcopy(self.dict_to_render)
            dict_to_render[self.FORM_KEY] = next_form
            dict_to_render[self.ACTION_KEY] = '/burst/set_monitor_equation'
            dict_to_render[
                self.PREVIOUS_ACTION_KEY] = '/burst/set_monitor_params'
            dict_to_render[self.IS_COPY] = is_simulator_copy
            dict_to_render[self.IS_LOAD] = is_simulator_load
            return dict_to_render
        session_stored_simulator.monitors = [monitor]

        next_form = SimulatorLengthFragment()
        next_form.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/set_simulation_length'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_monitor_params'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        if is_simulator_load:
            dict_to_render[self.ACTION_KEY] = ''
            dict_to_render[self.IS_LAST_FRAGMENT_KEY] = True
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_monitor_equation(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form = get_form_for_monitor(
                type(session_stored_simulator.monitors[0]))()
            form.fill_from_post(data)
            form.fill_trait(session_stored_simulator.monitor.hrf_kernel)

        next_form = SimulatorLengthFragment()

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/set_simulation_length'
        dict_to_render[
            self.PREVIOUS_ACTION_KEY] = '/burst/set_monitor_equation'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        if is_simulator_load:
            dict_to_render[self.ACTION_KEY] = ''
            dict_to_render[self.IS_LAST_FRAGMENT_KEY] = True
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_simulation_length(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)
        session_burst_config = common.get_from_session(common.KEY_BURST_CONFIG)

        dict_to_render = copy.deepcopy(self.dict_to_render)

        if is_simulator_load:
            common.add2session(common.KEY_IS_SIMULATOR_LOAD, False)

        next_form = SimulatorFinalFragment()
        if session_burst_config.name:
            burst_name = session_burst_config.name
            copy_prefix = 'Copy of '
            if is_simulator_copy and burst_name.find(copy_prefix) < 0:
                burst_name = copy_prefix + burst_name
            next_form.simulation_name.data = burst_name

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            fragment = SimulatorLengthFragment()
            fragment.fill_from_post(data)
            session_stored_simulator.simulation_length = fragment.length.value

        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/setup_pse'
        dict_to_render[
            self.PREVIOUS_ACTION_KEY] = '/burst/set_simulation_length'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LAST_FRAGMENT_KEY] = True
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def setup_pse(self, **data):
        next_form = SimulatorPSEConfigurationFragment(
            self.range_parameters.get_all_range_parameters())

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/set_pse_params'
        dict_to_render[
            self.PREVIOUS_ACTION_KEY] = '/burst/set_simulation_length'
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def set_pse_params(self, **data):
        form = SimulatorPSEConfigurationFragment(
            self.range_parameters.get_all_range_parameters())
        form.fill_from_post(data)

        param1 = form.pse_param1.value
        param2 = None
        if not form.pse_param2.value == form.pse_param2.missing_value:
            param2 = form.pse_param2.value

        project_id = common.get_current_project().id
        next_form = SimulatorPSEParamRangeFragment(param1,
                                                   param2,
                                                   project_id=project_id)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/launch_pse'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_pse_params'
        return dict_to_render

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def launch_pse(self, **data):
        # TODO: Split into: set range values and Launch, show message with finished config and nr of simulations
        all_range_parameters = self.range_parameters.get_all_range_parameters()
        range_param1, range_param2 = SimulatorPSEParamRangeFragment.fill_from_post(
            all_range_parameters, **data)
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)

        project = common.get_current_project()
        user = common.get_logged_user()

        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        burst_config.start_time = datetime.now()
        # if burst_name != 'none_undefined':
        #     burst_config.name = burst_name

        # TODO: branch simulation name is different
        if burst_config.name is None:
            new_id = dao.get_max_burst_id() + 1
            burst_config.name = 'simulation_' + str(new_id)

        operation_group = OperationGroup(
            project.id,
            ranges=[range_param1.to_json(),
                    range_param2.to_json()])
        operation_group = dao.store_entity(operation_group)

        metric_operation_group = OperationGroup(
            project.id,
            ranges=[range_param1.to_json(),
                    range_param2.to_json()])
        metric_operation_group = dao.store_entity(metric_operation_group)

        burst_config.operation_group = operation_group
        burst_config.operation_group_id = operation_group.id
        burst_config.metric_operation_group = metric_operation_group
        burst_config.metric_operation_group_id = metric_operation_group.id
        dao.store_entity(burst_config)

        try:
            thread = threading.Thread(
                target=self.simulator_service.async_launch_and_prepare_pse,
                kwargs={
                    'burst_config': burst_config,
                    'user': user,
                    'project': project,
                    'simulator_algo': self.cached_simulator_algorithm,
                    'range_param1': range_param1,
                    'range_param2': range_param2,
                    'session_stored_simulator': session_stored_simulator
                })
            thread.start()
        except BurstServiceException as e:
            self.logger.exception("Could not launch burst!")
            return {'error': e.message}

    @expose_json
    def launch_simulation(self, launch_mode, **data):
        current_form = SimulatorFinalFragment()
        try:
            current_form.fill_from_post(data)
        except Exception as exc:
            self.logger.exception(exc)
            return {'error': str(exc)}

        burst_name = current_form.simulation_name.value
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)

        project = common.get_current_project()
        user = common.get_logged_user()

        session_burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if burst_name != 'none_undefined':
            session_burst_config.name = burst_name

        burst_config_to_store = session_burst_config
        simulation_state_index_gid = None
        if launch_mode == self.simulator_service.LAUNCH_NEW:
            if session_burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                session_burst_config.name = 'simulation_' + str(new_id)
            if is_simulator_copy:
                burst_config_to_store = session_burst_config.clone()
        else:
            burst_config_to_store = session_burst_config.clone()
            count = dao.count_bursts_with_name(session_burst_config.name,
                                               session_burst_config.project_id)
            session_burst_config.name = session_burst_config.name + "_" + launch_mode + str(
                count)
            simulation_state_index = dao.get_generic_entity(
                SimulationStateIndex.__module__ + "." +
                SimulationStateIndex.__name__, session_burst_config.id,
                "fk_parent_burst")
            if simulation_state_index is None or len(
                    simulation_state_index) < 1:
                exc = BurstServiceException(
                    "Simulation State not found for %s, thus we are unable to branch from "
                    "it!" % session_burst_config.name)
                self.logger.error(exc)
                raise exc
            simulation_state_index_gid = simulation_state_index[0].gid

        burst_config_to_store.start_time = datetime.now()
        dao.store_entity(burst_config_to_store)

        try:
            thread = threading.Thread(target=self.simulator_service.
                                      async_launch_and_prepare_simulation,
                                      kwargs={
                                          'burst_config':
                                          burst_config_to_store,
                                          'user':
                                          user,
                                          'project':
                                          project,
                                          'simulator_algo':
                                          self.cached_simulator_algorithm,
                                          'session_stored_simulator':
                                          session_stored_simulator,
                                          'simulation_state_gid':
                                          simulation_state_index_gid
                                      })
            thread.start()
            return {'id': burst_config_to_store.id}
        except BurstServiceException as e:
            self.logger.exception('Could not launch burst!')
            return {'error': e.message}

    @expose_fragment('burst/burst_history')
    def load_burst_history(self):
        """
        Load the available burst that are stored in the database at this time.
        This is one alternative to 'chrome-back problem'.
        """
        session_burst = common.get_from_session(common.KEY_BURST_CONFIG)
        bursts = self.burst_service2.get_available_bursts(
            common.get_current_project().id)
        self.burst_service2.populate_burst_disk_usage(bursts)
        return {'burst_list': bursts, 'selectedBurst': session_burst.id}

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def load_burst_read_only(self, burst_config_id):
        try:
            burst_config = dao.get_burst_by_id(burst_config_id)
            common.add2session(common.KEY_BURST_CONFIG, burst_config)

            simulator_index = dao.get_generic_entity(SimulatorIndex,
                                                     burst_config.id,
                                                     'fk_parent_burst')[0]
            simulator_gid = simulator_index.gid

            project = common.get_current_project()
            storage_path = self.files_helper.get_project_folder(
                project, str(simulator_index.fk_from_operation))

            simulator, _, _ = self.simulator_service.deserialize_simulator(
                simulator_gid, storage_path)

            session_stored_simulator = simulator
            common.add2session(common.KEY_SIMULATOR_CONFIG,
                               session_stored_simulator)
            common.add2session(common.KEY_IS_SIMULATOR_LOAD, True)
            common.add2session(common.KEY_IS_SIMULATOR_COPY, False)

            form = self.prepare_first_fragment()
            dict_to_render = copy.deepcopy(self.dict_to_render)
            dict_to_render[self.IS_FIRST_FRAGMENT_KEY] = True
            dict_to_render[self.FORM_KEY] = form
            dict_to_render[self.ACTION_KEY] = "/burst/set_connectivity"
            dict_to_render[self.IS_LOAD] = True
            return dict_to_render
        except Exception:
            ### Most probably Burst was removed. Delete it from session, so that client
            ### has a good chance to get a good response on refresh
            self.logger.exception("Error loading burst")
            common.remove_from_session(common.KEY_BURST_CONFIG)
            raise

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def copy_simulator_configuration(self, burst_config_id):
        burst_config = dao.get_burst_by_id(burst_config_id)
        common.add2session(common.KEY_BURST_CONFIG, burst_config)

        simulator_index = dao.get_generic_entity(SimulatorIndex,
                                                 burst_config.id,
                                                 'fk_parent_burst')[0]
        simulator_gid = simulator_index.gid

        project = common.get_current_project()
        storage_path = self.files_helper.get_project_folder(
            project, str(simulator_index.fk_from_operation))

        simulator, _, _ = self.simulator_service.deserialize_simulator(
            simulator_gid, storage_path)

        session_stored_simulator = simulator
        common.add2session(common.KEY_SIMULATOR_CONFIG,
                           session_stored_simulator)
        common.add2session(common.KEY_IS_SIMULATOR_COPY, True)
        common.add2session(common.KEY_IS_SIMULATOR_LOAD, False)

        form = self.prepare_first_fragment()
        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.IS_FIRST_FRAGMENT_KEY] = True
        dict_to_render[self.FORM_KEY] = form
        dict_to_render[self.ACTION_KEY] = "/burst/set_connectivity"
        dict_to_render[self.IS_COPY] = True
        return dict_to_render

    @cherrypy.expose
    @using_jinja_template("wizzard_form")
    @handle_error(redirect=False)
    @check_user
    def reset_simulator_configuration(self):
        common.add2session(common.KEY_SIMULATOR_CONFIG, None)
        common.add2session(common.KEY_IS_SIMULATOR_COPY, False)
        common.add2session(common.KEY_IS_SIMULATOR_LOAD, False)

        project = common.get_current_project()
        common.add2session(common.KEY_BURST_CONFIG,
                           BurstConfiguration2(project.id))

        form = self.prepare_first_fragment()
        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.IS_FIRST_FRAGMENT_KEY] = True
        dict_to_render[self.FORM_KEY] = form
        dict_to_render[self.ACTION_KEY] = "/burst/set_connectivity"
        return dict_to_render

    @expose_json
    def rename_burst(self, burst_id, burst_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        validation_result = SimulatorFinalFragment.is_burst_name_ok(burst_name)
        if validation_result is True:
            self.burst_service2.rename_burst(burst_id, burst_name)
            return {'success': "Simulation successfully renamed!"}
        else:
            self.logger.exception(validation_result)
            return {'error': validation_result}

    @expose_json
    def get_history_status(self, **data):
        """
        For each burst id received, get the status and return it.
        """
        return self.burst_service2.update_history_status(
            json.loads(data['burst_ids']))
Beispiel #59
0
class BurstService(object):
    LAUNCH_NEW = 'new'
    LAUNCH_BRANCH = 'branch'

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.files_helper = FilesHelper()

    def mark_burst_finished(self,
                            burst_entity,
                            burst_status=None,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...

        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = BurstConfiguration.BURST_ERROR

        try:
            # If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(DataTypeGroup,
                                                     burst_entity.gid,
                                                     "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(
                    dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(
                    dt_group.id)
                dao.store_entity(dt_group)

            # Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(
                burst_entity.gid)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
            self.update_burst_configuration_h5(burst_entity)
        except Exception:
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
            self.update_burst_configuration_h5(burst_entity)

    def persist_operation_state(self,
                                operation,
                                operation_status,
                                message=None):
        """
        Update Operation instance state. Store it in DB and on HDD/
        :param operation: Operation instance
        :param operation_status: new status
        :param message: message in case of error
        :return: operation instance changed
        """
        operation.mark_complete(operation_status, message)
        operation = dao.store_entity(operation)
        # update burst also
        burst_config = self.get_burst_for_operation_id(operation.id)
        if burst_config is not None:
            burst_status = STATUS_FOR_OPERATION.get(operation_status)
            self.mark_burst_finished(burst_config, burst_status, message)
        return operation

    @staticmethod
    def get_burst_for_operation_id(operation_id, is_group=False):
        return dao.get_burst_for_operation_id(operation_id, is_group)

    def rename_burst(self, burst_id, new_name):
        """
        Rename the burst given by burst_id, setting it's new name to
        burst_name.
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.name = new_name
        dao.store_entity(burst)
        self.update_burst_configuration_h5(burst)

    @staticmethod
    def get_available_bursts(project_id):
        """
        Return all the burst for the current project.
        """
        bursts = dao.get_bursts_for_project(
            project_id, page_size=MAX_BURSTS_DISPLAYED) or []
        return bursts

    @staticmethod
    def populate_burst_disk_usage(bursts):
        """
        Adds a disk_usage field to each burst object.
        The disk usage is computed as the sum of the datatypes generated by a burst
        """
        sizes = dao.compute_bursts_disk_size([b.gid for b in bursts])
        for b in bursts:
            b.disk_size = format_bytes_human(sizes[b.gid])

    def update_history_status(self, id_list):
        """
        For each burst_id received in the id_list read new status from DB and return a list
        [id, new_status, is_group, message, running_time] tuple.
        """
        result = []
        for b_id in id_list:
            burst = dao.get_burst_by_id(b_id)
            if burst is not None:
                if burst.status == burst.BURST_RUNNING:
                    running_time = datetime.now() - burst.start_time
                else:
                    running_time = burst.finish_time - burst.start_time
                running_time = format_timedelta(running_time,
                                                most_significant2=False)

                if burst.status == burst.BURST_ERROR:
                    msg = 'Check Operations page for error Message'
                else:
                    msg = ''
                result.append([
                    burst.id, burst.status, burst.is_group, msg, running_time
                ])
            else:
                self.logger.debug("Could not find burst with id=" + str(b_id) +
                                  ". Might have been deleted by user!!")
        return result

    @staticmethod
    def update_simulation_fields(burst, op_simulation_id, simulation_gid):
        burst.fk_simulation = op_simulation_id
        burst.simulator_gid = simulation_gid.hex
        burst = dao.store_entity(burst)
        return burst

    def update_burst_configuration_h5(self, burst_configuration):
        # type: (BurstConfiguration) -> None
        project = dao.get_project_by_id(burst_configuration.fk_project)
        storage_path = self.files_helper.get_project_folder(
            project, str(burst_configuration.fk_simulation))
        self.store_burst_configuration(burst_configuration, storage_path)

    @staticmethod
    def load_burst_configuration(burst_config_id):
        # type: (int) -> BurstConfiguration
        burst_config = dao.get_burst_by_id(burst_config_id)
        return burst_config

    @staticmethod
    def prepare_burst_for_pse(burst_config):
        # type: (BurstConfiguration) -> (BurstConfiguration)
        operation_group = OperationGroup(burst_config.fk_project,
                                         ranges=burst_config.ranges)
        operation_group = dao.store_entity(operation_group)

        metric_operation_group = OperationGroup(burst_config.fk_project,
                                                ranges=burst_config.ranges)
        metric_operation_group = dao.store_entity(metric_operation_group)

        burst_config.operation_group = operation_group
        burst_config.fk_operation_group = operation_group.id
        burst_config.metric_operation_group = metric_operation_group
        burst_config.fk_metric_operation_group = metric_operation_group.id
        return dao.store_entity(burst_config)

    @staticmethod
    def store_burst_configuration(burst_config, storage_path):
        bc_path = h5.path_for(storage_path, BurstConfigurationH5,
                              burst_config.gid)
        with BurstConfigurationH5(bc_path) as bc_h5:
            bc_h5.store(burst_config)

    @staticmethod
    def load_burst_configuration_from_folder(simulator_folder, project):
        bc_h5_filename = DirLoader(
            simulator_folder,
            None).find_file_for_has_traits_type(BurstConfiguration)
        burst_config = BurstConfiguration(project.id)
        with BurstConfigurationH5(
                os.path.join(simulator_folder, bc_h5_filename)) as bc_h5:
            bc_h5.load_into(burst_config)
        return burst_config

    @staticmethod
    def prepare_simulation_name(burst, project_id):
        simulation_number = dao.get_number_of_bursts(project_id) + 1

        if burst.name is None:
            simulation_name = 'simulation_' + str(simulation_number)
        else:
            simulation_name = burst.name

        return simulation_name, simulation_number

    def prepare_indexes_for_simulation_results(self, operation,
                                               result_filenames, burst):
        indexes = list()
        self.logger.debug(
            "Preparing indexes for simulation results in operation {}...".
            format(operation.id))
        for filename in result_filenames:
            try:
                self.logger.debug(
                    "Preparing index for filename: {}".format(filename))
                index = h5.index_for_h5_file(filename)()
                h5_class = h5.REGISTRY.get_h5file_for_index(type(index))

                with h5_class(filename) as index_h5:
                    index.fill_from_h5(index_h5)
                    index.fill_from_generic_attributes(
                        index_h5.load_generic_attributes())

                index.fk_parent_burst = burst.gid
                index.fk_from_operation = operation.id
                if operation.fk_operation_group:
                    datatype_group = dao.get_datatypegroup_by_op_group_id(
                        operation.fk_operation_group)
                    self.logger.debug(
                        "Found DatatypeGroup with id {} for operation {}".
                        format(datatype_group.id, operation.id))
                    index.fk_datatype_group = datatype_group.id

                    # Update the operation group name
                    operation_group = dao.get_operationgroup_by_id(
                        operation.fk_operation_group)
                    operation_group.fill_operationgroup_name(
                        "TimeSeriesRegionIndex")
                    dao.store_entity(operation_group)
                self.logger.debug(
                    "Prepared index {} for file {} in operation {}".format(
                        index.summary_info, filename, operation.id))
                indexes.append(index)
            except Exception as e:
                self.logger.debug(
                    "Skip preparing index {} because there was an error.".
                    format(filename))
                self.logger.error(e)
        self.logger.debug(
            "Prepared {} indexes for results in operation {}...".format(
                len(indexes), operation.id))
        return indexes

    def prepare_index_for_metric_result(self, operation, result_filename,
                                        burst):
        self.logger.debug(
            "Preparing index for metric result in operation {}...".format(
                operation.id))
        index = h5.index_for_h5_file(result_filename)()
        with DatatypeMeasureH5(result_filename) as dti_h5:
            index.gid = dti_h5.gid.load().hex
            index.metrics = json.dumps(dti_h5.metrics.load())
            index.fk_source_gid = dti_h5.analyzed_datatype.load().hex
        index.fk_from_operation = operation.id
        index.fk_parent_burst = burst.gid
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation.fk_operation_group)
        self.logger.debug(
            "Found DatatypeGroup with id {} for operation {}".format(
                datatype_group.id, operation.id))
        index.fk_datatype_group = datatype_group.id
        self.logger.debug(
            "Prepared index {} for results in operation {}...".format(
                index.summary_info, operation.id))
        return index

    def _update_pse_burst_status(self, burst_config):
        operations_in_group = dao.get_operations_in_group(
            burst_config.fk_operation_group)
        if burst_config.fk_metric_operation_group:
            operations_in_group.extend(
                dao.get_operations_in_group(
                    burst_config.fk_metric_operation_group))
        operation_statuses = list()
        for operation in operations_in_group:
            if not has_finished(operation.status):
                self.logger.debug(
                    'Operation {} in group {} is not finished, burst status will not be updated'
                    .format(operation.id, operation.fk_operation_group))
                return
            operation_statuses.append(operation.status)
        self.logger.debug(
            'All operations in burst {} have finished. Will update burst status'
            .format(burst_config.id))
        if STATUS_ERROR in operation_statuses:
            self.mark_burst_finished(
                burst_config, BurstConfiguration.BURST_ERROR,
                'Some operations in PSE have finished with errors')
        elif STATUS_CANCELED in operation_statuses:
            self.mark_burst_finished(burst_config,
                                     BurstConfiguration.BURST_CANCELED)
        else:
            self.mark_burst_finished(burst_config)

    def update_burst_status(self, burst_config):
        if burst_config.fk_operation_group:
            self._update_pse_burst_status(burst_config)
        else:
            operation = dao.get_operation_by_id(burst_config.fk_simulation)
            message = operation.additional_info
            if len(message) == 0:
                message = None
            self.mark_burst_finished(burst_config,
                                     STATUS_FOR_OPERATION[operation.status],
                                     message)

    @staticmethod
    def prepare_metrics_operation(operation):
        # TODO reuse from OperationService and do not duplicate logic here
        parent_burst = dao.get_generic_entity(BurstConfiguration,
                                              operation.fk_operation_group,
                                              'fk_operation_group')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = operation.range_values
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE,
                                                  MEASURE_METRICS_CLASS)

        metric_operation = Operation(None,
                                     operation.fk_launched_by,
                                     operation.fk_launched_in,
                                     metric_algo.id,
                                     status=STATUS_FINISHED,
                                     op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)
        op_dir = FilesHelper().get_project_folder(operation.project,
                                                  str(metric_operation.id))
        return op_dir, metric_operation

    @staticmethod
    def handle_range_params_at_loading(burst_config, all_range_parameters):
        param1, param2 = None, None
        if burst_config.range1:
            param1 = RangeParameter.from_json(burst_config.range1)
            param1.fill_from_default(all_range_parameters[param1.name])
            if burst_config.range2 is not None:
                param2 = RangeParameter.from_json(burst_config.range2)
                param2.fill_from_default(all_range_parameters[param2.name])

        return param1, param2

    def prepare_data_for_burst_copy(self, burst_config_id, burst_name_format,
                                    project):
        burst_config = self.load_burst_configuration(burst_config_id)
        burst_config_copy = burst_config.clone()
        count = dao.count_bursts_with_name(burst_config.name,
                                           burst_config.fk_project)
        burst_config_copy.name = burst_name_format.format(
            burst_config.name, count + 1)

        storage_path = self.files_helper.get_project_folder(
            project, str(burst_config.fk_simulation))
        simulator = h5.load_view_model(burst_config.simulator_gid,
                                       storage_path)
        simulator.generic_attributes = GenericAttributes()
        return simulator, burst_config_copy

    @staticmethod
    def store_burst(burst_config):
        return dao.store_entity(burst_config)

    def load_simulation_from_zip(self, zip_file, project):
        import_service = ImportService()
        simulator_folder = import_service.import_simulator_configuration_zip(
            zip_file)

        simulator_h5_filename = DirLoader(
            simulator_folder,
            None).find_file_for_has_traits_type(SimulatorAdapterModel)
        simulator_h5_filepath = os.path.join(simulator_folder,
                                             simulator_h5_filename)
        simulator = h5.load_view_model_from_file(simulator_h5_filepath)

        burst_config = self.load_burst_configuration_from_folder(
            simulator_folder, project)
        burst_config_copy = burst_config.clone()
        simulator.generic_attributes.parent_burst = burst_config_copy.gid

        return simulator, burst_config_copy, simulator_folder
Beispiel #60
0
class SimulatorService(object):
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.algorithm_service = AlgorithmService()
        self.files_helper = FilesHelper()

    @staticmethod
    def _reset_model(session_stored_simulator):
        session_stored_simulator.model = type(session_stored_simulator.model)()
        vi_indexes = session_stored_simulator.determine_indexes_for_chosen_vars_of_interest(
        )
        vi_indexes = numpy.array(list(vi_indexes.values()))
        for monitor in session_stored_simulator.monitors:
            monitor.variables_of_interest = vi_indexes

    def reset_at_connectivity_change(self, is_simulator_copy, form,
                                     session_stored_simulator):
        """
        In case the user copies a simulation and changes the Connectivity, we want to reset the Model and Noise
        parameters because they might not fit to the new Connectivity's nr of regions.
        """
        if is_simulator_copy and form.connectivity.value != session_stored_simulator.connectivity:
            self._reset_model(session_stored_simulator)
            if issubclass(type(session_stored_simulator.integrator),
                          IntegratorStochastic):
                session_stored_simulator.integrator.noise = type(
                    session_stored_simulator.integrator.noise)()

    def reset_at_surface_change(self, is_simulator_copy, form,
                                session_stored_simulator):
        """
        In case the user copies a surface-simulation and changes the Surface, we want to reset the Model
        parameters because they might not fit to the new Surface's nr of vertices.
        """
        if is_simulator_copy and (
                session_stored_simulator.surface is None and form.surface.value
                or session_stored_simulator.surface and form.surface.value !=
                session_stored_simulator.surface.surface_gid):
            self._reset_model(session_stored_simulator)

    @staticmethod
    def _set_simulator_range_parameter(simulator, range_parameter_name,
                                       range_parameter_value):
        range_param_name_list = range_parameter_name.split('.')
        current_attr = simulator
        for param_name in range_param_name_list[:len(range_param_name_list) -
                                                1]:
            current_attr = getattr(current_attr, param_name)
        setattr(current_attr, range_param_name_list[-1], range_parameter_value)

    def async_launch_and_prepare_simulation(self, burst_config, user, project,
                                            simulator_algo, simulator):
        try:
            operation = self.operation_service.prepare_operation(
                user.id,
                project,
                simulator_algo,
                view_model=simulator,
                burst_gid=burst_config.gid,
                op_group_id=burst_config.fk_operation_group)
            burst_config = self.burst_service.update_simulation_fields(
                burst_config, operation.id, simulator.gid)
            storage_path = self.files_helper.get_project_folder(
                project, str(operation.id))
            self.burst_service.store_burst_configuration(
                burst_config, storage_path)

            wf_errs = 0
            try:
                OperationService().launch_operation(operation.id, True)
                return operation
            except Exception as excep:
                self.logger.error(excep)
                wf_errs += 1
                if burst_config:
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug(
                "Finished launching workflow. The operation was launched successfully, "
                + str(wf_errs) + " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            if burst_config:
                self.burst_service.mark_burst_finished(
                    burst_config, error_message=str(excep))

    def prepare_simulation_on_server(self, user_id, project, algorithm,
                                     zip_folder_path, simulator_file):
        simulator_vm = h5.load_view_model_from_file(simulator_file)
        operation = self.operation_service.prepare_operation(
            user_id, project, algorithm, view_model=simulator_vm)
        self.async_launch_simulation_on_server(operation, zip_folder_path)

        return operation

    def async_launch_simulation_on_server(self, operation, zip_folder_path):
        try:
            OperationService().launch_operation(operation.id, True)
            return operation
        except Exception as excep:
            self.logger.error(excep)
        finally:
            shutil.rmtree(zip_folder_path)

    @staticmethod
    def _set_range_param_in_dict(param_value):
        if type(param_value) is numpy.ndarray:
            return param_value[0]
        elif isinstance(param_value, uuid.UUID):
            return param_value.hex
        else:
            return param_value

    def async_launch_and_prepare_pse(self, burst_config, user, project,
                                     simulator_algo, range_param1,
                                     range_param2, session_stored_simulator):
        try:
            algo_category = simulator_algo.algorithm_category
            operation_group = burst_config.operation_group
            metric_operation_group = burst_config.metric_operation_group
            operations = []
            range_param2_values = [None]
            if range_param2:
                range_param2_values = range_param2.get_range_values()
            first_simulator = None

            for param1_value in range_param1.get_range_values():
                for param2_value in range_param2_values:
                    # Copy, but generate a new GUID for every Simulator in PSE
                    simulator = copy.deepcopy(session_stored_simulator)
                    simulator.gid = uuid.uuid4()
                    self._set_simulator_range_parameter(
                        simulator, range_param1.name, param1_value)

                    ranges = {
                        range_param1.name:
                        self._set_range_param_in_dict(param1_value)
                    }

                    if param2_value is not None:
                        self._set_simulator_range_parameter(
                            simulator, range_param2.name, param2_value)
                        ranges[
                            range_param2.name] = self._set_range_param_in_dict(
                                param2_value)

                    ranges = json.dumps(ranges)

                    operation = self.operation_service.prepare_operation(
                        user.id,
                        project,
                        simulator_algo,
                        view_model=simulator,
                        ranges=ranges,
                        burst_gid=burst_config.gid,
                        op_group_id=burst_config.fk_operation_group)
                    simulator.range_values = ranges
                    operations.append(operation)
                    if first_simulator is None:
                        first_simulator = simulator

            first_operation = operations[0]
            storage_path = self.files_helper.get_project_folder(
                project, str(first_operation.id))
            burst_config = self.burst_service.update_simulation_fields(
                burst_config, first_operation.id, first_simulator.gid)
            self.burst_service.store_burst_configuration(
                burst_config, storage_path)
            datatype_group = DataTypeGroup(
                operation_group,
                operation_id=first_operation.id,
                fk_parent_burst=burst_config.gid,
                state=algo_category.defaultdatastate)
            dao.store_entity(datatype_group)

            metrics_datatype_group = DataTypeGroup(
                metric_operation_group,
                fk_parent_burst=burst_config.gid,
                state=algo_category.defaultdatastate)
            dao.store_entity(metrics_datatype_group)

            wf_errs = 0
            for operation in operations:
                try:
                    OperationService().launch_operation(operation.id, True)
                except Exception as excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " +
                              str(len(operations) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")
            return first_operation

        except Exception as excep:
            self.logger.error(excep)
            self.burst_service.mark_burst_finished(burst_config,
                                                   error_message=str(excep))

    @staticmethod
    def compute_conn_branch_conditions(is_branch, simulator):
        if not is_branch:
            return None

        conn = load.load_entity_by_gid(simulator.connectivity)
        if conn.number_of_regions:
            return FilterChain(
                fields=[FilterChain.datatype + '.number_of_regions'],
                operations=["=="],
                values=[conn.number_of_regions])

    @staticmethod
    def validate_first_fragment(form, project_id, conn_idx):
        conn_count = dao.count_datatypes(project_id, conn_idx)
        if conn_count == 0:
            form.connectivity.errors.append(
                "No connectivity in the project! Simulation cannot be started without "
                "a connectivity!")

    def get_simulation_state_index(self, burst_config,
                                   simulation_history_class):
        parent_burst = burst_config.parent_burst_object
        simulation_state_index = dao.get_generic_entity(
            simulation_history_class, parent_burst.gid, "fk_parent_burst")

        if simulation_state_index is None or len(simulation_state_index) < 1:
            exc = BurstServiceException(
                "Simulation State not found for %s, thus we are unable to branch from "
                "it!" % burst_config.name)
            self.logger.error(exc)
            raise exc

        return simulation_state_index