コード例 #1
0
        def deco(*a, **b):
            try:

                return func(*a, **b)

            except common.NotAllowed as ex:
                log = get_logger(_LOGGER_NAME)
                log.error(str(ex))

                if redirect:
                    common.set_error_message(str(ex))
                    raise cherrypy.HTTPRedirect(ex.redirect_url)
                else:
                    raise cherrypy.HTTPError(ex.status, str(ex))

            except cherrypy.HTTPRedirect as ex:
                if redirect:
                    raise
                else:
                    log = get_logger(_LOGGER_NAME)
                    log.warn('Redirect converted to error: ' + str(ex))
                    # should we do this? Are browsers following redirects in ajax?
                    raise cherrypy.HTTPError(500, str(ex))

            except Exception:
                log = get_logger(_LOGGER_NAME)
                log.exception('An unexpected exception appeared')

                if redirect:
                    # set a default error message if one has not been set already
                    if not common.has_error_message():
                        common.set_error_message("An unexpected exception appeared. Please check the log files.")
                    raise cherrypy.HTTPRedirect("/tvb?error=True")
                else:
                    raise
コード例 #2
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
            session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
コード例 #3
0
    def prepare_adapter(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        try:
            if not issubclass(adapter_class, ABCAdapter):
                raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!")
            algo_group = dao.find_group(adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.algorithm_group = algo_group
            return adapter_instance
        except Exception, excep:
            get_logger("ABCAdapter").exception(excep)
            raise IntrospectionException(str(excep))
コード例 #4
0
    def __init__(self, data_file):

        self.logger = get_logger(__name__)

        if data_file is None:
            raise ParseException("Please select NIFTI file which contains data to import")

        if not os.path.exists(data_file):
            raise ParseException("Provided file %s does not exists" % data_file)

        try:
            self.nifti_image = nib.load(data_file)
        except nib.spatialimages.ImageFileError as e:
            self.logger.exception(e)
            msg = "File: %s does not have a valid NIFTI-1 format." % data_file
            raise ParseException(msg)

        nifti_image_hdr = self.nifti_image.get_header()

        # Check if there is a time dimensions (4th dimension).
        nifti_data_shape = nifti_image_hdr.get_data_shape()
        self.has_time_dimension = len(nifti_data_shape) > 3
        self.time_dim_size = nifti_data_shape[3] if self.has_time_dimension else 1

        # Extract sample unit measure
        self.units = nifti_image_hdr.get_xyzt_units()

        # Usually zooms defines values for x, y, z, time and other dimensions
        self.zooms = nifti_image_hdr.get_zooms()
コード例 #5
0
    def launch(self, data_file, surface=None):
        """
        Execute import operations:
        """
        if surface is None:
            raise LaunchException("No surface selected. Please initiate upload again and select a brain surface.")
            
        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            time_series = parser.parse(data_file)
            ts_data_shape = time_series.read_data_shape()

            if surface.number_of_vertices != ts_data_shape[1]:
                msg = "Imported time series doesn't have values for all surface vertices. Surface has %d vertices " \
                      "while time series has %d values." % (surface.number_of_vertices, ts_data_shape[1])
                raise LaunchException(msg)
            else:
                time_series.surface = surface

            return [time_series]

        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
コード例 #6
0
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation, adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst, error_message=str(excep))
コード例 #7
0
ファイル: utils.py プロジェクト: boegel/tvb-framework
def get_matlab_executable():
    """
    Check If MATLAB is installed on current system.
    Return True or False.
    Return True, when MATLAB executable is found in Path.
    """
    matlab_exe_path = None
    if sys.platform.startswith('win'):
        split_char = ";"
        octave_exec = OCTAVE + ".exe"
        matlab_exec = MATLAB + ".exe"
    else:
        split_char = ":"
        octave_exec = OCTAVE
        matlab_exec = MATLAB
    logger = get_logger(__name__)
    logger.debug("Searching Matlab in path: " + str(os.environ["PATH"]))
    for path in os.environ["PATH"].split(split_char):
        if os.path.isfile(os.path.join(path, matlab_exec)):
            matlab_exe_path = os.path.join(path, matlab_exec)
            logger.debug("MATLAB was found:" + path)
            return matlab_exe_path
    for path in os.environ["PATH"].split(split_char):
        if os.path.isfile(os.path.join(path, octave_exec)):
            logger.debug("OCTAVE was found:" + path)
            matlab_exe_path = os.path.join(path, octave_exec)
            return matlab_exe_path
    return matlab_exe_path
コード例 #8
0
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)

        self.user_service = UserService()
        self.flow_service = FlowService()

        analyze_category = self.flow_service.get_launchable_non_viewers()
        self.analyze_category_link = '/flow/step/' + str(analyze_category.id)
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.flow_service.get_visualisers_category()
        conn_id = self.flow_service.get_algorithm_by_module_and_class(CONNECTIVITY_MODULE, CONNECTIVITY_CLASS)[1].id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        self.connectivity_submenu = [dict(title="Large Scale Connectivity", subsection="connectivity",
                                          description="View Connectivity Regions. Perform Connectivity lesions",
                                          link=connectivity_link),
                                     dict(title="Local Connectivity", subsection="local",
                                          link='/spatial/localconnectivity/step_1/1',
                                          description="Create or view existent Local Connectivity entities.")]
        self.burst_submenu = [dict(link='/burst', subsection='burst',
                                   title='Simulation Cockpit', description='Manage simulations'),
                              dict(link='/burst/dynamic', subsection='dynamic',
                                   title='Phase plane', description='Configure model dynamics')]
コード例 #9
0
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)

        self.user_service = UserService()
        self.flow_service = FlowService()

        self.analyze_category_link = '/flow/step_analyzers'
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.flow_service.get_visualisers_category()
        conn_id = self.flow_service.get_algorithm_by_module_and_class(CONNECTIVITY_MODULE, CONNECTIVITY_CLASS).id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        self.connectivity_submenu = [dict(title="Large Scale Connectivity", link=connectivity_link,
                                          subsection=WebStructure.SUB_SECTION_CONNECTIVITY,
                                          description="View Connectivity Regions. Perform Connectivity lesions"),
                                     dict(title="Local Connectivity", link='/spatial/localconnectivity/step_1/1',
                                          subsection=WebStructure.SUB_SECTION_LOCAL_CONNECTIVITY,
                                          description="Create or view existent Local Connectivity entities.")]

        allen_algo = self.flow_service.get_algorithm_by_module_and_class(ALLEN_CREATOR_MODULE, ALLEN_CREATOR_CLASS)
        if allen_algo:
            # Only add the Allen Creator if AllenSDK is installed
            allen_link = self.get_url_adapter(allen_algo.fk_category, allen_algo.id)
            self.connectivity_submenu.append(dict(title="Allen Connectome Downloader", link=allen_link,
                                                  subsection=WebStructure.SUB_SECTION_ALLEN,
                                                  description="Download a mouse connectivity from Allen dataset"))

        self.burst_submenu = [dict(link='/burst', subsection=WebStructure.SUB_SECTION_BURST,
                                   title='Simulation Cockpit', description='Manage simulations'),
                              dict(link='/burst/dynamic', subsection='dynamic',
                                   title='Phase plane', description='Configure model dynamics')]
コード例 #10
0
 def __init__(self, conf):
     """
     :param conf: burst configuration entity
     """
     self.logger = get_logger(__name__)
     self.flow_service = FlowService()
     self.conf = conf
コード例 #11
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table1 = meta.tables['MAPPED_SURFACE_DATA']

        create_column(COL_1, table1)
        create_column(COL_2, table1)
        create_column(COL_3, table1)

        try:
            session = SA_SESSIONMAKER()
            session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists "
                                 "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                                 "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()
        except ProgrammingError:
            # PostgreSQL
            session = SA_SESSIONMAKER()
            session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists "
                                 "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                                 "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise
コード例 #12
0
    def launch(self, data_file, apply_corrections=False, mappings_file=None, connectivity=None):
        """
        Execute import operations:
        """
        self.data_file = data_file

        try:
            self.parser = NIFTIParser(data_file)

            volume = self._create_volume()

            if connectivity:
                rm = self._create_region_map(volume, connectivity, apply_corrections, mappings_file)
                return [volume, rm]

            if self.parser.has_time_dimension:
                time_series = self._create_time_series(volume)
                return [volume, time_series]

            # no connectivity and no time
            mri = self._create_mri(volume)
            return [volume, mri]

        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
コード例 #13
0
    def __init__(self, tvb_root_folder, dist_folder, library_path):
        """
        Creates a new instance.
        :param tvb_root_folder: root tvb folder.  
        :param dist_folder: folder where distribution is built.  
        :param library_path: folder where TVB code is put into final distribution.  
        """
        self.logger = get_logger(self.__class__.__name__)
        self._dist_folder = dist_folder
        self._tvb_root_folder = tvb_root_folder
        self._manuals_folder = os.path.join(tvb_root_folder, self.DOCS_SRC, self.MANUALS)
        self._styles_folder = os.path.join(self._manuals_folder, self.STYLES)

        # Folders where to store results
        self._dist_docs_folder = os.path.join(self._dist_folder, self.DOCS)
        self._dist_api_folder = os.path.join(self._dist_folder, self.API)
        self._dist_online_help_folder = os.path.join(library_path, self.ONLINE_HELP)
        self._dist_styles_folder = os.path.join(self._dist_online_help_folder, self.STYLES)

        # Check if folders exist. If not create them 
        if not os.path.exists(self._dist_docs_folder):
            os.makedirs(self._dist_docs_folder)
        if os.path.exists(self._dist_online_help_folder):
            shutil.rmtree(self._dist_online_help_folder)
        if not os.path.exists(self._dist_api_folder):
            os.makedirs(self._dist_api_folder)
        os.makedirs(self._dist_online_help_folder)
コード例 #14
0
ファイル: surface.py プロジェクト: amitsaroj001/tvb-framework
    def __init__(self, obj_file):
        """
        Create a surface from an obj file
        """
        self.logger = get_logger(__name__)

        try:
            obj = ObjParser()
            obj.read(obj_file)

            self.triangles = []
            self.vertices = obj.vertices
            self.normals = [(0.0, 0.0, 0.0)] * len(self.vertices)
            self.have_normals = len(obj.normals)

            for face in obj.faces:
                triangles = self._triangulate(face)
                for v_idx, t_idx, n_idx in triangles:
                    self.triangles.append(v_idx)
                    if n_idx != -1:
                        # last normal index wins
                        # alternative: self.normals[v_idx] += obj.normals[n_idx]
                        # The correct behaviour is to duplicate the vertex
                        # self.vertices.append(self.vertices[v_idx])
                        # self.tex_coords.append(self.tex_coords[v_idx])
                        self.normals[v_idx] = obj.normals[n_idx]
            # checks
            if not self.vertices or not self.triangles:
                raise ParseException("No geometry data in file.")
            self._to_numpy()
        except ValueError as ex:
            self.logger.exception(" Error in obj")
            raise ParseException(str(ex))
コード例 #15
0
def get_logger(name):
    try:
        from tvb.basic.logger.builder import get_logger
        return get_logger(__name__)
    except ImportError:
        import logging
        return logging.getLogger(name)
コード例 #16
0
    def build_adapter(algo_group):
        """
        Having a module and a class name, create an instance of ABCAdapter.
        """
        logger = get_logger("ABCAdapter")
        try:
            ad_module = importlib.import_module(algo_group.module)
            # This does no work for all adapters, so let it for manually choosing by developer
            if TvbProfile.env.IS_WORK_IN_PROGRESS:
                reload(ad_module)
                logger.info("Reloaded %r", ad_module)

            adapter = getattr(ad_module, algo_group.classname)

            if algo_group.init_parameter is not None and len(algo_group.init_parameter) > 0:
                adapter_instance = adapter(str(algo_group.init_parameter))
            else:
                adapter_instance = adapter()
            if not isinstance(adapter_instance, ABCAdapter):
                raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!")
            adapter_instance.algorithm_group = algo_group
            return adapter_instance
        except Exception, excep:
            logger.exception(excep)
            raise IntrospectionException(str(excep))
コード例 #17
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
コード例 #18
0
 def __init__(self, model, integrator):
     self.log = get_logger(self.__class__.__module__)
     self.model = model
     self.integrator = integrator
     #Make sure the model is fully configured...
     self.model.configure()
     self.model.update_derived_parameters()
コード例 #19
0
 def wrapper(*args, **kwargs):
     log = get_logger(_LOGGER_NAME)
     profile_file = func.__name__ + datetime.now().strftime("%d-%H-%M-%S.%f") + ".profile"
     log.info("profiling function %s. Profile stored in %s" % (func.__name__, profile_file))
     prof = cProfile.Profile()
     ret = prof.runcall(func, *args, **kwargs)
     prof.dump_stats(profile_file)
     return ret
コード例 #20
0
 def __init__(self):
     BaseController.__init__(self)
     self.flow_service = FlowService()
     self.logger = get_logger(__name__)
     editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
                               subsection='regionstim', description='Create a new Stimulus on Region level'),
                          dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
                               subsection='surfacestim', description='Create a new Stimulus on Surface level')]
     self.submenu_list = editable_entities
コード例 #21
0
 def __init__(self):
     # It will be populate with key from DataTypeMetaData
     self.meta_data = {DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}
     self.file_handler = FilesHelper()
     self.storage_path = '.'
     # Will be populate with current running operation's identifier
     self.operation_id = None
     self.user_id = None
     self.log = get_logger(self.__class__.__module__)
     self.tree_manager = InputTreeManager()
コード例 #22
0
def initialize_storage():
    """
    Create Projects storage root folder in case it does not exist.
    """
    try:
        helper = FilesHelper()
        helper.check_created()
    except FileStructureException:
        # Do nothing, because we do not have any UI to display exception
        logger = get_logger("tvb.core.services.initialize_storage")
        logger.exception("Could not make sure the root folder exists!")
コード例 #23
0
ファイル: pse_isocline.py プロジェクト: maedoc/tvb-framework
    def __init__(self, range1, range2, apriori_data, metrics, datatype_gids):
        self.log = get_logger(self.__class__.__name__)
        # ranges
        all_numbers_range1, self.range1_name, self.range1 = OperationGroup.load_range_numbers(range1)
        all_numbers_range2, self.range2_name, self.range2 = OperationGroup.load_range_numbers(range2)

        # Data from which to interpolate larger 2-D space
        self.apriori_x = self._prepare_axes(self.range1, all_numbers_range1)
        self.apriori_y = self._prepare_axes(self.range2, all_numbers_range2)
        self.apriori_data = apriori_data
        self.datatypes_gids = datatype_gids
        self.metrics = metrics
コード例 #24
0
ファイル: parser.py プロジェクト: amitsaroj001/tvb-framework
    def __init__(self, storage_path, key_edge_weight=None, key_edge_tract=None, key_node_coordinates=None,
                 key_node_label=None, key_node_region=None, key_node_hemisphere=None):

        self.logger = get_logger(__name__)
        self.storage_path = storage_path

        NetworkxParser._append_key(key_edge_weight, self.KEY_EDGE_WEIGHT)
        NetworkxParser._append_key(key_edge_tract, self.KEY_EDGE_TRACT)
        NetworkxParser._append_key(key_node_coordinates, self.KEY_NODE_COORDINATES)
        NetworkxParser._append_key(key_node_label, self.KEY_NODE_LABEL)
        NetworkxParser._append_key(key_node_region, self.KEY_NODE_REGION)
        NetworkxParser._append_key(key_node_hemisphere, self.KEY_NODE_HEMISPHERE)
コード例 #25
0
    def launch(self, data_file, apply_corrections=False, connectivity=None):
        """
        Execute import operations:
        """

        try:
            parser = NIFTIParser(data_file)

            # Create volume DT
            volume = Volume(storage_path=self.storage_path)
            volume.set_operation_id(self.operation_id)
            volume.origin = [[0.0, 0.0, 0.0]]
            volume.voxel_size = [parser.zooms[0], parser.zooms[1], parser.zooms[2]]
            if parser.units is not None and len(parser.units) > 0:
                volume.voxel_unit = parser.units[0]

            if parser.has_time_dimension or not connectivity:
                # Now create TimeSeries and fill it with data from NIFTI image
                time_series = TimeSeriesVolume(storage_path=self.storage_path)
                time_series.set_operation_id(self.operation_id)
                time_series.volume = volume
                time_series.title = "NIFTI Import - " + os.path.split(data_file)[1]
                time_series.labels_ordering = ["Time", "X", "Y", "Z"]
                time_series.start_time = 0.0

                if len(parser.zooms) > 3:
                    time_series.sample_period = float(parser.zooms[3])
                else:
                    # If no time dim, set sampling to 1 sec
                    time_series.sample_period = 1

                if parser.units is not None and len(parser.units) > 1:
                    time_series.sample_period_unit = parser.units[1]

                parser.parse(time_series, True)
                return [volume, time_series]

            else:
                region2volume_mapping = RegionVolumeMapping(storage_path=self.storage_path)
                region2volume_mapping.set_operation_id(self.operation_id)
                region2volume_mapping.volume = volume
                region2volume_mapping.connectivity = connectivity
                region2volume_mapping.title = "NIFTI Import - " + os.path.split(data_file)[1]
                region2volume_mapping.dimensions_labels = ["X", "Y", "Z"]
                region2volume_mapping.apply_corrections = apply_corrections

                parser.parse(region2volume_mapping, False)
                return [volume, region2volume_mapping]

        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
コード例 #26
0
        def deco(*a, **b):
            try:
                ## Un-comment bellow for profiling each request:
                #import cherrypy.lib.profiler as profiler
                #p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
                #return p.run(profile, template_path, func, *a, **b)

                template_dict = func(*a, **b)
                if not cfg.RENDER_HTML:
                    return template_dict
                    ### Generate HTML given the path to the template and the data dictionary.
                loader = TemplateLoader()
                template = loader.load(template_path)
                stream = template.generate(**template_dict)
                return stream.render('xhtml')
            except Exception, excep:
                if isinstance(excep, cherrypy.HTTPRedirect):
                    raise
                get_logger("tvb.interface.web.controllers.base_controller").exception(excep)
                set_error_message("An unexpected exception appeared. Please contact your system administrator.")
                raise cherrypy.HTTPRedirect("/tvb?error=True")
コード例 #27
0
    def __init__(self):
        self.logger = get_logger(__name__)
        first_run = TvbProfile.is_first_run()
        storage = TvbProfile.current.TVB_STORAGE if not first_run else TvbProfile.current.DEFAULT_STORAGE
        self.configurable_keys = {
            self.KEY_STORAGE: {'label': 'Root folder for all projects', 'value': storage,
                               'readonly': not first_run, 'type': 'text'},
            self.KEY_MAX_DISK_SPACE_USR: {'label': 'Max hard disk space per user (MBytes)',
                                          'value': TvbProfile.current.MAX_DISK_SPACE / 2 ** 10, 'type': 'text'},
            self.KEY_MATLAB_EXECUTABLE: {'label': 'Optional Matlab or Octave path', 'type': 'text',
                                         'value': TvbProfile.current.MATLAB_EXECUTABLE or get_matlab_executable() or '',
                                         'description': 'Some analyzers will not be available when '
                                                        'matlab/octave are not found'},
            self.KEY_SELECTED_DB: {'label': 'Select one DB engine', 'value': TvbProfile.current.db.SELECTED_DB,
                                   'type': 'select', 'readonly': not first_run,
                                   'options': TvbProfile.current.db.ACEEPTED_DBS},
            self.KEY_DB_URL: {'label': "DB connection URL",
                              'value': TvbProfile.current.db.ACEEPTED_DBS[TvbProfile.current.db.SELECTED_DB],
                              'type': 'text', 'readonly': TvbProfile.current.db.SELECTED_DB == 'sqlite'},

            self.KEY_PORT: {'label': 'Port to run Cherrypy on',
                            'value': TvbProfile.current.web.SERVER_PORT, 'dtype': 'primitive', 'type': 'text'},
            self.KEY_PORT_MPLH5: {'label': 'Port to run Matplotlib on', 'type': 'text', 'dtype': 'primitive',
                                  'value': TvbProfile.current.web.MPLH5_SERVER_PORT},
            self.KEY_URL_WEB: {'label': 'URL for accessing web',
                               'value': TvbProfile.current.web.BASE_URL, 'type': 'text', 'dtype': 'primitive'},
            self.KEY_URL_MPLH5: {'label': 'URL for accessing MPLH5 visualizers', 'type': 'text',
                                 'value': TvbProfile.current.web.MPLH5_SERVER_URL, 'dtype': 'primitive'},

            self.KEY_MAX_NR_THREADS: {'label': 'Maximum no. of threads for local installations', 'type': 'text',
                                      'value': TvbProfile.current.MAX_THREADS_NUMBER, 'dtype': 'primitive'},
            self.KEY_MAX_RANGE: {'label': 'Maximum no. of operations in one PSE',
                                 'description': "Parameters Space Exploration (PSE) maximum number of operations",
                                 'value': TvbProfile.current.MAX_RANGE_NUMBER, 'type': 'text', 'dtype': 'primitive'},
            self.KEY_MAX_NR_SURFACE_VERTEX: {'label': 'Maximum no. of vertices in a surface',
                                             'type': 'text', 'dtype': 'primitive',
                                             'value': TvbProfile.current.MAX_SURFACE_VERTICES_NUMBER},
            self.KEY_CLUSTER: {'label': 'Deploy on cluster', 'value': TvbProfile.current.cluster.IS_DEPLOY,
                               'description': 'Check this only if on the web-server machine OARSUB command is enabled.',
                               'dtype': 'primitive', 'type': 'boolean'},
            self.KEY_ADMIN_NAME: {'label': 'Administrator User Name',
                                  'value': TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                                  'type': 'text', 'readonly': not first_run,
                                  'description': ('Password and Email can be edited after first run, '
                                                  'from the profile page directly.')},
            self.KEY_ADMIN_PWD: {'label': 'Password',
                                 'value': TvbProfile.current.web.admin.ADMINISTRATOR_BLANK_PWD if first_run
                                 else TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                                 'type': 'password', 'readonly': not first_run},
            self.KEY_ADMIN_EMAIL: {'label': 'Administrator Email',
                                   'value': TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                                   'readonly': not first_run, 'type': 'text'}}
コード例 #28
0
def downgrade(migrate_engine):
    """
    Operations to reverse the above upgrade go here.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['PROJECTS']
        drop_column(COL_VERSION, table)

    except Exception:
        logger = get_logger(__name__)
        logger.warning("Cold not remove column as required by the downgrade")
        raise
コード例 #29
0
    def launch(self, data_file):
        """
            Execute import operations: 
        """
        parser = NIFTIParser(self.storage_path, self.operation_id)
        try:
            time_series = parser.parse(data_file)

            return [time_series.volume, time_series]             
        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
コード例 #30
0
ファイル: xml_reader.py プロジェクト: boegel/tvb-framework
 def __init__(self, interface_file):
     """
     Validate and read XML.
     Private constructor. Should not be used directly, but rather through XMLGroupReader.get_instance
     """
     self.logger = get_logger(self.__class__.__module__ + '.')
     self.logger.debug("Starting to validate XML file " + interface_file)
     try:
         pyxsval.parseAndValidate(interface_file)
     except pyxsval.XsvalError, errstr:
         msg = "The XML file " + str(interface_file) + " is not valid. "
         self.logger.error(msg + "Error message: " + str(errstr))
         raise XmlParserException(msg + "Error message: " + str(errstr))
コード例 #31
0
 def __init__(self):
     ABCUploader.__init__(self)
     self.logger = get_logger(self.__class__.__module__)
コード例 #32
0
class MappedType(model.DataType, mapped.MappedTypeLight):
    """
    Mix-in class combining core Traited mechanics with the db'ed DataType
    class enabling SQLAlchemy.
    """
    #### Transient fields below
    storage_path = None
    framework_metadata = None
    logger = get_logger(__name__)
    _ui_complex_datatype = False

    def __init__(self, **kwargs):
        """
        :param kwargs: initialization arguments for generic class.
                       Traited fields are optional to appear here. 
                       If not here, default traited value will be taken. 
        """
        if KWARG_STORAGE_PATH in kwargs:
            self.storage_path = kwargs[KWARG_STORAGE_PATH]
            kwargs.pop(KWARG_STORAGE_PATH)
        self._current_metadata = dict()
        super(MappedType, self).__init__(**kwargs)

    @declared_attr
    def __tablename__(cls):
        """
        Overwrite field __tablename__ for class.
        :return None if MappedType itself, custom table name, to recognize Mapped Table in DB.
        """
        if 'MappedType' in cls.__name__:
            return None
        return cls.compute_table_name()

    @classmethod
    def compute_table_name(cls):
        """
        For current class, if to be persisted in DB, compute proper table name.
        """
        return compute_table_name(cls.__name__)

    def __get__(self, inst, cls):
        """
        Called when an attribute of Type is retrieved on another class/instance.
        """
        if inst is None:
            return self
        if self.trait.bound:
            ### Return simple DB field or cached value
            return get(inst, '__' + self.trait.name, None)
        else:
            return self

    def __set__(self, inst, value):
        """
        Add DB code for when an attribute of MappedType class is set on another entity.
        """
        instance_gid, full_instance = None, None
        if value is None or isinstance(value, (str, unicode)):
            #### We consider the string represents a GID
            instance_gid = value
            if value is not None:
                instances_arr = dao.get_generic_entity(self.__class__,
                                                       instance_gid, 'gid')
                if len(instances_arr) > 0:
                    full_instance = instances_arr[0]
                else:
                    msg = "Could not set '%s' field on '%s' because there is no '%s' with gid: %s in database." \
                          % (self.trait.name, inst.__class__.__name__, self.__class__.__name__, instance_gid)
                    raise MissingEntityException(msg)
        else:
            instance_gid = value.gid
            full_instance = value
        self._put_value_on_instance(inst, instance_gid)

        if self.trait.bound:
            setattr(inst, '__' + self.trait.name, full_instance)

    def initialize(self):
        """
        Method automatically called immediately after DB-Load.
        """
        self.set_operation_id(self.fk_from_operation)
        return self

    def validate(self, ignore_list=None):
        """
        This method checks if the data stored into this entity is valid, 
        and ready to be stored in DB.
        Method automatically called just before saving entity in DB.
        In case data is not valid an Exception should be thrown.
        :param ignore_list: list of strings representing names of the attributes to not be 
                            validated.   
        """
        for key, attr in self.trait.iteritems():
            # Skip attribute
            if ignore_list is not None and key in ignore_list:
                break

            if attr.trait.required:
                # In case of fields with data stored on disk check shape
                if isinstance(attr, mapped.Array):
                    if attr.trait.file_storage != FILE_STORAGE_NONE:
                        # Check if any data stored in corresponding dataset
                        try:
                            self.get_data_shape(key)
                        except MissingDataSetException:
                            raise ValidationException(
                                "Could not store '%s' because required array '%s' is missing."
                                % (self.__class__.__name__, key))
                        except IOError:
                            raise ValidationException(
                                "Could not store '%s' because there is no HDF5 file associated."
                                % (self.__class__.__name__))

                elif not hasattr(self, key) or getattr(self, key) is None:
                    raise ValidationException(
                        "Could not store '%s' because required attribute '%s' is missing."
                        % (self.__class__.__name__, key))

    def set_operation_id(self, operation_id):
        """
        Setter for FK_operation_id.
        """
        self.fk_from_operation = operation_id
        parent_project = dao.get_project_for_operation(operation_id)
        self.storage_path = FilesHelper().get_project_folder(
            parent_project, str(operation_id))
        self._storage_manager = None

    # ---------------------------- FILE STORAGE -------------------------------
    ROOT_NODE_PATH = "/"

    def store_data(self, data_name, data, where=ROOT_NODE_PATH):
        """
        Store data into a HDF5 file on disk. Each data will be stored into a 
        dataset with the provided name.
            ::param data_name: name of the dataset where to store data
            ::param data: data to be stored (can be a list / array / numpy array...) 
            ::param where: represents the path where to store our dataset (e.g. /data/info) 
        """
        store_manager = self._get_file_storage_mng()
        store_manager.store_data(data_name, data, where)
        ### Also store Array specific meta-data.
        meta_dictionary = self.__retrieve_array_metadata(data, data_name)
        self.set_metadata(meta_dictionary, data_name, where=where)

    def store_data_chunk(self,
                         data_name,
                         data,
                         grow_dimension=-1,
                         close_file=True,
                         where=ROOT_NODE_PATH):
        """
        Store data into a HDF5 file on disk by writing chunks. 
        Data will be stored into a data-set with the provided name.
            ::param data_name: name of the data-set where to store data
            ::param data: data to be stored (can be a list / array / numpy array...)
            ::param grow_dimension: The dimension to be used to grow stored array. 
                                   If not provided a default value = -1 is used (grow on LAST dimension).
            ::param close_file: Specify if the file should be closed automatically after write operation. 
                                If not, you have to close file by calling method close_file()
            ::param where: represents the path where to store our dataset (e.g. /data/info)  
        """
        if isinstance(data, list):
            data = numpy.array(data)
        store_manager = self._get_file_storage_mng()
        store_manager.append_data(data_name, data, grow_dimension, close_file,
                                  where)

        ### Start updating array meta-data after new chunk of data stored.
        new_metadata = self.__retrieve_array_metadata(data, data_name)
        previous_meta = dict()
        if data_name in self._current_metadata:
            previous_meta = self._current_metadata[data_name]
        self.__merge_metadata(new_metadata, previous_meta, data)
        self._current_metadata[data_name] = new_metadata

    def get_data(self,
                 data_name,
                 data_slice=None,
                 where=ROOT_NODE_PATH,
                 ignore_errors=False):
        """
        This method reads data from the given data set based on the slice specification
            ::param data_name: Name of the data set from where to read data
            ::param data_slice: Specify how to retrieve data from array {e.g [slice(1,10,1),slice(1,6,2)] ]
            ::param where: represents the path where dataset is stored (e.g. /data/info)  
            ::return: a numpy.ndarray containing filtered data
        """
        store_manager = self._get_file_storage_mng()
        return store_manager.get_data(data_name, data_slice, where,
                                      ignore_errors)

    def get_data_shape(self, data_name, where=ROOT_NODE_PATH):
        """
        This method reads data-shape from the given data set
            ::param data_name: Name of the data set from where to read size
            ::param where: represents the path where dataset is stored (e.g. /data/info)  
            ::return: a shape tuple
        """
        if TVBSettings.TRAITS_CONFIGURATION.use_storage and self.trait.use_storage:
            try:
                store_manager = self._get_file_storage_mng()
                return store_manager.get_data_shape(data_name, where)
            except IOError, excep:
                self.logger.warning(str(excep))
                self.logger.warning(
                    "Could not read shape from file. Most probably because data was not written...."
                )
                return ()
        else:
コード例 #33
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.logger = get_logger(self.__class__.__module__)
     self.simulation_facade = SimulationFacade()
コード例 #34
0
### Overwrite PostgreSQL number of connections when executed in the context of a node
from tvb.basic.config.settings import TVBSettings

TVBSettings.MAX_DB_CONNECTIONS = TVBSettings.MAX_DB_ASYNC_CONNECTIONS
TVBSettings.OPERATION_EXECUTION_PROCESS = True

import matplotlib
from tvb.basic.logger.builder import get_logger
from tvb.core.adapters.abcadapter import ABCAdapter
from tvb.core.entities.storage import dao
from tvb.core.utils import parse_json_parameters
from tvb.core.traits import db_events
from tvb.core.services.operationservice import OperationService
from tvb.core.services.workflowservice import WorkflowService

LOGGER = get_logger('tvb.core.operation_async_launcher')
matplotlib.use('module://tvb.interfaces.web.mplh5.mplh5_backend')


def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        algorithm = curent_operation.algorithm
        algorithm_group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
コード例 #35
0
 def __init__(self):
     ABCSynchronous.__init__(self)
     self.logger = get_logger(self.__class__.__module__)
コード例 #36
0
ファイル: zip_surface_importer.py プロジェクト: yop0/tvb-root
class ZIPSurfaceImporter(ABCUploader):
    """
    Handler for uploading a Surface Data archive, with files holding
    vertices, normals and triangles to represent a surface data.
    """

    _ui_name = "Surface ZIP"
    _ui_subsection = "zip_surface_importer"
    _ui_description = "Import a Surface from ZIP"
    logger = get_logger(__name__)

    def get_form_class(self):
        return ZIPSurfaceImporterForm

    def get_output(self):
        return [SurfaceIndex]

    @staticmethod
    def _make_surface(surface_type):

        result = make_surface(surface_type)

        if result is not None:
            return result

        exception_str = "Could not determine surface type (selected option %s)" % surface_type
        raise LaunchException(exception_str)

    def launch(self, view_model):
        # type: (ZIPSurfaceImporterModel) -> [SurfaceIndex]
        """
        Execute import operations: unpack ZIP and build Surface object as result
        :raises LaunchException: when
                * `uploaded` is missing
                * `surface_type` is invalid
        :raises RuntimeError: when triangles contain an invalid vertex index
        """
        if view_model.uploaded is None:
            raise LaunchException(
                "Please select ZIP file which contains data to import")

        self.logger.debug("Start to import surface: '%s' from file: %s" %
                          (view_model.surface_type, view_model.uploaded))
        try:
            zip_surface = ZipSurfaceParser(view_model.uploaded)
        except IOError:
            exception_str = "Did not find the specified ZIP at %s" % view_model.uploaded
            raise LaunchException(exception_str)

        # Detect and instantiate correct surface type
        self.logger.debug("Create surface instance")
        surface = self._make_surface(view_model.surface_type)
        surface.zero_based_triangles = view_model.zero_based_triangles
        if view_model.should_center:
            vertices = center_vertices(zip_surface.vertices)
        else:
            vertices = zip_surface.vertices
        surface.vertices = vertices
        if len(zip_surface.normals) != 0:
            surface.vertex_normals = zip_surface.normals
        if view_model.zero_based_triangles:
            surface.triangles = zip_surface.triangles
        else:
            surface.triangles = zip_surface.triangles - 1

        if zip_surface.bi_hemispheric:
            self.logger.info("Hemispheres detected")

        surface.hemisphere_mask = zip_surface.hemisphere_mask
        surface.compute_triangle_normals()

        # Now check if the triangles of the surface are valid
        triangles_min_vertex = numpy.amin(surface.triangles)
        if triangles_min_vertex < 0:
            if triangles_min_vertex == -1 and not view_model.zero_based_triangles:
                raise LaunchException(
                    "Triangles contain a negative vertex index. Maybe you have a ZERO based surface."
                )
            else:
                raise LaunchException(
                    "Your triangles contain a negative vertex index: %d" %
                    triangles_min_vertex)

        no_of_vertices = len(surface.vertices)
        triangles_max_vertex = numpy.amax(surface.triangles)
        if triangles_max_vertex >= no_of_vertices:
            if triangles_max_vertex == no_of_vertices and view_model.zero_based_triangles:
                raise LaunchException(
                    "Your triangles contain an invalid vertex index: %d. "
                    "Maybe your surface is NOT ZERO Based." %
                    triangles_max_vertex)
            else:
                raise LaunchException(
                    "Your triangles contain an invalid vertex index: %d." %
                    triangles_max_vertex)

        validation_result = surface.validate()

        if validation_result.warnings:
            self.add_operation_additional_info(validation_result.summary())

        surface.configure()
        self.logger.debug("Surface ready to be stored")

        return h5.store_complete(surface, self.storage_path)
コード例 #37
0
class SensorsInternal(Sensors, TVBSensorsInternal):
    logger = get_logger(__name__)
    elec_labels = NArray(dtype=np.str,
                         label="Electrodes' labels",
                         default=None,
                         required=False,
                         doc="""Labels of electrodes.""")

    elec_inds = NArray(dtype=np.int,
                       label="Electrodes' indices",
                       default=None,
                       required=False,
                       doc="""Indices of electrodes.""")

    @property
    def number_of_electrodes(self):
        if self.elec_labels is None:
            return 0
        else:
            return len(self.elec_labels)

    @property
    def channel_labels(self):
        return self.elec_labels

    @property
    def channel_inds(self):
        return self.elec_inds

    def configure(self):
        super(SensorsInternal, self).configure()
        if self.number_of_sensors > 0:
            self.elec_labels, self.elec_inds = self.group_sensors_to_electrodes(
            )
        else:
            self.elec_labels = None
            self.elec_inds = None

    def get_elecs_inds_by_elecs_labels(self, lbls):
        if self.elec_labels is not None:
            return labels_to_inds(self.elec_labels, lbls)
        else:
            return None

    def get_sensors_inds_by_elec_labels(self, lbls):
        elec_inds = self.get_elecs_inds_by_elecs_labels(lbls)
        if elec_inds is not None:
            sensors_inds = []
            for ind in elec_inds:
                sensors_inds += self.elec_inds[ind]
            return np.unique(sensors_inds)

    def group_sensors_to_electrodes(self, labels=None):
        if self.sensors_type == SensorTypes.TYPE_INTERNAL.value:
            if labels is None:
                labels = self.labels
            sensor_names = np.array(split_string_text_numbers(labels))
            elec_labels = np.unique(sensor_names[:, 0])
            elec_inds = []
            for chlbl in elec_labels:
                elec_inds.append(np.where(sensor_names[:, 0] == chlbl)[0])
            return np.array(elec_labels), np.array(elec_inds)
        else:
            self.logger.warning("No multisensor electrodes for %s sensors!" %
                                self.sensors_type)
            return self.elec_labels, self.elec_inds

    def get_bipolar_elecs(self, elecs):
        try:
            bipolar_sensors_lbls = []
            bipolar_sensors_inds = []
            if self.elecs_inds is None:
                return None
            for elec_ind in elecs:
                curr_inds, curr_lbls = self.get_bipolar_sensors(
                    sensors_inds=self.elec_inds[elec_ind])
                bipolar_sensors_inds.append(curr_inds)
                bipolar_sensors_lbls.append(curr_lbls)
        except:
            elecs_inds = self.get_elecs_inds_by_elecs_labels(elecs)
            if elecs_inds is None:
                return None
            bipolar_sensors_inds, bipolar_sensors_lbls = self.get_bipolar_elecs(
                elecs_inds)
        return bipolar_sensors_inds, bipolar_sensors_lbls

    def to_tvb_instance(self, **kwargs):
        return super(SensorsInternal,
                     self).to_tvb_instance(TVBSensorsInternal, **kwargs)
コード例 #38
0
ファイル: files_helper.py プロジェクト: liadomide/tvb-root
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
コード例 #39
0
 def __init__(self, model, integrator):
     self.log = get_logger(self.__class__.__module__)
     self.model = model
     self.integrator = integrator
コード例 #40
0
 def __init__(self, config=CONFIGURED):
     self.config = config
     self.logger = get_logger(self.__class__.__name__)
     self.print_regions_indices = True
     matplotlib.use(self.config.MATPLOTLIB_BACKEND)
     pyplot.rcParams["font.size"] = self.config.FONTSIZE
コード例 #41
0
 def __init__(self):
     self.operation_service = OperationService()
     self.workflow_service = WorkflowService()
     self.logger = get_logger(self.__class__.__module__)
コード例 #42
0
from tvb.core.entities.load import load_entity_by_gid
from tvb.core.utils import date2string, LESS_COMPLEX_TIME_FORMAT
from tvb.core.entities.storage import dao
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.core.entities.transient.structure_entities import DataTypeMetaData
from tvb.core.adapters.exceptions import IntrospectionException, LaunchException, InvalidParameterException
from tvb.core.adapters.exceptions import NoMemoryAvailableException

ATT_METHOD = "python_method"
ATT_PARAMETERS = "parameters_prefix"

KEY_EQUATION = input_tree.KEY_EQUATION
KEY_FOCAL_POINTS = input_tree.KEY_FOCAL_POINTS
KEY_SURFACE_GID = input_tree.KEY_SURFACE_GID

LOGGER = get_logger("ABCAdapter")


def nan_not_allowed():
    """
    Annotation that guides NumPy behavior in case of floating point errors.
    The NumPy default is to just print a warning to sys.stdout, this annotation will raise our custom exception.
    This annotation will enforce that an exception is thrown in case a floating point error is produced.

    e.g. If NaN is take as input and not produced inside the context covered by this annotation,
         nothing happens from this method p.o.v.

    e.g. If inside a method annotated with this method we have something like numpy.log(-1),
         then LaunchException is thrown.
    """
    def wrap(func):
コード例 #43
0
ファイル: run.py プロジェクト: nuuria8/tvb-root
from tvb.interfaces.rest.server.decorators.encoders import CustomFlaskEncoder
from tvb.interfaces.rest.server.resources.datatype.datatype_resource import RetrieveDatatypeResource, \
    GetOperationsForDatatypeResource, GetExtraInfoForDatatypeResource
from tvb.interfaces.rest.server.resources.operation.operation_resource import GetOperationStatusResource, \
    GetOperationResultsResource, LaunchOperationResource
from tvb.interfaces.rest.server.resources.project.project_resource import GetOperationsInProjectResource, \
    GetDataInProjectResource, ProjectMembersResource
from tvb.interfaces.rest.server.resources.simulator.simulation_resource import FireSimulationResource
from tvb.interfaces.rest.server.resources.user.user_resource import LoginUserResource, GetProjectsListResource, \
    GetUsersResource, LinksResource
from tvb.interfaces.rest.server.rest_api import RestApi
from werkzeug.middleware.proxy_fix import ProxyFix

TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE)

LOGGER = get_logger('tvb.interfaces.rest.server.run')
LOGGER.info("TVB application will be running using encoding: " + sys.getdefaultencoding())

FLASK_PORT = 9090


def initialize_tvb_flask():
    if not os.path.exists(TvbProfile.current.TVB_STORAGE):
        try:
            os.makedirs(TvbProfile.current.TVB_STORAGE)
        except Exception:
            sys.exit("You do not have enough rights to use TVB storage folder:" + str(TvbProfile.current.TVB_STORAGE))
    try:
        initialize(skip_updates=True)
    except InvalidSettingsException as excep:
        LOGGER.exception(excep)
コード例 #44
0
 def __init__(self):
     self.logger = get_logger(__name__)
     self.structure_helper = FilesHelper()
コード例 #45
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.file_helper = FilesHelper()
コード例 #46
0
ファイル: abcuploader.py プロジェクト: nuuria8/tvb-root
class ABCUploader(ABCAdapter, metaclass=ABCMeta):
    """
    Base class of the uploading algorithms
    """
    LOGGER = get_logger(__name__)
    launch_mode = AdapterLaunchModeEnum.SYNC_DIFF_MEM

    def _prelaunch(self, operation, view_model, available_disk_space=0):
        """
        Before going with the usual prelaunch, get from input parameters the 'subject'.
        """
        self.generic_attributes.subject = view_model.data_subject

        trait_upload_field_names = list(
            self.get_form_class().get_upload_information().keys())
        if view_model.encrypted_aes_key is not None:
            for upload_field_name in trait_upload_field_names:
                self._decrypt_content(view_model, upload_field_name)

        return ABCAdapter._prelaunch(self, operation, view_model,
                                     available_disk_space)

    @staticmethod
    def get_path_to_encrypt(input_path):
        start_extension = input_path.rfind('.')
        path_to_encrypt = input_path[:start_extension]
        extension = input_path[start_extension:]

        return path_to_encrypt + ENCRYPTED_DATA_SUFFIX + extension

    @staticmethod
    def encrypt_password(public_key, symmetric_key):

        encrypted_symmetric_key = public_key.encrypt(
            symmetric_key,
            padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()),
                         algorithm=hashes.SHA256(),
                         label=None))

        return encrypted_symmetric_key

    @staticmethod
    def save_encrypted_password(encrypted_password,
                                path_to_encrypted_password):

        with open(
                os.path.join(path_to_encrypted_password,
                             ENCRYPTED_PASSWORD_NAME), 'wb') as f:
            f.write(encrypted_password)

    @staticmethod
    def _decrypt_content(view_model, trait_upload_field_name):
        if TvbProfile.current.UPLOAD_KEY_PATH is None or not os.path.exists(
                TvbProfile.current.UPLOAD_KEY_PATH):
            raise LaunchException(
                "We can not process Encrypted files at this moment, "
                "due to missing PK for decryption! Please contact the administrator!"
            )

        upload_path = getattr(view_model, trait_upload_field_name)

        # Get the encrypted password
        with open(view_model.encrypted_aes_key, 'rb') as f:
            encrypted_password = f.read()

        # Read the private key
        with open(TvbProfile.current.UPLOAD_KEY_PATH, "rb") as key_file:
            private_key = serialization.load_pem_private_key(
                key_file.read(), password=None, backend=default_backend())

        # Decrypt the password using the private key
        decrypted_password = private_key.decrypt(
            encrypted_password,
            padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()),
                         algorithm=hashes.SHA256(),
                         label=None))

        decrypted_password = decrypted_password.decode()

        # Get path to decrypted file
        decrypted_download_path = upload_path.replace(ENCRYPTED_DATA_SUFFIX,
                                                      DECRYPTED_DATA_SUFFIX)

        # Use the decrypted password to decrypt the message
        pyAesCrypt.decryptFile(upload_path, decrypted_download_path,
                               decrypted_password,
                               TvbProfile.current.hpc.CRYPT_BUFFER_SIZE)
        view_model.__setattr__(trait_upload_field_name,
                               decrypted_download_path)

    def get_required_memory_size(self, view_model):
        """
        Return the required memory to run this algorithm.
        As it is an upload algorithm and we do not have information about data, we can not approximate this.
        """
        return -1

    def get_required_disk_size(self, view_model):
        """
        As it is an upload algorithm and we do not have information about data, we can not approximate this.
        """
        return 0

    @staticmethod
    def read_list_data(full_path,
                       dimensions=None,
                       dtype=numpy.float64,
                       skiprows=0,
                       usecols=None):
        """
        Read numpy.array from a text file or a npy/npz file.
        """
        try:
            if full_path.endswith(".npy") or full_path.endswith(".npz"):
                array_result = numpy.load(full_path)
            else:
                array_result = numpy.loadtxt(full_path,
                                             dtype=dtype,
                                             skiprows=skiprows,
                                             usecols=usecols)
            if dimensions:
                return array_result.reshape(dimensions)
            return array_result
        except ValueError as exc:
            file_ending = os.path.split(full_path)[1]
            exc.args = (exc.args[0] + " In file: " + file_ending, )
            raise

    @staticmethod
    def read_matlab_data(path, matlab_data_name=None):
        """
        Read array from matlab file.
        """
        try:
            matlab_data = scipy_io.matlab.loadmat(path)
        except NotImplementedError:
            ABCUploader.LOGGER.error("Could not read Matlab content from: " +
                                     path)
            ABCUploader.LOGGER.error(
                "Matlab files must be saved in a format <= -V7...")
            raise

        try:
            return matlab_data[matlab_data_name]
        except KeyError:

            def double__(n):
                n = str(n)
                return n.startswith('__') and n.endswith('__')

            available = [s for s in matlab_data if not double__(s)]
            raise KeyError(
                "Could not find dataset named %s. Available datasets: %s" %
                (matlab_data_name, available))

    @staticmethod
    def get_upload_information():
        return NotImplementedError
コード例 #47
0
#   Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
#   Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
#       The Virtual Brain: a simulator of primate brain network dynamics.
#   Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
"""

for release 2.0

.. moduleauthor:: Lia Domide <*****@*****.**>
"""

import os
from tvb.basic.exceptions import TVBException
from tvb.basic.logger.builder import get_logger

LOGGER = get_logger(__name__)


def update(project_path):
    """

    """

    for root, _, files in os.walk(project_path):
        for file_name in files:
            LOGGER.info("Translating from version 2 to 3 File %s" %
                        (file_name))

    raise TVBException("Not yet implemented project update logic!")
コード例 #48
0
# coding=utf-8
# Some math tools
from itertools import product

import numpy as np
from tvb.contrib.scripts.utils.data_structures_utils import is_integer
from sklearn.cluster import AgglomerativeClustering
from tvb.basic.logger.builder import get_logger
from tvb.simulator.plot.config import FiguresConfig

logger = get_logger(__name__)


def weighted_vector_sum(weights, vectors, normalize=True):
    if isinstance(vectors, np.ndarray):
        vectors = list(vectors.T)
    if normalize:
        weights /= np.sum(weights)
    vector_sum = weights[0] * vectors[0]
    for iv in range(1, len(weights)):
        vector_sum += weights[iv] * vectors[iv]
    return np.array(vector_sum)


def normalize_weights(weights, percentile, remove_diagonal=True, ceil=1.0):
    # Create the normalized connectivity weights:
    if len(weights) > 0:
        normalized_w = np.array(weights)
        if remove_diagonal:
            # Remove diagonal elements
            n_regions = normalized_w.shape[0]
コード例 #49
0
class RootDAO(object, metaclass=SESSION_META_CLASS):
    """
    GLOBAL METHODS
    """
    session = None
    logger = get_logger(__name__)

    EXCEPTION_DATATYPE_GROUP = "DataTypeGroup"
    EXCEPTION_DATATYPE_SIMULATION = SIMULATION_DATATYPE_CLASS

    def store_entity(self, entity, merge=False):
        """
        Store in DB one generic entity.
        """
        self.logger.debug("We will store entity of type: %s with id %s" %
                          (entity.__class__.__name__, str(entity.id)))

        if merge:
            self.session.merge(entity)
        else:
            self.session.add(entity)
        self.session.commit()

        self.logger.debug("After commit %s ID is %s" %
                          (entity.__class__.__name__, str(entity.id)))

        saved_entity = self.session.query(
            entity.__class__).filter_by(id=entity.id).one()
        return saved_entity

    def store_entities(self, entities_list):
        """
        Store in DB a list of generic entities.
        """
        self.session.add_all(entities_list)
        self.session.commit()

        stored_entities = []
        for entity in entities_list:
            stored_entities.append(
                self.session.query(
                    entity.__class__).filter_by(id=entity.id).one())
        return stored_entities

    def get_generic_entity(self, entity_type, filter_value, select_field="id"):
        """
        Retrieve an entity of entity_type, filtered by select_field = filter_value.
        """
        if isinstance(entity_type, str):
            classname = entity_type[entity_type.rfind(".") + 1:]
            module = importlib.import_module(
                entity_type[0:entity_type.rfind(".")])
            entity_class = getattr(module, classname)
            result = self.session.query(entity_class).filter(
                entity_class.__dict__[select_field] == filter_value).all()
        else:
            result = self.session.query(entity_type).filter(
                entity_type.__dict__[select_field] == filter_value).all()

        # Need this since entity has attributes loaded automatically on DB load from
        # traited DB events. This causes the session to see the entity as dirty and issues
        # an invalid commit() which leaves the entity unattached to any sessions later on.
        self.session.expunge_all()
        return result

    def remove_entity(self, entity_class, entity_id):
        """ 
        Find entity by Id and Type, end then remove it.
        Return True, when entity was removed successfully, of False when exception.
        """
        try:
            entity = self.session.query(entity_class).filter_by(
                id=entity_id).one()
            self.session.delete(entity)
            self.session.commit()
            return True
        except NoResultFound:
            self.logger.info(
                "Entity from class %s with id %s has been already removed." %
                (entity_class, entity_id))
            return True
        except SQLAlchemyError as excep:
            self.logger.exception(excep)
            return False

    #
    # DATA_TYPE BUT GENERIC METHODS
    #

    def remove_datatype(self, gid):
        """
        When removing dataType, load fully so that sql-alchemy removes from all tables referenced.
        """
        data = self.session.query(DataType).filter(DataType.gid == gid).all()
        for entity in data:
            extended_ent = self.get_generic_entity(
                entity.module + "." + entity.type, entity.id)
            self.session.delete(extended_ent[0])
        self.session.commit()

    def get_datatype_by_id(self, data_id):
        """
        Retrieve DataType entity by ID.
        """
        result = self.session.query(DataType).filter_by(id=data_id).one()
        result.parent_operation.project
        return result

    def get_time_series_by_gid(self, data_gid):
        result = self.session.query(DataType).filter_by(gid=data_gid).one()
        result.data
        return result
コード例 #50
0
ファイル: tools.py プロジェクト: tjgood/tvb-library
    
    from tvb.simulator.plot.tools import *
    
    if IMPORTED_MAYAVI:
        plt = plot_function(...)

.. moduleauthor:: Stuart A. Knock <*****@*****.**>
.. moduleauthor:: Paula Sanz Leon <*****@*****.**>
"""

import numpy
import scipy as sp
import networkx as nx
from tvb.basic.logger.builder import get_logger

LOG = get_logger(__name__)

##----------------------------------------------------------------------------##
##-                  matplotlib based plotting functions                     -##
##---------------------------------------------------------------------------cd-##

import matplotlib as mpl
import matplotlib.pyplot as pyplot
import matplotlib.colors
import matplotlib.ticker as ticker
import matplotlib.colors as colors

try:
    from mpl_toolkits.axes_grid import make_axes_locatable
    IMPORTED_MPL_TOOLKITS = True
except ImportError:
コード例 #51
0
    def __init__(self, file_path):

        self.logger = get_logger(__name__)
        self.file_path = file_path
        self.file_stream = file_path
コード例 #52
0
class DynamicModelController(BurstBaseController):
    KEY_CACHED_DYNAMIC_MODEL = 'cache.DynamicModelController'
    LOGGER = get_logger(__name__)

    def __init__(self):
        BurstBaseController.__init__(self)
        self.available_models = get_ui_name_to_model()
        self.available_integrators = get_ui_name_to_integrator_dict()
        self.cache = SessionCache()
        # Work around a numexpr thread safety issue. See TVB-1639.
        self.traj_lock = threading.Lock()

    def get_cached_dynamic(self, dynamic_gid):
        """
        Creating the model per request will be expensive.
        So we cache it in session.
        If there is nothing cached it returns the default dynamic.
        """
        # TODO: The cached objects expire only with the session. Invalidate the cache earlier.
        if dynamic_gid not in self.cache:
            dynamic = Dynamic()
            self.cache[dynamic_gid] = dynamic
        return self.cache[dynamic_gid]

    @expose_page
    def index(self):
        dynamic_gid = utils.generate_guid()
        model_name_fragment = _InputTreeFragment()
        model_fragment = SimulatorModelFragment()
        integrator_fragment = SimulatorIntegratorFragment()

        params = {
            'title': "Dynamic model",
            'mainContent': 'burst/dynamic',
            'model_name_fragment': model_name_fragment,
            'model_form': model_fragment,
            'integrator_form': integrator_fragment,
            'dynamic_gid': dynamic_gid
        }
        self.fill_default_attributes(params)

        dynamic = self.get_cached_dynamic(dynamic_gid)
        self._configure_integrator_noise(dynamic.integrator, dynamic.model)
        return params

    def fill_default_attributes(self, param):
        return BurstBaseController.fill_default_attributes(
            self, param, subsection='phaseplane')

    @expose_json
    def model_changed(self, dynamic_gid, name):
        """
        Resets the phase plane and returns the ui model for the slider area.
        """
        dynamic = self.get_cached_dynamic(dynamic_gid)
        dynamic.model = self.available_models[name]()
        dynamic.model.configure()
        dynamic.phase_plane = phase_space_d3(dynamic.model, dynamic.integrator)
        mp_params = DynamicModelController._get_model_parameters_ui_model(
            dynamic.model)
        graph_params = DynamicModelController._get_graph_ui_model(dynamic)
        return {
            'params':
            mp_params,
            'graph_params':
            graph_params,
            'model_param_sliders_fragment':
            self._model_param_sliders_fragment(dynamic_gid),
            'axis_sliders_fragment':
            self._axis_sliders_fragment(dynamic_gid)
        }

    @expose_json
    def integrator_changed(self, dynamic_gid, **kwargs):
        # TODO: display form for integrator configuration
        # adapter = _IntegratorFragmentAdapter()
        # tree = adapter.convert_ui_inputs(kwargs, validation_required=False)
        # integrator_name = tree['integrator']
        # integrator_parameters = tree['integrator_parameters']

        # noise_framework.build_noise(integrator_parameters)
        integrator = self.available_integrators[kwargs['_integrator']]()

        dynamic = self.get_cached_dynamic(dynamic_gid)
        dynamic.integrator = integrator
        dynamic.model.integrator = integrator
        dynamic.model.configure()
        self._configure_integrator_noise(integrator, dynamic.model)

        dynamic.phase_plane = phase_space_d3(dynamic.model, dynamic.integrator)

    @staticmethod
    def _configure_integrator_noise(integrator, model):
        """
        This function has to be called after integrator construction.
        Without it the noise instance is not in a good state.

        Should the Integrator __init__ not take care of this? Or noise_framework.buildnoise?
        Should I call noise.configure() as well?
        similar to simulator.configure_integrator_noise
        """
        if isinstance(integrator, integrators.IntegratorStochastic):
            shape = (model.nvar, 1, model.number_of_modes)
            if integrator.noise.ntau > 0.0:
                integrator.noise.configure_coloured(integrator.dt, shape)
            else:
                integrator.noise.configure_white(integrator.dt, shape)

    @expose_json
    def parameters_changed(self, dynamic_gid, params):
        with self.traj_lock:
            params = json.loads(params)
            dynamic = self.get_cached_dynamic(dynamic_gid)
            model = dynamic.model
            for name, value in params.items():
                param_type = float
                if getattr(model, name).dtype == 'int':
                    param_type = int
                setattr(model, name, numpy.array([param_type(value)]))
            model.configure()
            return dynamic.phase_plane.compute_phase_plane()

    @expose_json
    def graph_changed(self, dynamic_gid, graph_state):
        with self.traj_lock:
            graph_state = json.loads(graph_state)
            dynamic = self.get_cached_dynamic(dynamic_gid)
            dynamic.phase_plane.update_axis(**graph_state)
            return dynamic.phase_plane.compute_phase_plane()

    @expose_json
    def trajectories(self, dynamic_gid, starting_points, integration_steps):
        with self.traj_lock:
            starting_points = json.loads(starting_points)
            dynamic = self.get_cached_dynamic(dynamic_gid)
            trajectories, signals = dynamic.phase_plane.trajectories(
                starting_points, int(integration_steps))

            for t in trajectories:
                if not numpy.isfinite(t).all():
                    self.logger.warn('Denaturated point %s on a trajectory')
                    return {'finite': False}

            return {
                'trajectories': trajectories,
                'signals': signals,
                'finite': True
            }

    @staticmethod
    def _get_model_parameters_ui_model(model):
        """
        For each model parameter return the representation used by the ui (template & js)
        """
        ret = []
        model_form_class = get_form_for_model(type(model))
        for name in model_form_class.get_params_configurable_in_phase_plane():
            attr = getattr(type(model), name)
            ranger = attr.domain
            if ranger is None:
                DynamicModelController.LOGGER.warn(
                    "Param %s doesn't have a domain specified" % (name))
                continue
            default = float(attr.default)

            ret.append({
                'name': name,
                'label': attr.label,
                'description': attr.doc,
                'min': ranger.lo,
                'max': ranger.hi,
                'step': ranger.step,
                'default': default
            })
        return ret

    @staticmethod
    def _get_graph_ui_model(dynamic):
        model = dynamic.model
        sv_model = []
        for sv in range(model.nvar):
            name = model.state_variables[sv]
            min_val, max_val, lo, hi = dynamic.phase_plane.get_axes_ranges(
                name)
            sv_model.append({
                'name': name,
                'label': ':math:`%s`' % name,
                'description': 'state variable ' + name,
                'lo': lo,
                'hi': hi,
                'min': min_val,
                'max': max_val,
                'step': (hi - lo) / 1000.0,  # todo check if reasonable
                'default': (hi + lo) / 2
            })

        ret = {
            'modes': list(range(model.number_of_modes)),
            'state_variables': sv_model,
            'default_mode': dynamic.phase_plane.mode
        }

        if model.nvar > 1:
            ret['default_sv'] = [
                model.state_variables[dynamic.phase_plane.svx_ind],
                model.state_variables[dynamic.phase_plane.svy_ind]
            ]
            ret['integration_steps'] = {'default': 512, 'min': 32, 'max': 2048}
        else:
            ret['default_sv'] = [model.state_variables[0]]
        return ret

    @using_template('burst/dynamic_axis_sliders')
    def _axis_sliders_fragment(self, dynamic_gid):
        dynamic = self.get_cached_dynamic(dynamic_gid)
        model = dynamic.model
        ps_params = self._get_graph_ui_model(dynamic)
        templ_var = ps_params
        templ_var.update({
            'showOnlineHelp': True,
            'one_dimensional': len(model.state_variables) == 1
        })
        return templ_var

    @using_template('burst/dynamic_mp_sliders')
    def _model_param_sliders_fragment(self, dynamic_gid):
        dynamic = self.get_cached_dynamic(dynamic_gid)
        model = dynamic.model
        mp_params = self._get_model_parameters_ui_model(model)
        templ_var = {'parameters': mp_params, 'showOnlineHelp': True}
        return templ_var

    @expose_json
    def submit(self, dynamic_gid, dynamic_name):
        if dao.get_dynamic_by_name(dynamic_name):
            return {
                'saved': False,
                'msg': 'There is another configuration with the same name'
            }

        dynamic = self.get_cached_dynamic(dynamic_gid)
        model = dynamic.model
        integrator = dynamic.integrator

        model_parameters = []

        model_form_class = get_form_for_model(type(model))
        for name in model_form_class.get_params_configurable_in_phase_plane():
            value = getattr(model, name)[0]
            model_parameters.append((name, value))

        entity = model_burst.Dynamic(
            dynamic_name,
            common.get_logged_user().id, model.__class__.__name__,
            json.dumps(model_parameters), integrator.__class__.__name__, None
            # todo: serialize integrator parameters
            # json.dumps(integrator.raw_ui_integrator_parameters)
        )

        dao.store_entity(entity)
        return {'saved': True}

    @expose_fragment('burst/dynamic_minidetail')
    def dynamic_detail(self, dynamic_id):
        dynamic = dao.get_dynamic(dynamic_id)
        model_parameters = dict(json.loads(dynamic.model_parameters))
        return {'model_parameters': model_parameters}
コード例 #53
0
    def __init__(self, zip_path):

        self.logger = get_logger(__name__)
        self.zip_archive = zipfile.ZipFile(zip_path)
コード例 #54
0
ファイル: flow_service.py プロジェクト: missxa/tvb-framework
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.file_helper = FilesHelper()
     self.input_tree_manager = InputTreeManager()
コード例 #55
0
class MappedTypeLight(Type):
    """
    Light base class for all entities which are about to be mapped in storage.
    Current light implementation is to be used with the scientific-library stand-alone mode.
    """

    METADATA_EXCLUDE_PARAMS = [
        'id', 'LINKS', 'fk_datatype_group', 'visible', 'disk_size',
        'fk_from_operation', 'parent_operation', 'fk_parent_burst'
    ]

    ### Constants when retrieving meta-data about Array attributes on the current instance.
    METADATA_ARRAY_MAX = "Maximum"
    METADATA_ARRAY_MIN = "Minimum"
    METADATA_ARRAY_MEAN = "Mean"
    METADATA_ARRAY_VAR = "Variance"
    METADATA_ARRAY_SHAPE = "Shape"
    _METADATA_ARRAY_SIZE = "Size"

    ALL_METADATA_ARRAY = {
        METADATA_ARRAY_MAX: 'max',
        METADATA_ARRAY_MIN: 'min',
        METADATA_ARRAY_MEAN: 'mean',
        METADATA_ARRAY_VAR: 'var',
        METADATA_ARRAY_SHAPE: 'shape'
    }

    logger = get_logger(__module__)

    def __init__(self, **kwargs):
        super(MappedTypeLight, self).__init__(**kwargs)
        self._current_metadata = dict()

    def accepted_filters(self):
        """
        Just offer dummy functionality in library mode.
        """
        return {}

    def get_info_about_array(self, array_name, included_info=None):
        """
        :return: dictionary {label: value} about an attribute of type mapped.Array
                 Generic informations, like Max/Min/Mean/Var are to be retrieved for this array_attr
        """
        included_info = included_info or {}
        summary = self.__get_summary_info(array_name, included_info)
        ### Before return, prepare names for UI display.
        result = dict()
        for key, value in summary.iteritems():
            result[array_name.capitalize().replace("_", " ") + " - " +
                   key] = value
        return result

    def __get_summary_info(self, array_name, included_info):
        """
        Get a summary from the metadata of the current array.
        """
        summary = dict()
        array_attr = getattr(self, array_name)
        if isinstance(array_attr, numpy.ndarray):
            for key in included_info:
                if key in self.ALL_METADATA_ARRAY:
                    summary[key] = eval("array_attr." +
                                        self.ALL_METADATA_ARRAY[key] + "()")
                else:
                    self.logger.warning(
                        "Not supported meta-data will be ignored " + str(key))
        return summary

    def get_data_shape(self, data_name):
        """
        This method reads data-shape from the given data set
            ::param data_name: Name of the attribute from where to read size
            ::return: a shape tuple
        """
        array_data = getattr(self, data_name)
        if hasattr(array_data, 'shape'):
            return getattr(array_data, 'shape')
        self.logger.warning("Could not find 'shape' attribute on " +
                            str(data_name) + " returning empty shape!!")
        return ()
コード例 #56
0
class Array(Type):
    """
    Traits type that wraps a NumPy NDArray.

    Initialization requires at least shape, and when not given, will be set to (), an empty, 0-dimension array.
    """

    wraps = numpy.ndarray
    dtype = DType()
    defaults = ((0, ), {})
    data = None
    _stored_metadata = MappedTypeLight.ALL_METADATA_ARRAY.keys()
    logger = get_logger(__module__)

    @property
    def shape(self):
        """  
        Property SHAPE for the wrapped array.
        """
        return self.data.shape

    @property
    def array_path(self):
        """  
        Property PATH relative.
        """
        return self.trait.name

    def __get__(self, inst, cls):
        """
        When an attribute of class Array is retrieved on another class.
        :param inst: It is a MappedType instance
        :param cls: MappedType subclass. When 'inst' is None and only 'cls' is passed, we do not read from storage,
                    but return traited attribute.
        :return: value of type self.wraps
        :raise Exception: when read could not be executed, Or when used GET with incompatible attributes (e.g. chunks).
        """
        if inst is None:
            return self

        if self.trait.bound:
            return self._get_cached_data(inst)
        else:
            return self

    def __set__(self, inst, value):
        """
        This is called when an attribute of type Array is set on another class instance.
        :param inst: It is a MappedType instance
        :param value: expected to be of type self.wraps
        :raise Exception: When incompatible type of value is set
        """
        self._put_value_on_instance(inst, self.array_path)
        if isinstance(value, list):
            value = numpy.array(value)
        elif type(value) in (int, float):
            value = numpy.array([value])

        setattr(inst, '__' + self.trait.name, value)

    def _get_cached_data(self, inst):
        """
        Just read from instance since we don't have storage in library mode.
        """
        return get(inst, '__' + self.trait.name, None)

    def log_debug(self, owner=""):
        """
        Simple access to debugging info on a traited array, usage ::
            obj.trait["array_name"].log_debug(owner="obj")
            
        or ::
            self.trait["array_name"].log_debug(owner=self.__class__.__name__)
        """
        name = ".".join((owner, self.trait.name))
        sts = str(self.__class__)
        if self.trait.value is not None and self.trait.value.size != 0:
            shape = str(self.trait.value.shape)
            dtype = str(self.trait.value.dtype)
            tvb_dtype = str(self.trait.value.dtype)
            has_nan = str(numpy.isnan(self.trait.value).any())
            array_max = str(self.trait.value.max())
            array_min = str(self.trait.value.min())
            self.logger.debug("%s: %s shape: %s" % (sts, name, shape))
            self.logger.debug("%s: %s actual dtype: %s" % (sts, name, dtype))
            self.logger.debug("%s: %s tvb dtype: %s" % (sts, name, tvb_dtype))
            self.logger.debug("%s: %s has NaN: %s" % (sts, name, has_nan))
            self.logger.debug("%s: %s maximum: %s" % (sts, name, array_max))
            self.logger.debug("%s: %s minimum: %s" % (sts, name, array_min))
        else:
            self.logger.debug("%s: %s is Empty" % (sts, name))
コード例 #57
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.storage_interface = StorageInterface()
コード例 #58
0
ファイル: run.py プロジェクト: swatibanerjee29/tvb-root
from tvb.interfaces.web.controllers.project.project_controller import ProjectController
from tvb.interfaces.web.controllers.project.figure_controller import FigureController
from tvb.interfaces.web.controllers.flow_controller import FlowController
from tvb.interfaces.web.controllers.settings_controller import SettingsController
from tvb.interfaces.web.controllers.burst.region_model_parameters_controller import RegionsModelParametersController
from tvb.interfaces.web.controllers.burst.exploration_controller import ParameterExplorationController
from tvb.interfaces.web.controllers.burst.dynamic_model_controller import DynamicModelController
from tvb.interfaces.web.controllers.spatial.base_spatio_temporal_controller import SpatioTemporalController
from tvb.interfaces.web.controllers.spatial.surface_model_parameters_controller import SurfaceModelParametersController
from tvb.interfaces.web.controllers.spatial.region_stimulus_controller import RegionStimulusController
from tvb.interfaces.web.controllers.spatial.surface_stimulus_controller import SurfaceStimulusController
from tvb.interfaces.web.controllers.spatial.local_connectivity_controller import LocalConnectivityController
from tvb.interfaces.web.controllers.burst.noise_configuration_controller import NoiseConfigurationController
from tvb.interfaces.web.controllers.simulator_controller import SimulatorController

LOGGER = get_logger('tvb.interfaces.web.run')
CONFIG_EXISTS = not TvbProfile.is_first_run()
PARAM_RESET_DB = "reset"
LOGGER.info("TVB application will be running using encoding: " +
            sys.getdefaultencoding())


def init_cherrypy(arguments=None):
    #### Mount static folders from modules marked for introspection
    arguments = arguments or []
    CONFIGUER = TvbProfile.current.web.CHERRYPY_CONFIGURATION
    for module in arguments:
        module_inst = importlib.import_module(str(module))
        module_path = os.path.dirname(os.path.abspath(module_inst.__file__))
        CONFIGUER["/static_" + str(module)] = {
            'tools.staticdir.on': True,
コード例 #59
0
def spikes_events_to_time_index(spike_time, time):
    if spike_time < time[0] or spike_time > time[-1]:
        get_logger(__name__).warning(
            "Spike time is outside the input time vector!")
    return np.argmin(np.abs(time - spike_time))
コード例 #60
0
 def __init__(self):
     self.introspection_registry = IntrospectionRegistry()
     self.logger = get_logger(self.__class__.__module__)