Exemplo n.º 1
0
    def submit_connectivity(self, original_connectivity, new_weights, new_tracts, interest_area_indexes, **_):
        """
        Method to be called when user submits changes on the 
        Connectivity matrix in the Visualizer.
        """
        result = []
        conn = self.load_entity_by_gid(original_connectivity)
        self.meta_data[DataTypeMetaData.KEY_SUBJECT] = conn.subject

        new_weights = numpy.asarray(json.loads(new_weights))
        new_tracts = numpy.asarray(json.loads(new_tracts))
        interest_area_indexes = numpy.asarray(json.loads(interest_area_indexes))

        result_connectivity = conn.generate_new_connectivity_from_ordered_arrays(new_weights, interest_area_indexes,
                                                                                 self.storage_path, new_tracts)
        result.append(result_connectivity)

        linked_region_mappings = dao.get_generic_entity(RegionMapping, original_connectivity, '_connectivity')
        for mapping in linked_region_mappings:
            result.append(mapping.generate_new_region_mapping(result_connectivity.gid, self.storage_path))

        linked_projection = dao.get_generic_entity(ProjectionRegionEEG, original_connectivity, '_sources')
        for projection in linked_projection:
            result.append(projection.generate_new_projection(result_connectivity.gid, self.storage_path))
        return result
    def remove_datatype(self, skip_validation=False):
        """
        Called when a Connectivity is to be removed.
        """
        if not skip_validation:
            associated_ts = dao.get_generic_entity(TimeSeriesRegion, self.handled_datatype.gid, '_connectivity')
            associated_rm = dao.get_generic_entity(RegionMapping, self.handled_datatype.gid, '_connectivity')
            associated_stim = dao.get_generic_entity(StimuliRegion, self.handled_datatype.gid, '_connectivity')
            associated_mes = dao.get_generic_entity(ConnectivityMeasure, self.handled_datatype.gid, '_connectivity')
            msg = "Connectivity cannot be removed as it is used by at least one "

            if len(associated_ts) > 0:
                raise RemoveDataTypeException(msg + " TimeSeriesRegion.")
            if len(associated_rm) > 0:
                raise RemoveDataTypeException(msg + " RegionMapping.")
            if len(associated_stim) > 0:
                raise RemoveDataTypeException(msg + " StimuliRegion.")
            if len(associated_mes) > 0:
                raise RemoveDataTypeException(msg + " ConnectivityMeasure.")

        #### Update child Connectivities, if any.
        child_conns = dao.get_generic_entity(Connectivity, self.handled_datatype.gid, '_parent_connectivity')
        
        if len(child_conns) > 0:
            for one_conn in child_conns[1:]:
                one_conn.parent_connectivity = child_conns[0].gid
            if child_conns and child_conns[0]:
                child_conns[0].parent_connectivity = self.handled_datatype.parent_connectivity
            for one_child in child_conns:
                dao.store_entity(one_child)
        ABCRemover.remove_datatype(self, skip_validation)
        
        
Exemplo n.º 3
0
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`

        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range1)
        range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(range1_name, range1_values, range1_labels, range2_name, range2_values, range2_labels)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if range2_name is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
Exemplo n.º 4
0
    def _find_metrics(operations):
        """ Search for an operation with results. Then get the metrics of the generated data type"""
        dt_measure = None

        for operation in operations:

            if not operation.has_finished:
                raise LaunchException("Can not display until all operations from this range are finished!")

            op_results = dao.get_results_for_operation(operation.id)
            if len(op_results):
                datatype = op_results[0]
                if datatype.type == "DatatypeMeasure":
                    ## Load proper entity class from DB.
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0]
                else:
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    if dt_measure:
                        dt_measure = dt_measure[0]
                break

        if dt_measure:
            return dt_measure.metrics
        else:
            raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")
Exemplo n.º 5
0
    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(model.OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(model.OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)
 def remove_datatype(self, skip_validation = False):
     """
     Called when a TimeSeries is removed.
     """
     associated_cv = dao.get_generic_entity(Covariance, self.handled_datatype.gid, '_source')
     associated_pca = dao.get_generic_entity(PrincipalComponents, self.handled_datatype.gid, '_source')
     associated_is = dao.get_generic_entity(IndependentComponents, self.handled_datatype.gid, '_source')
     associated_cc = dao.get_generic_entity(CrossCorrelation, self.handled_datatype.gid, '_source')
     associated_fr = dao.get_generic_entity(FourierSpectrum, self.handled_datatype.gid, '_source')
     associated_wv = dao.get_generic_entity(WaveletCoefficients, self.handled_datatype.gid, '_source')
     associated_cs = dao.get_generic_entity(CoherenceSpectrum, self.handled_datatype.gid, '_source')
     associated_dm = dao.get_generic_entity(DatatypeMeasure, self.handled_datatype.gid, '_analyzed_datatype')
     for datatype_measure in associated_dm:
         datatype_measure._analyed_datatype = None
         dao.store_entity(datatype_measure)
     msg = "TimeSeries cannot be removed as it is used by at least one "
     if not skip_validation:
         if len(associated_cv) > 0:
             raise RemoveDataTypeException(msg + " Covariance.")
         if len(associated_pca) > 0:
             raise RemoveDataTypeException(msg + " PrincipalComponents.")
         if len(associated_is) > 0:
             raise RemoveDataTypeException(msg + " IndependentComponents.")
         if len(associated_cc) > 0:
             raise RemoveDataTypeException(msg + " CrossCorrelation.")
         if len(associated_fr) > 0:
             raise RemoveDataTypeException(msg + " FourierSpectrum.")
         if len(associated_wv) > 0:
             raise RemoveDataTypeException(msg + " WaveletCoefficients.")
         if len(associated_cs) > 0:
             raise RemoveDataTypeException(msg + " CoherenceSpectrum.")
     ABCRemover.remove_datatype(self, skip_validation)
Exemplo n.º 7
0
 def remove_datatype(self, skip_validation=False):
     """
     Called when a Surface is to be removed.
     """
     if not skip_validation:
         associated_ts = dao.get_generic_entity(TimeSeriesVolume, self.handled_datatype.gid, '_volume')
         associated_stim = dao.get_generic_entity(SpatialPatternVolume, self.handled_datatype.gid, '_volume')
         error_msg = "Surface cannot be removed because is still used by a "
         if len(associated_ts) > 0:
             raise RemoveDataTypeException(error_msg + " TimeSeriesVolume.")
         if len(associated_stim) > 0:
             raise RemoveDataTypeException(error_msg + " SpatialPatternVolume.")
         
     ABCRemover.remove_datatype(self, skip_validation)
Exemplo n.º 8
0
def _create_bench_project():
    prj = lab.new_project("benchmark_project_ %s" % datetime.now())
    data_dir = path.abspath(path.dirname(tvb_data.__file__))
    zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip')
    lab.import_conn_zip(prj.id, zip_path)
    zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip')
    lab.import_conn_zip(prj.id, zip_path)
    zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip')
    lab.import_conn_zip(prj.id, zip_path)

    conn68 = dao.get_generic_entity(Connectivity, 68, "_number_of_regions")[0]
    conn96 = dao.get_generic_entity(Connectivity, 96, "_number_of_regions")[0]
    conn190 = dao.get_generic_entity(Connectivity, 192, "_number_of_regions")[0]
    return prj, [conn68, conn96, conn190]
    def launch(self, datatype_group, **kwargs):
        """
        Also overwrite launch from ABCDisplayer, since we want to handle a list of figures,
        instead of only one Matplotlib figure.

        :raises LaunchException: when called before all operations in the group are finished
        """
        if self.PARAM_FIGURE_SIZE in kwargs:
            figsize = kwargs[self.PARAM_FIGURE_SIZE]
            figsize = ((figsize[0]) / 80, (figsize[1]) / 80)
            del kwargs[self.PARAM_FIGURE_SIZE]
        else:
            figsize = (15, 7)

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
        _, range1_name, self.range1 = operation_group.load_range_numbers(operation_group.range1)
        _, range2_name, self.range2 = operation_group.load_range_numbers(operation_group.range2)

        for operation in dao.get_operations_in_group(operation_group.id):
            if operation.status == model.STATUS_STARTED:
                raise LaunchException("Can not display until all operations from this range are finished!")

            op_results = dao.get_results_for_operation(operation.id)
            if len(op_results):
                datatype = op_results[0]
                if datatype.type == "DatatypeMeasure":
                    ## Load proper entity class from DB.
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0]
                else:
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    if dt_measure:
                        dt_measure = dt_measure[0]
                break
            else:
                dt_measure = None

        figure_nrs = {}
        metrics = dt_measure.metrics if dt_measure else {}
        if metrics:
            for metric in metrics:
                # Separate plot for each metric.
                self._create_plot(metric, figsize, operation_group, range1_name, range2_name, figure_nrs)
        else:
            raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")

        parameters = dict(title=self._ui_name, showFullToolbar=True,
                          serverIp=config.SERVER_IP, serverPort=config.MPLH5_SERVER_PORT,
                          figureNumbers=figure_nrs, metrics=metrics, figuresJSON=json.dumps(figure_nrs))

        return self.build_display_result("pse_isocline/view", parameters)
    def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
            self.fail("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            self.fail("The OperationGroup entity was not removed.")
        except Exception:
            pass
Exemplo n.º 11
0
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """
        for i in xrange(len(self.bct_adapters)):
            for bct_identifier in self.bct_adapters[i].get_algorithms_dictionary():
                ### Prepare Operation and parameters
                algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id, bct_identifier)
                operation = TestFactory.create_operation(algorithm=algorithm, test_user=self.test_user,
                                                         test_project=self.test_project,
                                                         operation_status=model.STATUS_STARTED)
                self.assertEqual(model.STATUS_STARTED, operation.status)
                ### Launch BCT algorithm
                submit_data = {self.algo_groups[i].algorithm_param_name: bct_identifier,
                               algorithm.parameter_name: self.connectivity.gid}
                try:
                    OperationService().initiate_prelaunch(operation, self.bct_adapters[i], {}, **submit_data)
                    if bct_identifier in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise Exception("Algorithm %s was expected to throw input validation "
                                        "exception, but did not!" % (bct_identifier,))

                    operation = dao.get_operation_by_id(operation.id)
                    ### Check that operation status after execution is success.
                    self.assertEqual(STATUS_FINISHED, operation.status)
                    ### Make sure at least one result exists for each BCT algorithm
                    results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation')
                    self.assertTrue(len(results) > 0)

                except InvalidParameterException, excep:
                    ## Some algorithms are expected to throw validation exception.
                    if bct_identifier not in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise excep
Exemplo n.º 12
0
 def test_remove_project_node(self):
     """
     Test removing of a node from a project.
     """
     inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
     project_to_link = model.Project("Link", self.test_user.id, "descript")
     project_to_link = dao.store_entity(project_to_link)
     exact_data = dao.get_datatype_by_gid(gid)
     dao.store_entity(model.Links(exact_data.id, project_to_link.id))
     self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
     
     operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
     op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
     self.assertTrue(os.path.exists(op_folder))
     sub_files = os.listdir(op_folder)
     self.assertEqual(2, len(sub_files))
     ### Validate that no more files are created than needed.
     
     self.project_service._remove_project_node_files(inserted_project.id, gid)
     sub_files = os.listdir(op_folder)
     self.assertEqual(1, len(sub_files))
     ### operation.xml file should still be there
     
     op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
     sub_files = os.listdir(op_folder)
     self.assertEqual(2, len(sub_files))
     self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
     self.project_service._remove_project_node_files(project_to_link.id, gid)
     self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
     sub_files = os.listdir(op_folder)
     self.assertEqual(1, len(sub_files))
 def test_happy_flow_launch(self):
     """
     Test that launching a simulation from UI works.
     """
     OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
     sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
     self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))
Exemplo n.º 14
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
    def test_noise_2d_happy_flow(self):
        """
        Test a simulation with noise.
        """
        SIMULATOR_PARAMETERS['integrator'] = u'HeunStochastic'
        noise_2d_config = [[1 for _ in xrange(self.CONNECTIVITY_NODES)] for _ in xrange(2)]
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(noise_2d_config)
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'
        filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
        self.simulator_adapter.configure(**filtered_params)
        if hasattr(self.simulator_adapter, 'algorithm'):
            self.assertEqual((2, 74, 1), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
        else:
            self.fail("Simulator adapter was not initialized properly")
        OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
        sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
        self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))
        SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = '[1]'

        filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
        self.simulator_adapter.configure(**filtered_params)
        if hasattr(self.simulator_adapter, 'algorithm'):
            self.assertEqual((1,), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
        else:
            self.fail("Simulator adapter was not initialized properly")
        OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
Exemplo n.º 16
0
 def populate_values(data_list, type_, category_key, complex_dt_attributes=None):
     """
     Populate meta-data fields for data_list (list of DataTypes).
     """
     values = []
     all_field_values = ''
     for value in data_list:
         # Here we only populate with DB data, actual
         # XML check will be done after select and submit.
         entity_gid = value[2]
         actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
         display_name = ''
         if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
             display_name = actual_entity[0].display_name
         display_name = display_name + ' - ' + (value[3] or "None ")
         if value[5]:
             display_name = display_name + ' - From: ' + str(value[5])
         else:
             display_name = display_name + utils.date2string(value[4])
         if value[6]:
             display_name = display_name + ' - ' + str(value[6])
         display_name = display_name + ' - ID:' + str(value[0])
         all_field_values = all_field_values + str(entity_gid) + ','
         values.append({ABCAdapter.KEY_NAME: display_name, ABCAdapter.KEY_VALUE: entity_gid})
         if complex_dt_attributes is not None:
             ### TODO apply filter on sub-attributes
             values[-1][ABCAdapter.KEY_ATTRIBUTES] = complex_dt_attributes
     if category_key is not None:
         category = dao.get_category_by_id(category_key)
         if (not category.display) and (not category.rawinput) and len(data_list) > 1:
             values.insert(0, {ABCAdapter.KEY_NAME: "All", ABCAdapter.KEY_VALUE: all_field_values[:-1]})
     return values
Exemplo n.º 17
0
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """
        for adapter_instance in self.bct_adapters:
            algorithm = adapter_instance.stored_adapter
            operation = TestFactory.create_operation(algorithm=algorithm, test_user=self.test_user,
                                                     test_project=self.test_project,
                                                     operation_status=model.STATUS_STARTED)
            assert model.STATUS_STARTED == operation.status
            ### Launch BCT algorithm
            submit_data = {algorithm.parameter_name: self.connectivity.gid}
            try:
                OperationService().initiate_prelaunch(operation, adapter_instance, {}, **submit_data)
                if algorithm.classname in TestBCT.EXPECTED_TO_FAIL_VALIDATION:
                    raise Exception("Algorithm %s was expected to throw input validation "
                                    "exception, but did not!" % (algorithm.classname,))

                operation = dao.get_operation_by_id(operation.id)
                ### Check that operation status after execution is success.
                assert STATUS_FINISHED == operation.status
                ### Make sure at least one result exists for each BCT algorithm
                results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation')
                assert len(results) > 0

            except InvalidParameterException as excep:
                ## Some algorithms are expected to throw validation exception.
                if algorithm.classname not in TestBCT.EXPECTED_TO_FAIL_VALIDATION:
                    raise excep
Exemplo n.º 18
0
    def _populate_values(data_list, type_, category_key):
        """
        Populate meta-data fields for data_list (list of DataTypes).

        Private method, to be called recursively.
        It will receive a list of Attributes, and it will populate 'options'
        entry with data references from DB.
        """
        values = []
        all_field_values = ''
        for value in data_list:
            # Here we only populate with DB data, actual
            # XML check will be done after select and submit.
            entity_gid = value[2]
            actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
            display_name = ''
            if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
                display_name = actual_entity[0].display_name
            display_name += ' - ' + (value[3] or "None ")
            if value[5]:
                display_name += ' - From: ' + str(value[5])
            else:
                display_name += utils.date2string(value[4])
            if value[6]:
                display_name += ' - ' + str(value[6])
            display_name += ' - ID:' + str(value[0])
            all_field_values += str(entity_gid) + ','
            values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
        if category_key is not None:
            category = dao.get_category_by_id(category_key)
            if not category.display and not category.rawinput and len(data_list) > 1:
                values.insert(0, {KEY_NAME: "All", KEY_VALUE: all_field_values[:-1]})
        return values
Exemplo n.º 19
0
    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception, excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))
Exemplo n.º 20
0
    def test_db_mapping(self):
        """ Test DB storage/retrieval of a simple traited attribute"""
        session = SA_SESSIONMAKER()
        model.Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()
        
        # test data
        dikt = {'a': 6}
        tup = ('5', 9.348)
        dtype = numpy.dtype(float)
        json = {'a': 'asdf', 'b': {'23': '687568'}}

        test_inst = MappedTestClass()
        test_inst.dikt = copy.deepcopy(dikt)
        test_inst.tup = copy.deepcopy(tup)
        test_inst.dtype = copy.deepcopy(dtype)
        test_inst.json = copy.deepcopy(json)
        test_inst.set_operation_id(self.operation.id)
        test_inst = dao.store_entity(test_inst)

        test_inst = dao.get_generic_entity(MappedTestClass, test_inst.gid, 'gid')[0]
        self.assertEqual(test_inst.dikt, dikt)
        self.assertEqual(test_inst.tup, tup)
        self.assertEqual(test_inst.dtype, dtype)
        self.assertEqual(test_inst.json, json)
Exemplo n.º 21
0
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        assert dao.get_datatype_by_gid(gid) is not None, "Initialization problem!"
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        assert os.path.exists(op_folder)
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        ### Validate that no more files are created than needed.

        if(dao.get_system_user() is None):
            dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        assert dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links"
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        assert dao.get_datatype_by_gid(gid) is None
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
Exemplo n.º 22
0
    def _populate_values(data_list, type_, category_key, complex_dt_attributes=None):
        """
        Populate meta-data fields for data_list (list of DataTypes).

        Private method, to be called recursively.
        It will receive a list of Attributes, and it will populate 'options'
        entry with data references from DB.
        """
        values = []
        all_field_values = []
        for id_, _, entity_gid, subject, completion_date, group, gr_name, tag1 in data_list:
            # Here we only populate with DB data, actual
            # XML check will be done after select and submit.
            actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
            display_name = ''
            if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
                display_name = actual_entity[0].display_name
            display_name += ' - ' + (subject or "None ")
            if group:
                display_name += ' - From: ' + str(group)
            else:
                display_name += utils.date2string(completion_date)
            if gr_name:
                display_name += ' - ' + str(gr_name)
            display_name += ' - ID:' + str(id_)
            all_field_values.append(str(entity_gid))
            values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
            if complex_dt_attributes is not None:
                ### TODO apply filter on sub-attributes
                values[-1][KEY_ATTRIBUTES] = complex_dt_attributes  # this is the copy of complex dtype attributes on all db options
        if category_key is not None:
            category = dao.get_category_by_id(category_key)
            if not category.display and not category.rawinput and len(data_list) > 1:
                values.insert(0, {KEY_NAME: "All", KEY_VALUE: ','.join(all_field_values)})
        return values
Exemplo n.º 23
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True
 def mark_burst_finished(self, burst_entity, error=False, success=False, cancel=False, error_message=None):
     """
     Mark Burst status field.
     Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
     
     :param burst_entity: BurstConfiguration to be updated, at finish time.
     :param error: When True, burst will be marked as finished with error.
     :param success: When True, burst will be marked successfully.
     :param cancel: When True, burst will be marked as user-canceled.
     """
     try:
         linked_ops_number = dao.get_operations_in_burst(burst_entity.id, is_count=True)
         linked_datatypes = dao.get_generic_entity(model.DataType, burst_entity.id, "fk_parent_burst")
         
         disk_size = linked_ops_number   # 1KB for each dataType, considered for operation.xml files
         dt_group_sizes = dict()
         for dtype in linked_datatypes:
             if dtype.disk_size is not None:
                 disk_size = disk_size + dtype.disk_size
                 ### Prepare and compute DataTypeGroup sizes, in case of ranges.
                 if dtype.fk_datatype_group:
                     previous_group_size = dt_group_sizes[dtype.fk_datatype_group] if (dtype.fk_datatype_group 
                                                                                       in dt_group_sizes) else 0
                     dt_group_sizes[dtype.fk_datatype_group] = previous_group_size + dtype.disk_size
                          
         ### If there are any DataType Groups in current Burst, update their counter.
         burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup, burst_entity.id, "fk_parent_burst")
         if len(burst_dt_groups) > 0:
             for dt_group in burst_dt_groups:
                 dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
                 dt_group.disk_size = dt_group_sizes[dt_group.id] if (dt_group.id in dt_group_sizes) else 0
                 dao.store_entity(dt_group)
                 
         ### Update actual Burst entity fields    
         burst_entity.disk_size = disk_size          # In KB
         burst_entity.datatypes_number = len(linked_datatypes) 
         burst_entity.workflows_number = len(dao.get_workflows_for_burst(burst_entity.id))  
         burst_entity.mark_status(success=success, error=error, cancel=cancel)
         burst_entity.error_message = error_message
         
         dao.store_entity(burst_entity)
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception("Could not correctly update Burst status and meta-data!")
         burst_entity.mark_status(error=True)
         burst_entity.error_message = "Error when updating Burst Status"
         dao.store_entity(burst_entity)
Exemplo n.º 25
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
            session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
def update_localconnectivity_metadata(dt_id):
    dt = dao.get_generic_entity(LocalConnectivity, dt_id)[0]

    mtx = dt.matrix
    info_dict = SparseMatrix.extract_sparse_matrix_metadata(mtx)

    data_group_path = SparseMatrix.ROOT_PATH + 'matrix'
    dt.set_metadata(info_dict, '', True, data_group_path)
Exemplo n.º 27
0
    def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW):
        """
        Given a burst configuration and all the necessary data do the actual launch.
        
        :param burst_configuration: BurstConfiguration   
        :param simulator_index: the position within the workflows step list that the simulator will take. This is needed
            so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from.
        :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo
            group and category that are then passed to the launcher's prepare_operation method.
        :param user_id: the id of the user that launched this burst
        :param launch_mode: new/branch/continue
        """
        ## 1. Prepare BurstConfiguration entity
        if launch_mode == LAUNCH_NEW:
            ## Fully new entity for new simulation
            burst_config = burst_configuration.clone()
            if burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                burst_config.name = "simulation_" + str(new_id)
        else:
            ## Branch or Continue simulation
            burst_config = burst_configuration
            simulation_state = dao.get_generic_entity(
                SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS, burst_config.id, "fk_parent_burst"
            )
            if simulation_state is None or len(simulation_state) < 1:
                exc = BurstServiceException(
                    "Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name
                )
                self.logger.error(exc)
                raise exc

            simulation_state = simulation_state[0]
            burst_config.update_simulation_parameter("simulation_state", simulation_state.gid)
            burst_config = burst_configuration.clone()

            count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project)
            burst_config.name = burst_config.name + "_" + launch_mode + str(count)

        ## 2. Create Operations and do the actual launch
        if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
            ## New Burst entry in the history
            burst_id = self._store_burst_config(burst_config)
            thread = threading.Thread(
                target=self._async_launch_and_prepare,
                kwargs={
                    "burst_config": burst_config,
                    "simulator_index": simulator_index,
                    "simulator_id": simulator_id,
                    "user_id": user_id,
                },
            )
            thread.start()
            return burst_id, burst_config.name
        else:
            ## Continue simulation
            ## TODO
            return burst_config.id, burst_config.name
Exemplo n.º 28
0
    def tearDown(self):
        """
        Revert changes settings and remove recently imported algorithms
        """
        TvbProfile.current.web.CURRENT_DIR = self.old_current_dir
        adapters_init.__xml_folders__ = self.old_xml_path

        for group in dao.get_generic_entity(model.AlgorithmGroup, "simple", "algorithm_param_name"):
            dao.remove_entity(model.AlgorithmGroup, group.id)
Exemplo n.º 29
0
 def launch(self, connectivity_measure, region_map=None, shell_surface=None):
     if region_map is None or region_map.connectivity.number_of_regions != connectivity_measure.connectivity.number_of_regions:
         # We have no regionmap or the onw we have is not compatible with the measure.
         # first try to find the associated region map
         region_maps = dao.get_generic_entity(RegionMapping, connectivity_measure.connectivity.gid, '_connectivity')
         if region_maps:
             region_map = region_maps[0]
         #else: todo fallback on any region map with the right number of nodes
     return SurfaceViewer.launch(self, region_map.surface, region_map, connectivity_measure, shell_surface)
Exemplo n.º 30
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return
        user = dao.get_user_for_datatype(datatype.id)
        freed_space = datatype.disk_size or 0
        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            freed_space = dao.get_datatype_group_disk_size(datatype.id)
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
            freed_space = dao.get_datatype_group_disk_size(datatype.id)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

        user.used_disk_space = user.used_disk_space - freed_space
        dao.store_entity(user)
Exemplo n.º 31
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False, existing_dt_links=None):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        if datatype.parent_operation.fk_launched_in != int(project_id):
            self.logger.warning("Datatype with GUID [%s] has been moved to another project and does "
                                "not need to be deleted anymore." % datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        new_dt_links = []

        # Datatype Groups were already handled when the first DatatypeMeasureIndex has been found
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        # Found the first DatatypeMeasureIndex from a group
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            # We load it this way to make sure we have the 'fk_operation_group' in every case
            datatype_group_gid = dao.get_datatype_by_id(datatype.fk_datatype_group).gid
            datatype_group = h5.load_entity_by_gid(datatype_group_gid)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)

            datatypes = self.get_all_datatypes_from_data(datatype_group)
            first_datatype = datatypes[0]

            if hasattr(first_datatype, 'fk_source_gid'):
                ts = h5.load_entity_by_gid(first_datatype.fk_source_gid)
                ts_group = dao.get_datatypegroup_by_op_group_id(ts.parent_operation.fk_operation_group)
                dm_group = datatype_group
            else:
                dt_measure_index = get_class_by_name("{}.{}".format(DATATYPE_MEASURE_INDEX_MODULE,
                                                                    DATATYPE_MEASURE_INDEX_CLASS))
                dm_group = dao.get_datatype_measure_group_from_ts_from_pse(first_datatype.gid, dt_measure_index)
                ts_group = datatype_group

            links = []

            if ts_group:
                links.extend(dao.get_links_for_datatype(ts_group.id))
                correct = correct and self._remove_operation_group(ts_group.fk_operation_group, project_id,
                                                                   skip_validation, operations_set, links)

            if dm_group:
                links.extend(dao.get_links_for_datatype(dm_group.id))
                correct = correct and self._remove_operation_group(dm_group.fk_operation_group, project_id,
                                                                   skip_validation, operations_set, links)

            if len(links) > 0:
                # We want to get the links for the first TSIndex directly
                # This code works for all cases
                datatypes = dao.get_datatype_in_group(ts_group.id)
                ts = datatypes[0]

                new_dt_links = self._add_links_for_datatype_references(ts, links[0].fk_to_project, links[0].id,
                                                                       existing_dt_links)

        else:
            self.logger.debug("Removing datatype %s" % datatype)
            links = dao.get_links_for_datatype(datatype.id)

            if len(links) > 0:
                new_dt_links = self._add_links_for_datatype_references(datatype, links[0].fk_to_project, links[0].id,
                                                                       existing_dt_links)

            self._remove_project_node_files(project_id, datatype.gid, links, skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.storage_interface.remove_operation_data(project.name, operation_id)

        self.storage_interface.push_folder_to_sync(project.name)
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
        return new_dt_links
Exemplo n.º 32
0
    def launch(self,
               model,
               model_parameters,
               integrator,
               integrator_parameters,
               connectivity,
               monitors,
               monitors_parameters=None,
               surface=None,
               surface_parameters=None,
               stimulus=None,
               coupling=None,
               coupling_parameters=None,
               initial_conditions=None,
               conduction_speed=None,
               simulation_length=0,
               simulation_state=None):
        """
        Called from the GUI to launch a simulation.
          *: string class name of chosen model, etc...
          *_parameters: dictionary of parameters for chosen model, etc...
          connectivity: tvb.datatypes.connectivity.Connectivity object.
          surface: tvb.datatypes.surfaces.CorticalSurface: or None.
          stimulus: tvb.datatypes.patters.* object
        """
        result_datatypes = dict()
        start_time = self.algorithm.current_step * self.algorithm.integrator.dt

        self.algorithm.configure(full_configure=False)
        if simulation_state is not None:
            simulation_state.fill_into(self.algorithm)

        region_map = dao.get_generic_entity(region_mapping.RegionMapping,
                                            connectivity.gid, '_connectivity')
        region_volume_map = dao.get_generic_entity(
            region_mapping.RegionVolumeMapping, connectivity.gid,
            '_connectivity')
        if len(region_map) < 1:
            region_map = None
        else:
            region_map = region_map[0]
        if len(region_volume_map) < 1:
            region_volume_map = None
        else:
            region_volume_map = region_volume_map[0]

        for monitor in self.algorithm.monitors:
            m_name = monitor.__class__.__name__
            ts = monitor.create_time_series(self.storage_path, connectivity,
                                            surface, region_map,
                                            region_volume_map)
            self.log.debug("Monitor %s created the TS %s" % (m_name, ts))
            # Now check if the monitor will return results for each state variable, in which case store
            # the labels for these state variables.
            # todo move these into monitors as well
            #   and replace check if ts.user_tag_1 with something better (e.g. pre_ex & post)
            state_variable_dimension_name = ts.labels_ordering[1]
            if ts.user_tag_1:
                ts.labels_dimensions[
                    state_variable_dimension_name] = ts.user_tag_1.split(';')

            elif m_name in self.HAVE_STATE_VARIABLES:
                selected_state_vars = [
                    self.algorithm.model.state_variables[idx]
                    for idx in monitor.voi
                ]
                ts.labels_dimensions[
                    state_variable_dimension_name] = selected_state_vars

            ts.start_time = start_time
            result_datatypes[m_name] = ts

        #### Create Simulator State entity and persist it in DB. H5 file will be empty now.
        if not self._is_group_launch():
            simulation_state = SimulationState(storage_path=self.storage_path)
            self._capture_operation_results([simulation_state])

        ### Run simulation
        self.log.debug("%s: Starting simulation..." % str(self))
        for result in self.algorithm(simulation_length=simulation_length):
            for j, monitor in enumerate(monitors):
                if result[j] is not None:
                    result_datatypes[monitor].write_time_slice([result[j][0]])
                    result_datatypes[monitor].write_data_slice([result[j][1]])

        self.log.debug(
            "%s: Completed simulation, starting to store simulation state " %
            str(self))
        ### Populate H5 file for simulator state. This step could also be done while running sim, in background.
        if not self._is_group_launch():
            simulation_state.populate_from(self.algorithm)
            self._capture_operation_results([simulation_state])

        self.log.debug("%s: Simulation state persisted, returning results " %
                       str(self))
        final_results = []
        for result in result_datatypes.values():
            result.close_file()
            final_results.append(result)
        self.log.info("%s: Adapter simulation finished!!" % str(self))
        return final_results
Exemplo n.º 33
0
def load_dt(dt_id):
    dt = dao.get_datatype_by_id(dt_id)
    dt_idx = dao.get_generic_entity(dt.module + '.' + dt.type, dt_id)[0]
    dt_ht = h5.load_from_index(dt_idx)
    return dt_ht
Exemplo n.º 34
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0",
                                                 "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(
                group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)

    session = SA_SESSIONMAKER()
    session.execute(
        text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""
             ))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        # TODO: fix me
        # for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
        #     session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
Exemplo n.º 35
0
def load_dt(id):
    dt = dao.get_datatype_by_id(id)
    return dao.get_generic_entity(dt.module + '.' + dt.type, id)
Exemplo n.º 36
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: String referring to metric to apply on colors
        :param size_metric:  String referring to metric to apply on sizes

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid.hex)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric,
                                         size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(
            list(zip(values1, list(range(len(list(values1)))))))
        fake_numbers2 = dict(
            list(zip(values2, list(range(len(list(values2)))))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == STATUS_FINISHED:
                pse_filter = FilterChain(
                    fields=[FilterChain.datatype + '.type'],
                    operations=['!='],
                    values=['SimulatorIndex'])
                datatypes = dao.get_results_for_operation(
                    operation_.id, pse_filter)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasureIndex":
                        # Load proper entity class from DB.
                        measures = dao.get_generic_entity(
                            DatatypeMeasureIndex, datatype.gid)
                    else:
                        measures = dao.get_generic_entity(
                            DatatypeMeasureIndex, datatype.gid, 'source_gid')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(
                operation_, datatype)

        pse_context.fill_object(final_dict)
        # datatypes_dict is not actually used in the drawing of the PSE and actually
        # causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = list(range(len(list(values1))))
        if not only_numbers2:
            pse_context.values_y = list(range(len(list(values2))))
        return pse_context
Exemplo n.º 37
0
    def retrieve_project_full(self,
                              project_id,
                              applied_filters=None,
                              current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(
            project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (
            1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters,
                                                  start_idx,
                                                  OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(
                            OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(
                            one_op[3])
                        result[
                            "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None
                        result["gid"] = operation_group.gid
                        # Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = AlgorithmService().get_visualizers_for_group(datatype_group.gid) \
                            if datatype_group is not None else None
                    except Exception:
                        self.logger.exception(
                            "We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result[
                        "start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] -
                                                          result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(
                        result['id'])
                    result['results'] = [dt for dt in datatype_results]
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                # We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception(
                    "Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
Exemplo n.º 38
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning(
                "Attempt to delete DT[%s] which no longer exists." %
                datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype_group = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)
            if datatype_group.fk_parent_burst:
                burst = dao.get_generic_entity(BurstConfiguration,
                                               datatype_group.fk_parent_burst,
                                               'gid')[0]
                dao.remove_entity(BurstConfiguration, burst.id)
                if burst.fk_metric_operation_group:
                    correct = correct and self._remove_operation_group(
                        burst.fk_metric_operation_group, project_id,
                        skip_validation, operations_set)

                if burst.fk_operation_group:
                    correct = correct and self._remove_operation_group(
                        burst.fk_operation_group, project_id, skip_validation,
                        operations_set)

            else:
                self._remove_datatype_group_dts(project_id, datatype_group.id,
                                                skip_validation,
                                                operations_set)

                datatype_group = dao.get_datatype_group_by_gid(
                    datatype_group.gid)
                dao.remove_entity(DataTypeGroup, datatype.id)
                correct = correct and dao.remove_entity(
                    OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid,
                                            skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id,
                                                  "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            op_burst = dao.get_burst_for_operation_id(operation_id)
            if op_burst:
                correct = correct and dao.remove_entity(
                    BurstConfiguration, op_burst.id)
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.storage_interface.remove_operation_data(
                project.name, operation_id)

        self.storage_interface.push_folder_to_sync(project.name)
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " +
                                          str(datatype_gid))
Exemplo n.º 39
0
    def _edit_data(self, datatype, new_data, from_group=False):
        # type: (DataType, dict, bool) -> None
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(
                OperationGroup,
                new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup,
                                                      new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name +
                                             "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)
            op_folder = self.storage_interface.get_project_folder(
                operation.project.name, str(operation.id))
            vm_gid = operation.view_model_gid
            view_model_file = h5.determine_filepath(vm_gid, op_folder)
            if view_model_file:
                view_model_class = H5File.determine_type(view_model_file)
                view_model = view_model_class()
                with ViewModelH5(view_model_file, view_model) as f:
                    ga = f.load_generic_attributes()
                    ga.operation_tag = new_group_name
                    f.store_generic_attributes(ga, False)
            else:
                self.logger.warning(
                    "Could not find ViewModel H5 file for op: {}".format(
                        operation))

        # 2. Update GenericAttributes in the associated H5 files:
        h5_path = h5.path_for_stored_index(datatype)
        with H5File.from_file(h5_path) as f:
            ga = f.load_generic_attributes()

            ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
            ga.state = new_data[DataTypeOverlayDetails.DATA_STATE]
            ga.operation_tag = new_group_name
            if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
                ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
            if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
                ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
            if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
                ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
            if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
                ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
            if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
                ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

            f.store_generic_attributes(ga, False)

        # 3. Update MetaData in DT Index DB as well.
        datatype.fill_from_generic_attributes(ga)
        dao.store_entity(datatype)
Exemplo n.º 40
0
from tvb.core.services.operation_service import OperationService

# Before starting this, we need to have TVB web interface launched at least once
# (to have a default project, user, etc setup)
if __name__ == "__main__":
    TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE)
    log = get_logger(__name__)

    # This ID of a project needs to exists in DB, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(1)
    # Prepare the Adapter
    adapter_instance = ABCAdapter.build_adapter_from_class(FourierAdapter)

    # Prepare the input algorithms as if they were coming from web UI submit:
    time_series = dao.get_generic_entity(TimeSeriesRegionIndex,
                                         DataTypeMetaData.DEFAULT_SUBJECT,
                                         "subject")
    if len(time_series) < 1:
        log.error("We could not find a compatible TimeSeries Datatype!")
    launch_args = {"_time_series": time_series[0].gid, "_segment_length": 100}

    # launch an operation and have the results stored both in DB and on disk
    launched_operation = OperationService().fire_operation(
        adapter_instance, project.administrator, project.id, **launch_args)[0]

    # wait for the operation to finish
    while not launched_operation.has_finished:
        sleep(5)
        launched_operation = dao.get_operation_by_id(launched_operation.id)

    if launched_operation.status == STATUS_FINISHED:
Exemplo n.º 41
0
## Before starting this, we need to have TVB web interface launched at least once (to have a default project, user, etc)
if __name__ == "__main__":

    flow_service = FlowService()
    operation_service = OperationService()

    ## This ID of a project needs to exists in DB, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(1)

    ## Prepare the Adapter
    adapter_instance = ABCAdapter.build_adapter_from_class(SimulatorAdapter)

    ## Prepare the input algorithms as if they were coming from web UI submit:
    ## TODO create helper methods for working with objects instead of strings.
    connectivity = dao.get_generic_entity(Connectivity,
                                          DataTypeMetaData.DEFAULT_SUBJECT,
                                          "subject")[0]
    launch_args = dict()
    for f in adapter_instance.flaten_input_interface():
        launch_args[f["name"]] = str(f["default"]) if 'default' in f else None
    launch_args["connectivity"] = connectivity.gid
    launch_args[
        "model_parameters_option_Generic2dOscillator_variables_of_interest"] = 'V'

    if len(sys.argv) > 1:
        launch_args[
            "model_parameters_option_Generic2dOscillator_tau"] = sys.argv[1]

    ## launch an operation and have the results stored both in DB and on disk
    launched_operation = flow_service.fire_operation(adapter_instance,
                                                     project.administrator,
Exemplo n.º 42
0
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old +
                                             '"%')).all()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid,
                                            "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType,
                                            old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " +
                                 old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(
                        text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid +
                             """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " +
                                 str(new_projection_guid))

                sensors_guid = session.execute(
                    text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) +
                         """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params,
                                               cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {
                        'value': str(new_projection_guid)
                    }
                    burst.simulator_configuration[param_eeg_sensors] = {
                        'value': str(sensors_guid)
                    }
                    burst.simulator_configuration[param_eeg_rm] = {
                        'value': str(rm.gid)
                    }
                    burst._simulator_configuration = json.dumps(
                        burst.simulator_configuration,
                        cls=MapAsJson.MapAsJsonEncoder)
                    if burst.id not in all_bursts:
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(list(all_bursts.values()))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
Exemplo n.º 43
0
import requests
from tvb.basic.logger.builder import get_logger
from tvb.basic.profile import TvbProfile
from tvb.core.entities.model.model_operation import Operation
from tvb.core.entities.storage import dao
from tvb.core.services.kube_service import KubeService

log = get_logger(__name__)

if __name__ == '__main__':
    TvbProfile.set_profile(TvbProfile.WEB_PROFILE, True)
    if TvbProfile.current.web.OPENSHIFT_DEPLOY:
        log.info("Start operation assigner")
        while True:
            sleep(TvbProfile.current.OPERATIONS_BACKGROUND_JOB_INTERVAL)
            operations = dao.get_generic_entity(Operation, True, "queue_full")
            log.info(
                "Found {} operations with the queue full flag set.".format(
                    len(operations)))
            if len(operations) == 0:
                continue
            pods, auth_header = KubeService.get_pods(
                TvbProfile.current.web.
                OPENSHIFT_PROCESSING_OPERATIONS_APPLICATION)
            if pods:
                random.shuffle(pods)
                pods_no = len(pods)
                operations.sort(key=lambda l_operation: l_operation.id)
                for index, operation in enumerate(
                        operations[0:TvbProfile.current.MAX_THREADS_NUMBER *
                                   pods_no]):
Exemplo n.º 44
0
    def launch(self, view_model):
        # type: (ConnectivityAnnotationsViewModel) -> dict

        annotations_index = self.load_entity_by_gid(
            view_model.annotations_index)

        if view_model.connectivity_index is None:
            connectivity_index = self.load_entity_by_gid(
                annotations_index.fk_connectivity_gid)
        else:
            connectivity_index = self.load_entity_by_gid(
                view_model.connectivity_index)

        if view_model.region_mapping_index is None:
            region_map = dao.get_generic_entity(RegionMappingIndex,
                                                connectivity_index.gid,
                                                'fk_connectivity_gid')
            if len(region_map) < 1:
                raise LaunchException(
                    "Can not launch this viewer unless we have at least a RegionMapping for the current Connectivity!"
                )
            region_mapping_index = region_map[0]
        else:
            region_mapping_index = self.load_entity_by_gid(
                view_model.region_mapping_index)

        boundary_url = SurfaceURLGenerator.get_url_for_region_boundaries(
            region_mapping_index.fk_surface_gid, region_mapping_index.gid,
            self.stored_adapter.id)

        surface_h5 = h5.h5_file_for_gid(region_mapping_index.fk_surface_gid)
        assert isinstance(surface_h5, SurfaceH5)
        url_vertices_pick, url_normals_pick, url_triangles_pick = SurfaceURLGenerator.get_urls_for_pick_rendering(
            surface_h5)
        url_vertices, url_normals, _, url_triangles, url_region_map = SurfaceURLGenerator.get_urls_for_rendering(
            surface_h5, region_mapping_index.gid)

        params = dict(title="Connectivity Annotations Visualizer",
                      annotationsTreeUrl=URLGenerator.build_url(
                          self.stored_adapter.id, 'tree_json',
                          view_model.annotations_index),
                      urlTriangleToRegion=URLGenerator.build_url(
                          self.stored_adapter.id, "get_triangles_mapping",
                          region_mapping_index.gid),
                      urlActivationPatterns=URLGenerator.paths2url(
                          view_model.annotations_index,
                          "get_activation_patterns"),
                      minValue=0,
                      maxValue=connectivity_index.number_of_regions - 1,
                      urlColors=json.dumps(url_region_map),
                      urlVerticesPick=json.dumps(url_vertices_pick),
                      urlTrianglesPick=json.dumps(url_triangles_pick),
                      urlNormalsPick=json.dumps(url_normals_pick),
                      brainCenter=json.dumps(surface_h5.center()),
                      urlVertices=json.dumps(url_vertices),
                      urlTriangles=json.dumps(url_triangles),
                      urlNormals=json.dumps(url_normals),
                      urlRegionBoundaries=boundary_url)

        return self.build_display_result(
            "annotations/annotations_view",
            params,
            pages={"controlPage": "annotations/annotations_controls"})
Exemplo n.º 45
0
    def launch_burst(self,
                     burst_configuration,
                     simulator_index,
                     simulator_id,
                     user_id,
                     launch_mode=LAUNCH_NEW):
        """
        Given a burst configuration and all the necessary data do the actual launch.
        
        :param burst_configuration: BurstConfiguration   
        :param simulator_index: the position within the workflows step list that the simulator will take. This is needed
            so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from.
        :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo
            group and category that are then passed to the launcher's prepare_operation method.
        :param user_id: the id of the user that launched this burst
        :param launch_mode: new/branch/continue
        """
        ## 1. Prepare BurstConfiguration entity
        if launch_mode == LAUNCH_NEW:
            ## Fully new entity for new simulation
            burst_config = burst_configuration.clone()
            if burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                burst_config.name = 'simulation_' + str(new_id)
        else:
            ## Branch or Continue simulation
            burst_config = burst_configuration
            simulation_state = dao.get_generic_entity(
                SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS,
                burst_config.id, "fk_parent_burst")
            if simulation_state is None or len(simulation_state) < 1:
                exc = BurstServiceException(
                    "Simulation State not found for %s, "
                    "thus we are unable to branch from it!" %
                    burst_config.name)
                self.logger.error(exc)
                raise exc

            simulation_state = simulation_state[0]
            burst_config.update_simulation_parameter("simulation_state",
                                                     simulation_state.gid)
            burst_config = burst_configuration.clone()

            count = dao.count_bursts_with_name(burst_config.name,
                                               burst_config.fk_project)
            burst_config.name = burst_config.name + "_" + launch_mode + str(
                count)

        ## 2. Create Operations and do the actual launch
        if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]:
            ## New Burst entry in the history
            burst_id = self._store_burst_config(burst_config)
            thread = threading.Thread(target=self._async_launch_and_prepare,
                                      kwargs={
                                          'burst_config': burst_config,
                                          'simulator_index': simulator_index,
                                          'simulator_id': simulator_id,
                                          'user_id': user_id
                                      })
            thread.start()
            return burst_id, burst_config.name
        else:
            ## Continue simulation
            ## TODO
            return burst_config.id, burst_config.name
Exemplo n.º 46
0
    def remove_datatype(self, skip_validation=False):
        """
        Called when a TimeSeries is removed.
        """
        if not skip_validation:
            key = 'fk_source_gid'

            associated_fcd = dao.get_generic_entity(FcdIndex,
                                                    self.handled_datatype.gid,
                                                    key)
            associated_cv = dao.get_generic_entity(CovarianceIndex,
                                                   self.handled_datatype.gid,
                                                   key)
            associated_pca = dao.get_generic_entity(PrincipalComponentsIndex,
                                                    self.handled_datatype.gid,
                                                    key)
            associated_is = dao.get_generic_entity(IndependentComponentsIndex,
                                                   self.handled_datatype.gid,
                                                   key)
            associated_cc = dao.get_generic_entity(CrossCorrelationIndex,
                                                   self.handled_datatype.gid,
                                                   key)
            associated_fr = dao.get_generic_entity(FourierSpectrumIndex,
                                                   self.handled_datatype.gid,
                                                   key)
            associated_wv = dao.get_generic_entity(WaveletCoefficientsIndex,
                                                   self.handled_datatype.gid,
                                                   key)
            associated_cs = dao.get_generic_entity(CoherenceSpectrumIndex,
                                                   self.handled_datatype.gid,
                                                   key)
            associated_coef = dao.get_generic_entity(
                CorrelationCoefficientsIndex, self.handled_datatype.gid, key)
            associated_dtm = dao.get_generic_entity(DatatypeMeasureIndex,
                                                    self.handled_datatype.gid,
                                                    key)
            associated_ccs = dao.get_generic_entity(
                ComplexCoherenceSpectrumIndex, self.handled_datatype.gid, key)

            msg = "TimeSeries cannot be removed as it is used by at least one "

            if len(associated_fcd) > 0:
                raise RemoveDataTypeException(msg + " FCD.")
            if len(associated_cv) > 0:
                raise RemoveDataTypeException(msg + " Covariance.")
            if len(associated_pca) > 0:
                raise RemoveDataTypeException(msg + " PrincipalComponents.")
            if len(associated_is) > 0:
                raise RemoveDataTypeException(msg + " IndependentComponents.")
            if len(associated_cc) > 0:
                raise RemoveDataTypeException(msg + " CrossCorrelation.")
            if len(associated_fr) > 0:
                raise RemoveDataTypeException(msg + " FourierSpectrum.")
            if len(associated_wv) > 0:
                raise RemoveDataTypeException(msg + " WaveletCoefficients.")
            if len(associated_cs) > 0:
                raise RemoveDataTypeException(msg + " CoherenceSpectrum.")
            if len(associated_coef) > 0:
                raise RemoveDataTypeException(msg + " CorrelationCoefficient.")
            if len(associated_dtm) > 0:
                raise RemoveDataTypeException(msg + " DatatypeMeasure.")
            if len(associated_ccs) > 0:
                raise RemoveDataTypeException(msg +
                                              " ComplexCoherenceSpectrum.")

        ABCRemover.remove_datatype(self, skip_validation)
Exemplo n.º 47
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`

        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)

        range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range1)
        range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric,
                                         size_metric, back_page)
        pse_context.setRanges(range1_name, range1_values, range1_labels,
                              range2_name, range2_values, range2_labels)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if range2_name is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.gid,
                            '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(
                operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
Exemplo n.º 48
0
    def launch_simulation(self, launch_mode, **data):
        current_form = SimulatorFinalFragment()
        try:
            current_form.fill_from_post(data)
        except Exception as exc:
            self.logger.exception(exc)
            return {'error': str(exc)}

        burst_name = current_form.simulation_name.value
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)

        project = common.get_current_project()
        user = common.get_logged_user()

        session_burst_config = common.get_from_session(common.KEY_BURST_CONFIG)
        if burst_name != 'none_undefined':
            session_burst_config.name = burst_name

        burst_config_to_store = session_burst_config
        simulation_state_index_gid = None
        if launch_mode == self.simulator_service.LAUNCH_NEW:
            if session_burst_config.name is None:
                new_id = dao.get_max_burst_id() + 1
                session_burst_config.name = 'simulation_' + str(new_id)
            if is_simulator_copy:
                burst_config_to_store = session_burst_config.clone()
        else:
            burst_config_to_store = session_burst_config.clone()
            count = dao.count_bursts_with_name(session_burst_config.name,
                                               session_burst_config.project_id)
            session_burst_config.name = session_burst_config.name + "_" + launch_mode + str(
                count)
            simulation_state_index = dao.get_generic_entity(
                SimulationStateIndex.__module__ + "." +
                SimulationStateIndex.__name__, session_burst_config.id,
                "fk_parent_burst")
            if simulation_state_index is None or len(
                    simulation_state_index) < 1:
                exc = BurstServiceException(
                    "Simulation State not found for %s, thus we are unable to branch from "
                    "it!" % session_burst_config.name)
                self.logger.error(exc)
                raise exc
            simulation_state_index_gid = simulation_state_index[0].gid

        burst_config_to_store.start_time = datetime.now()
        dao.store_entity(burst_config_to_store)

        try:
            thread = threading.Thread(target=self.simulator_service.
                                      async_launch_and_prepare_simulation,
                                      kwargs={
                                          'burst_config':
                                          burst_config_to_store,
                                          'user':
                                          user,
                                          'project':
                                          project,
                                          'simulator_algo':
                                          self.cached_simulator_algorithm,
                                          'session_stored_simulator':
                                          session_stored_simulator,
                                          'simulation_state_gid':
                                          simulation_state_index_gid
                                      })
            thread.start()
            return {'id': burst_config_to_store.id}
        except BurstServiceException as e:
            self.logger.exception('Could not launch burst!')
            return {'error': e.message}
Exemplo n.º 49
0
 def get_generic_entity(entity_type, filter_value, select_field):
     return dao.get_generic_entity(entity_type, filter_value, select_field)
Exemplo n.º 50
0
    def retrieve_project_full(self,
                              project_id,
                              applied_filters=None,
                              current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(
            project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (
            1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters,
                                                  start_idx,
                                                  OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(
                            model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(
                            one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService(
                        ).get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception(
                            "We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) in (str, unicode):
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) in (str, unicode):
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result[
                        "start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] -
                                                          result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(
                        result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warn("Could not retrieve datatype %s" %
                                             str(dt))

                    operation_figures = dao.get_figures_for_operation(
                        result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(
                            figure.project.name)
                        figure_full_path = os.path.join(
                            figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(
                            figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception(
                    "Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
Exemplo n.º 51
0
 def _store_or_link_burst_config(self, burst_config, bc_path, project_id):
     bc_already_in_tvb = dao.get_generic_entity(BurstConfiguration, burst_config.gid, 'gid')
     if len(bc_already_in_tvb) == 0:
         self.store_datatype(burst_config, bc_path)
         return 1
     return 0