def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = ProjectService._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
Exemple #2
0
 def _update_pse_burst_status(self, burst_config):
     operations_in_group = dao.get_operations_in_group(
         burst_config.fk_operation_group)
     if burst_config.fk_metric_operation_group:
         operations_in_group.extend(
             dao.get_operations_in_group(
                 burst_config.fk_metric_operation_group))
     operation_statuses = list()
     for operation in operations_in_group:
         if not has_finished(operation.status):
             self.logger.debug(
                 'Operation {} in group {} is not finished, burst status will not be updated'
                 .format(operation.id, operation.fk_operation_group))
             return
         operation_statuses.append(operation.status)
     self.logger.debug(
         'All operations in burst {} have finished. Will update burst status'
         .format(burst_config.id))
     if STATUS_ERROR in operation_statuses:
         self.mark_burst_finished(
             burst_config, BurstConfiguration.BURST_ERROR,
             'Some operations in PSE have finished with errors')
     elif STATUS_CANCELED in operation_statuses:
         self.mark_burst_finished(burst_config,
                                  BurstConfiguration.BURST_CANCELED)
     else:
         self.mark_burst_finished(burst_config)
Exemple #3
0
    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    log = get_logger('tvb.core.operation_async_launcher')
    burst_service = BurstService2()

    try:
        log.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        log.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                  " for Operation:" + str(curent_operation.id))
        params = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)
        # These should go once we have a common place for it
        if not isinstance(adapter_instance, SimulatorAdapter):
            adapter_form = adapter_instance.get_form()(
                project_id=curent_operation.fk_launched_in)
            adapter_form.fill_from_post(params)
            adapter_instance.submit_form(adapter_form)

        # Un-comment bellow for profiling an operation:
        # import cherrypy.lib.profiler as profiler
        # p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        # p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, **params)
        if curent_operation.fk_operation_group:
            parent_burst = dao.get_generic_entity(
                BurstConfiguration2, curent_operation.fk_operation_group,
                'operation_group_id')[0]
            operations_in_group = dao.get_operations_in_group(
                curent_operation.fk_operation_group)
            if parent_burst.metric_operation_group_id:
                operations_in_group.extend(
                    dao.get_operations_in_group(
                        parent_burst.metric_operation_group_id))
            for operation in operations_in_group:
                if not has_finished(operation.status):
                    break
                if parent_burst is not None:
                    burst_service.mark_burst_finished(parent_burst)
        else:
            parent_burst = burst_service.get_burst_for_operation_id(
                operation_id)
            if parent_burst is not None:
                burst_service.mark_burst_finished(parent_burst)

        log.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        log.error("Could not execute operation " + str(sys.argv[1]))
        log.exception(excep)
        parent_burst = burst_service.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            burst_service.mark_burst_finished(parent_burst,
                                              error_message=str(excep))
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    log = get_logger('tvb.core.operation_async_launcher')
    burst_service = BurstService()

    try:
        log.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        log.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                  " for Operation:" + str(curent_operation.id))
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)
        # Un-comment bellow for profiling an operation:
        # import cherrypy.lib.profiler as profiler
        # p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        # p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance)
        if curent_operation.fk_operation_group:
            parent_burst = dao.get_generic_entity(
                BurstConfiguration, curent_operation.fk_operation_group,
                'fk_operation_group')[0]
            operations_in_group = dao.get_operations_in_group(
                curent_operation.fk_operation_group)
            if parent_burst.fk_metric_operation_group:
                operations_in_group.extend(
                    dao.get_operations_in_group(
                        parent_burst.fk_metric_operation_group))
            burst_finished = True
            for operation in operations_in_group:
                if not has_finished(operation.status):
                    burst_finished = False
                    break

            if burst_finished and parent_burst is not None and parent_burst.status != BurstConfiguration.BURST_ERROR:
                burst_service.mark_burst_finished(parent_burst)
        else:
            parent_burst = burst_service.get_burst_for_operation_id(
                operation_id)
            if parent_burst is not None:
                burst_service.mark_burst_finished(parent_burst)

        log.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        log.error("Could not execute operation " + str(operation_id))
        log.exception(excep)
        parent_burst = burst_service.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            burst_service.mark_burst_finished(parent_burst,
                                              error_message=str(excep))
 def test_set_op_group_visibility(self):
     """
     Tests if the visibility for an operation group is set correct.
     """
     _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
     list_of_operations = dao.get_operations_in_group(group_id)
     for operation in list_of_operations:
         self.assertTrue(operation.visible, "The operation should be visible.")
     op_group = dao.get_operationgroup_by_id(group_id)
     self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
     operations = dao.get_operations_in_group(group_id)
     for operation in operations:
         self.assertFalse(operation.visible, "The operation should not be visible.")
 def test_set_op_group_visibility(self):
     """
     Tests if the visibility for an operation group is set correct.
     """
     _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
     list_of_operations = dao.get_operations_in_group(group_id)
     for operation in list_of_operations:
         assert operation.visible, "The operation should be visible."
     op_group = dao.get_operationgroup_by_id(group_id)
     self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
     operations = dao.get_operations_in_group(group_id)
     for operation in operations:
         assert not operation.visible, "The operation should not be visible."
 def test_set_op_and_group_visibility(self):
     """
     When changing the visibility for an operation that belongs to an operation group, we
     should also change the visibility for the entire group of operations.
     """
     _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
     list_of_operations = dao.get_operations_in_group(group_id)
     for operation in list_of_operations:
         assert operation.visible, "The operation should be visible."
     self.project_service.set_operation_and_group_visibility(list_of_operations[0].gid, False)
     operations = dao.get_operations_in_group(group_id)
     for operation in operations:
         assert not operation.visible, "The operation should not be visible."
 def test_set_op_and_group_visibility(self):
     """
     When changing the visibility for an operation that belongs to an operation group, we
     should also change the visibility for the entire group of operations.
     """
     _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
     list_of_operations = dao.get_operations_in_group(group_id)
     for operation in list_of_operations:
         self.assertTrue(operation.visible, "The operation should be visible.")
     self.project_service.set_operation_and_group_visibility(list_of_operations[0].gid, False)
     operations = dao.get_operations_in_group(group_id)
     for operation in operations:
         self.assertFalse(operation.visible, "The operation should not be visible.")
 def test_set_op_group_visibility(self, datatype_group_factory):
     """
     Tests if the visibility for an operation group is set correct.
     """
     group = datatype_group_factory()
     list_of_operations = dao.get_operations_in_group(group.id)
     for operation in list_of_operations:
         assert operation.visible, "The operation should be visible."
     op_group = dao.get_operationgroup_by_id(group.id)
     self.project_service.set_operation_and_group_visibility(
         op_group.gid, False, True)
     operations = dao.get_operations_in_group(group.id)
     for operation in operations:
         assert not operation.visible, "The operation should not be visible."
 def test_set_op_and_group_visibility(self, datatype_group_factory):
     """
     When changing the visibility for an operation that belongs to an operation group, we
     should also change the visibility for the entire group of operations.
     """
     group = datatype_group_factory()
     list_of_operations = dao.get_operations_in_group(group.id)
     for operation in list_of_operations:
         assert operation.visible, "The operation should be visible."
     self.project_service.set_operation_and_group_visibility(
         list_of_operations[0].gid, False)
     operations = dao.get_operations_in_group(group.id)
     for operation in operations:
         assert not operation.visible, "The operation should not be visible."
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`

        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range1)
        range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(range1_name, range1_values, range1_labels, range2_name, range2_values, range2_labels)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if range2_name is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
Exemple #13
0
 def test_remove_burst_operation_group(self, simulation_launch):
     first_op = simulation_launch(self.test_user, self.test_project, 1000, True)
     operations_group_id = first_op.fk_operation_group
     assert not first_op.has_finished
     self.flow_c.cancel_or_remove_operation(operations_group_id, 1, True)
     operations = dao.get_operations_in_group(operations_group_id)
     for operation in operations:
         operation = dao.try_get_operation_by_id(operation.id)
         assert operation is None
    def build_structure_for_operation_group(operation_group_gid):

        group = dao.get_operationgroup_by_gid(operation_group_gid)
        operation = dao.get_operations_in_group(group.id, only_first_operation=True)
        algo = dao.get_algorithm_by_id(operation.fk_from_algo)

        structure = NodeStructure(operation_group_gid, algo.name)
        structure.data = NodeData.build_node_for_operation(operation, group.id)
        return structure
Exemple #15
0
    def build_structure_for_operation_group(operation_group_gid):

        group = dao.get_operationgroup_by_gid(operation_group_gid)
        operation = dao.get_operations_in_group(group.id, only_first_operation=True)
        algo = dao.get_algorithm_by_id(operation.fk_from_algo)

        structure = NodeStructure(operation_group_gid, algo.displayname)
        structure.data = NodeData.build_node_for_operation(operation, group.id)
        return structure
Exemple #16
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        _, range1_name, range1_labels = operation_group.load_range_numbers(
            operation_group.range1)
        has_range2, range2_name, range2_labels = operation_group.load_range_numbers(
            operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, range1_labels,
                                         range2_labels, color_metric,
                                         size_metric, back_page)
        final_dict = dict()
        operations = dao.get_operations_in_group(operation_group.id)
        for operation_ in operations:
            if operation_.status == model.STATUS_STARTED:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            key_2 = model.RANGE_MISSING_STRING
            if has_range2 is not None:
                key_2 = range_values[range2_name]

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatype = dao.get_results_for_operation(operation_.id)[0]
                measures = dao.get_generic_entity(DatatypeMeasure,
                                                  datatype.gid,
                                                  '_analyzed_datatype')
                pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {
                    key_2: pse_context.build_node_info(operation_, datatype)
                }
            else:
                final_dict[key_1][key_2] = pse_context.build_node_info(
                    operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        return pse_context
 def test_stop_burst_operation_group(self, simulation_launch):
     first_op = simulation_launch(self.test_user, self.test_project, 1000, True)
     operations_group_id = first_op.fk_operation_group
     assert not first_op.has_finished
     sleep(5)
     self.flow_c.cancel_or_remove_operation(operations_group_id, 1, False)
     operations = dao.get_operations_in_group(operations_group_id)
     for operation in operations:
         operation = dao.get_operation_by_id(operation.id)
         assert operation.status == STATUS_CANCELED
    def launch(self, datatype_group, **kwargs):
        """
        Also overwrite launch from ABCDisplayer, since we want to handle a list of figures,
        instead of only one Matplotlib figure.

        :raises LaunchException: when called before all operations in the group are finished
        """
        if self.PARAM_FIGURE_SIZE in kwargs:
            figsize = kwargs[self.PARAM_FIGURE_SIZE]
            figsize = ((figsize[0]) / 80, (figsize[1]) / 80)
            del kwargs[self.PARAM_FIGURE_SIZE]
        else:
            figsize = (15, 7)

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        _, range1_name, self.range1 = operation_group.load_range_numbers(
            operation_group.range1)
        _, range2_name, self.range2 = operation_group.load_range_numbers(
            operation_group.range2)

        # Get the computed measures on this DataTypeGroup
        first_op = dao.get_operations_in_group(operation_group.id)[0]
        if first_op.status != model.STATUS_FINISHED:
            raise LaunchException(
                "Not all operations from this range are finished. Cannot generate data until then."
            )

        datatype = dao.get_results_for_operation(first_op.id)[0]
        if datatype.type == "DatatypeMeasure":
            ## Load proper entity class from DB.
            dt_measure = dao.get_generic_entity(DatatypeMeasure,
                                                datatype.id)[0]
        else:
            dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid,
                                                '_analyzed_datatype')
            if dt_measure:
                dt_measure = dt_measure[0]

        figure_nrs = {}
        metrics = dt_measure.metrics if dt_measure else []
        for metric in metrics:
            # Separate plot for each metric.
            self._create_plot(metric, figsize, operation_group, range1_name,
                              range2_name, figure_nrs)

        parameters = dict(title=self._ui_name,
                          showFullToolbar=True,
                          serverIp=config.SERVER_IP,
                          serverPort=config.MPLH5_SERVER_PORT,
                          figureNumbers=figure_nrs,
                          metrics=metrics,
                          figuresJSON=json.dumps(figure_nrs))

        return self.build_display_result("pse_isocline/view", parameters)
    def __init__(self, operation_group_gid):
        NodeStructure.__init__(self, operation_group_gid, "")

        group = dao.get_operationgroup_by_gid(operation_group_gid)
        operation = dao.get_operations_in_group(group.id, only_first_operation=True)
        algo = dao.get_algorithm_by_id(operation.fk_from_algo)
        node_data = NodeData(MAX_SHAPE_SIZE, OPERATION_GROUP_SHAPE_COLOR, OPERATION_GROUP_SHAPE,
                             NODE_OPERATION_GROUP_TYPE, group.id, str(operation.start_date))

        self.name = algo.name
        self.data = node_data
Exemple #20
0
    def from_db(cls, operation_group_id):
        """
        Collects from db the information about the operation group that is required by the isocline view.
        """
        operations = dao.get_operations_in_group(operation_group_id)
        operation_group = dao.get_operationgroup_by_id(operation_group_id)

        self = cls(operation_group.range1, operation_group.range2, {},
                   PseIsoModel._find_metrics(operations), None)

        self._fill_apriori_data(operations)
        return self
Exemple #21
0
    def from_db(cls, operation_group_id):
        """
        Collects from db the information about the operation group that is required by the isocline view.
        """
        operations = dao.get_operations_in_group(operation_group_id)
        operation_group = dao.get_operationgroup_by_id(operation_group_id)

        self = cls(operation_group.range1, operation_group.range2, {},
                   PseIsoModel._find_metrics(operations), None)

        self._fill_apriori_data(operations)
        return self
Exemple #22
0
 def test_stop_operations_group(self, test_adapter_factory, datatype_group_factory):
     group = datatype_group_factory(status=STATUS_STARTED, store_vm=True)
     operations = dao.get_operations_in_group(group.fk_from_operation)
     operation_group_id = 0
     for operation in operations:
         operation = dao.get_operation_by_id(operation.id)
         assert not operation.has_finished
         operation_group_id = operation.fk_operation_group
     self.flow_c.cancel_or_remove_operation(operation_group_id, 1, False)
     for operation in operations:
         operation = dao.get_operation_by_id(operation.id)
         assert operation.status == STATUS_CANCELED
    def __init__(self, operation_group_gid):
        NodeStructure.__init__(self, operation_group_gid, "")

        group = dao.get_operationgroup_by_gid(operation_group_gid)
        operation = dao.get_operations_in_group(group.id,
                                                only_first_operation=True)
        algo = dao.get_algorithm_by_id(operation.fk_from_algo)
        node_data = NodeData(MAX_SHAPE_SIZE, OPERATION_GROUP_SHAPE_COLOR,
                             OPERATION_GROUP_SHAPE, NODE_OPERATION_GROUP_TYPE,
                             group.id, str(operation.start_date))

        self.name = algo.name
        self.data = node_data
Exemple #24
0
 def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
     """
     Returns the dataType inputs for an operation group. If more dataTypes
     are part of the same dataType group then only the dataType group will
     be returned instead of them.
     """
     operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
     op_group_inputs = dict()
     for gid in operations_gids:
         op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
         for datatype in op_inputs:
             op_group_inputs[datatype.id] = datatype
     return list(op_group_inputs.values())
 def get_datatypes_inputs_for_operation_group(group_id, selected_filter):
     """
     Returns the dataType inputs for an operation group. If more dataTypes
     are part of the same dataType group then only the dataType group will
     be returned instead of them.
     """
     operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
     op_group_inputs = dict()
     for gid in operations_gids:
         op_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
         for datatype in op_inputs:
             op_group_inputs[datatype.id] = datatype
     return op_group_inputs.values()
Exemple #26
0
    def test_datatypes_groups(self, test_adapter_factory,
                              datatype_group_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 0, "There should be no operation"

        dt_group = datatype_group_factory(project=self.test_project)
        model = TestModel()
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")

        operations = dao.get_operations_in_group(dt_group.id)

        for op in operations:
            model.gid = uuid.uuid4()
            op_path = StorageInterface().get_project_folder(
                self.test_project.name, str(op.id))
            op.view_model_gid = model.gid.hex
            op.algorithm = adapter.stored_adapter
            h5.store_view_model(model, op_path)
            dao.store_entity(op)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 2, "Expected two operation groups"
        assert all_operations[0][2] == 6, "Expected 6 operations in one group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[1][0])
        self.operation_service.stop_operation(all_operations[1][1])
        # Make sure operations are executed
        self.operation_service.launch_operation(all_operations[1][0], False)
        self.operation_service.launch_operation(all_operations[1][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        assert len(
            resulted_datatypes) >= 2, "Expected at least 2, but: " + str(
                len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
    def launch(self, datatype_group, **kwargs):
        """
        Also overwrite launch from ABCDisplayer, since we want to handle a list of figures,
        instead of only one Matplotlib figure.

        :raises LaunchException: when called before all operations in the group are finished
        """
        if self.PARAM_FIGURE_SIZE in kwargs:
            figsize = kwargs[self.PARAM_FIGURE_SIZE]
            figsize = ((figsize[0]) / 80, (figsize[1]) / 80)
            del kwargs[self.PARAM_FIGURE_SIZE]
        else:
            figsize = (15, 7)

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
        _, range1_name, self.range1 = operation_group.load_range_numbers(operation_group.range1)
        _, range2_name, self.range2 = operation_group.load_range_numbers(operation_group.range2)

        for operation in dao.get_operations_in_group(operation_group.id):
            if operation.status == model.STATUS_STARTED:
                raise LaunchException("Can not display until all operations from this range are finished!")

            op_results = dao.get_results_for_operation(operation.id)
            if len(op_results):
                datatype = op_results[0]
                if datatype.type == "DatatypeMeasure":
                    ## Load proper entity class from DB.
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0]
                else:
                    dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    if dt_measure:
                        dt_measure = dt_measure[0]
                break
            else:
                dt_measure = None

        figure_nrs = {}
        metrics = dt_measure.metrics if dt_measure else {}
        if metrics:
            for metric in metrics:
                # Separate plot for each metric.
                self._create_plot(metric, figsize, operation_group, range1_name, range2_name, figure_nrs)
        else:
            raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")

        parameters = dict(title=self._ui_name, showFullToolbar=True,
                          serverIp=config.SERVER_IP, serverPort=config.MPLH5_SERVER_PORT,
                          figureNumbers=figure_nrs, metrics=metrics, figuresJSON=json.dumps(figure_nrs))

        return self.build_display_result("pse_isocline/view", parameters)
Exemple #28
0
    def __init__(self, datatype_group_gid):
        self.datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)

        if self.datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        self.operation_group = dao.get_operationgroup_by_id(
            self.datatype_group.fk_operation_group)
        self.operations = dao.get_operations_in_group(self.operation_group.id)
        self.pse_model_list = self.parse_pse_data_for_display()
        self.all_metrics = dict()
        self._prepare_ranges_data()
Exemple #29
0
    def prepare_node_data(datatype_group):
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
        operations = dao.get_operations_in_group(operation_group.id)
        node_info_dict = dict()
        for operation_ in operations:
            datatypes = dao.get_results_for_operation(operation_.id)
            if len(datatypes) > 0:
                datatype = datatypes[0]
                node_info_dict[datatype.gid] = dict(operation_id=operation_.id,
                                                    datatype_gid=datatype.gid,
                                                    datatype_type=datatype.type,
                                                    datatype_subject=datatype.subject,
                                                    datatype_invalid=datatype.invalid)
        return node_info_dict
    def _create_operations_with_inputs(self,
                                       datatype_group,
                                       is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts = dao.get_datatypes_from_datatype_group(datatype_group.id)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = self._create_value_wrapper(self.test_user,
                                                      self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(
                TestFactory.create_operation(test_user=self.test_user,
                                             test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])

        # groups
        ops_group = dao.get_operations_in_group(
            datatype_group.fk_from_operation)
        assert 9 == len(ops_group)
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(datatype_group.id)
            return ops, dt_group.id
        return ops, datatype_gid
Exemple #31
0
    def prepare_node_data(datatype_group):
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)
        operations = dao.get_operations_in_group(operation_group.id)
        node_info_dict = dict()
        for operation_ in operations:
            datatypes = dao.get_results_for_operation(operation_.id)
            if len(datatypes) > 0:
                datatype = datatypes[0]
                node_info_dict[datatype.gid] = dict(
                    operation_id=operation_.id,
                    datatype_gid=datatype.gid,
                    datatype_type=datatype.type,
                    datatype_subject=datatype.subject,
                    datatype_invalid=datatype.invalid)
        return node_info_dict
    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = ProjectServiceTest._create_value_wrapper(self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])
            
        #groups
        _, ops_group = TestFactory.create_group(self.test_user, self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        self.assertEqual(2, len(ops_group))
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid
Exemple #33
0
    def import_list_of_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, None if is_group else importer_operation_id)

        if is_group and len(ordered_operations) > 0:
            first_op = dao.get_operation_by_id(importer_operation_id)
            vm_path = h5.determine_filepath(first_op.view_model_gid,
                                            os.path.dirname(import_path))
            os.remove(vm_path)

            ordered_operations[0].operation.id = importer_operation_id

        for operation_data in ordered_operations:
            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.storage_interface.get_project_folder(
                    project.name, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                operation_data.operation.create_date = datetime.now()
                operation_data.operation.start_date = datetime.now()
                operation_data.operation.completion_date = datetime.now()

                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    imported_operations.append(operation_entity)
                    new_op_folder = self.storage_interface.get_project_folder(
                        project.name, str(operation_entity.id))
                    view_model_disk_size = 0
                    for h5_file in operation_data.all_view_model_files:
                        view_model_disk_size += StorageInterface.compute_size_on_disk(
                            h5_file)
                        shutil.move(h5_file, new_op_folder)
                    operation_entity.view_model_disk_size = view_model_disk_size
                    dao.store_entity(operation_entity)
                except MissingReferenceException as excep:
                    self.storage_interface.remove_operation_data(
                        project.name, operation_entity.id)
                    operation_entity.fk_operation_group = None
                    dao.store_entity(operation_entity)
                    dao.remove_entity(DataTypeGroup, dt_group.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

            # We want importer_operation_id to be kept just for the first operation (the first iteration)
            if is_group:
                importer_operation_id = None

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
Exemple #34
0
 def set_group_descendants_visibility(operation_group_id):
     ops_in_group = dao.get_operations_in_group(operation_group_id)
     for group_op in ops_in_group:
         set_visibility(group_op)
Exemple #35
0
 def get_operations_in_group(operation_group):
     """ Return all the operations from an operation group. """
     return dao.get_operations_in_group(operation_group.id)
Exemple #36
0
    def prepare_parameters(datatype_group_gid,
                           back_page,
                           color_metric=None,
                           size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
        :param size_metric:  a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception(
                "Selected DataTypeGroup is no longer present in the database. "
                "It might have been remove or the specified id is not the correct one."
            )

        operation_group = dao.get_operationgroup_by_id(
            datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(
            operation_group, operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric,
                                         size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(zip(values1, range(len(list(values1)))))
        fake_numbers2 = dict(zip(values2, range(len(list(values2)))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(
                range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(
                            DatatypeMeasure, datatype.gid,
                            '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(
                operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = range(len(list(values1)))
        if not only_numbers2:
            pse_context.values_y = range(len(list(values2)))
        return pse_context
Exemple #37
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)

            already_existing_datatype = h5.load_entity_by_gid(datatype.gid)
            if datatype_group is not None and already_existing_datatype is not None:
                raise DatatypeGroupImportException(
                    "The datatype group that you are trying to import"
                    " already exists!")
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            if datatype_group is not None and hasattr(datatype_index, 'fk_source_gid') and \
                    datatype_index.fk_source_gid is not None:
                ts = h5.load_entity_by_gid(datatype_index.fk_source_gid)

                if ts is None:
                    op = dao.get_operations_in_group(
                        datatype_group.fk_operation_group,
                        only_first_operation=True)
                    op.fk_operation_group = None
                    dao.store_entity(op)
                    dao.remove_entity(OperationGroup,
                                      datatype_group.fk_operation_group)
                    dao.remove_entity(DataTypeGroup, datatype_group.id)
                    raise DatatypeGroupImportException(
                        "Please import the time series group before importing the"
                        " datatype measure group!")

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = StorageInterface.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
    def plot(self, figure, operation_group, metric, range1_name, range2_name):
        """
        Do the plot for the given figure. Also need operation group, metric and ranges
        in order to compute the data to be plotted.
        """
        operations = dao.get_operations_in_group(operation_group.id)
        # Data from which to interpolate larger 2-D space
        apriori_x = numpy.array(self.range1)
        apriori_y = numpy.array(self.range2)
        apriori_data = numpy.zeros((apriori_x.size, apriori_y.size))

        # An 2D array of GIDs which is used later to launch overlay for a DataType
        datatypes_gids = [[None for _ in self.range2] for _ in self.range1]
        for operation_ in operations:
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            index_x = self.range1.index(key_1)
            key_2 = range_values[range2_name]
            index_y = self.range2.index(key_2)
            if operation_.status != model.STATUS_FINISHED:
                raise LaunchException("Not all operations from this range are complete. Cannot view until then.")

            datatype = dao.get_results_for_operation(operation_.id)[0]
            datatypes_gids[index_x][index_y] = datatype.gid

            if datatype.type == "DatatypeMeasure":
                measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
            else:
                measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')

            if measures:
                apriori_data[index_x][index_y] = measures[0].metrics[metric]
            else:
                apriori_data[index_x][index_y] = 0

        # Attempt order-3 interpolation.
        kx = ky = 3
        if len(self.range1) <= 3 or len(self.range2) <= 3:
            # Number of points is too small, just do linear interpolation
            kx = ky = 1
        s = interpolate.RectBivariateSpline(apriori_x, apriori_y, apriori_data, kx=kx, ky=ky)
        # Get data of higher resolution that we'll plot later on
        posteriori_x = numpy.arange(self.range1[0], self.range1[-1],
                                    (self.range1[-1] - self.range1[0]) / RESOLUTION[0])
        posteriori_y = numpy.arange(self.range2[0], self.range2[-1],
                                    (self.range2[-1] - self.range2[0]) / RESOLUTION[1])
        posteriori_data = numpy.rot90(s(posteriori_x, posteriori_y))

        self.interp_models[figure.number] = s
        # Do actual plot.        
        axes = figure.gca()
        img = axes.imshow(posteriori_data, extent=(min(self.range1), max(self.range1),
                                                   min(self.range2), max(self.range2)),
                          aspect='auto', interpolation='nearest')
        axes.set_title("Interpolated values for metric %s" % (metric,))
        figure.colorbar(img)
        axes.set_xlabel(range1_name)
        axes.set_ylabel(range2_name)


        def format_coord(x, y):
            return 'x=%1.4f, y=%1.4f' % (x, y)


        axes.format_coord = format_coord
        return datatypes_gids
Exemple #39
0
    def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
        """
        We suppose that there are max 2 ranges and from each operation results exactly one dataType.

        :param datatype_group_gid: the group id for the `DataType` to be visualised
        :param back_page: Page where back button will direct
        :param color_metric: String referring to metric to apply on colors
        :param size_metric:  String referring to metric to apply on sizes

        :returns: `ContextDiscretePSE`
        :raises Exception: when `datatype_group_id` is invalid (not in database)
        """
        datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
        if datatype_group is None:
            raise Exception("Selected DataTypeGroup is no longer present in the database. "
                            "It might have been remove or the specified id is not the correct one.")

        operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)

        name1, values1, labels1, only_numbers1 = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                         operation_group.range1)
        name2, values2, labels2, only_numbers2 = DiscretePSEAdapter.prepare_range_labels(operation_group,
                                                                                         operation_group.range2)

        pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
        pse_context.setRanges(name1, values1, labels1, name2, values2, labels2,
                              only_numbers1 and only_numbers2)
        final_dict = {}
        operations = dao.get_operations_in_group(operation_group.id)

        fake_numbers1 = dict(zip(values1, range(len(list(values1)))))
        fake_numbers2 = dict(zip(values2, range(len(list(values2)))))

        for operation_ in operations:
            if not operation_.has_finished:
                pse_context.has_started_ops = True
            range_values = eval(operation_.range_values)
            key_1 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers1, name1, fake_numbers1)
            key_2 = DiscretePSEAdapter.get_value_on_axe(range_values, only_numbers2, name2, fake_numbers2)

            datatype = None
            if operation_.status == model.STATUS_FINISHED:
                datatypes = dao.get_results_for_operation(operation_.id)
                if len(datatypes) > 0:
                    datatype = datatypes[0]
                    if datatype.type == "DatatypeMeasure":
                        ## Load proper entity class from DB.
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                    else:
                        measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
                    pse_context.prepare_metrics_datatype(measures, datatype)

            if key_1 not in final_dict:
                final_dict[key_1] = {}

            final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)

        pse_context.fill_object(final_dict)
        ## datatypes_dict is not actually used in the drawing of the PSE and actually
        ## causes problems in case of NaN values, so just remove it before creating the json
        pse_context.datatypes_dict = {}
        if not only_numbers1:
            pse_context.values_x = range(len(list(values1)))
        if not only_numbers2:
            pse_context.values_y = range(len(list(values2)))
        return pse_context
Exemple #40
0
    def import_project_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, importer_operation_id)

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    #TODO: TVB-2849 to reveiw these flags and simplify condition
                    if stored_dts_count > 0 or (
                            not operation_data.is_self_generated and
                            not is_group) or importer_operation_id is not None:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        view_model_disk_size = 0
                        for h5_file in operation_data.all_view_model_files:
                            view_model_disk_size += FilesHelper.compute_size_on_disk(
                                h5_file)
                            shutil.move(h5_file, new_op_folder)
                        operation_entity.view_model_disk_size = view_model_disk_size
                        dao.store_entity(operation_entity)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                        self.files_helper.remove_operation_data(
                            project.name, operation_entity.id)
                except MissingReferenceException as excep:
                    dao.remove_entity(Operation, operation_entity.id)
                    self.files_helper.remove_operation_data(
                        project.name, operation_entity.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
    def plot(self, figure, operation_group, metric, range1_name, range2_name):
        """
        Do the plot for the given figure. Also need operation group, metric and ranges
        in order to compute the data to be plotted.
        """
        operations = dao.get_operations_in_group(operation_group.id)
        # Data from which to interpolate larger 2-D space
        apriori_x = numpy.array(self.range1)
        apriori_y = numpy.array(self.range2)
        apriori_data = numpy.zeros((apriori_x.size, apriori_y.size))

        # An 2D array of GIDs which is used later to launch overlay for a DataType
        datatypes_gids = [[None for _ in self.range2] for _ in self.range1]
        for operation_ in operations:
            range_values = eval(operation_.range_values)
            key_1 = range_values[range1_name]
            index_x = self.range1.index(key_1)
            key_2 = range_values[range2_name]
            index_y = self.range2.index(key_2)
            if operation_.status == model.STATUS_STARTED:
                raise LaunchException("Not all operations from this range are complete. Cannot view until then.")

            operation_results = dao.get_results_for_operation(operation_.id)
            if operation_results:
                datatype = operation_results[0]
                datatypes_gids[index_x][index_y] = datatype.gid
    
                if datatype.type == "DatatypeMeasure":
                    measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
                else:
                    measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
            else:
                datatypes_gids[index_x][index_y] = None
                measures = None

            if measures:
                apriori_data[index_x][index_y] = measures[0].metrics[metric]
            else:
                apriori_data[index_x][index_y] = numpy.NaN
            
        # Convert array to 0 but keep track of nan values so we can replace after interpolation
        # since interpolating with nan values will just break the whole process
        nan_indices = numpy.isnan(apriori_data)
        self.nan_indices[figure.number] = nan_indices
        apriori_data = numpy.nan_to_num(apriori_data)
        # Attempt order-3 interpolation.
        kx = ky = 3
        if len(self.range1) <= 3 or len(self.range2) <= 3:
            # Number of points is too small, just do linear interpolation
            kx = ky = 1
        s = interpolate.RectBivariateSpline(apriori_x, apriori_y, apriori_data, kx=kx, ky=ky)
        # Get data of higher resolution that we'll plot later on
        posteriori_x = numpy.arange(self.range1[0], self.range1[-1],
                                    (self.range1[-1] - self.range1[0]) / RESOLUTION[0])
        posteriori_y = numpy.arange(self.range2[0], self.range2[-1],
                                    (self.range2[-1] - self.range2[0]) / RESOLUTION[1])
        posteriori_data = s(posteriori_x, posteriori_y)
        x_granularity = RESOLUTION[0] / len(self.range1)
        y_granularity = RESOLUTION[1] / len(self.range2)
        for idx, row in enumerate(nan_indices):
            for idy, was_nan in enumerate(row):
                if was_nan:
                    # Now we want to set back all the values that were NaN before interpolation
                    # and keep track of the change in granularity. For this reason for each nan
                    # value we had before, we will now have a matrix of the shape [x_granularity x y_granularity]
                    # full of NaN values
                    start_x = idx * x_granularity
                    end_x = (idx + 1) * x_granularity
                    start_y = idy * y_granularity
                    end_y = (idy + 1) * y_granularity
                    for x_scaled in xrange(start_x, end_x, 1):
                        for y_scaled in xrange(start_y, end_y, 1):
                            posteriori_data[x_scaled, y_scaled] = numpy.NaN
        # Rotate to get good plot
        posteriori_data = numpy.rot90(posteriori_data)
        
        self.interp_models[figure.number] = s
        # Do actual plot.        
        axes = figure.gca()
        img = axes.imshow(posteriori_data, extent=(min(self.range1), max(self.range1),
                                                   min(self.range2), max(self.range2)),
                          aspect='auto', interpolation='nearest')
        axes.set_title("Interpolated values for metric %s" % (metric,))
        figure.colorbar(img)
        axes.set_xlabel(range1_name)
        axes.set_ylabel(range2_name)


        def format_coord(x, y):
            return 'x=%1.4f, y=%1.4f' % (x, y)


        axes.format_coord = format_coord
        return datatypes_gids
 def set_group_descendants_visibility(operation_group_id):
     ops_in_group = dao.get_operations_in_group(operation_group_id)
     for group_op in ops_in_group:
         set_visibility(group_op)
 def get_operations_in_group(operation_group):
     """ Return all the operations from an operation group. """
     return dao.get_operations_in_group(operation_group.id)