def _get_recorded_matrix(self, variable):
        """ Perform safety checks and get the recorded data from the vertex\
            in matrix format.

        :param variable: the variable name to read. supported variable names
            are :'gsyn_exc', 'gsyn_inh', 'v'
        :return: the data
        """
        timer = Timer()
        timer.start_timing()
        data = None
        sim = get_simulator()

        get_simulator().verify_not_running()

        # check that we're in a state to get voltages
        if not isinstance(
                self._population._vertex, AbstractNeuronRecordable):
            raise ConfigurationException(
                "This population has not got the capability to record {}"
                .format(variable))

        if not self._population._vertex.is_recording(variable):
            raise ConfigurationException(
                "This population has not been set to record {}"
                .format(variable))

        if not sim.has_ran:
            logger.warning(
                "The simulation has not yet run, therefore {} cannot"
                " be retrieved, hence the list will be empty".format(
                    variable))
            data = numpy.zeros((0, 3))
            indexes = []
            sampling_interval = self._population._vertex.\
                get_neuron_sampling_interval(variable)
        elif sim.use_virtual_board:
            logger.warning(
                "The simulation is using a virtual machine and so has not"
                " truly ran, hence the list will be empty")
            data = numpy.zeros((0, 3))
            indexes = []
            sampling_interval = self._population._vertex.\
                get_neuron_sampling_interval(variable)
        else:
            # assuming we got here, everything is ok, so we should go get the
            # data
            results = self._population._vertex.get_data(
                variable, sim.no_machine_time_steps, sim.placements,
                sim.graph_mapper, sim.buffer_manager, sim.machine_time_step)
            (data, indexes, sampling_interval) = results

        get_simulator().add_extraction_timing(
            timer.take_sample())
        return (data, indexes, sampling_interval)
    def _get_recorded_matrix(self, variable):
        """ Perform safety checks and get the recorded data from the vertex\
            in matrix format.

        :param variable: the variable name to read. supported variable names
            are :'gsyn_exc', 'gsyn_inh', 'v'
        :return: the data
        """
        timer = Timer()
        timer.start_timing()
        data = None
        sim = get_simulator()

        get_simulator().verify_not_running()

        # check that we're in a state to get voltages
        if not isinstance(
                self._population._vertex, AbstractNeuronRecordable):
            raise ConfigurationException(
                "This population has not got the capability to record {}"
                .format(variable))

        if not self._population._vertex.is_recording(variable):
            raise ConfigurationException(
                "This population has not been set to record {}"
                .format(variable))

        if not sim.has_ran:
            logger.warning(
                "The simulation has not yet run, therefore {} cannot"
                " be retrieved, hence the list will be empty".format(
                    variable))
            data = numpy.zeros((0, 3))
            indexes = []
            sampling_interval = self._population._vertex.\
                get_neuron_sampling_interval(variable)
        elif sim.use_virtual_board:
            logger.warning(
                "The simulation is using a virtual machine and so has not"
                " truly ran, hence the list will be empty")
            data = numpy.zeros((0, 3))
            indexes = []
            sampling_interval = self._population._vertex.\
                get_neuron_sampling_interval(variable)
        else:
            # assuming we got here, everything is ok, so we should go get the
            # data
            results = self._population._vertex.get_data(
                variable, sim.no_machine_time_steps, sim.placements,
                sim.graph_mapper, sim.buffer_manager, sim.machine_time_step)
            (data, indexes, sampling_interval) = results

        get_simulator().add_extraction_timing(
            timer.take_sample())
        return (data, indexes, sampling_interval)
Ejemplo n.º 3
0
def test_basic_use():
    t = Timer()
    # Just check that these things don't throw
    t.start_timing()
    with t:
        sleep(0.1)
    assert t.take_sample() is not None
    assert t.take_sample().total_seconds() > 0
Ejemplo n.º 4
0
    def __execute_mapping(self):
        if self._inject_inputs and self._do_immediate_injection:
            do_injection(self._inputs)
        new_outputs = dict()
        for algorithm in self._algorithms:
            # set up timer
            timer = None
            if self._do_timing:
                timer = Timer()
                timer.start_timing()

            # Execute the algorithm
            results = algorithm.call(self._internal_type_mapping)

            if self._provenance_path:
                self._report_full_provenance(algorithm, results)

            # handle_prov_data
            if self._do_timing:
                self._update_timings(timer, algorithm)

            if results is not None:
                self._internal_type_mapping.update(results)
                if self._do_immediate_injection and not self._inject_inputs:
                    new_outputs.update(results)

            # Do injection with the outputs produced
            if self._do_immediate_injection:
                do_injection(results)

        # Do injection with all the outputs
        if self._do_post_run_injection:
            if self._inject_inputs:
                do_injection(self._internal_type_mapping)
            else:
                do_injection(new_outputs)
    def _execute_mapping(self):
        if self._inject_inputs and self._do_immediate_injection:
            do_injection(self._inputs)
        new_outputs = dict()
        for algorithm in self._algorithms:
            # set up timer
            timer = None
            if self._do_timing:
                timer = Timer()
                timer.start_timing()

            # Execute the algorithm
            results = algorithm.call(self._internal_type_mapping)

            if self._provenance_path:
                self._report_full_provenance(algorithm, results)

            # handle_prov_data
            if self._do_timing:
                self._update_timings(timer, algorithm)

            if results is not None:
                self._internal_type_mapping.update(results)
                if self._do_immediate_injection and not self._inject_inputs:
                    new_outputs.update(results)

            # Do injection with the outputs produced
            if self._do_immediate_injection:
                do_injection(results)

        # Do injection with all the outputs
        if self._do_post_run_injection:
            if self._inject_inputs:
                do_injection(self._internal_type_mapping)
            else:
                do_injection(new_outputs)
Ejemplo n.º 6
0
def test_advanced_use():
    t = Timer()
    with t:
        sleep(0.1)
    assert t.measured_interval is not None
    assert t.measured_interval.total_seconds() >= 0.1
Ejemplo n.º 7
0
def test_create():
    t = Timer()
    assert t is not None
Ejemplo n.º 8
0
def ordered_covering(routing_table,
                     target_length,
                     aliases=None,
                     no_raise=False,
                     use_timer_cut_off=False,
                     time_to_run_for=None):
    """Reduce the size of a routing table by merging together entries where
    possible.

    .. warning::

        The input routing table *must* also include entries which could be
        removed and replaced by default routing.

    .. warning::

        It is assumed that the input routing table is not in any particular
        order and may be reordered into ascending order of generality (number
        of don't cares/Xs in the key-mask) without affecting routing
        correctness.  It is also assumed that if this table is unordered it is
        at least orthogonal (i.e., there are no two entries which would match
        the same key) and reorderable.

    :param list(Entry) routing_table:
        Routing entries to be merged.
    :param target_length:
        Target length of the routing table; the minimisation procedure will
        halt once either this target is reached or no further minimisation is
        possible. If None then the table will be made as small as possible.
    :type target_length: int or None
    :param aliases:
        Dictionary of which keys and masks in the routing table are
        combinations of other (now removed) keys and masks; this allows us to
        consider only the keys and masks the user actually cares about when
        determining if inserting a new entry will break the correctness of the
        table. This should be supplied when using this method to update an
        already minimised table.
    :type aliases: dict(tuple(int, int), set(tuple(int, int))
    :param bool no_raise:
        If False (the default) then an error will be raised if the table cannot
        be minimised to be smaller than `target_length` and `target_length` is
        not None. If True then a table will be returned regardless of the size
        of the final table.
    :return: new routing table, A new aliases dictionary.
    :rtype: tuple(list(Entry), dict(tuple(int,int), set(tuple(int,int))))
    :raises MinimisationFailedError:
        If the smallest table that can be produced is larger than
        ``target_length``.
    """
    # Copy the aliases dictionary, handle default
    aliases = dict(aliases) if aliases is not None else {}

    timer = Timer()
    timer.start_timing()

    # Perform an initial sort of the routing table in order of increasing
    # generality.
    routing_table = sorted(
        routing_table, key=lambda entry: get_generality(entry.key, entry.mask))

    while target_length is None or len(routing_table) > target_length:
        # Get the best merge
        merge = _get_best_merge(routing_table, aliases)

        # If there is no merge then stop
        if merge.goodness <= 0:
            break

        # Otherwise apply the merge, this returns a new routing table and a new
        # aliases dictionary.
        routing_table, aliases = merge.apply(aliases)

        # control for limiting the search
        if use_timer_cut_off:
            diff = timer.take_sample()
            if diff.total_seconds() >= time_to_run_for:
                raise MinimisationFailedError(
                    f"Best compression is {len(routing_table)} which is "
                    f"still higher than the target {target_length}")

    # If the table is still too big then raise an error
    if (not no_raise and target_length is not None
            and len(routing_table) > target_length):
        raise MinimisationFailedError(
            f"Best compression is {len(routing_table)} which is "
            f"still higher than the target {target_length}")

    # Return the finished routing table and aliases table
    return routing_table, aliases