Exemplo n.º 1
0
 def __init__(self, config, clock, level):
     if config is None:
         raise ValueError('no configuration provided.')
     if clock is None:
         raise ValueError('no clock provided.')
     self._clock = clock
     self._message_bus = clock.message_bus
     self._message_factory = clock.message_factory
     self._log = Logger("ifs", level)
     self._log.info('configuring integrated front sensor...')
     self._config = config['ros'].get('integrated_front_sensor')
     self._ignore_duplicates = self._config.get('ignore_duplicates')
     _use_pot = self._config.get('use_potentiometer')
     self._pot = Potentiometer(config, Level.INFO) if _use_pot else None
     self._loop_freq_hz = self._config.get('loop_freq_hz')
     self._rate = Rate(self._loop_freq_hz)
     # event thresholds:
     self._cntr_raw_min_trigger = self._config.get('cntr_raw_min_trigger')
     self._oblq_raw_min_trigger = self._config.get('oblq_raw_min_trigger')
     self._side_raw_min_trigger = self._config.get('side_raw_min_trigger')
     self._cntr_trigger_distance_cm = self._config.get(
         'cntr_trigger_distance_cm')
     self._oblq_trigger_distance_cm = self._config.get(
         'oblq_trigger_distance_cm')
     self._side_trigger_distance_cm = self._config.get(
         'side_trigger_distance_cm')
     self._log.info('event thresholds:    \t' \
             + Fore.RED   + ' port side={:>5.2f}; port={:>5.2f};'.format(self._side_trigger_distance_cm, self._oblq_trigger_distance_cm) \
             + Fore.BLUE  + ' center={:>5.2f};'.format(self._cntr_trigger_distance_cm) \
             + Fore.GREEN + ' stbd={:>5.2f}; stbd side={:>5.2f}'.format(self._oblq_trigger_distance_cm, self._side_trigger_distance_cm))
     # hardware pin assignments are defined in IO Expander
     # create/configure IO Expander
     self._ioe = IoExpander(config, Level.INFO)
     # these are used to support running averages
     _queue_limit = 2  # larger number means it takes longer to change
     self._deque_cntr = Deque([], maxlen=_queue_limit)
     self._deque_port = Deque([], maxlen=_queue_limit)
     self._deque_stbd = Deque([], maxlen=_queue_limit)
     self._deque_port_side = Deque([], maxlen=_queue_limit)
     self._deque_stbd_side = Deque([], maxlen=_queue_limit)
     self._counter = itertools.count()
     self._thread = None
     self._group = 0
     self._enabled = False
     self._suppressed = False
     self._closed = False
     self._count = 0
     self._log.info('ready.')
Exemplo n.º 2
0
    def evaluateBendSeq(self, bendseq):
        """
        :param bendseq: a BendSequence object
        :return: the given bendseq object, for convenience

        We compute the best places for the prescribed bendtypes to occur and stash them in
        the bendpoints field of the bendseq object, and the cost of creating these bends in
        the cost field.

        The "places" are indices 0, 1, 2, 3, ... which refer to the first node in the chain,
        then the first edge, next node, next edge, and so on, with even numbers meaning nodes
        and odd numbers meaning edges.
        """
        queue = Deque(bendseq.bendtypes)
        i = 0
        cost = 0
        bendpoints = []
        while len(queue) > 1:
            bendtype = queue.popleft()
            i, c = self.nextLocalOptimalPoint(i, bendtype, remaining=len(queue))
            if i is not None:
                bendpoints.append(i)
                cost += c
                i += 1
        if len(queue) == 1:
            bendtype = queue.popleft()
            i, c = self.globalOptimalPoint(bendtype, beginAt=i)
            if i is not None:
                bendpoints.append(i)
                cost += c
                i += 1
        bendseq.bendpoints = bendpoints
        bendseq.cost = cost
        return bendseq
Exemplo n.º 3
0
	def childrenDeep( self , order = kOrder_BreadthFirst, predicate=lambda x: True ):
		""":return: list of all children of path, [ child1 , child2 ]
		:param order: order enumeration
		:param predicate: returns true if x may be returned
		:note: the child objects returned are supposed to be valid paths, not just relative paths"""
		out = []
		if order == self.kOrder_DepthFirst:
			def depthSearch( child ):
				if not predicate( c ):
					return
				children = child.children( predicate = predicate )
				for c in children:
					depthSearch( c )
				out.append( child )
			# END recursive search method

			depthSearch( self )
		# END if depth first
		elif order == self.kOrder_BreadthFirst:
			childstack = Deque( [ self ] )
			while childstack:
				item = childstack.pop( )
				if not predicate( item ):
					continue
				children = item.children( predicate = predicate )

				childstack.extendleft( children )
				out.extend( children )
			# END while childstack
		# END if breadth first
		return out
Exemplo n.º 4
0
 def __init__(self, config, message_bus, level):
     if config is None:
         raise ValueError('no configuration provided.')
     self._message_bus = message_bus
     self._log = Logger("ext-clock", level)
     _clock_config = config['ros'].get('clock')
     self._loop_freq_hz = _clock_config.get('loop_freq_hz')
     self._log.info('tick frequency: {:d}Hz'.format(self._loop_freq_hz))
     self._dt_s = 1 / self._loop_freq_hz
     self._log.info('frequency: {}s'.format(self._dt_s))
     self._dt_ms = 50.0  # loop delay in milliseconds
     self._log.info('frequency: {}ms'.format(self._dt_ms))
     _config = config['ros'].get('external_clock')
     self._pin = _config.get('pin')
     self._log.info('external clock input pin: {}'.format(self._pin))
     GPIO.setmode(GPIO.BCM)
     GPIO.setup(self._pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
     self._enabled = False
     # testing ....................
     self._queue_len = 50  # larger number means it takes longer to change
     self._queue = Deque([], maxlen=self._queue_len)
     self._counter = itertools.count()
     self._last_time = dt.now()
     self._max_error = 0.0
     self._max_vari = 0.0
     self._log.info('ready.')
Exemplo n.º 5
0
 def __init__(self, min_dist=1., num_path_tracking=15):
     self.path = Deque(maxlen=num_path_tracking)
     self.waypoints = []
     self.min_dist = min_dist
     self.x = math.inf
     self.y = math.inf
     self.lastx = 0
     self.lasty = 0
Exemplo n.º 6
0
 def __init__(self, name, ticker, message_bus, level=Level.INFO):
     super().__init__(name, message_bus, level)
     self._log.info(Fore.YELLOW + 'MySubscriber-{}: create.'.format(name))
     self._ticker = ticker
     self._ticker.add_callback(self.tick)
     self._discard_ignored = True
     _queue_limit = 10
     self._deque = Deque([], maxlen=_queue_limit)
     self._log.debug('ready.')
Exemplo n.º 7
0
def ciclo_n(grafo, origen, largo):
    visitados = set()
    respuesta = Deque()
    respuesta.append(origen)
    visitados.add(origen)
    if _ciclo_n(grafo, origen, largo, visitados, respuesta, origen, 1):
        return respuesta
    respuesta.pop()
    return respuesta
Exemplo n.º 8
0
 def __init__(self, json_string):
     self.json = json_string
     self.tokens = {
         '{': self.Element,
         '[': self.List,
         'value': self.String,
         'orderedlist': self.OrderedList
     }
     self.stack = Deque()
     self.res = "<body>"
Exemplo n.º 9
0
 def __init__(self, socket, write_alarm):
     self._s = socket
     # Note the write queue is accessed by both main and worker threads,
     # so uses thread-safe deque() struture.  The other buffers are only
     # looked at by the main thread, so no locking is required.
     self._write_queue = Deque() # Records waiting to be sent out
     self._alarm = write_alarm # Way to notify we have data to write
     self._write_buf = '' # Raw outgoing data
     self._read_buf = '' # Raw incoming data
     self._packet_buf = [] # Store packets read until have a whole record
Exemplo n.º 10
0
 def __init__(self, name, message_bus, event_types, level=Level.WARN):
     self._log = Logger('subscriber-{}'.format(name), level)
     self._name = name
     self._event_types = event_types
     self._log.debug('Subscriber created.')
     self._message_bus = message_bus
     self._processed = 0
     _queue_limit = 10
     self._deque = Deque([], maxlen=_queue_limit)
     self._log.debug('ready.')
Exemplo n.º 11
0
 def __init__(self, address):
     self._queue = Deque()
     self._s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     self._s.setblocking(0)
     try:
         self._s.connect(address)
     except socket.error, e:
         if e.args[0] in [EINPROGRESS, EWOULDBLOCK]:
             # address has not yet called accept, since this is done in a
             # single thread, so get "op in progress error".  When the
             # receiving end calls accept, all is good.
             pass
         else:
             raise
Exemplo n.º 12
0
    def reveal_zeroes(self, index: Coordinate) -> List[Tuple[Coordinate, EntryValue]]:
        """Reveals all adjacent cells if the current Entry has a zero value."""

        # result = []
        # DFS -> Uses call stack
        # def reveal_helper(index: Coordinate) -> None:
        #     if index in self.board.cells_flagged():
        #         return
        #     val = self.board.get_cell_value(index)
        #     if val.is_num_and_g_t_zero():
        #         result.append(self.reveal_cell(index, val))
        #         return
        #     if val.isZero():
        #         result.append(self.reveal_cell(index, val))
        #         for coord in get_adjacent(index):
        #             if self.board.is_valid_cell(coord) and coord not in self.board.cells_revealed():
        #                 reveal_helper(coord)
        # reveal_helper(index)

        # BFS
        queue = Deque()
        queue.appendleft(index)
        result = []
        isBeingExplored = set()
 
        while queue:
            cell = queue.pop()
            val = self.board.get_cell_value(cell)
            if val.is_num_and_g_t_zero(): # val > 0
                if cell in isBeingExplored:
                    isBeingExplored.remove(cell)
                result.append(self.reveal_cell(cell, val))
            else:  # val == 0
                for coord in get_adjacent(cell):
                    if (
                        self.board.is_valid_cell(coord) 
                        and coord not in self.board.cells_revealed() 
                        and coord not in isBeingExplored
                    ):
                        isBeingExplored.add(coord)          
                        queue.appendleft(coord)
                if cell in isBeingExplored:
                    isBeingExplored.remove(cell)
                result.append(self.reveal_cell(cell, val))

        return result
Exemplo n.º 13
0
def bfs(grafo, vertice_inicial):
    visitados = set()
    distancia = {}
    padres = {}
    q = Deque()
    q.append(vertice_inicial)
    padres[vertice_inicial] = None
    visitados.add(vertice_inicial)
    distancia[vertice_inicial] = 0

    while q:
        v = q.popleft()
        for w in grafo.adyacentes(v):
            if w not in visitados:
                visitados.add(w)
                distancia[w] = distancia[v] + 1
                padres[w] = v
                q.append(w)
    return distancia, padres
Exemplo n.º 14
0
 def __init__(self, level):
     self._log = Logger("hwclock", level)
     self.check_boot_config()
     self.configure()
     self._callbacks = []
     _pin = 21  # BCM 21
     self._sensor = DigitalInputDevice(_pin, pull_up=False)
     self._sensor.when_activated = self._activated
     #       self._sensor.when_deactivated = self._deactivated
     self._enabled = False
     # testing ....................
     self._dt_ms = 50.0  # loop delay in milliseconds
     self._queue_len = 50  # larger number means it takes longer to change
     self._queue = Deque([], maxlen=self._queue_len)
     self._counter = itertools.count()
     self._last_time = dt.now()
     self._max_error = 0.0
     self._max_vari = 0.0
     self._log.info('ready.')
Exemplo n.º 15
0
def bfs_min_seguimiento(grafo, vertice_inicial, destino):
    visitados = set()
    distancia = {}
    padres = {}
    q = Deque()
    q.append(vertice_inicial)
    padres[vertice_inicial] = None
    visitados.add(vertice_inicial)
    distancia[vertice_inicial] = 0

    while q:
        v = q.popleft()
        if v == destino: break
        for w in grafo.adyacentes(v):
            if w not in visitados:
                visitados.add(w)
                distancia[w] = distancia[v] + 1
                padres[w] = v
                q.append(w)
    return distancia, padres
Exemplo n.º 16
0
def bfs(matrix, x, y):
    new_matrix = [[int(10e9)] * len(matrix[0]) for _ in range(len(matrix))]
    deque = Deque([[x, y]])
    dx, dy = [-1, 1, 0, 0], [0, 0, 1, -1]
    R, C = len(matrix), len(matrix[0])
    visited = [[False] * C for _ in range(R)]
    visited[x][y] = True
    new_matrix[x][y] = 0
    while deque:
        cx, cy = deque.popleft()
        for i in range(4):
            nx, ny = cx + dx[i], cy + dy[i]
            if 0 <= nx and nx < R and 0 <= ny and ny < C:
                if not visited[nx][ny] and matrix[nx][ny] == '.':
                    new_matrix[nx][ny] = new_matrix[cx][cy]
                    deque.appendleft([nx, ny])
                elif not visited[nx][ny] and matrix[nx][ny] == '#':
                    new_matrix[nx][ny] = new_matrix[cx][cy] + 1
                    deque.append([nx, ny])
                visited[nx][ny] = True
    return new_matrix
Exemplo n.º 17
0
    def __init__(self, type):
        # All downloaded symbols are stored in a dict before exporting
        # This is to ensure no duplicate data
        self.symbols = {}
        self.rsession = requests.Session()
        self.type = type
        self.current_queries = None
        self.completed_queries = []
        self.done = False
        self.insecure = None
        self.status_print = None

        # Attempt to deal with API results < 10 not containing all results
        # Assume if results = 10 then there are more
        # Assume if results = 0 or results = 1 then there are no more
        self.result_count_action = [
            # for a result count of 0, False means we know it's complete
            False,
            # for a result count of 1, False means we know it's complete
            False,
            # for a result count of 2 thru 9, None means we don't know
            #  so we assume it's incomplete
            None,
            None,
            None,
            None,
            None,
            None,
            None,
            None,
            # for a result count of 10, True means we know it's incomplete
            True
        ]

        # In stage 1, queries are processed FIFO
        # After stage 1, queries are processed LIFO
        self.stage1 = True

        # instantiate the queues
        self.queries = Deque()
        # The queue of queries to be fetched by the threaded downloaders
        #  queries are worked in batches so the downloader state can be saved occasionally
        self.fetch_jobs = Queue()
        # The queue of completed fetches that need to be processed
        # A blocking queue is used for this as well
        self.fetch_returns = Queue()

        # instantiate the "master" query
        self.master_query = Query('', None)
        # put the first real queries in the queue
        self._add_queries(self.master_query, first_search_characters)
        # "0" isn't valid alone as a search string
        #self._add_queries(self.master_query, '0')
        q = self.queries.pop()
        if q.query_string != "0":
            sys.exit("not 0")
        self._add_queries(q, general_search_characters)
        self.completed_queries.append(q)

        # instantiate workers
        # First the multiple http fetchers
        for x in range(100):
            t = Thread(target=self._fetch_worker)
            t.daemon = True
            t.start()
        # there is a single instance to process fetch returns
        # it is a thread so it can work while all the fetchers are idle
        t = Thread(target=self._fetch_processor)
        t.daemon = True
        t.start()
Exemplo n.º 18
0
    def computeOrderingNumbers(self):
        """
        Assign each node an "ordering number," storing these in self.orderingNumbers.
        These numbers are to be used by the self.cmpRankmates function in order to sort
        the nodes of each rank when it comes time to generate constraints for the tree.

        :return: nothing
        """
        # Leaves are simple.
        if self.depth == 1:
            self.isSymmetric = True
            return
        # Proceed for nonleaves:
        C = self.getCTrees()
        # Layout the C-Trees, recursively.
        for t in C:
            t.computeOrderingNumbers()
        # Sort the C-trees into isomorphism classes.
        classes = {}
        for t in C:
            isomstr = t.computeIsomString()
            A = classes.get(isomstr, [])
            A.append(t)
            classes[isomstr] = A
        # Now sort the classes.
        isoms = classes.keys()

        def isomCmp(I, J):
            c = classes[I]
            d = classes[J]
            cd = c[0].depth
            dd = d[0].depth
            cb = c[0].breadth
            db = d[0].breadth
            # Put narrower trees first.
            if cb > db: return 1
            if cb < db: return -1
            # For same breadth, put shallower trees first.
            if cd > dd: return 1
            if cd < dd: return -1
            # Otherwise just compare the isomorphism strings for some
            # way to make this relation deterministic.
            if I < J: return -1
            if I > J: return 1
            return 0

        isoms.sort(cmp=isomCmp)
        # Which classes have odd order?
        oddOrder = {}
        for I in isoms:
            c = classes[I]
            if len(c) % 2 == 1:
                oddOrder[I] = c
        # Determine whether our layout is going to be symmetric or not.
        numOdd = len(oddOrder)
        haveCentralTree = False
        # If there are no odd-order classes, then we are symmetric.
        if numOdd == 0:
            self.isSymmetric = True
        # If there are two or more odd-order classes, then we are not symmetric.
        elif numOdd > 1:
            self.isSymmetric = False
        # Else there is exactly one odd-order class.
        # In this case we are symmetric if and only if (any representative of) the one
        # odd order class is symmetric.
        else:
            self.isSymmetric = oddOrder.values()[0][0].isSymmetric
            # For symmetric layout the trees of odd-order class need to go in the centre,
            # so we put them first in the list, since we work our way outward from the centre
            # when placing the trees.
            oddIsom = oddOrder.keys()[0]
            isoms.remove(oddIsom)
            isoms.insert(0, oddIsom)
        # Now order the trees alternating around the centre, flipping the trees that get placed
        # on the left hand side.
        signedtrees = Deque()
        nextsign = 1
        for I in isoms:
            c = classes[I]
            for t in c:
                op = signedtrees.append if nextsign == 1 else signedtrees.appendleft
                op((nextsign, t))
                nextsign *= -1
        # Set ordering numbers.
        for i, st in enumerate(signedtrees):
            s, t = st
            self.orderingNumbers[t.root.ID] = i
            for ID, j in t.orderingNumbers.items():
                self.orderingNumbers[ID] = s * j
Exemplo n.º 19
0
 def __init__(self, G, r):
     # G must be an acyclic graph or we will raise an exception.
     # (At least it must be acyclic south of the nominated root.)
     self.graph = G
     self.root = r
     self.nodes = {}  # ID of node --> node
     # Initialise fields.
     self.depth = 0
     self.breadth = 0
     self.leaves = {}  # ID of node --> 1
     self.nodesByRank = {}  # rank (int) --> list of Nodes
     self.rankByNodeID = {}  # ID of node --> rank (int)
     self.parent = {}  # ID of child --> parent Node
     self.isSymmetric = False
     self.holaConfig = None
     # For layout, we have a map
     # rank (int) --> [lb, ub]
     # returning the lower and upper bounds on the lateral coordinates
     # of the tree, for each rank (e.g. for NORTH growth direction the
     # bounds are on x-coordinates).
     self.boundsByRank = {}
     # We also keep the global lower and upper bounds over all populated ranks.
     self.lb = 0
     self.ub = 0
     self.orderingNumbers = {}  # ID of node --> its ordering number
     self.bufferNodes = []
     self.pcs = []
     # The following booleans configure how the getBounds function for this
     # tree will work.
     #
     # If the boundary is infinite, then you will always get
     # lower and upper bound for every rank, no matter how high; else you will
     # get None for ranks having no nodes in them.
     #
     # If the boundary is tight then the bounds for each rank cover just the
     # nodes on that rank plus half nodeSep on each end; else the bounds for
     # every rank are equal to the tight bounds for the widest rank.
     #
     # The style of the layout can be configured to some extent by setting
     # these booleans.
     self.boundaryInfinite = False
     self.boundaryTight = True
     #
     self.growthDir = None
     # Compute ranks etc.
     self.setRank(r, 0)
     #queue = [r]
     deque = Deque([r])
     #
     while deque:
         # Pop front of queue.
         node = deque.popleft()
         # Make sure we're not cycling.
         if node.ID in self.nodes:
             msg = 'Trying to construct a Tree on a '
             msg += 'graph with a cycle.'
             raise Exception(msg)
         self.nodes[node.ID] = node
         # Proceed.
         children = node.getChildren()
         rank = self.rankByNodeID[node.ID]
         if not children:
             self.leaves[node.ID] = node
         for c in children:
             self.parent[c.ID] = node
             self.setRank(c, rank + 1)
             deque.append(c)
     ranks = self.nodesByRank.keys()
     self.depth = len(ranks)
     self.breadth = max([len(r) for r in self.nodesByRank.values()])
    def __init__(self,
                 image: Image,
                 branching_factor: int,
                 neighbours: np.ndarray,
                 description: str = None) -> None:
        """
        Constructs a new PixelNeighbourhoodTree from the given Image
        using the specified neighbourhood mask, branching factor, and
        progress bar description.

        Args:
            image: The Image.
            branching_factor: The maximum number of children a Node in
                              the TSVQ tree can have.
            neighbours: A mask indicating which offsets are considered to
                        be neighbours of the center offset.
            description: Description of the TQDM progress bar.  Setting
                         this parameter to None disables the progress bar.
        """
        assert branching_factor > 1, "The minimum branching factor is two."
        assert (
            len(neighbours.shape) == 2
            and neighbours.shape[0] == neighbours.shape[1]
            and neighbours.shape[0] % 2 == 1
        ), "Neighbours array should be a square matrix with an odd side length."
        self.__image = image
        self.__neighbours = neighbours

        # Construct a 2D Gaussian kernel from the neighbours matrix.  The
        # tiling at the end is necessary to represent each colour channel.
        gaussian_1D = signal.gaussian(len(neighbours),
                                      std=len(neighbours) // 2)
        gaussian_2D = np.outer(gaussian_1D, gaussian_1D)
        self.__weights = np.tile(np.extract(neighbours, gaussian_2D), 3)

        # Precompute the neighbourhoods of each Point in the Image.
        self.__neighbourhoods = np.zeros(shape=(image.height, image.width,
                                                len(self.__weights)),
                                         dtype=np.float64)
        for point in image.cover():
            self.__neighbourhoods[point.y,
                                  point.x, :] = self.__extract_neighbourhood(
                                      image, point)

        # Initialize the root Node with all the Points in the Image.
        self.__root = Node(self.__average_neighbourhood(image.cover()),
                           list(image.cover()))

        # Construct the TSVQ tree by maintaining a frontier of Nodes and
        # applying a variant of Lloyd's algorithm to decompose each Node
        # into several Nodes with smaller numbers of Points.
        frontier = Deque([self.__root])
        with tqdm.tqdm(desc=description,
                       disable=description is None) as progress:
            while frontier:
                parent = frontier.popleft()
                progress.update(1)

                # Skip this Node if it is impossible to uniquely sample
                # the required number of Points.
                if len(parent.points) < branching_factor:
                    continue

                # Initialize the child Nodes at random Points from the
                # parent Node.
                initial_points = random.sample(parent.points, branching_factor)
                neighbourhoods = np.array([
                    self.__neighbourhoods[point.y, point.x, :]
                    for point in initial_points
                ])

                # Iteratively partition the Points among the
                # neighbourhoods and update each neighbourhood to be the
                # centroid of its Points until convergence.
                prev_partition = np.zeros(len(parent.points), dtype=np.uint8)
                next_partition = self.__partition_points(
                    neighbourhoods, parent.points)
                neighbourhoods = self.__erase_empty_neighbourhoods(
                    neighbourhoods, next_partition)
                while np.any(prev_partition != next_partition):
                    # Update the neighbourhoods.
                    for i, neighbourhood in enumerate(neighbourhoods):
                        points = [
                            parent.points[index]
                            for index in np.flatnonzero(next_partition == i)
                        ]
                        neighbourhood[...] = self.__average_neighbourhood(
                            points)
                    # Update the partition.
                    prev_partition = next_partition
                    next_partition = self.__partition_points(
                        neighbourhoods, parent.points)
                    # Remove any neighbourhoods without any Points.
                    neighbourhoods = self.__erase_empty_neighbourhoods(
                        neighbourhoods, next_partition)

                # There is no use expanding a Node that has only one child.
                if len(neighbourhoods) == 1:
                    continue

                # Transform the neighbourhoods into Nodes and add them to
                # both the frontier and the parent Node.
                for i, neighbourhood in enumerate(neighbourhoods):
                    points = [
                        parent.points[index]
                        for index in np.flatnonzero(next_partition == i)
                    ]
                    child = Node(neighbourhood, points)
                    parent.children.append(child)
                    frontier.append(child)
from collections import deque as Deque

if __name__ == "__main__":
    # I still have no clue how to solve the problem.
    d = Deque([1, 2, 3, 4, 5, 6, 7, 8])
    d[3], d[4] = d[4], d[3]  # Big brain move
Exemplo n.º 22
0
    def __init__(self, config, queue, clock, message_bus, message_factory,
                 level):
        if config is None:
            raise ValueError('no configuration provided.')
        self._log = Logger("ifs", level)
        self._log.info('configuring integrated front sensor...')
        _cruise_config = config['ros'].get('cruise_behaviour')
        self._cruising_velocity = _cruise_config.get('cruising_velocity')
        self._config = config['ros'].get('integrated_front_sensor')
        self._clock = clock
        self._clock.add_consumer(self)
        self._message_bus = message_bus
        #       _queue = queue
        #       _queue.add_consumer(self)
        self._device_id = self._config.get(
            'device_id'
        )  # i2c hex address of slave device, must match Arduino's SLAVE_I2C_ADDRESS
        self._channel = self._config.get('channel')
        self._ignore_duplicates = self._config.get('ignore_duplicates')
        self._tick_modulo = self._config.get('tick_modulo')
        _max_workers = self._config.get('max_workers')
        self._log.info('tick modulo: {:d}'.format(self._tick_modulo))
        # event thresholds:
        self._callback_cntr_min_trigger = self._config.get(
            'callback_center_minimum_trigger')
        self._callback_side_min_trigger = self._config.get(
            'callback_side_minimum_trigger')
        self._callback_min_trigger = self._config.get(
            'callback_minimum_trigger')
        self._port_side_trigger_distance = self._config.get(
            'port_side_trigger_distance')
        self._port_trigger_distance = self._config.get('port_trigger_distance')
        self._center_trigger_distance = self._config.get(
            'center_trigger_distance')
        self._stbd_trigger_distance = self._config.get('stbd_trigger_distance')
        self._stbd_side_trigger_distance = self._config.get(
            'stbd_side_trigger_distance')
        self._log.info('event thresholds:    \t' \
                +Fore.RED + ' port side={:>5.2f}; port={:>5.2f};'.format(self._port_side_trigger_distance, self._port_trigger_distance) \
                +Fore.BLUE + ' center={:>5.2f};'.format(self._center_trigger_distance) \
                +Fore.GREEN + ' stbd={:>5.2f}; stbd side={:>5.2f}'.format(self._stbd_trigger_distance, self._stbd_side_trigger_distance))
        # hardware pin assignments
        self._port_side_ir_pin = self._config.get('port_side_ir_pin')
        self._port_ir_pin = self._config.get('port_ir_pin')
        self._center_ir_pin = self._config.get('center_ir_pin')
        self._stbd_ir_pin = self._config.get('stbd_ir_pin')
        self._stbd_side_ir_pin = self._config.get('stbd_side_ir_pin')
        self._log.info('infrared pin assignments:\t' \
                +Fore.RED + ' port side={:d}; port={:d};'.format(self._port_side_ir_pin, self._port_ir_pin) \
                +Fore.BLUE + ' center={:d};'.format(self._center_ir_pin) \
                +Fore.GREEN + ' stbd={:d}; stbd side={:d}'.format(self._stbd_ir_pin, self._stbd_side_ir_pin))
        self._port_bmp_pin = self._config.get('port_bmp_pin')
        self._center_bmp_pin = self._config.get('center_bmp_pin')
        self._stbd_bmp_pin = self._config.get('stbd_bmp_pin')
        self._log.info('bumper pin assignments:\t' \
                +Fore.RED + ' port={:d};'.format(self._port_bmp_pin) \
                +Fore.BLUE + ' center={:d};'.format(self._center_bmp_pin) \
                +Fore.GREEN + ' stbd={:d}'.format(self._stbd_bmp_pin))
        if message_factory:
            self._message_factory = message_factory
        else:
            self._message_factory = MessageFactory(level)
#       self._executor = ProcessPoolExecutor(max_workers=_max_workers)
        self._log.info(
            'creating thread pool executor with maximum of {:d} workers.'.
            format(_max_workers))
        self._executor = ThreadPoolExecutor(max_workers=_max_workers,
                                            thread_name_prefix='ifs')
        # config IO Expander
        self._ioe = IoExpander(config, Level.INFO)
        # calculating means for IR sensors
        self._pot = Potentiometer(config, Level.INFO)
        _queue_limit = 2  # larger number means it takes longer to change
        self._deque_port_side = Deque([], maxlen=_queue_limit)
        self._deque_port = Deque([], maxlen=_queue_limit)
        self._deque_cntr = Deque([], maxlen=_queue_limit)
        self._deque_stbd = Deque([], maxlen=_queue_limit)
        self._deque_stbd_side = Deque([], maxlen=_queue_limit)
        # ...
        self._last_event = None
        self._last_value = None
        self._enabled = False
        self._suppressed = False
        self._closed = False
        self._log.info(Fore.MAGENTA + 'ready.')
Exemplo n.º 23
0
 def __init__(self, max_size):
     super(MemoryContainer1D, self).__init__()
     self.mem_queue = Deque()
     assert max_size > 0, 'Memory container should have max size larger than 0.'
     self.max_size = max_size
Exemplo n.º 24
0
    def __init__(self,
                 config,
                 clock,
                 motor,
                 setpoint=0.0,
                 sample_time=0.01,
                 level=Level.INFO):
        '''
        :param config:       The application configuration, read from a YAML file.
        :param motor:        The motor to be controlled.
        :param setpoint:     The initial setpoint or target output
        :param sample_time:  The time in seconds before generating a new output value.
                             This PID is expected to be called at a constant rate.
        :param level:        The log level, e.g., Level.INFO.
        '''
        if clock is None:
            raise ValueError('null clock argument.')
        self._clock = clock
        if motor is None:
            raise ValueError('null motor argument.')
        self._motor = motor
        self._orientation = motor.orientation
        self._log = Logger('pid-ctrl:{}'.format(self._orientation.label),
                           level)
        if sys.version_info < (3, 0):
            self._log.error('PID class requires Python 3.')
            sys.exit(1)
        # PID configuration ................................
        if config is None:
            raise ValueError('null configuration argument.')
        _config = config['ros'].get('motors').get('pid-controller')
        #        self._pot_ctrl = _config.get('pot_ctrl')
        #        if self._pot_ctrl:
        #            self._pot = Potentiometer(config, Level.INFO)
        _period_sec = 1.0 / _config.get('sample_freq_hz')
        _kp = _config.get('kp')  # proportional gain
        _ki = _config.get('ki')  # integral gain
        _kd = _config.get('kd')  # derivative gain
        _min_output = _config.get('min_output')
        _max_output = _config.get('max_output')
        self._pid = PID(self._orientation.label,
                        _kp,
                        _ki,
                        _kd,
                        _min_output,
                        _max_output,
                        sample_time=_period_sec,
                        level=level)

        # used for hysteresis, if queue too small will zero-out motor power too quickly
        _queue_len = _config.get('hyst_queue_len')
        self._deque = Deque([], maxlen=_queue_len)

        self._enable_slew = True  #_config.get('enable_slew')
        self._slewlimiter = SlewLimiter(config,
                                        orientation=self._motor.orientation,
                                        level=Level.INFO)
        _slew_rate = SlewRate.NORMAL  # TODO _config.get('slew_rate')
        self._slewlimiter.set_rate_limit(_slew_rate)
        if self._enable_slew:
            self._slewlimiter.enable()
            self._log.info('slew limiter enabled.')
        else:
            self._log.info('slew limiter disabled.')

        self._power = 0.0
        self._last_power = 0.0
        self._enabled = False
        self._closed = False
        #       self._last_steps   = 0
        #       self._max_diff_steps = 0
        self._log.info('ready.')
Exemplo n.º 25
0
def nodewiseACA(G, L, ccs, logger, config):
    """
    :param G: graph to which nodewise ACA is to be applied
    :param L: ideal edge length
    :param ccs: compound constraints vector containing any existing constraints,
                and to which new ones should be added
    :return: the ACA object that we build here
    """
    # Get CoLa structures for G.
    rs, es, ix = G.writeRsEsIx()

    if logger.level >= LogLevel.DEBUG:
        nodes = G.nodes.values()
        nodes.sort(key=lambda u: u.ID)
        print 'ix map for NWACA:'
        for u in nodes:
            print '    ID %s --> index %s' % (u.ID, ix(u))

    # Set node labels for debugging purposes.
    if logger.level >= LogLevel.NODE_IDS_AS_LABELS:
        G.setIDsAsLabels()

    # We'll need to do an FD layout once in a while.
    def fdlayout(op=True):
        alg = adg.ConstrainedFDLayout(rs, es, L, op)
        alg.setConstraints(ccs)
        alg.run()
        G.moveNodesToRects(rs)

    # Sort nodes by degree.
    nodes = G.nodes.values()
    nodes.sort(key=lambda u: u.degree, reverse=True)
    # Exclude all degree-2 nodes.
    for i, u in enumerate(nodes):
        if u.degree == 2: break
    else:
        i += 1
    nodes = nodes[:i]

    # Create an ACA layout object.
    aca = adg.ACALayout3(rs, es, ccs, L, True)
    # Go through nodes one by one, in order, giving each one the best possible
    # configuration that is feasible with the existing constraints.
    if logger.level >= LogLevel.FINER_STAGE_GRAPHS:
        numSteps = 0
    ptr = 0
    mightNeedToShake = True  # says whether we can reduce stress and try a node again
    while ptr < len(nodes):
        centre = nodes[ptr]
        if logger.level >= LogLevel.DEBUG:
            print 'Node: %d, index = %d, deg = %d' % (centre.ID, ix(centre),
                                                      centre.degree)
        # Compute the possible assignments of neighbours to compass directions,
        # in order of descending desirability.
        # First make sure the Node objects have the most up-to-date positions.
        G.moveNodesToRects(rs)
        # Now compute list of possible assignments.
        asgns = Deque(assign.getAssignmentsForNode(centre))
        # Filter out assignments that would result in flat triangles, if we
        # want to disallow those.
        if config.NODE_CONFIG_NO_FLAT_TRIANGLES:
            # Precompute the adjacency matrix.
            am = G.getAdjacencyMatrix()

            # Write filter function.
            def makesFlatTriangle(asgn):
                for i in range(2):
                    s0, s1 = asgn.semis[i], asgn.semis[i + 2]
                    for nbr0 in s0:
                        ID0 = nbr0.ID
                        for nbr1 in s1:
                            ID1 = nbr1.ID
                            if am[ID0].get(ID1, False):
                                return True
                return False

            asgns = Deque(filter(lambda a: not makesFlatTriangle(a), asgns))
        # Logging
        if logger.level >= LogLevel.DEBUG:
            print '%d possible assignments' % len(asgns)
        # If no assignments, move on.
        if len(asgns) == 0:
            ptr += 1
            continue
        # Start trying them, and quit either when one works or when we run out.
        success = False
        oas = adg.OrderedAlignmentPtrs()
        while asgns:
            asgn = asgns.popleft()
            if logger.level >= LogLevel.DEBUG:
                print '    %s' % asgn
            oas.clear()
            for i, semi in enumerate(asgn.semis):
                # If the assignment places no node on semiaxis i, continue
                # to next semiaxis.
                if len(semi) == 0: continue
                for nbr in semi:
                    ci, ni = (ix(centre.ID), ix(nbr.ID))
                    # If these nodes are already logically aligned, then skip this OA.
                    # (This is not just to save time. In fact if you give VPSC redundant
                    #  equality constraints it will mark the second one as unsatisfiable.
                    #  This is because once one constraint is active, it will think the
                    #  other is still inactive and violated. To satisfy that one it
                    #  will try to split the block to which the two variables already
                    #  belong, and fail because there are only equality constraints
                    #  along the path between them. Since we are calling ACA's
                    #  'allOrNothing' method, this one "unsatisfiable" constraint will
                    #  cause the entire node arrangement to fail.)
                    if aca.nodesAreAligned(ci, ni): continue
                    # Else grab the appropriate ACASEPFLAG and create the OA.
                    flag = [
                        adg.ACAEAST, adg.ACASOUTH, adg.ACAWEST, adg.ACANORTH
                    ][i]
                    oa = aca.initOrdAlign(ci, ni, flag,
                                          getEdgeIndex(ci, ni, es))
                    oas.push_back(oa)
            if logger.level >= LogLevel.DEBUG:
                data = (centre.ID, ) + asgn.prettyIDTuple()
                name = 'before_%s.%s.%s.%s.%s' % data
                writeAONcppTest(name, rs, es, ccs, L, oas)
                G.moveNodesToRects(rs)
                gml = G.writeGML()
                f = open('testOut/nwaca_debug/%s.gml' % name, 'w')
                f.write(gml)
                f.close()
            success = aca.applyOAsAllOrNothing(oas)
            if logger.level >= LogLevel.DEBUG:
                print '    fail' if not success else '    success'
            if success: break
        # Now we have either found an assignment that works, or tried them all and
        # none of them worked.
        if success:
            # If any assignment was successful, the ACA object automatically adds the
            # new compound constraints to the ccs vector, which was passed by reference,
            # so we do not need to update that ourselves.
            ptr += 1
            mightNeedToShake = True
            if logger.level >= LogLevel.FINER_STAGE_GRAPHS:
                G.moveNodesToRects(rs)
                logger.writeGML("_03_NWACA_%03d_config" % numSteps, graph=G)
                numSteps += 1
        elif not success and mightNeedToShake:
            # If we were not able to configure this node, it may be that relieving
            # stress in the graph will permit us to configure it.
            # So run an FD layout and try this node again.
            fdlayout(op=True)
            mightNeedToShake = False
            if logger.level >= LogLevel.FINER_STAGE_GRAPHS:
                logger.writeGML("_03_NWACA_%03d_shake" % numSteps, graph=G)
                numSteps += 1
        else:
            # If we have already tried relieving stress once, and we /still/ couldn't
            # apply any assignment, then we give up on this node and move on to the next.
            # Note that we must leave mightNeedToShake equal to False, because since we have
            # tried shaking once already, there is no reason to shake again until at least
            # one more node has been configured.
            ptr += 1
    # Accept final positions.
    G.moveNodesToRects(rs)
    if logger.level >= LogLevel.STAGE_GRAPHS:
        logger.writeGML("_03_NWACA_done", graph=G)
    return (aca, rs, es)
Exemplo n.º 26
0
    def buildBufferNodesAndPCs(self, iel, dp, doBuildBufferNodes=True):
        """
        :param iel: ideal edge length for the graph
        :param dp: placement direction for this tree

        This method will be for the more complicated set of buffer
        nodes, which fit closer to the shape of the tree, if we decide
        to try that.
        See method by same name in TreePlacement class.

        We build buffer nodes to go on top of leaves and on the outside sides
        of nodes at the ends of ranks.

        The PCs will not only constrain the buffer nodes beside tree nodes, but
        will also maintain the shape of the tree: an alignment for each rank,
        sepcos maintaining both ordering and gaps within each rank, and either
        a rigid distribution on the ranks, or at least min gaps between them.
        """
        bns = []
        pcs = []
        if self.growthDir in Compass.vertical:
            axialDim, transDim = adg.YDIM, adg.XDIM
            axialCoord, transCoord = (lambda u: u.y), (lambda u: u.x)
            axialMeasure, transMeasure = (lambda u: u.h), (lambda u: u.w)
        else:
            axialDim, transDim = adg.XDIM, adg.YDIM
            axialCoord, transCoord = (lambda u: u.x), (lambda u: u.y)
            axialMeasure, transMeasure = (lambda u: u.w), (lambda u: u.h)
        # Build buffer nodes if requested.
        if doBuildBufferNodes:
            pad = iel / 4.0

            def makeNode(x, X, y, Y, node=None):
                if node is not None:
                    u = node
                else:
                    u = Node()
                    u.ID = self.graph.getNextID()
                    u.fill = '#C0804080'
                    u.setIDAsLabel()
                u.w, u.h = X - x, Y - y
                u.x, u.y = x + u.w / 2.0, y + u.h / 2.0
                return u

            existingBNs = Deque(self.bufferNodes)
            # Pads on tops of leaves:
            for ID in self.leaves:
                leaf = self.nodes[ID]
                x, X, y, Y = leaf.boundingBoxxXyY()

                bn = existingBNs.popleft() if len(existingBNs) > 0 else None

                if self.growthDir == Compass.NORTH:
                    bn = makeNode(x, X, y - pad, y, node=bn)
                elif self.growthDir == Compass.SOUTH:
                    bn = makeNode(x, X, Y, Y + pad, node=bn)
                elif self.growthDir == Compass.EAST:
                    bn = makeNode(X, X + pad, y, Y, node=bn)
                else:
                    assert (self.growthDir == Compass.WEST)
                    bn = makeNode(x - pad, x, y, Y, node=bn)

                L, R = (leaf,
                        bn) if self.growthDir in Compass.increasing else (bn,
                                                                          leaf)
                bns.append(bn)
                axialGap = pad / 2.0 + axialMeasure(leaf) / 2.0
                pcs.append(SepCo(axialDim, L, R, axialGap, exact=True))
                pcs.append(SepCo(transDim, L, R, 0, exact=True))
            # Pads on outsides of ranks:
            for i in range(1, self.depth):
                rank = self.nodesByRank[i]
                S = sorted(rank, key=transCoord)
                first, last = S[0], S[-1]
                x, X, y, Y = first.boundingBoxxXyY()
                u, U, v, V = last.boundingBoxxXyY()

                a = existingBNs.popleft() if len(existingBNs) > 0 else None
                b = existingBNs.popleft() if len(existingBNs) > 0 else None

                if self.growthDir in Compass.vertical:
                    a, b = makeNode(x - pad, x, y, Y,
                                    node=a), makeNode(U, U + pad, v, V, node=b)
                else:
                    assert (self.growthDir in Compass.horizontal)
                    a, b = makeNode(x, X, y - pad, y,
                                    node=a), makeNode(u, U, V, V + pad, node=b)

                bns.extend([a, b])
                firstGap = pad / 2.0 + transMeasure(first) / 2.0
                lastGap = pad / 2.0 + transMeasure(last) / 2.0
                pcs.append(SepCo(transDim, a, first, firstGap, exact=True))
                pcs.append(SepCo(axialDim, a, first, 0, exact=True))
                pcs.append(SepCo(transDim, last, b, lastGap, exact=True))
                pcs.append(SepCo(axialDim, last, b, 0, exact=True))
        # Generate the basic tree constraints.
        tallestNodes = []
        for i in range(self.depth):
            rank = self.nodesByRank[i]
            tallestNodes.append(max(rank, key=axialMeasure))
            # Align all nodes within the rank.
            pcs.append(AlignCo(axialDim, shapes=[(u, 0) for u in rank]))
            # Separate them.
            S = sorted(rank, key=transCoord)
            for L, R in zip(S[:-1], S[1:]):
                gap = transMeasure(L) / 2.0 + iel / 2.0 + transMeasure(R) / 2.0
                pcs.append(SepCo(transDim, L, R, gap))
        # Rank separations:
        for i in range(self.depth - 1):
            A, B = tallestNodes[i:i + 2]
            L, R = (A, B) if self.growthDir in Compass.increasing else (B, A)
            gap = axialMeasure(L) / 2.0 + iel / 2.0 + axialMeasure(R) / 2.0
            pcs.append(
                SepCo(axialDim,
                      L,
                      R,
                      gap,
                      exact=self.holaConfig.RIGID_RANK_SEP))
        # Alignments with centre children:
        for p in self.nodes.values():
            if p == self.root and dp in Compass.cwOrds: continue
            Ch = p.getChildren()
            Ch.sort(key=transCoord)
            n = len(Ch)
            m = (n - (n % 2)) / 2
            if n % 2 == 1:
                pcs.append(AlignCo(transDim, shapes=[(p, 0), (Ch[m], 0)]))
            # Try flexible distributions on the "mirror triples?"
            if self.holaConfig.TRY_MIRROR_TRIPLES:
                for i in range(m):
                    a, z = Ch[i], Ch[n - 1 - i]
                    pcs.append(FlexDistCo(transDim, a, p, z))
        DEBUG = False
        if DEBUG:
            print
            print 'Tree constraints for tree rooted at %d:' % self.root.ID
            for pc in pcs:
                print pc
        self.pcs = pcs
        self.bufferNodes = bns
        return (bns, pcs)
Exemplo n.º 27
0
    def traverse(self,
                 predicate=lambda i, d: True,
                 prune=lambda i, d: False,
                 depth=-1,
                 branch_first=True,
                 visit_once=True,
                 ignore_self=1,
                 as_edge=False):
        """:return: iterator yielding of items found when traversing self

        :param predicate: f(i,d) returns False if item i at depth d should not be included in the result

        :param prune:
            f(i,d) return True if the search should stop at item i at depth d.
            Item i will not be returned.

        :param depth:
            define at which level the iteration should not go deeper
            if -1, there is no limit
            if 0, you would effectively only get self, the root of the iteration
            i.e. if 1, you would only get the first level of predecessors/successors

        :param branch_first:
            if True, items will be returned branch first, otherwise depth first

        :param visit_once:
            if True, items will only be returned once, although they might be encountered
            several times. Loops are prevented that way.

        :param ignore_self:
            if True, self will be ignored and automatically pruned from
            the result. Otherwise it will be the first item to be returned.
            If as_edge is True, the source of the first edge is None

        :param as_edge:
            if True, return a pair of items, first being the source, second the
            destination, i.e. tuple(src, dest) with the edge spanning from
            source to destination"""
        visited = set()
        stack = Deque()
        stack.append((0, self, None))  # self is always depth level 0

        def addToStack(stack, item, branch_first, depth):
            lst = self._get_intermediate_items(item)
            if not lst:
                return
            if branch_first:
                stack.extendleft((depth, i, item) for i in lst)
            else:
                reviter = ((depth, lst[i], item)
                           for i in range(len(lst) - 1, -1, -1))
                stack.extend(reviter)

        # END addToStack local method

        while stack:
            d, item, src = stack.pop()  # depth of item, item, item_source

            if visit_once and item in visited:
                continue

            if visit_once:
                visited.add(item)

            rval = (as_edge and (src, item)) or item
            if prune(rval, d):
                continue

            skipStartItem = ignore_self and (item is self)
            if not skipStartItem and predicate(rval, d):
                yield rval

            # only continue to next level if this is appropriate !
            nd = d + 1
            if depth > -1 and nd > depth:
                continue

            addToStack(stack, item, branch_first, nd)
Exemplo n.º 28
0
Arquivo: util.py Projeto: kthulhu/mrv
def iterNetworkxGraph(graph,
                      startItem,
                      direction=0,
                      prune=lambda i, g: False,
                      stop=lambda i, g: False,
                      depth=-1,
                      branch_first=True,
                      visit_once=True,
                      ignore_startitem=1):
    """:return: iterator yielding pairs of depth, item 
	:param direction: specifies search direction, either :
		0 = items being successors of startItem
		1 = items being predecessors of startItem
	:param prune: return True if item d,i in graph g should be pruned from result.
		d is the depth of item i
	:param stop: return True if item d,i in graph g, d is the depth of item i
		stop the search in that direction. It will not be returned.
	:param depth: define at which level the iteration should not go deeper
		if -1, there is no limit
		if 0, you would only get startitem.
		i.e. if 1, you would only get the startitem and the first level of predessessors/successors
	:param branch_first: if True, items will be returned branch first, otherwise depth first
	:param visit_once: if True, items will only be returned once, although they might be encountered
		several times
	:param ignore_startitem: if True, the startItem will be ignored and automatically pruned from
		the result
	:note: this is an adjusted version of `dge.iterShells`"""
    visited = set()
    stack = Deque()
    stack.append((0, startItem))  # startitem is always depth level 0

    def addToStack(stack, lst, branch_first, dpth):
        if branch_first:
            reviter = ((dpth, lst[i]) for i in range(len(lst) - 1, -1, -1))
            stack.extendleft(reviter)
        else:
            stack.extend((dpth, item) for item in lst)

    # END addToStack local method

    # adjust function to define direction
    directionfunc = graph.successors
    if direction == 1:
        directionfunc = graph.predecessors

    while stack:
        d, item = stack.pop()  # depth of item, item

        if item in visited:
            continue

        if visit_once:
            visited.add(item)

        oitem = (d, item)
        if stop(oitem, graph):
            continue

        skipStartItem = ignore_startitem and (item == startItem)
        if not skipStartItem and not prune(oitem, graph):
            yield oitem

        # only continue to next level if this is appropriate !
        nd = d + 1
        if depth > -1 and nd > depth:
            continue

        addToStack(stack, directionfunc(item), branch_first, nd)