def hex_eq_len(hex_0: deque, hex_1: deque) -> 'pair deque': delta_len = len(hex_0) - len(hex_1) if delta_len < 0: hex_0.extendleft(['0' for _ in range(-delta_len)]) else: hex_1.extendleft(['0' for _ in range(delta_len)]) return hex_0, hex_1
def _sample_next_cluster(self, history: deque) -> Tuple[deque, float]: """Sample next cluster and corresponding transition time. In the key resulting from the current history is not present in the list of available transition probabilities, the last cluster label in the given history is replaced with the label of the nearest cluster, and a new history is searched. :param history: current history/past/trajectory :type history: deque :return: the history with the new cluster appended and the transition time :rtype: Tuple[deque, float] """ key = ",".join(map(str, list(history))) if not key in self._transition_prob: last_cluster = history.pop() history.append(self._find_closest_cluster(last_cluster)) history = self._find_history(history) key = ",".join(map(str, list(history))) next_cluster = int( np.random.choice(self._transition_prob[key][:, 0], p=self._transition_prob[key][:, 1])) key += ",{:d}".format(next_cluster) history.append(next_cluster) return history, self._transition_time[key]
def evaluate( self, obs_queue: deque, agent: Agent, num_episode: int = 3, render: bool = False, ) -> Tuple[float, List[GymImg], ]: """evaluate uses the given agent to run the game for a few episodes and returns the average reward and the captured frames.""" self.__env = self.__env_eval ep_rewards = [] frames = [] for _ in range(self.get_eval_lives() * num_episode): observations, ep_reward, _frames = self.reset(render=render) for obs in observations: obs_queue.append(obs) if render: frames.extend(_frames) done = False while not done: state = self.make_state(obs_queue).to(self.__device).float() action = agent.run(state) obs, reward, done = self.step(action) ep_reward += reward obs_queue.append(obs) if render: frames.append(self.get_frame()) ep_rewards.append(ep_reward) self.__env = self.__env_train return np.sum(ep_rewards) / num_episode, frames
def try_to_queue_node(node: TreeNode, queue: collections.deque, nodes_queued: int, max_nodes: int) -> bool: if max_nodes == -1 or nodes_queued < max_nodes: queue.append(node) return True else: return False
def dfsTopo_iterative(self, node: Node, curLabel: int, graph: Graph, reverse: bool, nodesOrdered: deque): stack = deque() stack.append(node) while len(stack) > 0: curNode: Node = stack.pop() if curNode.visited: if curNode.topoOrderVal is not None: # in cyclical graph, same node may be pushed to stack multiple times continue curNode.topoOrderVal = curLabel nodesOrdered.appendleft(curNode) curLabel = curLabel - 1 else: curNode.visited = True stack.append(curNode) edges = None if reverse: edges = curNode.inflowEdges else: edges = curNode.outflowEdges if edges is not None: for name in edges.keys(): tempNode = graph.nodes.get(name) if tempNode is None: print("Error: node not in the graph: %s" % name) continue if tempNode.visited: continue stack.append(tempNode) return curLabel
def _remote_executer(self, net_str, setting_idx, expdir, queue: deque): self.occupied = True cmd = self.exe_cmd print('{}: {} {}'.format(self.remote, cmd, expdir), file=stderr) result = self.run(cmd, stdin=expdir) try: result = str(result).split('\n') used_time = result[-3] result = result[-2] assert result.startswith('valid performance: ') and used_time.startswith('running time: '), \ 'Invalid return: %s, %s' % (used_time, result) used_time = used_time[len('running time: '):] used_time = float(used_time) / 60 # minutes result = result[len('valid performance: '):] result = float(result) queue.appendleft([net_str, setting_idx, (result, used_time)]) print( 'task {} is successfully executed, result is {}, using {} min.' .format(expdir, result, used_time), file=stderr) except Exception: queue.appendleft([net_str, setting_idx, expdir]) print('task {} fails, with return: %s.'.format(expdir, result), file=stderr) self.occupied = False
class RK_hash_generator: """ Rabin & Karp fingerprint generator """ def __init__(self, blockSize, hashRange): """ Initialise Rolling hash """ if type(blockSize) is int: self.block_size = blockSize else: raise TypeError('Block Size should be an integer value') self.prev_hash = 0 # Previous used hash self.base = 10 self.chars = None if type(hashRange) is int: self.hash_range = hashRange else: raise TypeError('Hash Range should be an integer value') def incremental(self, next_char): """ Calculates hash of byte sequence and stores it in the hash table. Use 'hash_block_with_history' method first to instantiate generator history before using this method. """ try: previous_char = self.chars.popleft() except AttributeError: # Self.chars not defined, no history defined raise RuntimeWarning( 'No history defined, hash_block_with_history should be called first' ) try: self.prev_hash = ((self.prev_hash - ord(previous_char) * pow( self.base, self.block_size - 1)) * self.base + ord(next_char)) except TypeError: raise TypeError('Incremental buffer should be of size one') self.chars.append(next_char) return self.prev_hash % self.hash_range def hash_block(self, byte_sequence): """ Calculates hash of byte sequence """ #if len(byte_sequence) != self.block_size: # raise BufferError('Byte sequence is %s long instead of %s' % (len(byte_sequence), self.block_size)) return self._hash_block_unconstrained(byte_sequence) % self.hash_range def hash_block_with_history(self, byte_sequence): """ Calculate hash of byte sequence """ if len( byte_sequence ) != self.block_size: # length of byte sequence must match specified block size raise BufferError('Byte sequence is %s long instead of %s' % (len(byte_sequence), self.block_size)) self.prev_hash = self._hash_block_unconstrained(byte_sequence) self.chars = Queue() # Queue is used to store history for char in byte_sequence: self.chars.append(char) return self.prev_hash % self.hash_range def _hash_block_unconstrained(self, byte_sequence): """ Calculates hash of byte sequence without modulo""" h = 0 # initial hash value, starts at zero multiplier = self.block_size - 1 for byte in byte_sequence: h += (ord(byte) * pow(self.base, multiplier)) multiplier -= 1 return h
def _helper(v: Hashable, visited: Set[Hashable], q: collections.deque): if v in visited: return visited.add(v) for v_next in g.get(v, []): _helper(v_next, visited, q) q.appendleft(v)
def get_new_states(acc_data: deque, gyro_data: deque, parameters: dict, data: str, time: int, actions: dict) -> dict: """ :param queue: queue with last ten measurements :param parameters: parameters of system state for current moment :param levels: constants to compare angular velocity and acceleration with :param data: new data from IMU :param time: current time counter :param actions: current state of each action (swing, spin, clash, stab) :return: new actions state """ data = data.split(';') accel = list(map(int, data[0].split())) acc_data.append(accel) gyro = list(map(int, data[1].split())) gyro_data.append(gyro) a_curr = sum([accel[i] * accel[i] for i in range(3)]) w_curr = sum([gyro[i] * gyro[i] for i in range(3)]) update_acc_data(parameters, actions, a_curr, time) update_gyro_data(parameters, actions, w_curr, time) actions['hit'] = check_hit_with_accelerometer_and_change( acc_data, time, parameters, actions['hit']) if not actions['swing']: actions['swing'] = check_swing(gyro_data, time, parameters) if not actions['stab']: actions['stab'] = check_stab(acc_data, gyro_data, time, parameters) parameters['w_prev'] = w_curr return actions
def _parse_single_expr_string(tokens: deque) -> str: if not is_deque(tokens): raise ParseError('Expected expression') if tokens[0] == 'not': # expression is not(e), so next, parse the expression e before prepending the ~ operator to it. token = tokens.pop() e = _parse_single_expr_string(token) if '~' in e: raise ParseError('Multiple not operators in expression.') return '~' + e else: # expression is a standard Op(param1, param2, etc ...) format expr_name = tokens.popleft() if not is_string(expr_name): raise ParseError('Invalid expression name: {}'.format(expr_name)) expr_name = expr_name.lower() variables = [] while tokens: param = tokens.popleft() if not is_string(param): raise ParseError('Invalid parameter {} for expression "{}"'.format(param, expr_name)) if param.startswith('?'): variables.append(param[1:].lower()) else: if not param[0].isupper(): param = param.capitalize() variables.append(param) return build_expr_string(expr_name, variables)
def _read_data_thread(sensor: SDS011, q: deque, timeout: int): while True: meas = sensor.query() timestamp = int(time.time()) q.append((meas, timestamp)) time.sleep(timeout)
def ambs_unam_dict(ambs: deque, unam: deque): """ return dictionary with unam as key, ambs as value :param ambs: :param unam: :return: ******** DEPRECATED ************ (ambs_unam_split in ambiguous_split should handle this) """ ambs_unam_d = OrderedDict() try: if len(ambs) != len(unam): raise NotSameLengthError else: while True: ambs_unam_d[unam.popleft()] = ambs.popleft() except IndexError: pass except NotSameLengthError: print("The length of the AMBS and UNAM deques were different. aborting") finally: return ambs_unam_d
def get_pages_with_weather_at_place(static_root: str, pages: deque) -> deque: """Function find all pages with link at a place or find all places in country and links for it.""" links, another = [], [] while pages: response = get(pages.popleft()) soup = BeautifulSoup(response.text, 'lxml') cells = soup.find_all(class_="countryMap-cell") for cell in cells: for a in cell.find_all('a'): if a.text == '>>>': pages.append(f"https://rp5.ru{a['href']}") elif a['href'].find('/Погода_в_') > -1: links.append(f'https://rp5.ru{a["href"]}') elif a['href'].find('Погода_в_') > -1: links.append(f'https://rp5.ru/{a["href"]}') else: another.append(a["href"]) for page in pages: if page in links: links.remove(page) return links, another
async def radio_receive_task(self, packet_deque: deque): print("started radio_receive_task") radio_buffer = bytes() while True: radio_buffer = await self.radio_receive() print("Received:", radio_buffer) packet_deque.append(radio_buffer)
def _consume_output(printed, spec: deque): """ Helper function: consume the given output from io spec. Raises AssertionErrors when it encounter problems. """ if not printed: return elif not spec: raise AssertionError( 'Asking to consume output, but expects no interaction') elif _is_input(spec[0]): raise AssertionError('Expects input, but trying to print a value') elif printed == spec[0]: spec.popleft() elif callable(spec[0]): spec.popleft()(printed, spec) elif spec[0].startswith(printed): spec[0] = spec[0][len(printed):] elif printed.startswith(spec[0]): n = len(spec.popleft()) _consume_output(printed[n:], spec) else: raise AssertionError(f'Printed wrong value:\n' f' print: {printed!r}\n' f' got: {spec[0]!r}')
def search_solution(states: deque): current_state = states[-1] if current_state.is_final_state(): print(" --- Solutions ---") visual_validation_bucket = [8, 0, 0] for state in states: action = state.action if action: get_water_quality_for_visual_validation( visual_validation_bucket, action) print( f"poll {action.amount_of_water}L of water from '{action.poll_from}L bucket' to '{action.add_to}L bucket' -> {visual_validation_bucket}" ) else: print( f"water start with 8L, 5L, 3L -> '{visual_validation_bucket}'" ) # trying to use DFS to explore all solutions for next_move in explore_next_move(current_state): if not is_processed_state(states, next_move): states.append(next_move) search_solution(states) states.pop()
def _traversal(self, nodes: deque) -> List[List[int]]: if not nodes: return [] res = [] is_right_dir = True while nodes: init_len = len(nodes) new_level = [] for _ in range(init_len): if is_right_dir: node = nodes.popleft() if node.left: nodes.append(node.left) if node.right: nodes.append(node.right) else: node = nodes.pop() if node.right: nodes.appendleft(node.right) if node.left: nodes.appendleft(node.left) new_level.append(node.val) res.append(new_level) is_right_dir = not is_right_dir return res
def remove_from_left(source_deque: deque, max_remove: int) -> None: if max_remove < 0: raise ValueError(f'Invalid max ({max_remove}) for deque') for _ in range(max_remove): print(f'Removing {source_deque[0]} from left ... ', end='') source_deque.popleft() print(f'-> {source_deque}')
def build(self, build_queue: deque): for i in range(2 - self.get_build_queue_size()): if not build_queue: print('Pusta kolejka') return building_name = build_queue.popleft() building_row = self._get_build_row(building_name) try: build_inactive_displayed = building_row.find_element_by_css_selector( 'span.inactive').is_displayed() except NoSuchElementException: build_inactive_displayed = False if build_inactive_displayed: print(f"Currently you can't build {building_name}") build_queue.appendleft(building_name) return build_button = building_row.find_element_by_css_selector( 'td.build_options a.btn-build') try: build_button.click() level = int(re.search(r'\d+', build_button.text).group()) print(f'Built {building_name} at level {level}') time.sleep(0.1) except StaleElementReferenceException: build_queue.appendleft(building_name)
def solution(q: deque, command: list): if command[0] == 'push': q.append(command[1]) elif command[0] == 'pop': if q: print(q.popleft()) else: print(-1) elif command[0] == 'size': print(len(q)) elif command[0] == 'empty': if q: print(0) else: print(1) elif command[0] == 'front': if q: print(q[0]) else: print(-1) elif command[0] == 'back': if q: print(q[-1]) else: print(-1)
def bombs(effects: deque, casings: deque): type_bombs = { "Datura Bombs": [40, 0], "Cherry Bombs": [60, 0], "Smoke Decoy Bombs": [120, 0], } success = False while effects and casings: current_casing = casings[-1] current_effect = effects[0] for bomb, value in type_bombs.items(): is_found_bomb = False if current_casing + current_effect == value[0]: type_bombs[bomb][1] += 1 casings.pop() effects.popleft() is_found_bomb = True break if not is_found_bomb: casings[-1] -= 5 if all([value[1] >= 3 for value in type_bombs.values()]): success = True break return success, type_bombs
def ignore_quotation(output: Output, tokens: deque) -> None: output.normal_out += tokens.popleft().string token = tokens[0] while token.val != Tok.quotation: output.normal_out += token.string tokens.popleft() token = tokens[0]
def peek(stack: deque): try: top = stack.pop() stack.append(top) except IndexError: top = None return top
def get_random_solution(self, curr_time: float, individual: list, unvisited_nodes: list, visited_nodes: deque): if len(unvisited_nodes) == 0: first_node = self.get_node(individual, visited_nodes[0]) last_node = self.get_node(individual, visited_nodes[-1]) full_time = curr_time + last_node["dists"][first_node["index"]] cost = self.get_cost(individual, visited_nodes) return " ".join(str(visited_nodes[i]) for i in range(1, len(visited_nodes))), full_time, cost valid_nodes = self.possible_nodes( curr_time, individual, unvisited_nodes, visited_nodes[-1]) while self.errors <= self.MAX_ERRORS: if len(valid_nodes) == 0 and len(unvisited_nodes) != 0: self.errors += 1 return None elt = valid_nodes[randrange(len(valid_nodes))] last_node = self.get_node(individual, visited_nodes[-1]) start, end = self.get_time_window(individual, elt) arrive_time = curr_time + last_node["dists"][elt] new_time = arrive_time if arrive_time > start else start unvisited_nodes.remove(elt) valid_nodes.remove(elt) visited_nodes.append(elt) result = self.get_random_solution( new_time, individual, unvisited_nodes, visited_nodes) if result == None: unvisited_nodes.append(visited_nodes.pop()) else: return result return None
async def _part_uploader( self, upload_id: str, object_name: str, parts_queue: asyncio.Queue, results_queue: deque, part_upload_tries: int, **kwargs, ): backoff = asyncbackoff( None, None, max_tries=part_upload_tries, exceptions=(ClientError, ), ) while True: msg = await parts_queue.get() if msg is DONE: break part_no, part_hash, part = msg etag = await backoff(self._put_part)( # type: ignore upload_id=upload_id, object_name=object_name, part_no=part_no, data=part, content_sha256=part_hash, **kwargs, ) log.debug( "Etag for part %d of %s is %s", part_no, upload_id, etag, ) results_queue.append((part_no, etag))
def solution(a: deque): if len(a) == 1: return a[0] else: a.popleft() a.append(a.popleft()) return solution(a)
def value(x: deque): result = [] while True: if len(x) > 0: if x[0].isalpha(): return False elif isinteger(x[0]): result.append(x.popleft()) else: b, a = result.pop(), result.pop() if x[0] == "+": result.append(sum(a, b)) elif x[0] == "-": result.append(difference(a, b)) elif x[0] == "*": result.append(multiply(a, b)) elif x[0] == "^": result.append(power(a, b)) else: result.append(divide(a, b)) x.popleft() else: break if len(result) > 1: return "Invalid Expression" else: if result[0] == int(result[0]): return int(result[0]) else: return result[0]
def to_array(self): arr = Dq() cur = self.head while cur: arr.append(cur.val) cur = cur.next return arr
async def port_receive(recv_port: trio.SocketStream, packet_deque: deque): print("calling port_receive") async for data in recv_port: print("port_received", data) if data != b"": packet_deque.append(raw_to_base64(data)) print("packet queue:", packet_deque)
def random_move_target(queue: collections.deque, opponent_board_view: Board) -> Point: """ Randomly choose an empty square to shoot at when in 'hunt' mode, if hit, change to 'target' mode and focus first on neighbor cells. """ # decide moving mode if len(queue) == 0: mode = 'hunt' else: mode = 'target' # make shot if mode == 'hunt': empty_xs, empty_ys = np.where(opponent_board_view.shots == NO_SHOT) choice = randint(0, len(empty_xs)-1) shot = Point(x=empty_xs[choice], y=empty_ys[choice]) elif mode == 'target': shot = queue.popleft() while opponent_board_view.shots[shot.x, shot.y] != NO_SHOT: if len(queue) > 0: shot = queue.popleft() else: shot = random_move_target(queue, opponent_board_view) # add neighbors to queue if the shot is successful if opponent_board_view.has_ship[shot.x, shot.y]: up = Point(x=shot.x, y=max(shot.y-1, 0)) down = Point(x=shot.x, y=min(shot.y+1, 9)) left = Point(x=max(shot.x-1, 0), y=shot.y) right = Point(x=min(shot.x+1, 9), y=shot.y) neighbors = [up, down, left, right] queue.extend(neighbors) return shot
def program(instructions: list, pid: int, out_queue: deque, in_queue: deque): registers = defaultdict(lambda: 0) registers["p"] = pid pointer = 0 while 0 <= pointer < len(instructions): i, x, y, *_ = (instructions[pointer] + " !").split(" ") x_v = int(x) if is_digit(x) else registers[x] y = int(y) if is_digit(y) else registers[y] if i == "snd": out_queue.append(x_v) yield "send" elif i == "set": registers[x] = y elif i == "add": registers[x] += y elif i == "mul": registers[x] *= y elif i == "mod": registers[x] %= y elif i == "rcv": while len(in_queue) == 0: yield "wait" else: registers[x] = in_queue.popleft() yield "received" elif i == "jgz": if x_v > 0: pointer += (y - 1) pointer += 1
class RK_hash_generator: """ Rabin & Karp fingerprint generator """ def __init__(self, blockSize, hashRange): """ Initialise Rolling hash """ if type(blockSize) is int: self.block_size = blockSize else: raise TypeError('Block Size should be an integer value') self.prev_hash = 0 # Previous used hash self.base = 10 self.chars = None if type(hashRange) is int: self.hash_range = hashRange else: raise TypeError('Hash Range should be an integer value') def incremental(self, next_char): """ Calculates hash of byte sequence and stores it in the hash table. Use 'hash_block_with_history' method first to instantiate generator history before using this method. """ try: previous_char = self.chars.popleft() except AttributeError: # Self.chars not defined, no history defined raise RuntimeWarning('No history defined, hash_block_with_history should be called first') try: self.prev_hash = ((self.prev_hash - ord(previous_char) * pow(self.base, self.block_size - 1)) * self.base + ord(next_char)) except TypeError: raise TypeError('Incremental buffer should be of size one') self.chars.append(next_char) return self.prev_hash % self.hash_range def hash_block(self, byte_sequence): """ Calculates hash of byte sequence """ #if len(byte_sequence) != self.block_size: # raise BufferError('Byte sequence is %s long instead of %s' % (len(byte_sequence), self.block_size)) return self._hash_block_unconstrained(byte_sequence) % self.hash_range def hash_block_with_history(self, byte_sequence): """ Calculate hash of byte sequence """ if len(byte_sequence) != self.block_size: # length of byte sequence must match specified block size raise BufferError('Byte sequence is %s long instead of %s' % (len(byte_sequence), self.block_size)) self.prev_hash = self._hash_block_unconstrained(byte_sequence) self.chars = Queue() # Queue is used to store history for char in byte_sequence: self.chars.append(char) return self.prev_hash % self.hash_range def _hash_block_unconstrained(self, byte_sequence): """ Calculates hash of byte sequence without modulo""" h = 0 # initial hash value, starts at zero multiplier = self.block_size - 1 for byte in byte_sequence: h += (ord(byte) * pow(self.base, multiplier)) multiplier -= 1 return h
def analyze_result(self, results: deque, debounce: int, repeat_every: int): """Analyze result according to check definition. :param results: deque object with last results according to debounce or repeat_every :param debounce: debounce value from check data :param repeat_every: repeat_every value from check data :return: (trigger status, event_type) :rtype: tuple """ last_result = CachedResult(RESULT_UNKNOWN, RESULT_UNKNOWN, False, "") result_length = len(results) results_list = list(results) if result_length < debounce: # not enough values to proceed with analyze return RESULT_OK, None if result_length == debounce + 1: last_result = results.popleft() elif result_length == debounce: last_result = CachedResult(RESULT_OK, RESULT_OK, False, "") elif result_length > debounce: last_result = results_list[-(debounce + 1)] debounce_probes = results_list[result_length - debounce:] all_debounce_fail = all(result.status in FAILED_STATUSES for result in debounce_probes) if all([all_debounce_fail, last_result.status in OK_STATUSES]): return RESULT_FAILED, 'trigger' try: if all([all_debounce_fail, results_list[-(repeat_every + 1)].alert_sent]): return RESULT_FAILED, 'trigger' except IndexError: pass latest_result = results.pop() if all([last_result.status in FAILED_STATUSES, all(result.status in FAILED_STATUSES for result in results), latest_result.status == RESULT_OK]): if latest_result.hysteresis != RESULT_OK: return RESULT_FAILED, None if latest_result.hysteresis == RESULT_OK: return RESULT_OK, 'resolve' if all([all(result.status in OK_STATUSES for result in results), latest_result.status == RESULT_FAILED]): return RESULT_FAILED, None if all([all_debounce_fail, last_result.status in FAILED_STATUSES]): return RESULT_FAILED, None return RESULT_OK, None
class spider: _plugins = [] _baseUrl = "" _errors = {} def __init__(self, plugins, blacklist): self._plugins = plugins self._visited = set() self._queue = Queue() self._blacklist = set(blacklist) def spider(self, url): self._queue.append(url) self._baseUrl = url try: while 1: url = self._queue.pop() self._visit(url) except IndexError: pass def _visit(self, url): if url in self._visited: return print "visiting: " + url self._visited.add(url) br = mechanize.Browser() try: resp = br.open(url) except urllib2.HTTPError, e: self._errors[e.geturl()] = [e.getcode()] return if not br.viewing_html(): return for plugin in self._plugins: plugin.parsePage(br) unique = set() for l in br.links(): if l.absolute_url[0 : len(self._baseUrl)] == self._baseUrl and not l.absolute_url in self._blacklist: visitableUrl = l.absolute_url.split("#")[0] if not visitableUrl in unique and not visitableUrl in self._visited: self._queue.append(visitableUrl) unique.add(visitableUrl) print "found: " + visitableUrl print "visited: " + url
def filterMsgs(self, wrappedMsgs: deque) -> deque: """ Filters messages by view number so that only the messages that have the current view number are retained. :param wrappedMsgs: the messages to filter """ filtered = deque() while wrappedMsgs: wrappedMsg = wrappedMsgs.popleft() msg, sender = wrappedMsg if hasattr(msg, f.VIEW_NO.nm): reqViewNo = getattr(msg, f.VIEW_NO.nm) if reqViewNo == self.viewNo: filtered.append(wrappedMsg) elif reqViewNo > self.viewNo: logger.debug( "{}'s elector queueing {} since it is for a later view".format( self.name, wrappedMsg)) self.pendMsgForLaterView((msg, sender), reqViewNo) else: self.discard(wrappedMsg, "its view no {} is less than the elector's {}" .format(wrappedMsg, reqViewNo, self.viewNo), logger.debug) else: filtered.append(wrappedMsg) return filtered
def _parse_formula(formula: deque) -> list: if not is_deque(formula): raise ParseError('Invalid formula: {}'.format(formula)) if len(formula) == 0: raise ParseError('Formula is empty') expr_lst = [] token = formula.popleft() if not is_string(token): raise ParseError('Invalid token for start of formula: {}'.format(token)) if token.lower() == 'and': # preconds and effects only use 'and' keyword exprs = _parse_expr_list(formula) expr_lst.extend(exprs) else: # parse single expression formula.appendleft(token) expr_lst.append(_parse_single_expr_string(formula)) return expr_lst
def _parse_effects(self, tokens: deque) -> bool: if not is_deque(tokens): raise ParseError('Invalid effects list for action "{}": {}'.format(self._action_name, tokens)) if len(tokens) == 0: raise ParseError('Missing effects list for action "{}".'.format(self._action_name)) effects_seq = tokens.popleft() self._effects = _parse_formula(effects_seq) return True
def _parse_preconditions(self, tokens: deque) -> bool: if not is_deque(tokens): raise ParseError('Invalid precondition list for action "{}": {}'.format(self._action_name, tokens)) if len(tokens) == 0: raise ParseError('Missing precondition list for action "{}".'.format(self._action_name)) precond_seq = tokens.popleft() self._preconditions = _parse_formula(precond_seq) return True
def _parse_goal(self, tokens: deque) -> bool: if not is_deque(tokens): raise ParseError('Invalid goal list after :goal keyword') if len(tokens) == 0: raise ParseError('Missing goal list after :goal keyword') goal_list = tokens.popleft() self.goals = _parse_formula(goal_list) return True
def transform_sexprs(tokens: deque): """Read an expression from a sequence of tokens.""" if len(tokens) == 0: raise ParseError('unexpected EOF while reading {}'.format(filename)) token = tokens.popleft() if '(' == token: D = deque() try: while tokens[0] != ')': D.append(transform_sexprs(tokens)) tokens.popleft() # pop off ')' return D except IndexError: raise ParseError('unexpected EOF while reading {}'.format(filename)) elif ')' == token: raise ParseError('unexpected ) in {}'.format(filename)) else: return token
def hash_block_with_history(self, byte_sequence): """ Calculate hash of byte sequence """ if len(byte_sequence) != self.block_size: # length of byte sequence must match specified block size raise BufferError('Byte sequence is %s long instead of %s' % (len(byte_sequence), self.block_size)) self.prev_hash = self._hash_block_unconstrained(byte_sequence) self.chars = Queue() # Queue is used to store history for char in byte_sequence: self.chars.append(char) return self.prev_hash % self.hash_range
def _parse_parameters(self, tokens: deque) -> bool: if is_deque(tokens) and len(tokens) > 0: param_list = tokens.popleft() if not is_deque(param_list): raise ParseError('Expected parameter list for action "{}"'.format(self._action_name)) try: self._parameters = _parse_variables(param_list, self._types) except IndexError: raise ParseError('Error parsing parameter list for action "{}"'.format(self._action_name)) return True
def _parse_define(self, tokens: deque) -> bool: if not is_deque(tokens): raise ParseError('Domain list not found after define statement') domain_seq = tokens.popleft() if is_deque(domain_seq) and len(domain_seq) == 0: raise ParseError('Domain list empty') token = domain_seq.popleft() if token != 'domain': raise ParseError('Domain keyword not found after define statement') if is_deque(domain_seq) and len(domain_seq) == 0: raise ParseError('Domain name not found in domain list') self.domain_name = domain_seq.popleft() return True
def _parse_define(self, tokens: deque) -> bool: if not is_deque(tokens) or len(tokens) == 0: raise ParseError('Expected problem list after define statement') problem_seq = tokens.popleft() if not is_deque(problem_seq): raise ParseError('Invalid problem list after define statement') if len(problem_seq) == 0: raise ParseError('Missing problem list after define statement') token = problem_seq.popleft() if token != 'problem': raise ParseError('Problem keyword not found after define statement') self.problem_name = problem_seq.popleft() return True
def handleAllSync(self, deq: deque, limit=None) -> int: """ Synchronously handle all items in a deque. :param deq: a deque of items to be handled by this router :param limit: the number of items in the deque to the handled :return: the number of items handled successfully """ count = 0 while deq and (not limit or count < limit): count += 1 msg = deq.popleft() self.handleSync(msg) return count
async def handleAll(self, deq: deque, limit=None) -> int: """ Handle all items in a deque. Can call asynchronous handlers. :param deq: a deque of items to be handled by this router :param limit: the number of items in the deque to the handled :return: the number of items handled successfully """ count = 0 while deq and (not limit or count < limit): count += 1 item = deq.popleft() await self.handle(item) return count
async def handleAll(self, deq: deque, limit=None) -> int: """ Handle multiple messages passed as a deque. :param deq: a deque of messages to be handled by this router :param limit: the number of messages in the deque to the handled :return: the number of message handled successfully """ count = 0 while deq and (not limit or count < limit): count += 1 msg = deq.popleft() await self.handle(msg) return count
def match_tokens(tokens: deque): if not is_deque(tokens): return False item = tokens.popleft() if is_string(item): item = item.lower() for text in match_dict: if item.startswith(text): if match_dict[text](tokens): break elif is_deque(item): match_tokens(item) else: raise ParseError('Unexpected token: {}'.format(item)) return True
def _expand_all(root): q = Queue() q.append((root, '')) paths = [] while q: node, path = q.popleft() paths.append(path) for c in PathParser._search_graph.get(node, []): q.append((c, path + '/' + c)) return paths
def _expand_gap(root, end): q = Queue() q.append((root, '')) paths = [] while q: node, path = q.popleft() if node == end: paths.append(path) else: for c in PathParser._search_graph.get(node, []): q.append((c, path + '/' + c)) return paths
def _parse_predicates(self, tokens: deque) -> bool: while tokens: if not is_deque(tokens): raise ParseError('Valid list not found after :predicates keyword') predicate = tokens.popleft() if not is_deque(predicate): raise ParseError('Invalid predicate: {}'.format(predicate)) pred_name = predicate.popleft() if not is_string(pred_name): raise ParseError('Invalid predicate name: {}'.format(pred_name)) if not is_deque(predicate): raise ParseError('Invalid predicate variable list: {}'.format(predicate)) try: new_predicate = [pred_name] + _parse_variables(predicate, self._types) except IndexError: raise ParseError('Error parsing variables for predicate {}'.format(pred_name)) self.predicates.append(new_predicate) return True
def breadth_first_search(startnode, goalnode): queue = Queue() queue.append(startnode) nodesseen = set() nodesseen.add(startnode) while queue: node = queue.popleft() if node is goalnode: return True else: queue.extend(node for node in node.successors if node not in nodesseen) nodesseen.update(node.successors) return False
def filterMsgs(self, wrappedMsgs: deque) -> deque: """ Filters messages by view number so that only the messages that have the current view number are retained. :param wrappedMsgs: the messages to filter """ filtered = deque() while wrappedMsgs: wrappedMsg = wrappedMsgs.popleft() msg, sender = wrappedMsg if hasattr(msg, f.VIEW_NO.nm): reqViewNo = getattr(msg, f.VIEW_NO.nm) if reqViewNo == self.viewNo: filtered.append(wrappedMsg) else: self.discard(wrappedMsg, "its view no {} is less than the elector's {}" .format(reqViewNo, self.viewNo), logger.debug) else: filtered.append(wrappedMsg) return filtered
def breadth_first_search(startnode, goalnode): """ Input: startnode: A digraph node goalnode: A digraph node Output: Whether goalnode is reachable from startnode """ queue = Queue() queue.append(startnode) nodesseen = set() nodesseen.add(startnode) while queue: node = queue.popleft() if node is goalnode: return True else: queue.extend(node for node in node.successors if node not in nodesseen) nodesseen.update(node.successors) return False
def _load_queue(file_name: str, target: collections.deque): r = 0 try: with gzip.open(file_name, 'rt', encoding='utf-8') as rep_file: for line in rep_file: j = json.loads(line) t = j.get('time') if t is None: t = 0 k = j.get('action') if k != 'game_end': target.append((t, action.Action(line, 'instruction', None))) else: r = t target.append((t, action.Action(line, 'game_end', None))) except OSError: main.root_logger.error('Corrupted replay file: %s', file_name) target.append((0, action.Action('{"action":"game_end","ai_id":-2,"time":0}', 'game_end', None))) finally: return r
def _append_bothsides(deq: collections.deque) -> Generator[None, Any, None]: """Alternately add to each side of a deque.""" while True: deq.append((yield)) deq.appendleft((yield))
def __init__(self, plugins, blacklist): self._plugins = plugins self._visited = set() self._queue = Queue() self._blacklist = set(blacklist)
def _parse_domain(self, tokens: deque) -> bool: if not is_deque(tokens) or len(tokens) == 0: raise ParseError('Expected domain name after :domain keyword') self.domain_name = tokens.popleft() return True
def _addTileIfAdmissible(deq:collections.deque, t: basic_map.Tile): if (_getPassable(t) and not t._has_been_visited): if t not in deq: deq.appendleft(t)