def _start_of_chain_case(cls, ci, chain, construction_lut, processed_intervals, nodes_by_level, s_intervals): # print(">>> Chain starting with", ci) qnode = QNode.from_chain(chain) for ci1, ci2 in pairwise(chain): # only_in_1 = (ci1.first_start, ci2.first_start - 1) intersection = (ci2.first_start, ci1.first_end) # print(ci1, ci2, only_in_1, intersection) # qnode.add_child(construction_lut[only_in_1]) qnode.add_child(construction_lut[intersection]) processed_intervals.add(ci2) for ci1, ci2, ci3 in iter3(chain): if ci3.first_start - 1 == ci1.first_end: continue only_in_2 = (ci1.first_end + 1, ci3.first_start - 1) qnode.add_child(construction_lut[only_in_2]) # print(ci1, ci2, ci3) first, second = chain[:2] qnode.add_child(construction_lut[(first.first_start, second.first_start - 1)]) prelast, last = chain[-2:] qnode.add_child(construction_lut[(prelast.first_end + 1, last.first_end)]) # else: # only_in_2 = (ci1.first_end + 1, ci2.first_end) # qnode.add_child(construction_lut[only_in_2]) # print("Adding QNode", chain[0].first_start, chain[-1].first_end) # print(" |- And ", chain[0].first_start, chain[0].first_end) construction_lut[chain[0].first_start, chain[-1].first_end] = qnode construction_lut[chain[0].first_start, chain[0].first_end] = qnode nodes_by_level[s_intervals.reverse_index[ci]].append(qnode)
def flush(self): self.boundaries.append((None, datetime.now(),)) ranges = funcy.pairwise(self.boundaries) self.boundaries = [] self.current_app = None date_str = lambda d: d.strftime("%Y-%m-%d %H:%M:%S") return [(app, date_str(start), date_str(end)) for (app, start), (_, end) in ranges]
def split_with(sep_idxs, li): ''' If sep_idxs is empty, then it returns empty generator. But I don't know why.. ''' for s,t in F.pairwise( I.chain(sep_idxs, [len(li)]) ): yield li[s:t]
def test_prop__range_search(gen): ixys = gen['ixys'] mode = gen['mode']; xORy = c_char(mode.encode()) min_key = gen['min_key'] max_key = gen['max_key'] includeds = gen['includeds'] n_node, ixy_arr, c_bst, n_inserted = bst_tree(ixys, xORy) tup_bst = tup_tree(c_bst[:n_inserted+4]) #pprint(tup_bst) ixy_idxes = (c_int * n_inserted)() stack = (c_int * MAX_LEN)() n_included = bst.includeds1d( c_bst, ixy_arr, xORy, min_key, max_key, ixy_idxes, stack) actual_idxes = [int(i) for i in ixy_idxes[:n_included]] expect_idxes = F.lmap(F.first, includeds) #actual_ixys = F.lmap(cobj2tuple, ixy_idxes) assert set(actual_idxes) == set(expect_idxes), \ f'{actual_idxes} != {expect_idxes}, {tup_bst}' key = prop(mode) for i1, i2 in F.pairwise(actual_idxes): assert key(ixy_arr[i1]) <= key(ixy_arr[i2]) assert n_included == len(includeds), \ f'{n_included} != {len(includeds)}, {tup_bst}'
def test_returns_data_sources_ordered_by_id(self): self.factory.create_data_source(group=self.factory.org.default_group) self.factory.create_data_source(group=self.factory.org.default_group) response = self.make_request("get", "/api/data_sources", user=self.factory.user) self.assertTrue( all(left <= right for left, right in pairwise(response.json)))
def load(data): nums = lmap(int, data) start, end = nums[0], nums[-1] # map cup labels to their successor cup label (cyclic) nxt = {c0: c1 for c0, c1 in pairwise(nums)} nxt[end] = start return Data(nxt=nxt, start=start, end=end)
def merge_consecutive_filter_clauses(ir_blocks): """Merge consecutive Filter(x), Filter(y) blocks into Filter(x && y) block.""" new_ir_blocks = [ir_blocks[0]] for previous_block, current_block in pairwise(ir_blocks): if isinstance(previous_block, Filter) and isinstance(current_block, Filter): new_ir_blocks[-1] = Filter( BinaryComposition(u'&&', previous_block.predicate, current_block.predicate)) else: new_ir_blocks.append(current_block) return new_ir_blocks
def time_command(cmd): cprint('Timing "%s": ' % cmd, "green") # We will collect unbuffered output with timestamps to measure hang ups. # Python buffers output when it's redirected, so this is critical. output = [] env = {**os.environ, "PYTHONUNBUFFERED": "x", "COLUMNS": str(get_cols())} start = time.monotonic() # Execute command with output redirected to pipe and unbuffered proc = subprocess.Popen( cmd, bufsize=0, shell=True, env=env, cwd=_cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) # Collect the combined output as it goes while True: chunk = proc.stdout.read(1024) if not chunk: break output.append((time.monotonic(), chunk)) sys.stdout.buffer.write(chunk) sys.stdout.flush() proc.wait() end = time.monotonic() # Fail loudly and stop the benchmark if proc.returncode != 0: raise Exception('Command "{}" failed with code {}'.format( cmd, proc.returncode)) total = end - start cprint("%s s" % total, "green") # from pprint import pprint # pprint(output) return { "total": total, "in": output[0][0] - start if output else None, "out": end - output[-1][0] if output else None, "sleep": silent(max)(r[0] - l[0] for l, r in pairwise(output)), "output": output, }
def column_windows(engine, column, windowsize): # Based on: https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WindowedRangeQuery rows = sa.select([column, sa.func.row_number().over(order_by=column).label('rownum')]).alias() whereclause = sa.text('rownum %% %d = 1' % windowsize) if windowsize > 1 else None query = sa.select([rows.c[column.name]], whereclause).select_from(rows).order_by(rows.c.rownum) intervals = [id for id, in engine.execute(query)] end_id = intervals[0] if intervals else None for start_id, end_id in funcy.pairwise(intervals): yield sa.and_(sa.and_(column >= start_id, column < end_id)) if end_id is not None: yield column >= end_id
def _assert_mark_location_preceding_optional_traverse( ir_blocks: List[BasicBlock], ) -> None: """Assert that optional Traverse blocks are preceded by a MarkLocation.""" # Once all fold blocks are removed, each optional Traverse must have # a MarkLocation block immediately before it. _, new_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) for first_block, second_block in pairwise(new_ir_blocks): # Traverse blocks with optional=True are immediately preceded by a MarkLocation block. if isinstance(second_block, Traverse) and second_block.optional: if not isinstance(first_block, MarkLocation): raise AssertionError( "Expected MarkLocation before Traverse with optional=True, " "but none was found: {}".format(ir_blocks))
def _assert_coerce_type_outside_of_fold(ir_blocks: List[BasicBlock]) -> None: """Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block.""" is_in_fold = False for first_block, second_block in pairwise(ir_blocks): if isinstance(first_block, Fold): is_in_fold = True if not is_in_fold and isinstance(first_block, CoerceType): if not isinstance(second_block, (MarkLocation, Filter)): raise AssertionError( "Expected MarkLocation or Filter after CoerceType, " "but none was found: {}".format(ir_blocks)) if isinstance(second_block, Unfold): is_in_fold = False
def column_windows(engine, column, windowsize): # Based on: https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WindowedRangeQuery rows = sa.select( [column, sa.func.row_number().over(order_by=column).label('rownum')]).alias() whereclause = sa.text('rownum %% %d = 1' % windowsize) if windowsize > 1 else None query = sa.select([rows.c[column.name]], whereclause).select_from(rows).order_by(rows.c.rownum) intervals = [id for id, in engine.execute(query)] end_id = intervals[0] if intervals else None for start_id, end_id in funcy.pairwise(intervals): yield sa.and_(sa.and_(column >= start_id, column < end_id)) if end_id is not None: yield column >= end_id
def part2(data): # break the cycle again del data.nxt[data.end] # extend up to 1 million hi = max(data.nxt) + 1 extension = range(hi, 1_000_000 + 1) data.nxt.update(pairwise(extension)) # re-wire the extension and the cycle data.nxt[data.end] = hi data.nxt[1_000_000] = data.start nxt = play(data.nxt, data.start, moves=10_000_000) # multiply the two cup labels after the 1-labeled cup result = nxt[1] result *= nxt[result] return str(result)
def _sanity_check_block_pairwise_constraints( ir_blocks: List[BasicBlock]) -> None: """Assert that adjacent blocks obey all invariants.""" for first_block, second_block in pairwise(ir_blocks): # Always Filter before MarkLocation, never after. if isinstance(first_block, MarkLocation) and isinstance( second_block, Filter): raise AssertionError( "Found Filter after MarkLocation block: {}".format(ir_blocks)) # There's no point in marking the same location twice in a row. if isinstance(first_block, MarkLocation) and isinstance( second_block, MarkLocation): raise AssertionError( "Found consecutive MarkLocation blocks: {}".format(ir_blocks)) # Traverse blocks with optional=True are immediately followed # by a MarkLocation, CoerceType or Filter block. if isinstance(first_block, Traverse) and first_block.optional: if not isinstance(second_block, (MarkLocation, CoerceType, Filter)): raise AssertionError( "Expected MarkLocation, CoerceType or Filter after Traverse " "with optional=True. Found: {}".format(ir_blocks)) # Backtrack blocks with optional=True are immediately followed by a MarkLocation block. if isinstance(first_block, Backtrack) and first_block.optional: if not isinstance(second_block, MarkLocation): raise AssertionError( "Expected MarkLocation after Backtrack with optional=True, " "but none was found: {}".format(ir_blocks)) # Recurse blocks are immediately preceded by a MarkLocation or Backtrack block. if isinstance(second_block, Recurse): if not (isinstance(first_block, MarkLocation) or isinstance(first_block, Backtrack)): raise AssertionError( "Expected MarkLocation or Backtrack before Recurse, but none " "was found: {}".format(ir_blocks))
def nameres_files(dir, files, debug=False, project="", extra_args=[]): if len(files) == 1: print("Analyzing file {}".format(files[0])) project_flag = ("-P{}".format(project) if project else "--with-default-project") extra_args = list(extra_args) try: args = (["nameres", project_flag, '--all'] + (['--debug'] if debug else []) + list(extra_args) + files) out = (subprocess.check_call if debug else subprocess.check_output)(args, cwd=dir) if debug: return results = [] for res in out.split("Analyzing ")[1:]: file_name, _, _, content = res.split("\n", 3) content = content.strip() file_result = FileResult(file_name, dir) content = list( pairwise( partition_by(lambda l: 'Resolving xrefs' in l, content.splitlines()))) for header, res_lines in content: file_result.add( Result.construct(file_result, header + res_lines)) results.append(file_result) return results except subprocess.CalledProcessError: print("Resolution crashed.") print("Command line: {}".format(" ".join(args))) return [] except: return [] finally: if debug: print("Command line: {}".format(" ".join(args)))
def test_prop__range_query2d(gen): ixys = gen['ixys'] min_x = gen['min_x']; max_x = gen['max_x'] min_y = gen['min_y']; max_y = gen['max_y'] includeds = gen['includeds'] n_node, ixy_arr, c_bst, n_inserted = \ bst_tree(ixys, c_char('x'.encode())) tup_bst = tup_tree(c_bst[:n_inserted+4]) ixy_idxes = (c_int * n_inserted)() stack = (c_int * MAX_LEN)() n_included = bst.includeds2d( c_bst, ixy_arr, min_x, max_x, min_y, max_y, ixy_idxes, stack) actual_idxes = [int(i) for i in ixy_idxes[:n_included]] expect_idxes = F.lmap(F.first, includeds) assert set(actual_idxes) == set(expect_idxes), \ f'{actual_idxes} != {expect_idxes}, {tup_bst}' for i1, i2 in F.pairwise(actual_idxes): assert ixy_arr[i1].x <= ixy_arr[i2].x assert n_included == len(includeds), \ f'{n_included} != {len(includeds)}, {tup_bst}'
def test_returns_data_sources_ordered_by_id(self): self.factory.create_data_source(group=self.factory.org.default_group) self.factory.create_data_source(group=self.factory.org.default_group) response = self.make_request("get", "/api/data_sources", user=self.factory.user) self.assertTrue(all(left <= right for left, right in pairwise(response.json)))
def sanity_check_ir_blocks_from_frontend(ir_blocks): """Assert that IR blocks originating from the frontend do not have nonsensical structure. Args: ir_blocks: list of BasicBlocks representing the IR to sanity-check Raises: AssertionError, if the IR has unexpected structure. If the IR produced by the front-end cannot be successfully and correctly used to generate MATCH or Gremlin, this is the method that should catch the problem. """ if not ir_blocks: raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks)) # QueryRoot is always and only the first block. if not isinstance(ir_blocks[0], QueryRoot): raise AssertionError(u'The first block was not QueryRoot: {}'.format(ir_blocks)) for block in ir_blocks[1:]: if isinstance(block, QueryRoot): raise AssertionError(u'Found QueryRoot after the first block: {}'.format(ir_blocks)) # ConstructResult is always and only the last block. if not isinstance(ir_blocks[-1], ConstructResult): raise AssertionError(u'The last block was not ConstructResult: {}'.format(ir_blocks)) for block in ir_blocks[:-1]: if isinstance(block, ConstructResult): raise AssertionError(u'Found ConstructResult before the last block: ' u'{}'.format(ir_blocks)) # There are no Traverse / Backtrack / Recurse blocks after an OutputSource block. seen_output_source = False for block in ir_blocks: if isinstance(block, OutputSource): seen_output_source = True elif seen_output_source: if isinstance(block, (Backtrack, Traverse, Recurse)): raise AssertionError(u'Found Backtrack / Traverse / Recurse ' u'after OutputSource block: ' u'{}'.format(ir_blocks)) for first_block, second_block in pairwise(ir_blocks): # Always Filter before MarkLocation, never after. if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter): raise AssertionError(u'Found Filter after MarkLocation block: {}'.format(ir_blocks)) # There's no point in marking the same location twice in a row. if isinstance(first_block, MarkLocation) and isinstance(second_block, MarkLocation): raise AssertionError(u'Found consecutive MarkLocation blocks: {}'.format(ir_blocks)) # Traverse blocks with optional=True are immediately preceded by a MarkLocation block. if isinstance(second_block, Traverse) and second_block.optional: if not isinstance(first_block, MarkLocation): raise AssertionError(u'Expected MarkLocation before Traverse with optional=True, ' u'but none was found: {}'.format(ir_blocks)) # Traverse blocks with optional=True are immediately followed # by a MarkLocation, CoerceType or Filter block. if isinstance(first_block, Traverse) and first_block.optional: if not isinstance(second_block, (MarkLocation, CoerceType, Filter)): raise AssertionError(u'Expected MarkLocation, CoerceType or Filter after Traverse ' u'with optional=True. Found: {}'.format(ir_blocks)) # CoerceType blocks are immediately followed by a MarkLocation or Filter block. if isinstance(first_block, CoerceType): if not isinstance(second_block, (MarkLocation, Filter)): raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, ' u'but none was found: {}'.format(ir_blocks)) # Backtrack blocks with optional=True are immediately followed by a MarkLocation block. if isinstance(first_block, Backtrack) and first_block.optional: if not isinstance(second_block, MarkLocation): raise AssertionError(u'Expected MarkLocation after Backtrack with optional=True, ' u'but none was found: {}'.format(ir_blocks)) # Recurse blocks are immediately preceded by a MarkLocation block. if isinstance(second_block, Recurse): if not isinstance(first_block, MarkLocation): raise AssertionError(u'Expected MarkLocation before Recurse, but none was found: ' u'{}'.format(ir_blocks)) # There's exactly one QueryRoot / Traverse / Recurse / Backtrack block (total) # between any two MarkLocation blocks. traversal_blocks = 0 for block in ir_blocks: # Treat QueryRoot as a Backtrack / Recurse / Traverse block, # to handle the first MarkLocation. if isinstance(object, (Backtrack, Traverse, Recurse, QueryRoot)): traversal_blocks += 1 elif isinstance(object, MarkLocation): if traversal_blocks != 1: raise AssertionError(u'Expected 1 traversal block between ' u'MarkLocation blocks, but found: ' u'{} {}'.format(traversal_blocks, ir_blocks)) traversal_blocks = 0 # Exactly one MarkLocation block is found between a QueryRoot / Traverse / Recurse block, # and the first subsequent Traverse, Recurse, Backtrack or ConstructResult block. found_start_block = False mark_location_blocks = 0 for block in ir_blocks: # Terminate started intervals before opening new ones. end_interval_types = (Backtrack, ConstructResult, Recurse, Traverse) if isinstance(block, end_interval_types) and found_start_block: found_start_block = False if mark_location_blocks != 1: raise AssertionError(u'Expected 1 MarkLocation block between traversals, found: ' u'{} {}'.format(mark_location_blocks, ir_blocks)) # Now consider opening new intervals or processing MarkLocation blocks. if isinstance(block, MarkLocation): mark_location_blocks += 1 elif isinstance(block, (QueryRoot, Traverse, Recurse)): found_start_block = True mark_location_blocks = 0
def assert_valid_bst(mode, ixy_map, ixy_arr, tree, n_inserted, n_node): ''' tree is bst ''' key = prop(mode) # Num of leaves ixy ref = num of inserted ixys # Parent must be positive value except root. for i,node in enumerate(tree[1:n_inserted+1]): assert node.parent >= 0, (n_inserted, i, pyobj(node)) # Get ixy idxes from tree structure ixy_idxes = all_ixy_idxes( #tup_tree(tree[:n_inserted+50])) tup_tree(tree[:n_node+100])) if DBG: print(f' after[{n_node}]',#tup_tree(tree[:n_node+10])) [f'{p} {l} {r}' for _,p,l,r in tup_tree(tree[:n_node+10])])########### if DBG: print('iidxes', ixy_idxes) if DBG: print('n_node =',n_node) # Inserted number of ixys preserved? no0idxes = F.compact([abs(i) for i in ixy_idxes]) assert n_inserted == len(no0idxes), \ 'ixy_idxes = {}, tup_tree = {}'.format( ixy_idxes, tup_tree(tree[:n_inserted+4])) # All ixy have unique index. assert len(set(no0idxes)) == n_inserted,\ f'{len(set(no0idxes))} == {n_inserted}' # All leaves point ixy(neg idx), not inode. assert all(idx <= 0 for idx in ixy_idxes), \ 'ixy_idxes = {}, tree = {}'.format( ixy_idxes, tup_tree(tree[:n_inserted+4])) # Inserted ixys are sorted in ascending order. inserted_ixys = F.lmap( lambda i: ixy_arr[abs(i)], ixy_idxes) for ixy1, ixy2 in F.pairwise(inserted_ixys): assert key(ixy1) <= key(ixy2), 'tree = {}' \ .format(tup_tree(tree[:n_inserted+4])) # All leaves: l <= r leaves = F.lfilter(is_leaf, tree[:n_inserted+4]) for leaf in leaves: l = leaf.left; r = leaf.right if l and r: l_val = key(ixy_map[abs(l)]) r_val = key(ixy_map[abs(r)]) assert l_val <= r_val # All inodes must be sorted in ascending order. inodes = all_inodes(tup_tree(tree[:n_node+100])) for n1, n2 in F.pairwise(inodes): k1 = n1[0]; k2 = n2[0] assert k1 <= k2 # Inserted ixys are sorted in ascending order. neg_idxeseq = F.mapcat(tup( lambda k,p,l,r: ((l,) if l < 0 else ()) + ((r,) if r < 0 else ())), inodes) ixy_idxes = F.map(abs, neg_idxeseq) saved_ixys = F.map(lambda i: pyobj(ixy_arr[i]), ixy_idxes) keys = F.lmap(key, saved_ixys) for k1,k2 in F.pairwise(keys): assert k1 <= k2
import numpy as np import funcy import random from enum import Enum # number of nodes in each layer # must be odd shape = [1, 3, 5, 5, 5, 5, 3, 1] rows = len(shape) nodes = sum(shape) adj = np.zeros((nodes, nodes), np.bool) n = list(funcy.sums(shape)) print(n) n_offs = list(funcy.pairwise(n)) print(n_offs) m = list(funcy.sums([0] + shape)) print(m) m_offs = list(funcy.pairwise(m)) print(m_offs) conns = list(zip(n_offs, m_offs)) print(conns) for ((col_min, col_max), (row_min, row_max)) in conns: # Fully connect to next layer for row in range(row_min, row_max): for col in range(col_min, col_max): layer_len = ((col_max - col_min) + (row_max - row_min)) / 2
def is_bouncy(number: int) -> bool: pairwise_digits = list(pairwise(to_digits(number))) return (not all(a >= b for (a, b) in pairwise_digits)) \ and (not all(a <= b for (a, b) in pairwise_digits))