def get_values(observation): return flatten(concat( observation['my_car'].values(), mapcat(methodcaller('values'), sorted(observation['other_cars'], key=itemgetter('position_length'))), # 距離が近い順にソートします。前後も分けたほうが良い? mapcat(methodcaller('values'), sorted(observation['obstacles' ], key=itemgetter('position_length'))), # noqa: E202 mapcat(methodcaller('values'), sorted(observation['stars' ], key=itemgetter('position_length'))) # noqa: E202 ))
class Media(Node, Model): def _on_init(self): try: self._validate() except Exception as e: print('ERROR in validation for Media:') print() print(str(e)) print() print(self._yaml()) print() _schema = media_schema __repr__ = lambda self: f'Media(url={self.url})' # id = property(lambda self: self.pk) url = property(lambda self: url_from_id(self.pk)) # for image data, for video posts returns the thumbnail images = property(lambda self: \ list(map(get_image_url, self['carousel_media'])) if self['carousel_media'] \ else [get_image_url(self)] or [] ) # for videos, it is a MDP, if it is long it needs to be recomposed mpd = property(lambda self: \ list(map(get_manifest, self['carousel_media'] or [])) if self['carousel_media'] \ else [get_manifest(self)] or [] ) videos = property(lambda self: \ list(map(get_video_url, self['carousel_media'] or [])) if self['carousel_media'] \ else [get_video_url(self)] or [] ) # def fallback(*args): # first = lambda arr: arr[1:] if len(arr) > 0 else lambda: None # rest = lambda arr: arr[:1]if len(arr) > 0 else [] # return first(args)() or fallback(rest(args)) _usertags = property(lambda self: fallback( lambda: self['usertags']['in']['user'], lambda: list( map(lambda x: x['user'], self['usertags']['in'])), lambda: list( map(lambda x: x['user'], self['caption']['usertags']['in'])), lambda: list( mapcat( lambda data: map(lambda x: x['user'], data['usertags']['in']), self['carousel_media'] or [])), lambda: list( mapcat( lambda data: map(lambda x: x['user'], data['caption'][ 'usertags']['in']), self['carousel_media'] or []) ), lambda: []))
def discretized_and_pointwise_hausdorff(recset1, recset2, k=3): xs = list(fn.mapcat(lambda r: r.discretize(k), recset1)) ys = list(fn.mapcat(lambda r: r.discretize(k), recset2)) error1 = max(r.error + r.shortest_edge for r in recset1) error2 = max(r.error + r.shortest_edge for r in recset2) error = error1 + error2 d12 = pointwise_hausdorff(xs, ys) return mdtr.Interval(max(d12 - error, 0), d12 + error)
def discretized_and_pointwise_hausdorff(recset1, recset2, k=3): xs = list(fn.mapcat(lambda r: r.discretize(k), recset1)) ys = list(fn.mapcat(lambda r: r.discretize(k), recset2)) error1 = max(r.error + r.shortest_edge for r in recset1) error2 = max(r.error + r.shortest_edge for r in recset2) error = error1 + error2 d12 = pointwise_hausdorff(xs, ys) return Interval(max(d12 - error, 0), d12 + error)
def _extract_date_tags(page: BeautifulSoup, tags_file: str, verbose: bool = False): result = [] _extract_func_date = partial(_extract_date, verbose=verbose) _tags = loadyaml(tags_file) for tag, tags_params in _tags.items(): # if verbose: # print("Processing tag - '%s'" % tag) result = merge( result, list(mapcat( partial(_extract_func_date, tag), mapcat(page.findAll, [tag] * len(tags_params), tags_params)))) return list(map(prepare_date, result))
def _create_observation(cls, game): def get_values(observation): return flatten(concat( observation['my_car'].values(), mapcat(methodcaller('values'), sorted(observation['other_cars'], key=itemgetter('position_length'))), # 距離が近い順にソートします。前後も分けたほうが良い? mapcat(methodcaller('values'), sorted(observation['obstacles' ], key=itemgetter('position_length'))), # noqa: E202 mapcat(methodcaller('values'), sorted(observation['stars' ], key=itemgetter('position_length'))) # noqa: E202 )) observation = ( np.array(tuple(get_values(game.create_observation(game.cars[0]))), np.float32) / # noqa: W504 np.array(tuple(concat( ( 1000, # my_car.position.x 1000, # my_car.position.y np.pi, # my_car.angle np.pi, # my_car.velocity_angle MAX_SPEED / FPS, # my_car.velocity_length np.pi, # my_car.steering_angle 10, # my_car.steering_torque 30, # my_car.score 10 * FPS, # my_car.crash_energy ), mapcat(lambda _: ( np.pi, # other_car.position_angle 1000, # other_car.position_length np.pi, # other_car.angle np.pi, # other_car.velocity_angle MAX_SPEED / FPS * 2, # other_car.velocity_length np.pi, # other_car.steering_angle 30, # other_car.score 10 * FPS, # other_car.crash_energy ), range(7)), mapcat(lambda _: ( np.pi, # obstacle.position_angle 1000 # obstacle.position_length ), range(OBSTACLE_COUNT)), mapcat(lambda _: ( np.pi, # star.position_angle 1000 # star.position_length ), range(STAR_COUNT)), )), dtype=np.float32) ) observation[observation < -1] = -1 observation[observation > 1] = 1 # noqa: E222 return observation
def followers(bot: Bot, nodes, args) -> List[User]: # bot.logger.debug('nodes at followers %s' % list(nodes)[:3]) # # nodes = iter(list(nodes)) amount = args.get('amount') or 1 # query = args.get('query', {}) pack_user = lambda item: User(**item) process = rcompose( lambda user: user.pk, lambda id: cycled_api_call( amount, bot, bot.api.user_followers, dict( user_id=id, **args.get('query', {}), ), 'users', ), lambda gen: map(pack_user, gen)) result = mapcat(process, nodes) return result, {}
def _params(exp): def get_params(leaf): if isinstance(leaf, ModalOp): if isinstance(leaf.interval[0], Param): yield leaf.interval[0] if isinstance(leaf.interval[1], Param): yield leaf.interval[1] return set(fn.mapcat(get_params, exp.walk()))
def row_to_smt(rows_and_var_lists, store, t): rows_and_var_lists = list(rows_and_var_lists) def _row_to_smt(rows_and_vars): def _create_var(a, x): return float(a) * store[x, t][0] return (_create_var(a, x) for a, x in zip(*rows_and_vars)) return sum(fn.mapcat(_row_to_smt, rows_and_var_lists))
def oracle_hausdorff_bounds2(recset1, recset2, f1, f2, eps=1e-1, k=3): refiner1 = edge_length_guided_refinement(recset1, f1) refiner2 = edge_length_guided_refinement(recset2, f2) while True: xs = list(fn.mapcat(lambda r: r.discretize(k), recset1)) ys = list(fn.mapcat(lambda r: r.discretize(k), recset2)) error1 = max(r.error + shortest_edge(r) for r in recset1) error2 = max(r.error + shortest_edge(r) for r in recset2) error = error1 + error2 d12 = pointwise_hausdorff(xs, ys) yield Interval(max(d12 - error, 0), d12 + error) recset1 = fn.first(filter(lambda xs: -xs[0][0] <= eps, refiner1)) recset2 = fn.first(filter(lambda xs: -xs[0][0] <= eps, refiner2)) recset1, recset2 = [r for _, r in recset1], [r for _, r in recset2] eps /= 2
def likers(bot, nodes, args) -> List[Media]: pack_user = lambda item: User(**item) amount = args.get('amount') process = rcompose(lambda media: media.pk, lambda id: get_likers(id, bot, amount), lambda gen: map(pack_user, gen)) result = mapcat(process, nodes) return result, {}
def params(self): def get_params(leaf): if isinstance(leaf, ModalOp): if isinstance(leaf.interval[0], Param): yield leaf.interval[0] if isinstance(leaf.interval[1], Param): yield leaf.interval[1] elif isinstance(leaf, LinEq): if isinstance(leaf.const, Param): yield leaf.const return set(fn.mapcat(get_params, self.walk()))
def geotag_feed(bot: Bot, nodes, args) -> List[Media]: pack_media = lambda data: Media(**data['media']) amount = args.get('amount') or 1 ordering = args.get('ordering', 'ranked') process = rcompose( lambda tag: tag.id, # lambda x: tap(x, lambda: print(x)), lambda id: cycled_api_call(None, bot, bot.api.location_section, dict(location_id=id, tab=ordering), ('sections', )), # lambda x: tap(x, lambda: print(next(x))), lambda gen: mapcat(lambda data: data['layout_content']['medias'], gen), lambda gen: islice(gen, amount), lambda items: map(pack_media, items), ) result = mapcat(process, nodes) return result, {}
def _append_car(self, position, angle): car = Car(self.space) car.set_position_and_angle(position, angle) car.crash_energy = 0 car.score = 0 for shape in concat( car.shapes, mapcat(lambda tire: tire.shapes, (car.tire_lf, car.tire_rf, car.tire_lr, car.tire_rr))): shape.collision_type = 1 self.cars.append(car)
def test_staircase_hausdorff(k, xys1, xys2): def discretize(intvl): p1, p2 = intvl xs = np.linspace(p1.x, p2.x, 2 + k) ys = np.linspace(p1.y, p2.y, 2 + k) return [Point2d(x, y) for x, y in product(xs, ys)] f1 = [Point2d(x, y) for x, y in zip(*xys1)] f2 = [Point2d(x, y) for x, y in zip(*xys2)] f1_hat = set(fn.mapcat(discretize, zip(f1, f1[1:]))) f2_hat = set(fn.mapcat(discretize, zip(f2, f2[1:]))) # Check discretization works as expected assert len(f1_hat) == (len(f1) - 1) * k + len(f1) assert len(f2_hat) == (len(f2) - 1) * k + len(f2) # Check extended array has smaller distance d1 = mdth.pointwise_hausdorff(f1_hat, f2_hat) d2 = staircase_hausdorff(f1, f2) event(f"d1, d2={d1, d2}") assert d2 <= d1 or pytest.approx(d1) == d2
def row_to_smt(rows_and_var_lists, store, t): rows_and_var_lists = list(rows_and_var_lists) def _row_to_smt(rows_and_vars): def _create_var(a, x): if (x, t) not in store: store[(x, t)] = Symbol(f"{x}[{t}]", REAL) return float(a) * store[x, t] return (_create_var(a, x) for a, x in zip(*rows_and_vars)) return sum(fn.mapcat(_row_to_smt, rows_and_var_lists))
def flatten_binary(phi, op, dropT, shortT): def f(x): return x.args if isinstance(x, op) else [x] args = [arg for arg in phi.args if arg != dropT] if any(arg is shortT for arg in args): return shortT elif not args: return dropT elif len(args) == 1: return args[0] else: return op(tuple(fn.mapcat(f, phi.args)))
def hashtags(bot, nodes, args): amount = args.get('amount') def process(node,): text = node['caption']['text'] raw_tags = set(part[1:] for part in text.split() if part.startswith('#')) tags = [Hashtag(name=tag) for tag in raw_tags] yield from islice(tags, amount) result = mapcat(process, nodes) # result = return result, {}
def check_operators(node_ref, status): """ Checks that all operators listed in the QC file are valid. Sets an error message in the status if not. """ operators = funcy.mapcat(nd.get_all_operators, status[node_ref]['thresholds']) errors = funcy.remove(nd.is_operator, operators) if len(errors) > 0: status['error'] = generator_error_string(operator_error_message, errors) return status
def scrape(bot: Bot, nodes, args): try: max = float(args['max']) if 'max' in args else float('inf') key = args.get('key', 'data') model = args['model'] except Exception: bot.logger.error( 'please add all necessary args, {} isn\'t enought'.format(args)) return [], {} count = 0 def increment(): nonlocal count count += 1 data = [] def process(node): """ model: name: x.full_name id: x.pk followers: x.followers_count """ if isinstance(model, dict): insertion = dotdict() for name, expr in model.items(): insertion[name] = evaluate(expr, node, bot=bot) elif isinstance(model, str): insertion = evaluate(model, node, bot=bot) # print(json.dumps(insertion, indent=4)) data.append(insertion) bot.logger.info('scraped node {} '.format(node, )) if count <= max: increment() yield node else: return None nodes = mapcat(process, nodes) return nodes, {key: data}
def following(bot, nodes, args) -> List[User]: amount = args.get('amount') or 1 pack_user = lambda item: User(**item) process = rcompose( lambda user: user.pk, lambda id: cycled_api_call(amount, bot, bot.api.user_following, dict(user_id=id, **args.get('query', {}),), 'users'), lambda gen: map(pack_user, gen) ) result = mapcat(process, nodes) return result, {}
def hashtag_feed(bot: Bot, nodes, args) -> List[Media]: amount = args.get('amount') or 1 pack_media = lambda data: Media(id=data['pk'], data=data) process = rcompose( lambda tag: tag.name, # lambda x: tap(x, lambda: print(bot.last)), lambda name: cycled_api_call(amount, bot, bot.api.feed_tag, (name,), 'items'), lambda items: map(pack_media, items), ) result = mapcat(process, nodes) return result, {}
def hashtag_stories(bot: Bot, nodes, args) -> List[Media]: amount = args.get('amount') or 1 pack_story = lambda data: Story(**data) process = rcompose( lambda tag: tag.name, # lambda x: tap(x, lambda: print(bot.last)), lambda id: cycled_api_call(amount, bot, bot.api.feed_tag, id, ['story', 'items']), lambda items: map(pack_story, items), ) result = mapcat(process, nodes) return result, {}
def user_feed(bot: Bot, nodes, args) -> List[Media]: amount = args.get('amount') pack_media = lambda data: Media(**data) process = rcompose( lambda user: user.username, # lambda x: tap(x, lambda: print(x)), lambda name: cycled_api_call(amount, bot, bot.api.username_feed, name, 'items'), lambda items: map(pack_media, items), # lambda x: tap(x, lambda: print(x)), ) result = mapcat(process, nodes) return result, {}
def geotag_stories(bot: Bot, nodes, args) -> List[Story]: pack_story = lambda data: Story(**data) amount = args.get('amount') or 1 process = rcompose( lambda tag: tag.id, # lambda x: tap(x, lambda: print(x)), lambda id: cycled_api_call(amount, bot, bot.api.location_stories, id, ('story', 'items')), # lambda x: tap(x, lambda: print(next(x))), # lambda gen: islice(gen, amount), lambda items: map(pack_story, items), ) result = mapcat(process, nodes) return result, {}
def parse_page(text: str) -> Iterator[Item]: tree = html.fromstring(text) for item in tree.xpath('//*[@class="catalog-item"]'): # tags item_title = item.find_class('catalog-item__title')[0] difficult = item.find_class('difficult')[0] cost = item.find_class('catalog-item__cost')[0] duration = item.find_class('catalog-item__time')[0] params = item.find_class('catalog-item__param-list')[0] # content age = int_or_none(find_age(content(params)) or '') or 18 offset = timedelta(days=(int_or_none(content(duration)) or 1) - 1) date_nodes = mapcat(list, item.cssselect('div.catalog-item__dates')) seen = set() for node in date_nodes: value = content(node) if value in seen: continue seen.add(value) done = {int(content(x)) for x in node.cssselect('s')} for start in parse_dates(value): slots = None if start.day in done: slots = 0 level = int_or_none(difficult.get('class')) item = Item( vendor=Vendor.PRO_ADVENTURE, level=level and LEVELS_MAP[level], start=start, end=start + offset, url='https://pro-adventure.ru' + item_title.get('href'), title=content(item_title), price=content(cost), for_kids=age <= 5, slots=slots, ) yield item
def _to_aag(gates, aag: AAG = None, *, max_idx=1, lit_map=None): if lit_map is None: lit_map = {} if not gates: return aag, max_idx, lit_map # Recurse to update get aag for subtrees. for c in fn.mapcat(lambda g: g.children, gates): if c in lit_map: continue aag, max_idx, lit_map = _to_aag([c], aag, max_idx=max_idx, lit_map=lit_map) # Update aag with current level. for gate in gates: if gate in lit_map: continue if isinstance(gate, aig.Inverter): input_lit = lit_map[gate.input] lit_map[gate] = (input_lit & -2) | (1 ^ (input_lit & 1)) continue elif isinstance(gate, aig.ConstFalse): lit_map[gate] = 0 continue # Must be And, Latch, or Input lit_map[gate] = 2 * max_idx max_idx += 1 if isinstance(gate, aig.AndGate): encoded = tuple(map(lit_map.get, (gate, gate.left, gate.right))) aag.gates.append(encoded) elif isinstance(gate, aig.Input): aag.inputs[gate.name] = lit_map[gate] else: assert isinstance(gate, aig.LatchIn) return aag, max_idx, lit_map
def user_stories(bot, nodes, args) -> List[Story]: amount = args.get('amount') or 1 pack_story = lambda data: Story(**data) # unmasked = lambda: unmask(bot.last) # log_unmasked = lambda: bot.logger.warning(unmasked()) # bot.logger.warning([x for x in nodes]) process = rcompose( lambda user: user.pk, # lambda id: tap(id, lambda: bot.api.get_user_stories(id)), # lambda x: tap(x, lambda: print(x)), lambda id: get_stories(bot, id, amount), # lambda gen: map(lambda data: print(Story(**data)._yaml()) or data, gen), lambda gen: map(pack_story, gen), ) stories = mapcat(process, nodes) return stories, {}
def box_edges(r): """Produce all n*2**(n-1) edges. TODO: clean up """ n = len(r.bot) diag = np.array(r.top) - np.array(r.bot) bot = np.array(r.bot) xs = [ np.array(x) for x in product([1, 0], repeat=n - 1) if x.count(1) != n ] def _corner_edge_masks(i): for x in xs: s_mask = np.insert(x, i, 0) t_mask = np.insert(x, i, 1) yield s_mask, t_mask for s_mask, t_mask in fn.mapcat(_corner_edge_masks, range(n)): intervals = tuple(zip(bot + s_mask * diag, bot + t_mask * diag)) yield to_rec(intervals=intervals)
def box_edges(r): """Produce all n*2**(n-1) edges. TODO: clean up """ n = len(r.bot) diag = np.array(r.top) - np.array(r.bot) bot = np.array(r.bot) xs = [ np.array(x) for x in product([1, 0], repeat=n - 1) if x.count(1) != n ] def _corner_edge_masks(i): for x in xs: s_mask = np.insert(x, i, 0) t_mask = np.insert(x, i, 1) yield s_mask, t_mask for s_mask, t_mask in fn.mapcat(_corner_edge_masks, range(n)): intervals = tuple(zip(bot + s_mask * diag, bot + t_mask * diag)) yield mdtr.to_rec(intervals=intervals)
def assert_valid_bst(mode, ixy_map, ixy_arr, tree, n_inserted, n_node): ''' tree is bst ''' key = prop(mode) # Num of leaves ixy ref = num of inserted ixys # Parent must be positive value except root. for i,node in enumerate(tree[1:n_inserted+1]): assert node.parent >= 0, (n_inserted, i, pyobj(node)) # Get ixy idxes from tree structure ixy_idxes = all_ixy_idxes( #tup_tree(tree[:n_inserted+50])) tup_tree(tree[:n_node+100])) if DBG: print(f' after[{n_node}]',#tup_tree(tree[:n_node+10])) [f'{p} {l} {r}' for _,p,l,r in tup_tree(tree[:n_node+10])])########### if DBG: print('iidxes', ixy_idxes) if DBG: print('n_node =',n_node) # Inserted number of ixys preserved? no0idxes = F.compact([abs(i) for i in ixy_idxes]) assert n_inserted == len(no0idxes), \ 'ixy_idxes = {}, tup_tree = {}'.format( ixy_idxes, tup_tree(tree[:n_inserted+4])) # All ixy have unique index. assert len(set(no0idxes)) == n_inserted,\ f'{len(set(no0idxes))} == {n_inserted}' # All leaves point ixy(neg idx), not inode. assert all(idx <= 0 for idx in ixy_idxes), \ 'ixy_idxes = {}, tree = {}'.format( ixy_idxes, tup_tree(tree[:n_inserted+4])) # Inserted ixys are sorted in ascending order. inserted_ixys = F.lmap( lambda i: ixy_arr[abs(i)], ixy_idxes) for ixy1, ixy2 in F.pairwise(inserted_ixys): assert key(ixy1) <= key(ixy2), 'tree = {}' \ .format(tup_tree(tree[:n_inserted+4])) # All leaves: l <= r leaves = F.lfilter(is_leaf, tree[:n_inserted+4]) for leaf in leaves: l = leaf.left; r = leaf.right if l and r: l_val = key(ixy_map[abs(l)]) r_val = key(ixy_map[abs(r)]) assert l_val <= r_val # All inodes must be sorted in ascending order. inodes = all_inodes(tup_tree(tree[:n_node+100])) for n1, n2 in F.pairwise(inodes): k1 = n1[0]; k2 = n2[0] assert k1 <= k2 # Inserted ixys are sorted in ascending order. neg_idxeseq = F.mapcat(tup( lambda k,p,l,r: ((l,) if l < 0 else ()) + ((r,) if r < 0 else ())), inodes) ixy_idxes = F.map(abs, neg_idxeseq) saved_ixys = F.map(lambda i: pyobj(ixy_arr[i]), ixy_idxes) keys = F.lmap(key, saved_ixys) for k1,k2 in F.pairwise(keys): assert k1 <= k2
def __init__(self): self._seed = None self.name = 'SelfDriving' self.action_space = gym.spaces.Box(np.array((-1, -1, -1), dtype=np.float32), np.array((1, 1, 1), dtype=np.float32), dtype=np.float32) self.observation_space = gym.spaces.Box( np.array( tuple( concat( ( -1, # my_car.position.x -1, # my_car.position.y -1, # my_car.angle -1, # my_car.velocity_angle 0, # my_car.velocity_length -1, # my_car.steering_angle -1, # my_car.steering_torque 0, # my_car.score 0, # my_car.crash_energy ), mapcat( lambda _: ( -1, # other_car.position_angle 0, # other_car.position_length -1, # other_car.angle -1, # other_car.velocity_angle 0, # other_car.velocity_length -1, # other_car.steering_angle 0, # other_car.score 0, # other_car.crash_energy ), range(7)), mapcat( lambda _: ( -1, # obstacle.position_angle 0 # obstacle.position_length ), range(OBSTACLE_COUNT)), mapcat( lambda _: ( -1, # star.position_angle 0 # star.position_length ), range(STAR_COUNT)), )), dtype=np.float32), np.array( tuple( concat( ( 1, # my_car.position.x 1, # my_car.position.y 1, # my_car.angle 1, # my_car.velocity_angle 1, # my_car.velocity_length 1, # my_car.steering_angle 1, # my_car.steering_torque 1, # my_car.score 1, # my_car.crash_energy ), mapcat( lambda _: ( 1, # other_car.position_angle 1, # other_car.position_length 1, # other_car.angle 1, # other_car.velocity_angle 1, # other_car.velocity_length 1, # other_car.steering_angle 1, # other_car.score 1, # other_car.crash_energy ), range(7)), mapcat( lambda _: ( 1, # obstacle.position_angle 1 # obstacle.position_length ), range(OBSTACLE_COUNT)), mapcat( lambda _: ( 1, # star.position_angle 1 # star.position_length ), range(STAR_COUNT)), )), dtype=np.float32), dtype=np.float32) self.screen = None self.reset()
def _staircase(n): xs = np.linspace(0, 1, n) xs = list(fn.mapcat(lambda x: [x, x], xs))[1:] ys = xs[::-1] return xs, ys
def tokenize(lines, token_size_filter=2): tokens = fp.mapcat(tokenize_line, lines) return [t for t in tokens if len(t) > token_size_filter]