def coco_mixer(acc, curr): return _.flat_map( acc, lambda xo: { "id": curr['id'], "image_id": xo['image_id'], "coco_class": xo['category_id'], "x": xo['bbox'][0], "y": xo['bbox'][1], "bbox_width": xo['bbox'][2], "bbox_height": xo['bbox'][3], "img_width": curr['width'], "img_height": curr['height'], "output": curr['file_name'] + ".txt", "darkchocolate": [ int(xo['category_id']), xo['bbox'][0] / curr['width'], xo[ 'bbox'][1] / curr['height'], xo['bbox'][2] / curr['width'], xo['bbox'][3] / curr['height'] ] })
def get_available_cells(field: MatrixInt, dimensions: Coord) -> List[Coord]: checked = find_checked_cells(field) all_cells = [ Coord(tup) for tup in product(range(dimensions.i), range(dimensions.j)) ] nearby = py_.flat_map(checked, lambda cell: find_adjacent_cells(cell, all_cells)) return list(set(all_cells) - set(nearby) - set(checked))
def _serarch_by_selects(selects, body_json): if not selects: return True queries = body_json.get('queries', []) resp_selects = pydash.flat_map( queries, lambda q: pydash.get( q, 'Query.Commands.0.SemanticQueryDataShapeCommand.Query.Select')) return len(set(as_list(selects)) - {s.get('Name') for s in resp_selects}) == 0
def _search_by_entity(entity, body_json): if not entity: return True queries = body_json.get('queries', []) query_entities = pydash.flat_map( queries, lambda q: pydash.get( q, 'Query.Commands.0.SemanticQueryDataShapeCommand.Query.From')) entity_names = [pydash.get(qe, 'Entity') for qe in query_entities] return entity in entity_names
def get_move_candidates(v: MatrixInt) -> List[Coord]: ships = find_ships(v) # Находим все куски кораблей # Находим все пустые клетки empty = [Coord((i, j)) for i, row in enumerate(v) for j, cell in enumerate(row) if cell == CellState.CELL_FOG.value] # Для каждого куска корабля выбираем клетки, где он может продолжаться candidates = py_.flat_map(ships, lambda ship: calculate_ship_extension(ship, empty)) return candidates or empty
def test_get_page_iobes(): with open('test/fixtures/parade_page_db.json') as f: parade_page = json.load(f) with open('test/fixtures/parade_page_contexts.json') as f: filter_out_of_bounds = lambda mention: mention['offset'] < len(parade_page['content']) parade_page_contexts = _.map_values(json.load(f), lambda mentions: list(filter(filter_out_of_bounds, mentions))) context_pairs = _.mapcat(_.to_pairs(parade_page_contexts), lambda pair: [[pair[0], mention] for mention in pair[1]]) contexts = _.sort_by(context_pairs, lambda title_mention: title_mention[1]['offset']) mentions = _.flat_map(contexts, _.last) mention_link_titles = list(map(_.head, contexts)) assert parade_iobes == iobes.get_page_iobes(parade_page, mentions, mention_link_titles)
def stat_product(self): """统计某天商品数据""" # 查询当天消费过的商品 res = db.session.query(Order.snap_products).filter_by( soft=True).filter(Order.order_status != OrderStatus.UNPAID.value, Order.order_status != OrderStatus.CANCEL.value, Order._create_time >= self.date_from, Order._create_time <= self.date_to).all() if res: product_dicts = flat_map(res, lambda x: json.loads(x[0])) products = self.define_products(product_dicts) else: products = [] product_ids = db.session.query(Product.id).filter_by(soft=True).all() # 所有商品ID列表 product_ids = [item[0] for item in product_ids] # 所有当天消费过的商品ID列表 ids_products_paid = key_by(products, 'id') with db.auto_commit(): for product_id in product_ids: total_price_res = Decimal('0.00') total_count_res = 0 if product_id in ids_products_paid.keys(): total_price_res = ids_products_paid[product_id][ 'total_price_str'] total_count_res = ids_products_paid[product_id]['count'] # StatProductDaily.create( # date=self.one_date, # product_id=product_id, # total_price=total_price_res, # total_count=total_count_res # ) StatProductDaily.create( date=self.one_date, product_id=product_id, total_price=Decimal(f'{randint(1, 999)}.34'), total_count=randint(1, 999))
def test_flat_map(case, expected): assert _.flat_map(*case) == expected