Ejemplo n.º 1
0
        def _groupby_paramter_type():
            entity_list_portlike = lfilter(
                lambda x: FoxylibEntity.entity2type(x) in Portlike.
                entity_types(), entity_list)
            entity_list_tradegood = lfilter(
                lambda x: FoxylibEntity.entity2type(x) == TradegoodEntity.
                entity_type(), entity_list)

            if not entity_list_portlike:
                return Param.Type.TRADEGOOD

            if not entity_list_tradegood:
                return Param.Type.PORTLIKE

            if len(h_port2indexes) > 1:
                return Param.Type.TRADEGOOD

            if len(h_tradegood2indexes) > 1:
                return Param.Type.PORTLIKE

            span_portlike = FoxylibEntity.entity2span(entity_list_portlike[0])
            span_tradegood = FoxylibEntity.entity2span(
                entity_list_tradegood[0])

            if span_portlike[0] < span_tradegood[0]:
                return Param.Type.PORTLIKE
            else:
                return Param.Type.TRADEGOOD
Ejemplo n.º 2
0
    def h_qterm2j_doc(cls):
        logger = HenriqueLogger.func_level2logger(cls.h_qterm2j_doc,
                                                  logging.DEBUG)
        j_doc_list = list(TradegoodDocument.j_doc_iter_all())
        jpath = TradegoodDocument.jpath_names()

        h_list = [{
            cls._query2qterm(name): j_doc
        } for j_doc in j_doc_list
                  for name_list_lang in jdown(j_doc, jpath).values()
                  for name in name_list_lang]

        logger.debug({
            "h_list":
            iter2duplicate_list(
                lmap(lambda h: iter2singleton(h.keys()), h_list)),
            "jpath":
            jpath,
            "j_doc_list[0]":
            j_doc_list[0],
            "query[0]":
            jdown(j_doc_list[0], jpath)
        })

        qterm_list_duplicate = iter2duplicate_list(
            map(lambda h: iter2singleton(h.keys()), h_list))
        h_list_clean = lfilter(
            lambda h: iter2singleton(h.keys()) not in qterm_list_duplicate,
            h_list)

        h = merge_dicts(h_list_clean, vwrite=vwrite_no_duplicate_key)
        return h
Ejemplo n.º 3
0
    def data2check_unique(cls, j_colhead_list, str_COL_list_ROW_list):
        # if not cls.ColHead.j_head2is_key(colhead): return

        count_col = len(j_colhead_list)
        j_list_uniq = lfilter(
            lambda j: cls.ColHead.j_head2is_key(j_colhead_list[j]),
            range(count_col))
        if not j_list_uniq: return

        count_row = len(str_COL_list_ROW_list)

        tuple_ROW_list = lmap(
            lambda row: tuple(map(lambda j: row[j], j_list_uniq)),
            str_COL_list_ROW_list)

        iList_duplicate = sorted(
            lfilter_duplicate(range(count_row),
                              key=lambda i: tuple_ROW_list[i]),
            key=lambda i: (tuple_ROW_list[i], i),
        )
        if not iList_duplicate: return

        column_name_list = lmap(
            lambda j: cls.ColHead.j_head2col_name(j_colhead_list[j]),
            j_list_uniq)
        tuple_ROW_list_duplicate = lmap(partial(ListTool.li2v, tuple_ROW_list),
                                        iList_duplicate)

        h_error = {
            "column_name_list": column_name_list,
            "rownum_list_duplicate": lmap(cls._i2rownum, iList_duplicate),
            "tuple_ROW_list_duplicate": tuple_ROW_list_duplicate,
        }
        raise cls.DataUniqueValidatorException(h_error)
Ejemplo n.º 4
0
    def table_ll2j_pair(cls, ll_IN):
        logger = FoxylibLogger.func_level2logger(cls.table_ll2j_pair,
                                                 logging.DEBUG)
        logger.info({"# rows": len(ll_IN)})

        ll_RECT = cls._table_ll2rectangle(ll_IN)
        str_list_HEAD, str_COL_list_ROW_list = ll_RECT[0], ll_RECT[1:]
        cls.ColHead.str_list2check_unique(str_list_HEAD)

        col_count = len(str_list_HEAD)
        j_colhead_list = [
            cls.ColHead.parse_str2j_colhead(str_list_HEAD[k])
            for k in range(col_count)
        ]

        cls.data2check_unique(j_colhead_list, str_COL_list_ROW_list)

        j_row_list_raw = [
            cls.str_list2j_row(j_colhead_list, str_COL_list_ROW)
            for str_COL_list_ROW in str_COL_list_ROW_list
        ]
        j_row_list = lfilter(bool, j_row_list_raw)

        logger.info({"j_row_list[0]": j_row_list[0]})
        return j_colhead_list, j_row_list
Ejemplo n.º 5
0
    def packet2response(cls, packet):
        chatroom = Chatroom.codename2chatroom(
            KhalaPacket.packet2chatroom(packet))
        locale = Chatroom.chatroom2locale(chatroom)
        lang = LocaleTool.locale2lang(locale)

        text_in = KhalaPacket.packet2text(packet)
        config = {HenriqueEntity.Config.Field.LOCALE: locale}

        def entity2is_valid(entity):
            if FoxylibEntity.entity2value(
                    entity) != HenriqueSkill.Codename.HELP:
                return True

            span = FoxylibEntity.entity2span(entity)
            if len(str2strip(text_in[:span[0]])) > 1:
                return True

        entity_list_skill = lfilter(
            entity2is_valid,
            SkillEntity.text2entity_list(text_in, config=config))

        def entity_list2codename_list(entity_list):
            codename_list = luniq(
                map(SkillEntity.entity2skill_codename, entity_list))
            if codename_list:
                return codename_list

            return [HenriqueSkill.Codename.HELP]

        codename_list = entity_list2codename_list(entity_list_skill)
        clazz_list = lmap(HenriqueSkill.codename2class, codename_list)

        blocks = [clazz.lang2description(lang) for clazz in clazz_list]
        return Rowsblock.blocks2text(blocks)
Ejemplo n.º 6
0
    def timedelta_lang2text(cls, td, lang):
        if td is None:
            return NanbanTimedeltaSuffix.lang2str_idk(lang)

        unit_td_list = [
            TimedeltaTool.unit_day(),
            TimedeltaTool.unit_hour(),
            TimedeltaTool.unit_minute(),
        ]
        quotient_list = TimedeltaTool.timedelta_units2quotients(
            td, unit_td_list)

        def index2str(index):
            unit_td, quotient = unit_td_list[index], quotient_list[index]
            if not quotient:
                return None

            unit = TimedeltaEntityUnit.timedelta2unit(unit_td)
            str_unit = TimedeltaEntityUnit.v_unit_lang2str(
                quotient, unit, lang)
            return str_unit

        n = len(unit_td_list)
        word_list = lfilter(is_not_none, map(index2str, range(n)))

        str_out = " ".join(word_list)
        return str_out
Ejemplo n.º 7
0
    def class2child_classes(cls, clazz):
        members = inspect.getmembers(clazz, inspect.isclass)
        children = lfilter(lambda x: x != type, map(ig(1), members))

        for child in children:
            yield child
            yield from ClassTool.class2child_classes(child)
Ejemplo n.º 8
0
        def f_iter(h_iter, *args, **kwargs):
            h_list_valid = lfilter(bool, h_iter)
            if not h_list_valid: return default

            h_final = reduce(lambda h1, h2: f_binary(h1, h2, *args, **kwargs),
                             h_list_valid, {})
            return h_final
Ejemplo n.º 9
0
    def text2entity_list(cls, str_in):
        logger = FoxylibLogger.func_level2logger(cls.text2entity_list,
                                                 logging.DEBUG)

        entity_list_1day_raw = DayofweekEntityKo.text2entity_list(str_in)

        entity_list_multiday = cls._text2entity_list_multiday(str_in)
        span_list_multiday = lmap(FoxylibEntity.entity2span,
                                  entity_list_multiday)

        def entity_1day2is_not_covered(entity_1day):
            span_1day = FoxylibEntity.entity2span(entity_1day)
            for span_multiday in span_list_multiday:
                if SpanTool.covers(span_multiday, span_1day):
                    return False
            return True

        entity_list_1day_uncovered = lfilter(entity_1day2is_not_covered,
                                             entity_list_1day_raw)

        entity_list = lchain(
            lmap(cls._entity_1day2multiday, entity_list_1day_uncovered),
            entity_list_multiday)

        return entity_list
Ejemplo n.º 10
0
    def xy_list2photoframe_point_list(cls, xy_list):
        c_X = Counter(map(ig(0), xy_list))
        x_list = [
            x for x, n in c_X.most_common() if n >= cls.PHOTOFRAME_LENGTH
        ]

        c_Y = Counter(map(ig(1), xy_list))
        y_list = [
            y for y, n in c_Y.most_common() if n >= cls.PHOTOFRAME_LENGTH
        ]

        def p2is_topleft(p_IN):
            x, y = p_IN
            p_set = set([
                p for i in range(cls.PHOTOFRAME_LENGTH)
                for p in [(x + i,
                           y), (x, y +
                                i), (x + cls.PHOTOFRAME_LENGTH - 1, y +
                                     i), (x + i,
                                          y + cls.PHOTOFRAME_LENGTH - 1)]
            ])
            return p_set <= set(xy_list)

        p_list = lfilter(p2is_topleft, product(x_list, y_list))
        p_list_OUT = sorted(p_list, key=lambda p: (p[0], p[1]))
        return p_list_OUT
Ejemplo n.º 11
0
    def head_plist2html(cls, headpair_list):

        html_css_list, html_js_list = lzip_strict(*headpair_list)

        html_css = join_html("\n", lfilter(bool, html_css_list))
        html_js = join_html("\n", lfilter(bool, html_js_list))

        l = [
            HTMLTool.str2html_comment("CSS import"),
            html_css,
            "",
            HTMLTool.str2html_comment("JS import"),
            html_js,
        ]
        html = join_html("\n", l)
        return html
Ejemplo n.º 12
0
    def wait_all(cls, f_list, sec_timeout, sec_interval):
        logger = FoxylibLogger.func_level2logger(cls.wait_all, logging.DEBUG)
        time_end = time.time() + sec_timeout if sec_timeout is not None else None

        n = len(f_list)
        status_list = [None] * n

        logger.debug(format_str("waiting for {} process for {} secs", len(f_list), sec_timeout))

        while (time_end is None) or (time.time() < time_end):
            for i in range(n):
                if status_list[i] is True:
                    continue
                status_list[i] = f_list[i]()

            if all(status_list):
                break

            logger.debug(format_str("waiting for {}/{} processes for {} secs with {} sec interval",
                                    len(lfilter(lambda x: not x, status_list)),
                                    len(f_list),
                                    "{:.3f}".format(time_end - time.time()),
                                    sec_interval,
                                    ))
            time.sleep(sec_interval)

        return status_list
Ejemplo n.º 13
0
    def _j_stats_jpaths2comprank(cls, j_stats, jpath_list):
        comprank_list = lfilter(
            is_not_none, map(lambda jpath: jdown(j_stats, jpath), jpath_list))
        if not comprank_list:
            return None

        return list2singleton(comprank_list)
Ejemplo n.º 14
0
 def adapt_tags(self):
     """
     适配从数据端来的tag基本信息到客户端需要的数据
     """
     if self.tag and self.tag.strip() != '':
         try:
             tags = json.loads(self.tag)
             is_hot = tags.get('is_hot', False)
             is_important = tags.get('is_important', False)
             is_discussed = tags.get('is_discussed', False)
             if is_hot:
                 self.tags.append(TAG_HOT)
             if is_important:
                 self.tags.append(TAG_IMPORTANT)
             if is_discussed:
                 self.tags.append(TAG_DISCUSSED)
                 self.is_comment_hot = True
             # FIXME 比较别扭的逻辑
             hit_keywords = tags.get('sport_ref', [])
             if hasattr(self, 'type') and self.type == EnumItemType.VIDEO:
                 if hit_keywords:
                     keywords = sorted(hit_keywords, key=lambda x: x[1], reverse=True)
                     self.tags.extend(lmap(lambda x: make_sport_tag(x[0]), keywords[:3]))
             # TODO 要闻后台人工标签逻辑,目前仅实现去掉要闻标签逻辑,其他功能待产品进一步细化
             if self.manual_tags == [0]:
                 self.tags = lfilter(lambda x: x['id'] != TAG_IMPORTANT['id'], self.tags)
         except:
             pass
Ejemplo n.º 15
0
    def j_chat2role(cls, chat):
        l_matched = lfilter(lambda x: x.chat2is_role_matched(chat), cls.role_class_list())
        if not l_matched:
            return None

        role_class = l_singleton2obj(l_matched)
        return role_class.NAME
Ejemplo n.º 16
0
    def match_nodes2groupname_list(cls, m, cls_node_list):
        str_group_list = MatchTool.match2str_group_list(m)

        nodename_list = lmap(cls2name, cls_node_list)
        str_group_list_related = lfilter(
            lambda s: s.split("__")[-1] in nodename_list, str_group_list)
        return str_group_list_related
Ejemplo n.º 17
0
    def span_iter2merged(cls, span_iter):
        span_list_in = lfilter(bool, span_iter)  # se might be None
        if not span_list_in: return []

        l_sorted = sorted(map(list, span_list_in))
        n = len(l_sorted)

        l_out = []
        ispan_start = 0
        iobj_end = l_sorted[0][-1]
        for ispan in range(n - 1):
            s2, e2 = l_sorted[ispan + 1]

            if iobj_end >= s2:
                iobj_end = max(iobj_end, e2)
                continue

            span_out = cls.span_list_span2span_big(l_sorted,
                                                   (ispan_start, ispan + 1))
            l_out.append(span_out)
            ispan_start = ispan + 1

        span_last = cls.span_list_span2span_big(l_sorted, (ispan_start, n))
        l_out.append(span_last)

        return l_out
Ejemplo n.º 18
0
    def table2col_trimmed(cls, table):
        m = iter2singleton(map(len, table))

        colindex_list_valid = lfilter(
            lambda j: not cls.table_colindex2is_empty(table, j),
            range(cls.COUNT_COLHEAD, m))
        return cls.table_colindexes2filtered(table, colindex_list_valid)
Ejemplo n.º 19
0
    def value2front(cls, l, v):
        n = len(l)
        i_list_matching = lfilter(lambda i: l[i] == v, range(n))
        yield from map(lambda i: l[i], i_list_matching)

        i_set_matching = set(i_list_matching)
        yield from map(lambda i: l[i],
                       filter(lambda i: i not in i_set_matching, range(n)))
Ejemplo n.º 20
0
 def table2row_trimmed(
     cls,
     table,
 ):
     n = len(table)
     rowindex_list_valid = lfilter(
         lambda i: not cls.table_rowindex2is_empty(table, i), range(n))
     return cls.table_rowindexes2filtered(table, rowindex_list_valid)
Ejemplo n.º 21
0
    def list_func_count2index_list_continuous_valid(cls, l, f_valid,
                                                    count_match):
        n = len(l)

        i_list_valid = lfilter(
            lambda i: all(f_valid(l[i + j]) for j in range(count_match)),
            range(n - (count_match - 1)))
        return i_list_valid
Ejemplo n.º 22
0
    def stt_delim2type(cls, tok):
        is_PREFIX = (tok.value in cls.l_PREFIX)
        is_INFIX = (tok.value in ["="])
        if len(lfilter(bool, [is_PREFIX, is_INFIX])) != 1: raise Exception()

        if is_PREFIX: return cls.DELIM_TYPE_PREFIX
        if is_INFIX: return cls.DELIM_TYPE_INFIX
        raise Exception()
Ejemplo n.º 23
0
    def list_pair2match_list(cls, s1, s2):
        # initialize SequenceMatcher object with
        # input string
        sm = SequenceMatcher(None, s1, s2)

        matches = sm.get_matching_blocks()

        return lfilter(lambda m: not cls.match2is_empty(m), matches)
Ejemplo n.º 24
0
 def _table_colspan2rowindexes_valid(cls,
                                     table,
                                     colspan,
                                     fresh_start_req=True):
     n_row = len(table)
     return lfilter(
         lambda i: cls._table_colspan_i2is_valid(
             table, colspan, i, fresh_start_req=fresh_start_req),
         range(n_row))
Ejemplo n.º 25
0
    def i2m_right_before(cls, i, m_list):
        if not m_list:
            return None

        m_list_valid = lfilter(lambda m: m.end() <= i, m_list)
        if not m_list_valid: return None

        m_max = max(m_list_valid, key=lambda m: m.start())
        return m_max
Ejemplo n.º 26
0
    def llfilter(cls, f, ll, count_unwrap=0):
        if count_unwrap == 0:
            return lfilter(f, ll)

        return [cls.llfilter(
            f,
            y,
            count_unwrap - 1,
        ) for y in ll]
Ejemplo n.º 27
0
    def table_colindex2rowindexes_sorted(cls, table, colindex):
        logger = FoxylibLogger.func_level2logger(
            cls.table_colindex2rowindexes_sorted, logging.DEBUG)

        n_row = len(table)
        l_col = lmap(lambda i: table[i][colindex], range(n_row))
        i_list_valid = lfilter(lambda i: l_col[i], range(1, n_row))
        i_list_sorted = sorted(i_list_valid, key=lambda i: -int(l_col[i]))

        return i_list_sorted
Ejemplo n.º 28
0
    def _table_ll2rectangle(cls, str_ll_IN):
        str_ll_CLEAN = lfilter(any, str_ll_IN)

        count_COL = len(str_ll_CLEAN[0])  # assume first line is colheads

        for str_list in str_ll_CLEAN:
            count_ADD = count_COL - len(str_list)
            str_list.extend([""] * count_ADD)

        return str_ll_CLEAN
Ejemplo n.º 29
0
    def table_list2dict_duplicates(cls,
                                   dict_sheetname2table,
                                   cellinfo2is_data=None,
                                   key=None):
        info_list = lfilter(
            cellinfo2is_data,
            Cellinfo.table_dict2info_iter(dict_sheetname2table))

        h_duplicates = DuplicateTool.iter2dict_duplicates(
            info_list, key=lambda x: key(Cellinfo.info2content(x)))
        return h_duplicates
Ejemplo n.º 30
0
    def img2xy_list(
        cls,
        img,
    ):
        ll = cls.img2rgb_ll(img)
        n_ROW, n_COL = (img.height, img.width)

        xy2is_valid = None

        xy_list_ALL = lproduct(lrange(n_COL), lrange(n_ROW))
        xy_list_VALID = lfilter(xy2is_valid, xy_list_ALL)
        return xy_list_VALID
Ejemplo n.º 31
0
 def __call__(self, values):
     """Takes a list of property features and extracts the EPC rating"""
     epc_rating_items = lfilter(lambda v: v.startswith('EPC Rating'), values)
     return epc_rating_items[0][-1]
Ejemplo n.º 32
0
 def __call__(self, values):
     """Takes a list of property features and extracts the EPC rating"""
     epc_rating_items = lfilter(lambda v: v.lower().startswith('energy rating '), values)
     return epc_rating_items[0][14] if len(epc_rating_items) > 0 else "Unknown"