def _entities2dict_part(cls, entities): Param = PriceSkillParameter param_type = Param.Type.entity_group2parameter_type(entities) field = cls.parameter_type2field(param_type) if param_type == Param.Type.PORTLIKE: port_codenames = lchain( *map(lambda x: Portlike.entity_portlike2port_codenames(x), entities)) return {field: port_codenames} if param_type == Param.Type.TRADEGOOD: tradegood_codenames = lmap(FoxylibEntity.entity2value, entities) return {field: tradegood_codenames} if param_type == Param.Type.RATE: entity = l_singleton2obj(entities) rate = FoxylibEntity.entity2value(entity) return {field: rate} if param_type == Param.Type.TREND: entity = l_singleton2obj(entities) trend = FoxylibEntity.entity2value(entity) return {field: trend} raise Exception({ "param_type": param_type, "entities": entities, })
def _groupby_paramter_type(): entity_list_portlike = lfilter( lambda x: FoxylibEntity.entity2type(x) in Portlike. entity_types(), entity_list) entity_list_tradegood = lfilter( lambda x: FoxylibEntity.entity2type(x) == TradegoodEntity. entity_type(), entity_list) if not entity_list_portlike: return Param.Type.TRADEGOOD if not entity_list_tradegood: return Param.Type.PORTLIKE if len(h_port2indexes) > 1: return Param.Type.TRADEGOOD if len(h_tradegood2indexes) > 1: return Param.Type.PORTLIKE span_portlike = FoxylibEntity.entity2span(entity_list_portlike[0]) span_tradegood = FoxylibEntity.entity2span( entity_list_tradegood[0]) if span_portlike[0] < span_tradegood[0]: return Param.Type.PORTLIKE else: return Param.Type.TRADEGOOD
def entity_lang2response_block(cls, entity, lang): entity_type = FoxylibEntity.entity2type(entity) codename = FoxylibEntity.entity2value(entity) from henrique.main.skill.tradegood.tradegood_port.tradegood_port_response import TradegoodPortResponse from henrique.main.skill.tradegood.tradegood_tradegood.tradegood_tradegood_response import TradegoodTradegoodResponse from henrique.main.skill.tradegood.tradegood_culture.tradegood_culture_response import TradegoodCultureResponse h_type2func = { PortEntity.entity_type(): partial(TradegoodPortResponse.codename_lang2text, lang=lang), TradegoodEntity.entity_type(): partial(TradegoodTradegoodResponse.codename_lang2text, lang=lang), CultureEntity.entity_type(): partial(TradegoodCultureResponse.codename_lang2text, lang=lang), } assert_equals( set(h_type2func.keys()), smap(lambda c: c.entity_type(), cls.target_entity_classes())) codename2response = h_type2func.get(entity_type) if not codename2response: raise NotImplementedError( "Invalid entity_type: {}".format(entity_type)) text_out = codename2response(codename) return Rowsblock.text2norm(text_out)
def entity2is_valid(entity): if FoxylibEntity.entity2value( entity) != HenriqueSkill.Codename.HELP: return True span = FoxylibEntity.entity2span(entity) if len(str2strip(text_in[:span[0]])) > 1: return True
def entity_portlike2port_codenames(cls, entity_portlike): entity_type = FoxylibEntity.entity2type(entity_portlike) if entity_type == PortEntity.entity_type(): return [FoxylibEntity.entity2value(entity_portlike)] if entity_type == CultureEntity.entity_type(): culture_codename = FoxylibEntity.entity2value(entity_portlike) port_list = Port.culture2ports(culture_codename) return lmap(Port.port2codename, port_list) raise RuntimeError({"entity_type": entity_type})
def _text2entity_list_multiday(cls, str_in): logger = FoxylibLogger.func_level2logger( cls._text2entity_list_multiday, logging.DEBUG) entity_list_1day = DayofweekEntityKoSingle.text2entity_list(str_in) p_delim = cls.pattern_delim() m_list_delim = list(p_delim.finditer(str_in)) span_ll = [ lmap(FoxylibEntity.entity2span, entity_list_1day), lmap(MatchTool.match2span, m_list_delim), lmap(FoxylibEntity.entity2span, entity_list_1day), ] f_span2is_gap = lambda span: cls.str_span2is_gap(str_in, span) j_tuple_list = list( ContextfreeTool.spans_list2reducible_indextuple_list( span_ll, f_span2is_gap)) logger.debug({ "j_tuple_list": j_tuple_list, "entity_list_1day": entity_list_1day, "m_list_delim": m_list_delim, }) for j_tuple in j_tuple_list: j1, j2, j3 = j_tuple entity_pair = entity_list_1day[j1], entity_list_1day[j3] logger.debug({ "j1": j1, "j3": j3, "entity_pair": entity_pair, }) span = ( FoxylibEntity.entity2span(entity_pair[0])[0], FoxylibEntity.entity2span(entity_pair[1])[1], ) j_entity = { FoxylibEntity.Field.TYPE: DayofweekSpanEntity.entity_type(), FoxylibEntity.Field.SPAN: span, FoxylibEntity.Field.FULLTEXT: str_in, FoxylibEntity.Field.VALUE: tmap(FoxylibEntity.entity2value, entity_pair), } yield j_entity
def entity2response_block(cls, entity, packet, ): logger = HenriqueLogger.func_level2logger(cls.packet2response, logging.DEBUG) chatroom = Chatroom.codename2chatroom(KhalaPacket.packet2chatroom(packet)) if Chatroom.chatroom2codename(chatroom) != ChatroomKakaotalk.codename(): return locale = Chatroom.chatroom2locale(chatroom) lang = LocaleTool.locale2lang(locale) v = FoxylibEntity.entity2value(entity) codename = ChatroomuserEntity.value_packet2codename(v, packet) logger.debug({"codename": codename, "entity": entity, "v": v, }) chatroomuser = Chatroomuser.codename2chatroomuser(codename) comments = Chatroomuser.chatroomuser2comments(chatroomuser) comment = choice(comments) if comments else None filepath = os.path.join(FILE_DIR, "tmplt.{}.part.txt".format(lang)) data = {"name": codename, "comment": comment, "str_aliases": ", ".join(Chatroomuser.chatroomuser2aliases(chatroomuser)), } text_out = str2strip(HenriqueJinja2.textfile2text(filepath, data)) return text_out
def i2entity(i): if i not in h_i2j: return entity_list_hm[i] j = h_i2j[i] span = (span_list_hm[i][0], span_list_second[j][1]) entity_hm, m2 = entity_list_hm[i], span_list_second[j] value_hm = FoxylibEntity.entity2value(entity_hm) hour, minute = TimeEntity.Value.value2hm(value_hm) second = int(m2.group()) if not TimeTool.second2is_valid(second): return None value = { TimeEntity.Value.Field.HOUR: hour, TimeEntity.Value.Field.MINUTE: minute, TimeEntity.Value.Field.SECOND: second, } entity = { FoxylibEntity.Field.TYPE: entity_type, FoxylibEntity.Field.FULLTEXT: text_in, FoxylibEntity.Field.SPAN: span, FoxylibEntity.Field.VALUE: value, } return entity
def j2valid_trend(j): nonlocal entities_list if j < 3: return False j_tuple = j_param_types2j_latest(j, Param.Type.list()) if any(map(is_none, j_tuple)): return False entities_tuple = [ entities_list[j] if j is not None else None for j in j_tuple ] if any(map(lambda x: len(x) != 1, entities_tuple)): return False j_portlike, j_tradegood, j_rate, j_trend = j_tuple assert_equal(j_trend, j) if j_rate != j - 1: return False if j - 2 not in {j_portlike, j_tradegood}: return False entity_portlike, entity_tradegood, entity_rate, entity_trend = map( l_singleton2obj, entities_tuple) if FoxylibEntity.entity2type( entity_portlike) != PortEntity.entity_type( ): # not culture return False entity_latter = max([entity_portlike, entity_tradegood], key=FoxylibEntity.entity2span) span_latter, span_rate, span_trend = lmap( FoxylibEntity.entity2span, [entity_latter, entity_rate, entity_trend]) span_latter_rate = SpanTool.span_pair2between( span_latter, span_rate) str_between_latter_rate = StringTool.str_span2substr( text, span_latter_rate) if not RegexTool.pattern_str2match_full( RegexTool.pattern_blank_or_nullstr(), str_between_latter_rate): return False span_rate_trend = SpanTool.span_pair2between(span_rate, span_trend) str_between_rate_trend = StringTool.str_span2substr( text, span_rate_trend) if not RegexTool.pattern_str2match_full( RegexTool.pattern_blank_or_nullstr(), str_between_rate_trend): return False return True
def _entity_1day2multiday(cls, j_entity_1day): v_1day = FoxylibEntity.entity2value(j_entity_1day) j_entity_multiday = merge_dicts( [j_entity_1day, { FoxylibEntity.Field.VALUE: (v_1day, ) }], vwrite=vwrite_overwrite) return j_entity_multiday
def entity2relativedelta(cls, entity): logger = HenriqueLogger.func_level2logger(cls.entity2relativedelta, logging.DEBUG) element_list = FoxylibEntity.entity2value(entity) relativedelta_list = lmap(TimedeltaElement.element2relativedelta, element_list) logger.debug({"relativedelta_list": relativedelta_list}) return sum(relativedelta_list, relativedelta(days=0))
def text2entity_list(cls, str_in, config=None): def entity2is_wordbound_prefixed(entity): return StringTool.str_span2is_wordbound_prefixed( str_in, FoxylibEntity.entity2span(entity)) cardinal_entity_list = lfilter(entity2is_wordbound_prefixed, CardinalEntity.text2entity_list(str_in)) m_list_suffix = cls.pattern_suffix().finditer(str_in) span_ll = [ lmap(FoxylibEntity.entity2span, cardinal_entity_list), lmap(MatchTool.match2span, m_list_suffix), ] f_span2is_gap = lambda span: StringTool.str_span2match_blank_or_nullstr( str_in, span, ) j_tuple_list = ContextfreeTool.spans_list2reducible_indextuple_list( span_ll, f_span2is_gap) for j1, j2 in j_tuple_list: cardinal_entity = cardinal_entity_list[j1] m_suffix = m_list_suffix[j2] span = (FoxylibEntity.entity2span(cardinal_entity)[0], MatchTool.match2span(m_suffix)[1]) j_entity = { FoxylibEntity.Field.TYPE: HourEntity.entity_type(), FoxylibEntity.Field.SPAN: span, FoxylibEntity.Field.FULLTEXT: str_in, FoxylibEntity.Field.VALUE: FoxylibEntity.entity2value(cardinal_entity), } yield j_entity
def i2entity(i): entity = entity_list_in[i] assert_equal(FoxylibEntity.entity2type(entity), TimeEntity.entity_type()) if i not in h_i2j: return entity_list_in[i] j = h_i2j[i] m_ampm = m_list_ampm[j] span = (span_list_in[i][0], span_list_ampm[j][1]) v_entity = FoxylibEntity.entity2value(entity) hour, minute, second = TimeEntity.Value.value2hms(v_entity) ampm = AMPM.match2value(m_ampm) hour_adjusted, ampm_adjusted = AMPM.hour_ampm2normalized( hour, ampm) # logger.debug({"hour":hour, "ampm":ampm, # "hour_adjusted":hour_adjusted, "ampm_adjusted":ampm_adjusted}) value = DictTool.filter( lambda k, v: v is not None, { TimeEntity.Value.Field.HOUR: hour_adjusted, TimeEntity.Value.Field.MINUTE: minute, TimeEntity.Value.Field.SECOND: second, TimeEntity.Value.Field.AMPM: ampm_adjusted, }) entity = { FoxylibEntity.Field.TYPE: FoxylibEntity.entity2type(entity), FoxylibEntity.Field.FULLTEXT: text_in, FoxylibEntity.Field.SPAN: span, FoxylibEntity.Field.VALUE: value, } return entity
def entity2response_block( cls, packet, entity, ): chatroom = Chatroom.codename2chatroom( KhalaPacket.packet2chatroom(packet)) locale = Chatroom.chatroom2locale(chatroom) lang = LocaleTool.locale2lang(locale) entity_type = FoxylibEntity.entity2type(entity) h_type2func = { PortEntity.entity_type(): partial(PortSkill.entity_lang2response_block, lang=lang), TradegoodEntity.entity_type(): partial(TradegoodSkill.entity_lang2response_block, lang=lang), CultureEntity.entity_type(): partial(CultureSkill.entity_lang2response_block, lang=lang), ChatroomuserEntity.entity_type(): partial( WhoSkill.entity2response_block, packet=packet, ), } assert_equals( set(h_type2func.keys()), smap(lambda c: c.entity_type(), cls.target_entity_classes())) entity2response = h_type2func.get(entity_type) if not entity2response: raise NotImplementedError( "Invalid entity_type: {}".format(entity_type)) return entity2response(entity)
def packet2response(cls, packet): logger = HenriqueLogger.func_level2logger(cls.packet2response, logging.DEBUG) logger.debug({"packet": packet}) server_codename = HenriquePacket.packet2server(packet) chatroom = Chatroom.codename2chatroom( KhalaPacket.packet2chatroom(packet)) locale = Chatroom.chatroom2locale(chatroom) or HenriqueLocale.DEFAULT lang = LocaleTool.locale2lang(locale) tz = pytz.timezone(HenriqueLocale.lang2tzdb(lang)) dt_now = datetime.now(tz) text_in = KhalaPacket.packet2text(packet) config = {HenriqueEntity.Config.Field.LOCALE: locale} # entity_list = RelativeTimedeltaEntity.text2entity_list(text_in, config=config) entity_list = HenriqueEntity.text_extractors2entity_list( text_in, cls.config2extractors(config), ) logger.debug({ "len(entity_list)": len(entity_list), "entity_list": entity_list, }) if not entity_list: return cls.server_lang2lookup(server_codename, lang) if len(entity_list) != 1: return # Invalid request entity = l_singleton2obj(entity_list) if FoxylibEntity.entity2type( entity) == RelativeTimedeltaEntity.entity_type(): reldelta = RelativeTimedeltaEntity.entity2relativedelta(entity) dt_in = cls.relativedelta2nanban_datetime( server_codename, reldelta, ) # raise Exception({"dt_in":dt_in, "reldelta":reldelta}) if dt_in is None: msg_error = NanbanSkillError.codename_lang2text( NanbanSkillError.Codename.NO_PREV_NANBAN_ERROR, lang) raise HenriqueCommandError(msg_error) logger.debug({ "reldelta": reldelta, }) elif FoxylibEntity.entity2type(entity) == TimeEntity.entity_type(): time_in = TimeEntity.value2datetime_time( FoxylibEntity.entity2value(entity)) dt_in = PytzTool.localize(datetime.combine(dt_now.date(), time_in), tz) logger.debug({ "time_in": time_in, "dt_in": dt_in, }) else: raise RuntimeError({ "Invalid entity type: {}".format( FoxylibEntity.entity2type(entity)) }) dt_nearest = DatetimeTool.datetime2nearest(dt_in, dt_now, NanbanTimedelta.period(), Nearest.COMING) logger.debug({ "text_in": text_in, "dt_now": dt_now, "dt_in": dt_in, "dt_nearest": dt_nearest, }) cls.nanban_datetime2upsert_mongo(packet, dt_nearest) return cls.server_lang2lookup(server_codename, lang)
def entity2is_me(cls, entity): value = FoxylibEntity.entity2value(entity) return value == cls.Constant.ME
def entity_type(cls): return FoxylibEntity.class2entity_type(cls)
def entity2relativedelta(cls, entity): return cls.Value.value2relativedelta( FoxylibEntity.entity2value(entity))
def entity2skill_codename(cls, entity): return FoxylibEntity.entity2value(entity)
def entity_1day2is_not_covered(entity_1day): span_1day = FoxylibEntity.entity2span(entity_1day) for span_multiday in span_list_multiday: if SpanTool.covers(span_multiday, span_1day): return False return True
def entity2is_wordbound_prefixed(entity): return StringTool.str_span2is_wordbound_prefixed( str_in, FoxylibEntity.entity2span(entity))
def entity_group2span(cls, entity_group): s1, e1 = FoxylibEntity.entity2span(entity_group[0]) s2, e2 = FoxylibEntity.entity2span(entity_group[-1]) return s1, e2