class TradegoodAction: @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def j_yaml(cls): filepath = os.path.join(FILE_DIR, "action.yaml") return YAMLTool.filepath2j(filepath) @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def p_command(cls, ): return KhalaAction.j_yaml2p_command(cls.j_yaml()) @classmethod def text_body2match(cls, text_body): return KhalaAction.pattern_text2match(cls.p_command(), text_body) @classmethod def respond(cls, packet): from henrique.main.entity.tradegood.subaction.tradegood_subactions import TradegoodTradegoodSubaction text = KhalaPacket.packet2text(packet) tradegood_entity_list = TradegoodEntity.text2entity_list(text) str_list = lmap( lambda p: TradegoodTradegoodSubaction.tradegood_entity2response( p, packet), tradegood_entity_list) str_out = "\n\n".join(str_list) return KhalaResponse.Builder.str2j_response(str_out)
class TradegoodGooglesheets: @classmethod def spreadsheetId(cls): return "1XgTitp7h-oeAIzaxlLkQx1KX4c2uk4-izwn6W5ke290" @classmethod @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) def dict_sheetname2data_ll(cls, ): sheetname_list = [NameskoSheet.NAME, NamesenSheet.NAME, TradegoodtypeSheet.NAME] return GooglesheetsTool.sheet_ranges2dict_range2data_ll(HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) def dict_codename2tradegood(cls): tradegood_list_all = cls.tradegood_list_all() h = merge_dicts([{Tradegood.tradegood2codename(tradegood): tradegood} for tradegood in tradegood_list_all], vwrite=DictTool.VWrite.f_vwrite2f_hvwrite(vwrite_no_duplicate_key), ) return h @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) def tradegood_list_all(cls): h_codename2aliases_en = NamesenSheet.dict_codename2aliases() h_codename2aliases_ko = NameskoSheet.dict_codename2aliases() h_codename2tradegoodtype = TradegoodtypeSheet.dict_codename2tradegoodtype() # raise Exception({"h_codename2product_list":h_codename2product_list}) codename_list = luniq(chain(h_codename2aliases_en.keys(), h_codename2aliases_ko.keys(), ) ) def codename2port(codename): aliases = DictTool.filter(lambda k, v: v, {"en": h_codename2aliases_en.get(codename), "ko": h_codename2aliases_ko.get(codename), }) tradegoodtype = h_codename2tradegoodtype.get(codename) port = DictTool.filter(lambda k, v: bool(v), {Tradegood.Field.CODENAME: codename, Tradegood.Field.TRADEGOODTYPE: tradegoodtype, Tradegood.Field.ALIASES: aliases, } ) return DictTool.filter(lambda k, v: v, port) return lmap(codename2port, codename_list)
class Product: class Field: PORT = "port" TRADEGOOD = "tradegood" PRICE = "price" @classmethod def product2port(cls, product): return product.get(cls.Field.PORT) @classmethod def product2tradegood(cls, product): return product.get(cls.Field.TRADEGOOD) @classmethod def product2tradegoodtype(cls, product): from henrique.main.document.tradegood.tradegood import Tradegood tradegood = Tradegood.codename2tradegood(cls.product2tradegood(product)) return Tradegood.tradegood2tradegoodtype(tradegood) @classmethod def product2price(cls, product): return product.get(cls.Field.PRICE) @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def list_all(cls): return lchain(*map(Port.port2products, Port.list_all())) @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def _dict_port2products(cls): return dict_groupby_tree(cls.list_all(), [Product.product2port]) @classmethod def port2products(cls, port_codename): return cls._dict_port2products().get(port_codename) or [] # @classmethod # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) # def dict_tradegood2products(cls): # return dict_groupby_tree(cls.list_all(), [Product.product2tradegood]) @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def _dict_tradegoodtype2products(cls): return dict_groupby_tree(cls.list_all(), [Product.product2tradegoodtype]) @classmethod def tradegoodtype2products(cls, tradegoodtype_codename): return cls._dict_tradegoodtype2products().get(tradegoodtype_codename) or []
def test_05(self): Chatroom.chatrooms2upsert([ChatroomKakaotalk.chatroom()]) ServerDoc.codenames2delete([Server.Codename.MARIS]) sender_name = "iris" channel_user_codename = ChannelUserKakaotalk.sender_name2codename( sender_name) ChannelUser.channel_users2upsert( [ChannelUserKakaotalk.sender_name2channel_user(sender_name)]) # now_utc = datetime.now(pytz.utc) packet = { KhalaPacket.Field.TEXT: "?남만 +2분", KhalaPacket.Field.CHATROOM: KakaotalkUWOChatroom.codename(), KhalaPacket.Field.CHANNEL_USER: channel_user_codename, KhalaPacket.Field.SENDER_NAME: sender_name, } with self.assertRaises(HenriqueCommandError) as context: NanbanSkill.packet2response(packet) self.assertEquals( """[남만시각] 이전에 설정된 남만 시각이 없어서 +/-로 남만 시각을 조정할 수 없어요.""", str(context.exception)) if HenriqueEnv.env() == HenriqueEnv.Value.LOCAL: return # cannot test here because LOCAL has different settings hyp = HenriquePacket.packet2response(packet) ref = "[남만시각] 이전에 설정된 남만 시각이 없어서 +/-로 남만 시각을 조정할 수 없어요." # pprint(hyp) self.assertEqual(hyp, ref)
class SkillGooglesheets: @classmethod def spreadsheetId(cls): return "18D67KgdOwq1RbDP5mgIS8FzFAAOkQPCHbD8zcFRyoyQ" @classmethod @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_sheetname2data_ll(cls, ): sheetname_list = [AliasesEn.NAME, AliasesKo.NAME] return GooglesheetsTool.sheet_ranges2dict_range2data_ll( HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_lang2codename2aliases(cls): return { "en": AliasesEn.dict_codename2aliases(), "ko": AliasesKo.dict_codename2aliases(), } # WARMER.warmup()
class ChatroomCollection: @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def collection(cls, *_, **__): db = HenriqueMongodb.db() return db.get_collection("chatroom", *_, **__)
def start_discord(): from henrique.main.singleton.env.henrique_env import HenriqueEnv from henrique.main.singleton.logger.henrique_logger import KhalaLogger logger = KhalaLogger.func_level2logger(start_discord, logging.DEBUG) logger.debug({"HenriqueEnv.env()": HenriqueEnv.env()}) HenriqueWarmer.warmup_all() # maybe update? # https://stackoverflow.com/a/50981577 client = DiscordClient.client() discord_token = HenriqueEnv.key2value("DISCORD_TOKEN") logger.debug({"discord_token": discord_token}) assert_true(discord_token) client.run(discord_token)
def env2port(cls, env): env_norm = HenriqueEnv.env2norm(env) h = { HenriqueEnv.Value.LOCAL: 14920, HenriqueEnv.Value.DEV: 14920, HenriqueEnv.Value.STAGING: 14920, HenriqueEnv.Value.PROD: 80, } return h.get(env_norm)
class ChannelUserDocCache: class Constant: MAXSIZE = 200 @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) def warmer(cls, ): collection = ChannelUserCollection.collection() docs = map(MongoDBTool.bson2json, collection.find()) ChannelUserDoc._docs2cache(docs)
def conn(cls): logger = HenriqueLogger.func_level2logger(cls.conn, logging.DEBUG) host = HenriqueEnv.key2value(cls.Env.HOST) port = HenriqueEnv.key2value(cls.Env.PORT) user = HenriqueEnv.key2value(cls.Env.USER) password = HenriqueEnv.key2value(cls.Env.PASSWORD) dbname = HenriqueEnv.key2value(cls.Env.DBNAME) j_connect = { "host": host, "port": port, "user": user, "password": password, "dbname": dbname, } logger.debug({"j_connect": j_connect}) conn = psycopg2.connect(**j_connect) return conn
def xoxb_token(cls): logger = HenriqueLogger.func_level2logger(cls.xoxb_token, logging.DEBUG) key = "SLACK_HENRIQUE_BOT_USER_OAUTH_ACCESS_TOKEN" token = HenriqueEnv.key2value(key) logger.debug({ "key": key, "bool(token)": bool(token), "token is None": token is None, }) return token
class NanbanSkillError: class Codename: NO_PREV_NANBAN_ERROR = "NO_PREV_NANBAN_ERROR" @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def yaml(cls): filepath = os.path.join(FILE_DIR, "error.yaml") j = YAMLTool.filepath2j(filepath) return j @classmethod def codename_lang2text(cls, codename, lang): return JsonTool.down(cls.yaml(), [codename, lang])
class Sign: class Constant: PLUS = "+" MINUS = "-" @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def pattern(cls): return re.compile(RegexTool.rstr2wordbounded("[+-]")) @classmethod def sign2int(cls, sign): h = {cls.Constant.PLUS: 1, cls.Constant.MINUS: -1} return h[sign]
class PortReference: class YAML: NAME = "name" @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def j_yaml(cls): filepath = os.path.join(FILE_DIR, "port_reference.yaml") j = YAMLTool.filepath2j(filepath) return j @classmethod def lang2name(cls, lang): j_yaml = cls.j_yaml() return jdown(j_yaml, [cls.YAML.NAME, lang])
class ServerGooglesheets: @classmethod def spreadsheetId(cls): return "1z_8oCBFUj5ArGr8WvQFio3-uoQgbAOC87BupNNAF0_M" @classmethod @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_sheetname2data_ll(cls, ): sheetname_list = [NameskoSheet.NAME, NamesenSheet.NAME] return GooglesheetsTool.sheet_ranges2dict_range2data_ll( HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def server_list_all(cls): h_codename2aliases_en = NamesenSheet.dict_codename2aliases() h_codename2aliases_ko = NameskoSheet.dict_codename2aliases() codename_list = luniq( chain( h_codename2aliases_en.keys(), h_codename2aliases_ko.keys(), )) def codename2server(codename): aliases = DictTool.filter( lambda k, v: v, { "en": h_codename2aliases_en.get(codename), "ko": h_codename2aliases_ko.get(codename), }) culture = { Server.Field.CODENAME: codename, Server.Field.ALIASES: aliases, } return DictTool.filter(lambda k, v: v, culture) return lmap(codename2server, codename_list)
class HenriqueCommand: @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def pattern_prefix(cls): return re.compile(r"^\s*\?", re.I) @classmethod def packet2skill_code(cls, packet): text_in = KhalaPacket.packet2text(packet) config = HenriqueEntity.Config.packet2config(packet) return cls._text_config2skill_code(text_in, config) @classmethod @CacheTool.cache2hashable(cache=lru_cache(maxsize=HenriqueEntity.Cache.DEFAULT_SIZE), f_pair=CacheTool.JSON.func_pair(), ) def _text_config2skill_code(cls, text_in, config): pattern_prefix = cls.pattern_prefix() match_list_prefix = list(pattern_prefix.finditer(text_in)) if not match_list_prefix: return None entity_list = SkillEntity.text2entity_list(text_in, config=config) if not entity_list: return None spans_list = [lmap(lambda m: m.span(), match_list_prefix), lmap(FoxylibEntity.entity2span, entity_list) ] gap2is_valid = partial(StringTool.str_span2match_blank_or_nullstr, text_in) indextuple_list = ContextfreeTool.spans_list2reducible_indextuple_list(spans_list, gap2is_valid) assert_in(len(indextuple_list), [0, 1]) if not indextuple_list: return None index_entity = l_singleton2obj(indextuple_list)[1] entity = entity_list[index_entity] return SkillEntity.entity2skill_codename(entity) @classmethod def text2is_query(cls, text): return bool(cls.pattern_prefix().match(text))
def wrapper(f): die_on_error = HenriqueEnv.key2nullboolean(HenriqueEnv.Key.DIE_ON_ERROR) # raise Exception(die_on_error) if die_on_error is True: return f @wraps(f) def wrapped(*args, **kwargs): try: return f(*args,**kwargs) except HenriqueCommandError as e: return str(e) except exception_tuple: message = str({"request.args": request.args}) ErrorsChannel.post(message) return default return wrapped
class NanbanTimedeltaSuffix: class Key: SUFFIX = "suffix" I_DONT_KNOW = "i_dont_know" @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def yaml(cls): filepath = os.path.join(FILE_DIR, "suffix.yaml") j_yaml = YAMLTool.filepath2j(filepath, yaml.SafeLoader) return j_yaml @classmethod @VersionTool.inactive def str_timedelta2relativetimedelta(cls, str_timedelta, lang): suffix_format = JsonTool.down(cls.yaml(), [cls.Key.SUFFIX, lang]) return suffix_format.format(str_timedelta) @classmethod def lang2str_idk(cls, lang): return JsonTool.down(cls.yaml(), [cls.Key.I_DONT_KNOW, lang])
class ChatroomKakaotalk: class Constant: LOCALE = "ko-KR" NAME = "uwo" @classmethod def codename(cls): return Chatroom.Constant.DELIM.join([Channel.Codename.KAKAOTALK_UWO, cls.Constant.NAME]) @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def chatroom(cls, ): logger = KhalaLogger.func_level2logger(cls.chatroom, logging.DEBUG) chatroom = {Chatroom.Field.CHANNEL: Channel.Codename.KAKAOTALK_UWO, Chatroom.Field.CODENAME: cls.codename(), # Chatroom.Field.EXTRA: ChatroomDiscord.Extra.message2extra(message), Chatroom.Field.LOCALE: cls.Constant.LOCALE, } logger.debug({"chatroom": chatroom, }) return chatroom
def db(cls): client = cls.client() dbname = HenriqueEnv.key2value(cls.Env.MONGO_DBNAME) return client[dbname]
def uri(cls): logger = HenriqueLogger.func_level2logger(cls.client, logging.DEBUG) uri = HenriqueEnv.key2value(cls.Env.MONGO_URI) # logger.debug({"uri": uri}) return uri
class TradegoodtypeEntity: @classmethod def entity_type(cls): return ClassTool.class2fullpath(cls) @classmethod def text2norm(cls, text): return str2lower(text) @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) def _dict_lang2matcher(cls, ): return { lang: cls.lang2matcher(lang) for lang in HenriqueLocale.langs() } @classmethod @cached(cache=TTLCache(maxsize=HenriqueLocale.lang_count(), ttl=HenriqueEntity.Cache.DEFAULT_TTL)) # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=HenriqueLocale.lang_count())) def lang2matcher(cls, lang): tgt_list = Tradegoodtype.list_all() langs_recognizable = HenriqueLocale.lang2langs_recognizable(lang) def tgt2aliases(tgt): for _lang in langs_recognizable: yield from Tradegoodtype.tradegoodtype_lang2aliases(tgt, _lang) h_value2aliases = merge_dicts([{ Tradegoodtype.tradegoodtype2codename(tgt): list(tgt2aliases(tgt)) } for tgt in tgt_list], vwrite=vwrite_no_duplicate_key) config = { GazetteerMatcher.Config.Key.NORMALIZER: cls.text2norm, # GazetteerMatcher.Config.Key.TEXTS2PATTERN: HenriqueEntity.texts2rstr_word_with_cardinal_suffix, } matcher = GazetteerMatcher(h_value2aliases, config) return matcher @classmethod @CacheTool.cache2hashable( cache=cached( TTLCache(maxsize=HenriqueEntity.Cache.DEFAULT_SIZE, ttl=HenriqueEntity.Cache.DEFAULT_TTL), ), f_pair=CacheTool.JSON.func_pair(), ) def text2entity_list(cls, text_in, config=None): locale = HenriqueEntity.Config.config2locale( config) or HenriqueLocale.DEFAULT lang = LocaleTool.locale2lang(locale) or LocaleTool.locale2lang( HenriqueLocale.DEFAULT) matcher = cls.lang2matcher(lang) span_value_list = list(matcher.text2span_value_iter(text_in)) entity_list = [{ FoxylibEntity.Field.SPAN: span, FoxylibEntity.Field.TEXT: StringTool.str_span2substr(text_in, span), FoxylibEntity.Field.VALUE: value, FoxylibEntity.Field.TYPE: cls.entity_type(), } for span, value in span_value_list] return entity_list
class ChatroomuserGooglesheets: @classmethod def shorturl(cls): return "https://bit.ly/2TRYkvi" @classmethod def spreadsheetId(cls): return "1HcW7Im6SWy2T8g6POFDTOQXzH1jeTBjPxJn7KZ3RHB4" @classmethod def _dict_sheetname2data_ll(cls, ): sheetname_list = [ AliasesSheet.NAME, CommentsSheet.NAME, ] return GooglesheetsTool.sheet_ranges2dict_range2data_ll( HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod @CacheManager.attach_cachedmethod( self2cache=lambda x: LRUCache(maxsize=2), ) def dict_sheetname2data_ll(cls, ): return cls._dict_sheetname2data_ll() @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_codename2chatroomuser(cls): chatroomuser_list_all = cls.chatroomuser_list_all() h = merge_dicts( [{ Chatroomuser.chatroomuser2codename(chatroomuser): chatroomuser } for chatroomuser in chatroomuser_list_all], vwrite=DictTool.VWrite.f_vwrite2f_hvwrite(vwrite_no_duplicate_key), ) return h @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def chatroomuser_list_all(cls): h_codename2aliases = AliasesSheet.dict_codename2aliases() h_codename2comments = CommentsSheet.dict_codename2comments() # raise Exception({"h_codename2product_list":h_codename2product_list}) codename_list = list(h_codename2aliases.keys()) def codename2chatroomuser(codename): aliases = h_codename2aliases.get(codename) or [] comments = h_codename2comments.get(codename) chatroomuser = { Chatroomuser.Field.CODENAME: codename, Chatroomuser.Field.COMMENTS: comments, Chatroomuser.Field.ALIASES: aliases, } return DictTool.filter(lambda k, v: v, chatroomuser) return lmap(codename2chatroomuser, codename_list)
class CultureGooglesheets: @classmethod def spreadsheetId(cls): return "1s_EBQGNu0DlPedOXQNcfmE_LDk4wRq5QgJ9TsdBCCDE" @classmethod @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_sheetname2data_ll(cls, ): sheetname_list = [ NameskoSheet.NAME, NamesenSheet.NAME, PrefersSheet.NAME ] return GooglesheetsTool.sheet_ranges2dict_range2data_ll( HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod def culture_list_all(cls): logger = HenriqueLogger.func_level2logger(cls.culture_list_all, logging.DEBUG) h_codename2aliases_en = NamesenSheet.dict_codename2aliases() h_codename2aliases_ko = NameskoSheet.dict_codename2aliases() h_codename2prefers = PrefersSheet.dict_codename2prefers() codename_list = luniq( chain( h_codename2aliases_en.keys(), h_codename2aliases_ko.keys(), h_codename2prefers.keys(), )) def codename2culture(codename): aliases = DictTool.filter( lambda k, v: v, { "en": h_codename2aliases_en.get(codename), "ko": h_codename2aliases_ko.get(codename), }) culture = { Culture.Field.CODENAME: codename, Culture.Field.ALIASES: aliases, Culture.Field.PREFERS: h_codename2prefers.get(codename) or [], } return DictTool.filter(lambda k, v: v, culture) list_all = lmap(codename2culture, codename_list) # logger.debug({"list_all":list_all}) return list_all @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_codename2culture(cls): culture_list = cls.culture_list_all() assert_is_not_none(culture_list) h_codename2culture = merge_dicts([{ Culture.culture2codename(culture): culture } for culture in culture_list]) return h_codename2culture
class PortGooglesheets: @classmethod def spreadsheetId(cls): return "1DxaBuSsOvAf4nsy4n2XNwcmPVqBLRvWgCbs5Y8AHFtE" @classmethod def _dict_sheetname2data_ll(cls, ): sheetname_list = [NameskoSheet.NAME, NamesenSheet.NAME, CommentsKoSheet.NAME, CultureSheet.NAME, ProductSheet.NAME, ] return GooglesheetsTool.sheet_ranges2dict_range2data_ll(HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod @CacheManager.attach_cachedmethod(self2cache=lambda x: LRUCache(maxsize=2),) def dict_sheetname2data_ll(cls,): return cls._dict_sheetname2data_ll() @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_codename2port(cls): port_list_all = cls.port_list_all() h = merge_dicts([{Port.port2codename(port): port} for port in port_list_all], vwrite=DictTool.VWrite.f_vwrite2f_hvwrite(vwrite_no_duplicate_key), ) return h @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def port_list_all(cls): h_codename2aliases_en = NamesenSheet.dict_codename2aliases() h_codename2aliases_ko = NameskoSheet.dict_codename2aliases() h_codename2culture = CultureSheet.dict_codename2culture() h_codename2product_list = ProductSheet.dict_codename2products() h_codename2comments_ko = CommentsKoSheet.dict_codename2comments() # raise Exception({"h_codename2product_list":h_codename2product_list}) codename_list = luniq(chain(h_codename2aliases_en.keys(), h_codename2aliases_ko.keys(), ) ) def codename2port(codename): aliases = DictTool.filter(lambda k, v: v, {"en": h_codename2aliases_en.get(codename), "ko": h_codename2aliases_ko.get(codename), }) comments = DictTool.filter(lambda k, v: v, {"ko": h_codename2comments_ko.get(codename), }) port = {Port.Field.CODENAME: codename, Port.Field.CULTURE: h_codename2culture[codename], Port.Field.ALIASES: aliases, Port.Field.PRODUCTS: h_codename2product_list.get(codename), Port.Field.COMMENTS: comments, } return DictTool.filter(lambda k, v: v, port) return lmap(codename2port, codename_list)
class Port: class Field: CODENAME = "codename" CULTURE = "culture" ALIASES = "aliases" PRODUCTS = "products" COMMENTS = "comments" # @classmethod # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) # def _dict_codename2port_all_OLD(cls): # from henrique.main.document.port.mongodb.port_doc import PortDoc # h_mongo = PortDoc.dict_codename2port() # # from henrique.main.document.port.googlesheets.port_googlesheets import PortGooglesheets # h_googlesheets = PortGooglesheets.dict_codename2port() # # codename_list = luniq(chain(h_googlesheets.keys(), h_mongo.keys(),)) # # def codename2port(codename): # port = merge_dicts([h_mongo.get(codename) or {}, # h_googlesheets.get(codename) or {}, # ], vwrite=vwrite_update_if_identical, # ) # return port # # dict_codename2port = merge_dicts([{codename: codename2port(codename)} # for codename in codename_list], # vwrite=vwrite_no_duplicate_key, ) # return dict_codename2port @classmethod def _dict_codename2port_all(cls): from henrique.main.document.port.googlesheets.port_googlesheets import PortGooglesheets return PortGooglesheets.dict_codename2port() @classmethod def list_all(cls): return list(cls._dict_codename2port_all().values()) @classmethod def port2codename(cls, port): return port[cls.Field.CODENAME] @classmethod def port2culture(cls, port): return port[cls.Field.CULTURE] @classmethod def port2products(cls, port): return port.get(cls.Field.PRODUCTS) or [] @classmethod def port_lang2aliases(cls, port, lang): return JsonTool.down(port, [cls.Field.ALIASES, lang]) @classmethod def port_lang2comments(cls, port, lang): return JsonTool.down(port, [cls.Field.COMMENTS, lang]) @classmethod def port_langs2aliases(cls, port, langs): return luniq(chain(*[cls.port_lang2aliases(port, lang) for lang in langs])) @classmethod def port_lang2name(cls, port, lang): return IterTool.first(cls.port_lang2aliases(port, lang)) @classmethod def codename2port(cls, codename): return cls._dict_codename2port_all().get(codename) # @classmethod # def port_tradegood2is_sold(cls, port, tg_codename): # products = cls.port2products(port) # # for product in products: # tg_codename_product = Product.product2tradegood(product) # if tg_codename == tg_codename_product: # return True # return False @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def _dict_tradegood2ports(cls,): def h_tradegood2ports_iter(): port_list = cls.list_all() # raise Exception({"port_list":port_list}) for port in port_list: for product in cls.port2products(port): yield {Product.product2tradegood(product): [port]} h_tg2ports_list = list(h_tradegood2ports_iter()) h_tg2ports = merge_dicts(h_tg2ports_list, vwrite=DictTool.VWrite.extend) return h_tg2ports @classmethod def tradegood2ports(cls, tg_codename): return cls._dict_tradegood2ports().get(tg_codename) or [] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def _dict_culture2ports(cls,): h_culture2ports = merge_dicts([{cls.port2culture(port): [port]} for port in cls.list_all()], vwrite=DictTool.VWrite.extend) return h_culture2ports # return GroupbyTool.dict_groupby_tree(culture_port_iter(), [ig(0)]) @classmethod def culture2ports(cls, culture_codename): return cls._dict_culture2ports().get(culture_codename) or []
class TradegoodtypeGooglesheets: @classmethod def spreadsheetId(cls): return "1tCXSXrjzOdR8URx8SavC9feUrgSELuB87V5IvzdFsPE" @classmethod @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) # @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def dict_sheetname2data_ll(cls, ): sheetname_list = [ NameskoSheet.NAME, NamesenSheet.NAME, CategorySheet.NAME ] return GooglesheetsTool.sheet_ranges2dict_range2data_ll( HenriqueGoogleapi.credentials(), cls.spreadsheetId(), sheetname_list, ) @classmethod def sheetname2data_ll(cls, sheetname): return cls.dict_sheetname2data_ll()[sheetname] @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) def dict_codename2tradegoodtype(cls): tradegoodtype_list_all = cls.tradegoodtype_list_all() h = merge_dicts( [{ Tradegoodtype.tradegoodtype2codename(tgt): tgt } for tgt in tradegoodtype_list_all], vwrite=DictTool.VWrite.f_vwrite2f_hvwrite(vwrite_no_duplicate_key), ) return h @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @cached(cache=TTLCache(maxsize=2, ttl=60 * 10)) def tradegoodtype_list_all(cls): h_codename2aliases_en = NamesenSheet.dict_codename2aliases() h_codename2aliases_ko = NameskoSheet.dict_codename2aliases() h_codename2category = CategorySheet.dict_codename2tradegoodtype() # raise Exception({"h_codename2product_list":h_codename2product_list}) codename_list = luniq( chain( h_codename2aliases_en.keys(), h_codename2aliases_ko.keys(), )) def codename2port(codename): aliases = DictTool.filter( lambda k, v: v, { "en": h_codename2aliases_en.get(codename), "ko": h_codename2aliases_ko.get(codename), }) category = h_codename2category.get(codename) port = DictTool.filter( lambda k, v: bool(v), { Tradegoodtype.Field.CODENAME: codename, Tradegoodtype.Field.CATEGORY: category, Tradegoodtype.Field.ALIASES: aliases, }) return DictTool.filter(lambda k, v: v, port) return lmap(codename2port, codename_list)
def filepath_privatekey(cls): # http://console.cloud.google.com/iam-admin/serviceaccounts/details/112472142364049649520 # return os.path.join(REPO_DIR, "henrique","env", "google", "api", "henrique-272420-c09c9b3e31ff.json") return HenriqueEnv.key2value("GOOGLEAPI_PRIVATEKEY_FILEPATH")
class TradegoodEntitySpecialcase: @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def pattern_ko(cls): return re.compile(RegexTool.rstr2wordbounded(r"육메(?:크|클)?")) @classmethod def text2entity_list(cls, text_in, config=None): locale = HenriqueEntity.Config.config2locale( config) or HenriqueLocale.DEFAULT lang = LocaleTool.locale2lang(locale) langs_recognizable = HenriqueLocale.lang2langs_recognizable(lang) if "ko" not in langs_recognizable: return [] match_list = list(cls.pattern_ko().finditer(text_in)) def match2entity_list(match): span = match.span() assert_in(SpanTool.span2len(span), (2, 3)) entity_list = [] s, e = span span_nutmeg = (s, s + 1) entity_nutmeg = { FoxylibEntity.Field.SPAN: span_nutmeg, FoxylibEntity.Field.TEXT: StringTool.str_span2substr(text_in, span_nutmeg), FoxylibEntity.Field.VALUE: "Nutmeg", FoxylibEntity.Field.TYPE: TradegoodEntity.entity_type(), } entity_list.append(entity_nutmeg) span_mace = (s + 1, s + 2) entity_mace = { FoxylibEntity.Field.SPAN: span_mace, FoxylibEntity.Field.TEXT: StringTool.str_span2substr(text_in, span_mace), FoxylibEntity.Field.VALUE: "Mace", FoxylibEntity.Field.TYPE: TradegoodEntity.entity_type(), } entity_list.append(entity_mace) if SpanTool.span2len(span) == 3: span_clove = (s + 2, s + 3) entity_cloves = { FoxylibEntity.Field.SPAN: span_clove, FoxylibEntity.Field.TEXT: StringTool.str_span2substr(text_in, span_clove), FoxylibEntity.Field.VALUE: "Cloves", FoxylibEntity.Field.TYPE: TradegoodEntity.entity_type(), } entity_list.append(entity_cloves) return entity_list entity_list = [ entity for m in match_list for entity in match2entity_list(m) ] return entity_list
class TimedeltaEntityUnit: class Value: YEAR = "year" MONTH = "month" WEEK = "week" DAY = "day" HOUR = "hour" MINUTE = "minute" SECOND = "second" @classmethod @WARMER.add(cond=not HenriqueEnv.is_skip_warmup()) @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=2)) def yaml(cls): filepath = os.path.join(FILE_DIR, "timedelta_unit.yaml") j_yaml = YAMLTool.filepath2j(filepath, yaml.SafeLoader) return j_yaml @classmethod def normalize(cls, text): return str2lower(text) @classmethod def timedelta2unit(cls, td): h = { timedelta(days=7): cls.Value.WEEK, timedelta(days=1): cls.Value.DAY, timedelta(seconds=3600): cls.Value.HOUR, timedelta(seconds=60): cls.Value.MINUTE, timedelta(seconds=1): cls.Value.SECOND, } return h[td] @classmethod def v_unit_lang2str(cls, v, unit, lang): j_yaml = cls.yaml() str_unit = JsonTool.down(j_yaml, [unit, lang])[0] if lang in {"en"}: return " ".join([str(v), str_unit]) if lang in {"ko"}: return "".join([str(v), str_unit]) raise Exception("Invalid language: {}".format(lang)) @classmethod def unit2plural(cls, unit): return "{}s".format(unit) @classmethod def gazetteer_all(cls, ): gazetteer = {k: lchain(*j.values()) for k, j in cls.yaml().items()} return gazetteer @classmethod def langs2gazetteer(cls, langs): gazetteer = { k: lchain(*[j.get(lang, []) for lang in langs]) for k, j in cls.yaml().items() } return gazetteer @classmethod def langs2matcher(cls, langs): return cls._langs2matcher(frozenset(langs)) @classmethod @FunctionTool.wrapper2wraps_applied(lru_cache(maxsize=4)) def _langs2matcher(cls, langs): logger = HenriqueLogger.func_level2logger(cls._langs2matcher, logging.DEBUG) gazetteer = cls.langs2gazetteer(langs) def texts2pattern(texts): rstr_raw = RegexTool.rstr_iter2or(map(re.escape, texts)) left_bounds = lchain( RegexTool.bounds2suffixed(RegexTool.left_wordbounds(), "\d"), RegexTool.left_wordbounds(), ) right_bounds = RegexTool.right_wordbounds() rstr = RegexTool.rstr2bounded(rstr_raw, left_bounds, right_bounds) logger.debug({ "rstr": rstr, "rstr_raw": rstr_raw, }) return re.compile(rstr, re.I) config = { GazetteerMatcher.Config.Key.TEXTS2PATTERN: texts2pattern, GazetteerMatcher.Config.Key.NORMALIZER: cls.normalize, } matcher = GazetteerMatcher(gazetteer, config=config) return matcher @classmethod def warmer(cls): cls.langs2matcher({"ko", "en"}) cls.langs2matcher({"en"})