def load_mods(): import json import glob import os import sys from sagas.conf import resource_files, resource_path mod_files=[resource_path(f) for f in resource_files('mod_*.json')] # load custom rulesets if os.path.exists('./assets'): mod_files.extend(glob.glob('./assets/mod_*.json')) sys.path.append(os.path.abspath('.')) lang_mods={} for mod_file in mod_files: logger.info(f'.. load mod {mod_file}') with open(mod_file) as f: cfg=json.load(f) for k,v in cfg.items(): if k in lang_mods: lang_mods[k].extend(v) else: lang_mods[k]=v lang_mod_classes={} for k,v in lang_mods.items(): lang_mod_classes[k]=[class_from_module_path(c) for c in v] return lang_mod_classes
def user_dict(): from sagas.conf import resource_path dictf = resource_path('dict_zh.txt') jieba.load_userdict(dictf) seg_list = jieba.cut("列出所有的采购订单") # 默认是精确模式 print(", ".join(seg_list))
def __init__(self, lang: Text): from sagas.conf import resource_path import json_utils from os import path file = resource_path(f'analspa_{lang}.json') self.root = json_utils.read_json_file(file) if path.exists( file) else {}
def test_dict(): # self.segmentor.load(os.path.join(MODELDIR, "cws.model")) dictf = resource_path('dict_zh.txt') segmentor = Segmentor() # 初始化实例 segmentor.load_with_lexicon(os.path.join(MODELDIR, "cws.model"), dictf) # segmentor.load(os.path.join(MODELDIR, "cws.model")) words = segmentor.segment('列出派工单') # fail: '列出所有的采购订单' print('\t'.join(words)) segmentor.release()
def __init__(self): from sagas.conf import resource_path self.type_defs = load_schema_from_path(resource_path('schemas.graphql')) query = QueryType() bucket = ObjectType('Bucket') behave = ObjectType('Behave') desc = ObjectType('Desc') query.set_field('bucket_behaves', bucket_behaves) bucket.set_field('behaves', resolve_behaves) self.schema = make_executable_schema(self.type_defs, [behave, bucket, query])
def __init__(self): import glob import json_utils from sagas.conf import resource_files, resource_path self.intents = [] files = [resource_path(f) for f in resource_files('ruleset_*.json')] # for f in glob.glob(f'{cf.conf_dir}/stack/conf/ruleset_*.json'): for f in files: rules = json_utils.read_json_file(f) # for rule in rules: # self.intents.append({rule['intent']: {'triggers': rule['action']}}) self.intents.extend(rules)
def __init__(self): from sagas.conf import resource_path MODELDIR = f'{cf.conf_dir}/ai/ltp/ltp_data_v3.4.0' self.segmentor = Segmentor() # self.segmentor.load(os.path.join(MODELDIR, "cws.model")) dictf = resource_path('dict_zh.txt') self.segmentor.load_with_lexicon(os.path.join(MODELDIR, "cws.model"), dictf) self.postagger = Postagger() self.postagger.load(os.path.join(MODELDIR, "pos.model")) self.parser = Parser() self.parser.load(os.path.join(MODELDIR, "parser.model")) self.recognizer = NamedEntityRecognizer() self.recognizer.load(os.path.join(MODELDIR, "ner.model")) self.labeller = SementicRoleLabeller() self.labeller.load(os.path.join(MODELDIR, "pisrl.model"))
def start(self): import json import glob import os import sys from sagas.conf import resource_files, resource_path mod_files = [ resource_path(f) for f in resource_files('startups_default*.json') ] if os.path.exists('./assets'): mod_files.extend(glob.glob('./assets/startups_*.json')) sys.path.append(os.path.abspath('.')) for mod_file in mod_files: logger.info(f'.. load startup {mod_file}') with open(mod_file) as f: cfg = json.load(f) classes = [class_from_module_path(c) for c in cfg] for c in classes: ci = c() self.mods.append(ci) ci.start()
def init_logger(): from sagas.conf import resource_path file=resource_path('logger.yml') with open(file, 'r') as f: config = yaml.safe_load(f.read()) logging.config.dictConfig(config)