def __init__(self, model_dir=os.path.join('models', 'bert', 'classification')): Service.__init__(self, 'classification', 'bert', ['parse']) self.models = {} self.results = {} self.id2label = {} langs = set() for name in os.listdir(model_dir): if not os.path.isdir(os.path.join(model_dir, name)): continue with open(os.path.join(model_dir, name, transformers.CONFIG_NAME), 'r') as f: configs = json.load(f) num_labels = configs['_num_labels'] language = configs[constants.MODEL_INFO][constants.LANGUAGE] langs.add(language) pretrained_model_name_or_path = os.path.join(model_dir, name) self.models[name] = bert_for_sentence_classification.BertForSentenceClassification(language, num_labels, pretrained_model_name_or_path) self.results[name] = configs[constants.MODEL_INFO] self.id2label[name] = models_utilities.load_labels(pretrained_model_name_or_path) self.langs = list(langs)
def restart(service): """Restarts the given service""" if service == 'all': return [Service(s).restart() for s in Service.SERVICE_LIST] service = Service(service) s.info(f"Restarting $[{service.name}]") service.restart()
def setUp(self): repoBooks = RepositoryBooks("Repobooks:") repoClients = RepositoryClients("Repoclients:") repoRental = RepositoryRentals("Reporentals:") validatorBooks = ValidateBook() validatorClients = ValidateClient() validatorRental = ValidateRental() self.Srv = Service(repoBooks, repoClients, repoRental, validatorRental, validatorBooks, validatorClients)
def __init__(self): Service.__init__(self, 'names', 'misc', ['ner'], ['fiscal_code']) self._person_prefixes = [ 'sig.ra', 'sig.a', 'sig.na', 'sig', 'sig.', 'avv', 'avv.', 'dott', 'dott.', 'dr', 'dr.', 'egr', 'ra' ] self._names = _line_set(os.path.join('resources', 'names', 'it.txt')) self._surnames = _line_set( os.path.join('resources', 'surnames', 'it.txt'))
def __init__(self): Service.__init__(self, 'codes', 'regex', []) self.regexes = [ (re.compile(r'([a-z]{6}\s?\d{2}\s?[a-z]{1}\s?\d{2}\s?[a-z]{1}\s?\d{3}\s?[a-z]{1})', re.IGNORECASE), 'FISCAL_CODE', 'it'), (re.compile(r'(IT\d{2}[ ][a-zA-Z]\d{3}[ ]\d{4}[ ]\d{4}[ ]\d{4}[ ]\d{4}[ ]\d{3})', re.IGNORECASE), 'IBAN', 'it'), (re.compile(r'(IT\d{2}[a-zA-Z]\d{22}|IT\d{2}[a-zA-Z][ ]\d{5}[ ]\d{5}[ ]\d{12})', re.IGNORECASE), 'IBAN', 'it'), (re.compile(r'(IT\s?\d{2}\s?[a-z]\s?\d{8}\s?\d{6}\s?\d{8})', re.IGNORECASE), 'IBAN', 'it'), (re.compile(r'\D(\d{11})\D', re.IGNORECASE), 'PIVA', 'it') ]
def test_main(self): manager = ServiceManager([ Service('webserver', module_name='test_services_webserver'), Service('raspberry', module_filename='./test_services_raspberry.py') ]) manager_thread = threading.Thread(target=manager.serve) manager_thread.start() manager_thread.join()
def __init__(self, model_dir=os.path.join('models', 'gensim', 'lda'), stopwords_dir=os.path.join('resources', 'stopwords')): Service.__init__(self, 'topic-modeling', 'lda-gensim', ['parse']) self.models = {} self.stopwords = {} for name in os.listdir(model_dir): self.models[name] = LdaModel.load(os.path.join(model_dir, name, 'model')) for name in os.listdir(stopwords_dir): lang = name[:2] with open(os.path.join(stopwords_dir, name)) as f: self.stopwords[lang] = set([line.strip() for line in f.readlines()])
def stop(service): """Stops any running service""" if service == 'all': s.info( f"Stopping all services: $[{', '.join(map(str, [s for s in Service.SERVICE_LIST]))}]\n" ) return [Service(s).stop() for s in Service.SERVICE_LIST] service = Service(service) s.info(f"Stopping $[{service.name}]\n") service.stop()
def __init__(self, models_dir='models/allen/sentiment-regression'): Service.__init__(self, 'sentiment', 'allen-regression', ['parse']) self.models = {} self.descriptions = {} self.indexer = ELMoTokenCharactersIndexer() for lang in os.listdir(models_dir): if len(lang) == 2: self.models[lang] = self._load_model( os.path.join(models_dir, lang)) self.descriptions[lang] = _load_model_description( os.path.join(models_dir, lang))
def __init__(self,name): """初始化远程调用对象 @param port: int 远程分布服的端口号 @param rootaddr: 根节点服务器地址 """ self._name = name self._factory = pb.PBClientFactory() self._reference = ProxyReference(self) self._service = Service('proxy') self._addr = None
def __init__(self, models_dir='models/opennmt/translation'): Service.__init__(self, 'translation', 'opennmt', ['parse']) # define opt values for the summarisation task self.models = {} self.descriptions = {} for lang in os.listdir(models_dir): if len(lang) == 5: self.models[lang] = self._load_model( os.path.join(models_dir, lang), lang) self.descriptions[lang] = _load_model_description( os.path.join(models_dir, lang))
def __init__(self,server,cfg_fname='linkserv.json'): Service.__init__(self,server) self.nick = 'linkserv' self.delim = '\r\n.\r\n' self.links = [] self._cfg_fname = cfg_fname self._lock = threading.Lock() self._unlock = self._lock.release self._lock = self._lock.acquire j = self.get_cfg() if 'autoconnect' in j and j['autoconnect'] in self._yes: self.connect_all()
def __init__(self, models_dir='models/allen/ner'): Service.__init__(self, 'ner', 'allen-custom', ['parse']) self.readers = {} self.predictors = {} self.descriptions = {} for lang in os.listdir(models_dir): reader, predictor = self._load_reader_and_predictor( os.path.join(models_dir, lang)) self.readers[lang] = reader self.predictors[lang] = predictor self.descriptions[lang] = _load_model_description( os.path.join(models_dir, lang))
def start(service, *args, **kwargs): """ Starts the various services used during API development """ if service == 'all': s.info( f"Starting all services: $[{', '.join(map(str, [s for s in Service.SERVICE_LIST]))}]\n" ) return [ Service(s, *args, **kwargs).start() for s in Service.SERVICE_LIST ] service = Service(service, *args, **kwargs) s.info(f"Starting $[{service.name}]\n") service.start()
def getExclusiveGroups(l, tl, prefixes): (qcl0, qcl1) = assignEndpoint2(tl, l, prefixes) views0 = [] views1 = [] for cl in qcl0: l0 = qcl0[cl] serv = Service(cl, l0) views0 = views0 + [serv] for t in qcl1: eps = qcl1[t] elems = [JoinBlock([Service(ep, t)]) for ep in eps] ub = UnionBlock(elems) views1 = views1 + [ub] return (views0, views1)
def getMetadata(idrac, port, file_collection_path, file_collection_time): meta_dict = {} response = requests.get(f"https://{user}:{passwd}@{idrac}/redfish/v1/", verify=False) json_response = response.json() service_data = Service(**json_response) serviceMeta = service_data.getMetadata() filename = getFileDescriptor(serviceMeta, idrac) try: fd = open(f"{file_collection_path}/output/{filename}", "a+") except FileNotFoundError as ex: if not os.path.exists(f"{file_collection_path}"): os.makedirs(file_collection_path) fd = open(f"{file_collection_path}/output/{filename}", "a+") # meta_dict[idrac] = getFile return [serviceMeta[0], serviceMeta[1], fd, file_collection_path, file_collection_time ]
def __restart_service(self, name): try: service = Service.create(name) service.restart() except ServiceError as exc: print exc
def __init__(self,server,cfg_fname='tcserv.json'): Service.__init__(self,server) self.nick = 'tcserv' self.cfg_fname = cfg_fname self.handle_error = server.handle_error self.dbg = lambda m : self.server.dbg('TCServ: %s'%m) self._db_lock = threading.Lock() self._lock_db = self._db_lock.acquire self._unlock_db = self._db_lock.release self.onion_peers = {} self.db_name = ':memory:' self.peers = 0 self.unlisted_peers = [] self._load_config() self.listener = TC_Listener(self) self.connect_all()
def __stop_service(self, name): try: service = Service.create(name) service.stop() except ServiceError as exc: print exc
def getServices(services = {}): try: raw = etcdClient.read(config.ETCD_ROOT_KEY, recursive = True, sorted=True) except: moduleLogger.error('Failed to connect to etcd host %s:%s'%(config.ETCD_HOST, config.ETCD_PORT)) return services rawServices = [x for x in raw.get_subtree() if x.dir and not x.key == config.ETCD_ROOT_KEY ] for service in rawServices: # split the key to obtain the service name name = service.key[1:].split('/')[1] # load the available backends try: backends = [Backend(**json.loads(x.value)) for x in service.leaves] except: backends = [] if backends: # retrieve service if it already exists otherwise create a new one s = services.get(name, Service(name)) for back in backends: s.addBackend(back) services[name] = s else: moduleLogger.debug('No backends found for service %s not including in list'%name) return services
def getUnitaryStars(l, tl, genPred, prefixes): (qcl0, qcl1) = assignEndpoint(tl, l, genPred, prefixes) views0 = [] views1 = [] for cl in qcl0: l0 = qcl0[cl] vs = formStars2(l0) serv = [Service(cl, view) for view in vs] views0 = views0 + serv for t in qcl1: eps = qcl1[t] elems = [JoinBlock([Service(ep, t)]) for ep in eps] ub = UnionBlock(elems) views1 = views1 + [ub] return (views0, views1)
def test_create_service(self): ser = Service({ "name": "test_name", "url": "test_url", "description": "test_description" }) print(ser)
def login(self, svc, login_details=None): try: s = Service.service(svc)(login_details=login_details) self._services.append(s) print "[Status: %s]" % s.status() except ValueError as e: print e
def __init__(self): log.startLogging(sys.stdout) reload(sys) sys.setdefaultencoding('UTF-8') #数据库 import database GlobalManager.db = database.db GlobalManager.Session = database.Session #服务器配置 from server import Server from services import Service server = Server(8100, "game") GlobalManager.tcpserver = server GlobalManager.netservice = Service('NetService') GlobalManager.tcpserver.serverFactory.addServiceChannel( GlobalManager.netservice) #开启进程监视 GlobalManager.pbroot = PBRoot() GlobalManager.pbroot.addServiceChannel(GlobalManager.netservice) reactor.listenTCP(10000, BilateralFactory(GlobalManager.pbroot)) #加入服务器逻辑函数 import handle
def getStarsM(l, tl, genPred, prefixes, c): (qcl0, qcl1) = assignEndpointM(tl, l, genPred, prefixes, c) views0 = [] views1 = [] #print qcl0 #print qcl1 for cl in qcl0: l0 = qcl0[cl] vs = formStars(l0) serv = [Service(cl, view) for view in vs] views0 = views0 + serv for t in qcl1: eps = qcl1[t] elems = [JoinBlock([Service(ep, t)]) for ep in eps] ub = UnionBlock(elems) views1 = views1 + [ub] return (postp2(views0), views1)
def getStarsS(l, tl, genPred, prefixes, c): qcl = assignEndpointS(tl, l, genPred, prefixes, c) views = [] for cl in qcl: l0 = qcl[cl] vs = formStars(l0) serv = [Service(cl, view) for view in vs] views = views + serv return postp2(views)
def __init__(self, model_dir=os.path.join('models', 'sklearn', 'nmf')): Service.__init__(self, 'topic-modeling', 'sklearn', []) self.models = {} self.vectorizers = {} self.results = {} langs = set() for name in os.listdir(model_dir): model_path = os.path.join(model_dir, name, 'model.pkl') results_path = os.path.join(model_dir, name, 'results.json') with open(model_path, 'rb') as f: m_ = pickle.load(f) self.models[name] = m_['model'] self.vectorizers[name] = m_['vectorizer'] with open(results_path) as f: results = json.load(f) if 'lang' in results: langs.add(results['lang']) self.results[name] = results self.langs = list(langs)
class leafNode(object): """远程调用对象""" def __init__(self, name): """初始化远程调用对象 @param port: int 远程分布服的端口号 @param rootaddr: 根节点服务器地址 """ self._name = name self._factory = pb.PBClientFactory() self._reference = ProxyReference(self) self._service = Service('proxy') self._addr = None def setName(self, name): """设置节点的名称""" self._name = name def getName(self): """获取节点的名称""" return self._name def connect(self, addr): """初始化远程调用对象""" self._addr = addr reactor.connectTCP(addr[0], addr[1], self._factory) self.register() def reconnect(self): """重新连接""" self.connect(self._addr) def setServiceChannel(self, service): """设置引用对象""" self._service = service def getServiceChannel(self): return self._service def register(self): """把本节点注册到RootNode,并且向RootNode发送代理通道对象 """ deferedRemote = self._factory.getRootObject() deferedRemote.addCallback(callBack, 'register', self._name, self._reference) def callRemote(self, commandId, *args, **kw): """远程调用""" deferedRemote = self._factory.getRootObject() return deferedRemote.addCallback(callBack, 'callTarget', commandId, *args, **kw) def callTarget(self, targetKey, *args, **kw): return self._service.callTarget(targetKey, *args, **kw)
def fetchMetadata(idrac, port, file_collection_path, file_collection_time): meta_dict = {} url = f"https://{user}:{passwd}@{idrac}/redfish/v1/" response_code, json_response = apiGetCall(url, 'Root API', idrac) if response_code == 200: service_data = Service(**json_response) serviceMeta = service_data.getMetadata() filename = serviceMeta[0] + '_' + serviceMeta[1]+ '_' + idrac + '_' + getTimestamp() +".jsonl" # try: # fd = open(f"{file_collection_path}/output/{filename}", "a+") # except FileNotFoundError as ex: # if not os.path.exists(f"{file_collection_path}"): # os.makedirs(file_collection_path) # fd = open(f"{file_collection_path}/output/{filename}", "a+") # meta_dict[idrac] = getFile return [serviceMeta[0], serviceMeta[1], filename, file_collection_path, file_collection_time] elif response_code == 408: print(json_response) else: print(f"{url} Failed with status code {response_code} and error Message {json_response}")
def fetchMetadata(idrac, port, file_collection_path, file_collection_time, q): meta_dict = {} url = f"https://{user}:{passwd}@{idrac}/redfish/v1/" response_code, json_response = apiGetCall(url, 'Root API', idrac) if response_code == 200: # q.put_nowait(makeLogMessage("INFO", idrac, "Fetch Successfully")) service_data = Service(**json_response) serviceMeta = service_data.getMetadata() filename = serviceMeta[0] + '_' + serviceMeta[ 1] + '_' + idrac + '_' + getTimestamp() + ".jsonl" return [ serviceMeta[0], serviceMeta[1], filename, file_collection_path, file_collection_time ] elif response_code == 408: q.put_nowait(makeLogMessage("ERROR", idrac, json_response)) else: q.put_nowait( makeLogMessage( "ERROR", idrac, json_response['error']['@Message.ExtendedInfo'][0]['Message']))
class leafNode(object): """远程调用对象""" def __init__(self,name): """初始化远程调用对象 @param port: int 远程分布服的端口号 @param rootaddr: 根节点服务器地址 """ self._name = name self._factory = pb.PBClientFactory() self._reference = ProxyReference(self) self._service = Service('proxy') self._addr = None def setName(self,name): """设置节点的名称""" self._name = name def getName(self): """获取节点的名称""" return self._name def connect(self,addr): """初始化远程调用对象""" self._addr = addr reactor.connectTCP(addr[0], addr[1], self._factory) self.register() def reconnect(self): """重新连接""" self.connect(self._addr) def addServiceChannel(self,service): """设置引用对象""" self._service = service def getServiceChannel(self): return self._service def register(self): """把本节点注册到RootNode,并且向RootNode发送代理通道对象 """ deferedRemote = self._factory.getRootObject() deferedRemote.addCallback(callBack, 'register', self._name, self._reference) def callRemote(self,commandId,*args,**kw): """远程调用""" deferedRemote = self._factory.getRootObject() return deferedRemote.addCallback(callBack,'callTarget',commandId,*args,**kw) def callTarget(self, targetKey, *args, **kw): return self._service.callTarget(targetKey,*args,**kw)
def __init__(self, model_dir=os.path.join('models', 'bert', 'next_sentence_prediction')): Service.__init__(self, 'next-sentence-prediction', 'bert', ['parse']) self.models = {} self.results = {} langs = set() for name in os.listdir(model_dir): if not os.path.isdir(os.path.join(model_dir, name)): continue with open(os.path.join(model_dir, name, transformers.CONFIG_NAME), 'r') as f: configs = json.load(f) language = configs[constants.MODEL_INFO][constants.LANGUAGE] langs.add(language) pretrained_model_name_or_path = os.path.join(model_dir, name) self.models[name] = bert_for_next_sentence_prediction.BertForNextSentencePrediction(language, pretrained_model_name_or_path) self.results[name] = configs[constants.MODEL_INFO] self.langs = list(langs)
def __init__(self, model_dir='models/sklearn/classification'): Service.__init__(self, 'classification', 'sklearn', []) self.models = {} self.patterns = {} self.extra_patterns = {} self.results = {} langs = set() for name in os.listdir(model_dir): model_path = os.path.join(model_dir, name, 'model.pkl') results_path = os.path.join(model_dir, name, 'results.json') with open(model_path, 'rb') as f: m_ = pickle.load(f) self.models[name] = m_['model'] self.patterns[name] = m_['patterns'] self.extra_patterns[name] = m_['extra_patterns'] with open(results_path) as f: results = json.load(f) if 'lang' in results: langs.add(results['lang']) self.results[name] = results self.langs = list(langs)
def do_savelogin(self, svc): if len(_config) == 0: try: load('config.enc') except IOError: pass except ValueError as e: print e return try: d = {} for k in Service.login_reqs(svc): d[k] = _get_passphrase(prompt=k.title(), confirm=True, length_req=0) if svc not in _config: _config[svc] = {} _config[svc]['login_details'] = d save('config.enc') except ValueError as e: print e
def do_list(self, s): print ", ".join([service.__name__ for service in Service.services()])