def init_controller(controller_id): globals.system_log = logger.create("service") globals.access_log = logger.create("access") globals.tz_info = tz_info.TimezoneInfo() globals.sys_status = sys_status.SystemStatus() globals.controllers = cybrocontrollers.CybroControllers() globals.config = config.GlobalConfig() globals.transaction_pool = transaction_pool.TransactionPool() globals.udp_proxy = udp_proxy.UDPProxy() globals.udp_proxy.start() global controller controller = globals.controllers.create(controller_id, False) global cybro_comm cybro_comm = cybrocomm.CybroComm(1, controller_id) cybro_comm.controller = controller cybro_comm.data_received_event = threading.Event() global alloce # read file alloc always controller.read_alloc_file_immediately() alloce = alloc.Allocation(controller_id) alloce.read()
def __init__(self): import config import tz_info globals.system_log = logger.create("service") globals.access_log = logger.create("access") globals.system_log.info("*** CybroScgiServer %s started ***" % const.ApplicationVersion) globals.tz_info = tz_info.TimezoneInfo() globals.sys_status = sys_status.SystemStatus() globals.controllers = cybrocontrollers.CybroControllers() globals.config = config.GlobalConfig() globals.transaction_pool = transaction_pool.TransactionPool() if sys_config.DebugRConsole: from rfoo.utils import rconsole rconsole.spawn_server() globals.system_log.warning("Debug rconsole server spawned.") if sys_config.DebugTcpServer: import tcp_logger_server globals.tcp_log_server = tcp_logger_server.create( sys_config.DebugTcpServerPort)
def service_account(self, request): """Changes service account email and private key used to sign GS URLs.""" conf = config.GlobalConfig.fetch() if not conf: conf = config.GlobalConfig() changed = conf.modify(service_account_email=request.client_email, service_account_pkey=request.private_key, service_account_pkey_id=request.private_key_id) if changed: logging.warning('Updated service account configuration') return message_types.VoidMessage()
def gs_config(self, request): """Configures paths in Google Storage to use by CAS service.""" try: cloudstorage.validate_file_path(request.cas_gs_path.rstrip('/')) cloudstorage.validate_file_path(request.cas_gs_temp.rstrip('/')) except ValueError as err: raise endpoints.BadRequestException('Not a valid GS path: %s' % err) conf = config.GlobalConfig.fetch() if not conf: conf = config.GlobalConfig() changed = conf.modify(cas_gs_path=request.cas_gs_path.rstrip('/'), cas_gs_temp=request.cas_gs_temp.rstrip('/')) if changed: logging.warning('Updated Google Storage paths configuration') return message_types.VoidMessage()
from lib import ssh_conn,logs import time import config l=logs.Log() c=config.GlobalConfig(1) models={0:"GLOBAL", 1:"CONNECTION", 2:"TRANSFER", 3:"CONFIG", 4:"INSTALL" } stats={1:"<< FINISHED >>", 0:"<< FAILED >>", 2:"<DONE>" } class interactive(): def __init__(self,ip, username, pwd, port, key): self.ip=ip self.username=username self.pwd=pwd self.port=port self.key=key self.ftp=ssh_conn.ssh_sftp() self.ssh=ssh_conn.ssh_conn() def upload_file(self,file): return self.ftp.ftp(self.ip, self.username,self.pwd,self.port,self.key,file) def down_file(self,file): self.ftp.ftp(self.ip, self.username,self.pwd,self.port,self.key,file) def connect(self,cmd): result=self.ssh.ssh_connect(self.ip, self.username,self.pwd, 2,cmd) if not result:
choices={'BiLSTM', 'GlobalBiLSTM', 'BERT', 'CNN3', 'LSTM', 'ContextAware'}, default='BiLSTM', help='name of the model') parser.add_argument('--save_name', type=str) parser.add_argument('--train_prefix', type=str, default='dev_train') parser.add_argument('--test_prefix', type=str, default='dev_dev') args = parser.parse_args() model = { 'CNN3': models.CNN3, 'LSTM': models.LSTM, 'BiLSTM': models.BiLSTM, 'ContextAware': models.ContextAware, 'BERT': models.BERT, 'GlobalBiLSTM': models.BiLSTM } if args.model_name == 'BERT': con = config.BertConfig(args) con.set_batch_size(6) elif args.model_name == 'GlobalBiLSTM': con = config.GlobalConfig(args) else: con = config.Config(args) con.set_max_epoch(200) # con.set_data_path('/ws/ifp-53_2/hasegawa/lwang114/fall2020/cs598hj/hw2/prepro_data') con.load_train_data() con.load_test_data() con.train(model[args.model_name], args.save_name)
import logger import config import tz_info import udp_proxy import time c = zerorpc.Client() c.connect("tcp://127.0.0.1:4242") print c.PushRequest("UDP_push_activated") globals.system_log = logger.create("service") globals.tz_info = tz_info.TimezoneInfo() globals.sys_status = sys_status.SystemStatus() globals.controllers = cybrocontrollers.CybroControllers() globals.config = config.GlobalConfig() udpThread = udp_proxy.UDPProxy() udpThread.start() #print('The result is') try: while 1: #time.sleep(0.015) time.sleep(1) # every 15ms check for push list timeout and remove inactive controllers print c.ServerShutdownRequest() # shutdown SCGI server on global request #if c.ServerShutdownRequest(): # globals.system_log.info("CybroScgiServer remote shutdown requested.")
def test_get_cas_service_bad_config(self): conf = config.GlobalConfig( cas_gs_path='blah', cas_gs_temp='/cas_gs_temp/def') self.mock(config, 'cached', lambda: conf) self.assertIsNone(impl.get_cas_service())
def test_get_cas_service_no_config(self): conf = config.GlobalConfig() self.mock(config, 'cached', lambda: conf) self.assertIsNone(impl.get_cas_service())
def test_get_cas_service_ok(self): conf = config.GlobalConfig( cas_gs_path='/cas_gs_path/abc/', cas_gs_temp='/cas_gs_temp/def/') self.mock(config, 'cached', lambda: conf) self.assertIsNotNone(impl.get_cas_service())
# ValueError: Trying to store a value with len [XX] in [CallOI??] column but # this column has a limit of [XXX]! import glob import fnmatch import os import sys import config as config import pandas as pd import datetime as dt import numpy as np # path = 'C:/Users/David/data/' # path = '/home/david/data/' globalconf = config.GlobalConfig() path = globalconf.config['paths']['data_folder'] def run(): os.chdir(path) optchain_orig = 'optchain_yahoo_db_expiry_2018-03.h5' pattern_optchain = 'optchain_yahoo_db_expiry_2018-03.h5*' optchain_out = 'optchain_yahoo_db_expiry_2018-03.db' lst1 = glob.glob(pattern_optchain) lst1.remove(optchain_orig) print(lst1) dataframe = pd.DataFrame() for x in lst1: store_in1 = pd.HDFStore(path + x) root1 = store_in1.root
def readConfig(): return config.GlobalConfig()