def configure_default_port(options): from log import logger from installer_io import InstallerIO from configParser import ConfigParser from text import text io = InstallerIO(options.silent) config_file = os.path.join(options.chorus_path, "shared/chorus.properties") chorus_config = ConfigParser(config_file) alpine_config_file = os.path.join( options.chorus_path, "shared/ALPINE_DATA_REPOSITORY/configuration/deploy.properties") alpine_config = ConfigParser(alpine_config_file) ports = ["server_port", "solr_port"] menu = "\n".join( str(i + 1) + ". %s: [default: %s]" % (ports[i], chorus_config[ports[i]]) for i in xrange(0, len(ports))) menu += "\n" alpine_ports = ["alpine_port"] menu += "\n".join(str(len(ports)+i+1) + ". %s: [default: %s]" % (alpine_ports[i], alpine_config[alpine_ports[i].replace("_", ".")]) \ for i in xrange(0, len(alpine_ports))) menu += "\n%d. exit" % (len(ports) + len(alpine_ports) + 1) num = io.require_menu(text.get("interview_question", "port_menu") % menu, range(1, len(ports) + len(alpine_ports) + 2), default=len(ports) + len(alpine_ports) + 1) if num in range(1, len(ports) + 1): new_port = io.prompt_int( text.get("interview_question", "change_port") % ports[num - 1], default=int(chorus_config[ports[num - 1]])) chorus_config[ports[num - 1]] = new_port chorus_config.write(config_file) logger.info("%s has successfully changed to %d" % (ports[num - 1], new_port)) elif num in range(len(ports) + 1, len(ports) + 1 + len(alpine_ports)): new_port = io.prompt_int( text.get("interview_question", "change_port") % alpine_ports[num - len(ports) - 1], default=int(alpine_config[alpine_ports[num - len(ports) - 1].replace("_", ".")])) alpine_config[alpine_ports[num - len(ports) - 1].replace( "_", ".")] = new_port alpine_config.write(alpine_config_file) chorus_config["workflow.url"] = "http://%s:%d" % ( alpine_config["alpine.host"], new_port) chorus_config.write(config_file) logger.info("%s has successfully changed to %d" % (alpine_ports[num - len(ports) - 1], new_port))
def pipeline_endpoint(): data = request.json parsed_data = getRepo('pipeline', data) configUrl = parsed_data['config'] config = ConfigParser(configUrl).getConfig() startCrow(parsed_data, config) return 'OK'
def main(): data = request.json parsed_data = getRepo(CROW_REPO, data) configUrl = CROW_RAW_REPO + '/' + parsed_data['image'] + '/' + parsed_data['branch'] + '/crow.yaml' config = ConfigParser(configUrl).getConfig() startCrow(parsed_data, config) return 'OK'
def main(): config = ConfigParser().config molgenis_connector = MolgenisConnector(config['url'], config['account'], config['password']) qc = QualityChecker(molgenis_connector) qc.check_collection_data() qc.check_biobank_data() qc.check_network_data() qc.check_person_data() qc.logs.close() molgenis_connector.logout()
def __init__(self,basepath,confile="configs/config.xml"): ''' Constructor ''' self.basepath=basepath self.cp = ConfigParser(confile) self.fp=FolderParser(basepath) self.fileitems=self.cp.getCheckItems() if self.basepath==None or self.basepath=="": self.basepath=self.fp.getLogPath() #print "base path is " + self.basepath self.internal_bt_p2=0 self.external_bt_p2=0 pass
def __init__(self, logfolder): ''' Constructor ''' # store the real path to a file self.storefile = "tmp/latest_log_path" # static defined variable self.dateinternal = "DATE_INTERNAL" self.dateinternal_lastlog = "DATEINTERNAL_LASTLOG" self.dateexternal = "DATE_EXTERNAL" self.dateexternal_lastlog = "DATEEXTERNAL_LASTLOG" # store the fade and real path map self.mapfadereal = {} self.initdata() self.logfolder = logfolder if self.logfolder == None or self.logfolder == "": self.logfolder = self.getLogPath() self.fullfilepaths = [] self.fullfolderpaths = [] self.cp = ConfigParser("configs/config.xml") self.workpath()
def main(): global sPath global alive global running global mConfig, mMap, mExplain global display, display1, display2 global log global nRoom global aColour global localFile global aBlock global notifyMode global beatClock # use crafted display function to ease the migration from python3 to python2 and to accommodate to different terminal coding in different system # display1 is normal displayer, while display2 is a separate displayer running in special thread, implementing the display interval # in each case, display1 shall be a instant displayer; thus, use display1 to output diagnostic message display1 = Displayer(0).display display = display1 mConfigBak = mConfig.copy() try: parser1 = ConfigParser(mConfig, mExplain, mMap, 'display danmu message in bilibili live') useCLI = True if len(sys.argv) > 1 else False if (not os.path.exists(sPath)): sDir = os.path.split(sys.argv[0])[0] sFile = os.path.join(sDir, sPath) if (os.path.exists(sFile)): sPath = sFile else: display1('配置文件 {} 不存在'.format(sPath)) sPath = None # parse configuration from file and from command line option mData = parser1.parse(sPath, useCLI) mConfig = mData except Exception as e: display1('读取配置出错:', e, sep='\n') display1('退回默认配置') mConfig = mConfigBak if (mConfig['nDelay'] > 0): # danmu message display interval is enabled, using threaded displayer display2 = Displayer(1, mConfig['nDelay']).display display = display2 aColour = [(x + 30 if x < 10 else x + 80) for x in mConfig['aColour']] if (mConfig['verbose']): log = display1 else: def log(*aArgs, **mArgs): pass if (mConfig['block']): # it seems that two format of flooding messages are existing aBlock = ['bilibili-(゜-゜)つロ乾杯~', '- ( ゜- ゜)つロ 乾杯~ - bilibili'] if (mConfig['notify']): notifyMode = 2 else: notifyMode = 1 log(mConfig) nRoom = mConfig['nRoom'] or int(input('room ID:')) running = True socket.setdefaulttimeout(10) while running: try: try: sServer, nRoom, sHoster, sTitle = getRoom(nRoom) except urllib.error.HTTPError as e: if (e.code == 404): display1('找不到该房间,请重新输入房间号') nRoom = int(input('room ID:')) continue else: raise if (mConfig['write']): sTime = time.strftime('%m%d_%H%M%S-') sName = sHoster + '-' + sTitle sName = re.sub(r'[^\w_\-.()]', '-', sName) sFileName = '{}{}.txt'.format(sTime, sName) localFile = open(sFileName, 'a', encoding='utf-8') log('弹幕服务器 ' + sServer) aAddr1 = (sServer, 788) sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock1.connect(aAddr1) except TimeoutError as e: sock1.close() display1('到弹幕服务器的连接失败,尝试更换地址') if (sServer == 'livecmt-1.bilibili.com'): sServer = 'livecmt-2.bilibili.com' else: sServer = 'livecmt-1.bilibili.com' log('弹幕服务器 ' + sServer) aAddr1 = (sServer, 788) sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock1.connect(aAddr1) log('地址为 ', *sock1.getpeername()) nUid = int(100000000000000 + 200000000000000 * random.random()) # a random meaningless user ID #bPayload = b'{"roomid":%d,"uid":%d}' % (nRoom, nUid); bPayload = ('{"roomid":%d,"uid":%d}' % (nRoom, nUid)).encode('utf-8') nLength = len(bPayload) + 16 bReq = struct.pack('>IIII', nLength, 0x100001, 0x7, 0x1) bReq += bPayload sock1.sendall(bReq) alive = True bHeartBeat = struct.pack('>IIII', 0x10, 0x100001, 0x2, 0x1) sock1.sendall(bHeartBeat) # send heartbeat message per 30 seconds interval = SetInterval(lambda: (sock1.sendall(bHeartBeat)), 30) interval.start() # capture CR in stdin to send hearbeat in order to fetch freshed online count if (not beatClock): beatClock = interval.clock t = threading.Thread(target=notify) t.daemon = 1 t.start() else: beatClock = interval.clock handler2(sock1) except (socket.timeout, TimeoutError) as e: display1('连接超时,重试...') continue except SocketDied as e: display1('连接被关闭,程序重启...') continue except BaseException as e: if (isinstance(e, KeyboardInterrupt)): display1('程序退出') running = False elif (sys.version[0] == '3' and isinstance(e, ConnectionResetError)): # ConnectionResetError is not supported in python2 display1(e) display1('到服务器的连接被断开,尝试重新连接...') continue else: with open('danmu_error.log', 'ab') as f1: # record error log f1.write(('\n' + (str(e))).encode('utf-8')) raise finally: alive = False if ('interval' in locals()): interval.stop() if ('sock1' in locals()): sock1.close() if (localFile): display1('弹幕已保存到文件 {}'.format(localFile.name)) localFile.close()
from getData import GetData from configParser import ConfigParser import os import time import signal import sys osVer = platform.system() try: os.mkdir(saneConf.temp) except OSError: pass if osVer == "Darwin": import externs conf = ConfigParser("conf/client/osx.cfg").get() elif osVer == "Windows": import externs conf = ConfigParser("conf/client/windows.cfg").get() elif osVer == "Linux": import externs conf = ConfigParser("conf/client/linux.cfg").get() def exit(signum, frame): externs.setWallpaper(bg) sys.exit() signal.signal(signal.SIGINT, exit)
def test_config_rules(self): config = ConfigParser("conf/client/test.cfg").get() for x in range(len(config.rules)): self.assertEqual(config.rules[x].name, self.goodConf.rules[x].name) self.assertEqual(config.rules[x].rule, self.goodConf.rules[x].rule)
def test_config_header(self): config = ConfigParser("conf/client/test.cfg").get() self.assertEqual("Test Header", config.header)
# coding=utf-8 import logging from configParser import ConfigParser from mysql import connector from Config.defaults import DEFAULTS DB_CONFIG = ConfigParser() DB_CONFIG.read(DEFAULTS['BACKEND_CFG']) dbconfig = DB_CONFIG.items('KVM_MYSQL_TEST') def get_mysql_conn(cid='KVM_MYSQL_TEST'): #mysql_conn = self.dbconn_dict.setdefault(cid, None) cfg = dict(DB_CONFIG.items(cid)) cfg['port'] = int(cfg['port']) mysql_conn = connector.connect(**cfg) return mysql_conn # if not mysql_conn: # try: # cfg = dict(self.config.items(cid)) # cfg['port'] = int(cfg['port']) # mysql_conn = connector.connect(**cfg) # except Exception, err: # logging.error(err) # raise err # 连接异常时直接抛出错误 # else: # self.dbconn_dict[cid] = mysql_conn # logging.info('MYSQL - CONNECTION %s - SUCCEED' % mysql_conn)
gc.loadHeirarchy(application='atg', datacenter='me') print(gc.paths) print ("***************** CONSOLIDATED JSON ITERATIVE ***********************") gc.buildConsolidatedJSON() gc.printOrderedJSON() print ("***************** CONSOLIDATED JSON RECURSIVE ***********************") gc.buildConsolidatedJSONRecursive() gc.printOrderedJSON() gc.writeOrderedJSON() print ("***************** CONFIG PARSER FILTER JSON ***********************") cp=ConfigParser() cp.loadJSONDict(gc.jsonMerged) cp.loadGlobalConfig(dataPath+"/globalConfigFull.json") cp.filterJSON('atg') cp.printOrderedJSON() cp.writeOrderedJSON(dataPath) print ("***************** INTERPOLATE KEYS ***********************") i=Interpolate() i.loadGlobalConfig(dataPath+"/globalConfig.tmpl") i.interpolateDict() i.printOrderedJSON() i.writeOrderedJSON(dataPath) #i.interpolateTemplate(dataPath+"/sample.txt.tmpl") i.interpolate(dataPath+"/multidir")
def __init__(self, conf=DEFAULTS['BACKEND_CFG']): '''初始化日志模块、配置信息''' self.config = ConfigParser() self.config.read(conf) self.dbconn_dict = {}