def connect(sql,operation): mysql = readconfig('mysql') host = mysql['host'] port = mysql['port'] name = mysql['name'] user = mysql['user'] passwd = mysql['passwd'] # 打开数据库连接 db= pymysql.connect(host=host,user=user,password=passwd,db=name,port=port) # 使用 cursor() 方法创建一个游标对象 cursor cursor = db.cursor() # 使用 execute() 方法执行 SQL 查询 cursor.execute(sql) if operation == "add": db.commit() elif operation == "delete": pass elif operation == "change": db.commit() elif operation == "find": result = cursor.fetchall() return result else: pass # 使用 fetchone() 方法获取单条数据. # 关闭数据库连接 db.close()
def get_servers(): """ Returns (master host, list of supervisors and master) """ servers = [] conf = config.readconfig('snapw.config') master = conf["master"]["host"] + ":" + conf["master"]["port"] servers.append(master) for supervisor in conf["hosts"]: servers.append(supervisor["host"] + ":" + supervisor["port"]) return master, servers
def connect(): """Connect to the PostgreSQL database. Returns a database connection. Use context manager decorator for database connection for 'with' statement, avoid repeated codes on connection commit, and close.""" conn = None try: params = config.readconfig('database.ini') conn = psycopg2.connect(database=params['database']) yield conn conn.commit() except: if conn: conn.rollback() raise finally: if conn: conn.close()
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import config import ssh # read config content_json = config.readconfig('temp.json') # get server & program list try: svr = content_json['svr'] print svr program_list = content_json['program_list'] print program_list program_list_leng = len(program_list) pass except Exception, e: print "exception" sys.exit() print '\nSSH to server ' + svr + '.\n' # get input username = raw_input("SSH username:"******"SSH password:") # test ssh server ssh.ssh_cmd(svr, username, password, 'ls /home/' + username)
# -*- coding: utf-8 -*- import re import xlrd import testlibrary from Applet.redis_operation import setMileage from comman import upload_location from config import readconfig readcig = readconfig() tp = testlibrary.testlibrary() """ 0x14;设置视频报警标志位,设置需要要上传哪种报警 0x15:视频信号丢失报警状态逻辑通道号设置,对应位为1表示视频信号丢失 vedio_signal 0x16:视频信号遮挡报警状态逻辑通道号设置,对应位为1表示视频信号遮挡 vedio_signal 0x17:存储器故障故障报警状态 memery 0x18:异常驾驶行为报警详细描述:1疲劳,2打电话,4抽烟 abnormal_driving extrainfo_id = [20,21,22,23,24] """ def main(args, testinfo, tp, link, mobile, extrainfo_id, idlist, wsid, deviceid, port, vehicle_id): pdict, sichuandict, ex808dict, sensordict, bluetoothdict = readcig.readtestfile( ) # 设置redis里程,传入车辆id,公共参数pdict setMileage.setto_redismel(vehicle_id, ex808dict, pdict['redishost'], pdict['db'], pdict['pwd']) excel_list = readexcel()
def log(msg): print("%s %s" % (datetime.datetime.now(), msg)) parser = argparse.ArgumentParser() parser.add_argument('-c','--country') parser.add_argument('-d','--directory') args = parser.parse_args() if (args.country == None or args.directory == None): print "usage: python backup --country=<country> --directory=<directory to be backed up>" exit(1) timestamp = time.gmtime() server, database, username, password, tempdir = config.readconfig() con = mdb.connect(server,username,password,database) vault_name = vault_name(args.country, args.directory, timestamp) log("vault name for this backup: %s" % vault_name) files_to_backup = list_directory(args.directory) files_to_backup = get_modified_files(con, args.country, args.directory, files_to_backup) if (len(files_to_backup) > 0): create_vault(vault_name) msg, file_result = perform_backup(files_to_backup, tempdir, vault_name) store_backup_result(con, file_result, vault_name, args.country, args.directory, timestamp,msg,0) else: log("no backup performed - no news")
"GetNbr": "average" } logging.info("Generating features.") features, target = generate_features(label_rt, conf["var"], data, setting) logging.info("Training model.") train(features, target) logging.info("Generating new configuration file") new_conf_d = predict(conf["var"]) return new_conf_d if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format= '[%(asctime)s] [%(levelname)s] [%(process)d] [%(filename)s] [%(funcName)s] %(message)s' ) data = {} config_path = 'snapw.config' conf = config.readconfig(config_path) master_server = get_servers()[0].split(":")[0] # server_list = ['ild1.stanford.edu:9200', 'ild1.stanford.edu:9201', 'ild2.stanford.edu:9201'] new_conf = learning_pipeline(data, conf) logging.info("New Config file: %s" % new_conf) send_conf_file(new_conf, master_server) time.sleep(3) # TODO(nkhadke): Test. #print check_conf_file(master_server)
"""Handle requests in a separate thread.""" def execute(self): while self.running: self.handle_request() print "exit" sys.exit(0) def self_dummy(self): haddr = "%s:%s" % (self.host, self.port) client.dummy(haddr) if __name__ == '__main__': dconf = config.readconfig("snapw.config") print dconf master = dconf["master"] host = master["host"] port = int(master["port"]) server = ThreadedHTTPServer((host, port), Server) # head service host name server.host = host # head service port server.port = port # set of hosts completed their step server.done = set() # set of hosts ready for the next step
# Data is dictionary with key as machine, value as the k-k-v file. # Setting is a dictionary with key as parameter, value as the category. logging.critical("DATA: %s", data) label_rt = 1.0 setting = {"GenTasks": "average", "GenStubs":"average", "GenGraph":"average", "GetNbr":"average"} logging.info("Generating features.") features, target = generate_features(label_rt, conf["var"], data, setting) logging.info("Training model.") train(features, target) logging.info("Generating new configuration file") new_conf_d = predict(conf["var"]) return new_conf_d if __name__ == '__main__': logging.basicConfig(level=logging.INFO, format='[%(asctime)s] [%(levelname)s] [%(process)d] [%(filename)s] [%(funcName)s] %(message)s') data = {} config_path = 'snapw.config' conf = config.readconfig(config_path) master_server = get_servers()[0].split(":")[0] # server_list = ['ild1.stanford.edu:9200', 'ild1.stanford.edu:9201', 'ild2.stanford.edu:9201'] new_conf = learning_pipeline(data, conf) logging.info("New Config file: %s" % new_conf) send_conf_file(new_conf, master_server) time.sleep(3) # TODO(nkhadke): Test. #print check_conf_file(master_server)
#!/usr/bin/env python3 import json import getpass import hashlib import time import logging from config import readconfig from mysql import connect import prettytable as pt from clean import clean #日志处理 logfile = readconfig('log') filename = logfile['logfile'] logging.basicConfig( format= '[%(asctime)s] - [%(threadName)5s] - [%(filename)s-line:%(lineno)d] [%(levelname)s] %(message)s', filename=filename, level=logging.DEBUG, filemode='a') #数据库连接 #添加用户 def add(): print("""欢迎注册(请输入用户名、密码、qq号、所在服务区) eg: zhangsan 123 123 艾欧尼亚 """) while True: flag = False while True:
message = "<Result><StatusCode>OK</StatusCode><Text></Text></Result>" handleAlerts(tree, False) else: print("Authentication failed....") response = {} headers = {'Content-type': 'application/html'} response['status'] = "Success" raise HTTPResponse(message, status=200, headers=headers) # # main program code # (elasticHost, esindex, localServer, localPort, mongoport, mongohost, createIndex, debug, slacktoken, slackuse) = config.readconfig(elasticHost, esindex, localServer, localPort, mongoport, mongohost, debug, slacktoken, slackuse) # first read data from command line (elasticHost, esindex, localServer, localPort, mongoport, mongohost, createIndex, debug, testSettings) = config.readCommandLine(elasticHost, esindex, localServer, localPort, mongoport, mongohost, createIndex, debug, testSettings) if (createIndex): print ("Info: Just creating an index " + esindex) elastic.initIndex(elasticHost, esindex) elif (testSettings): print ("Checking all connections based on supplied settings") else:
print("Authentication failed....") response = {} headers = {'Content-type': 'application/html'} response['status'] = "Success" raise HTTPResponse(message, status=200, headers=headers) # # main program code # (elasticHost, esindex, localServer, localPort, mongoport, mongohost, createIndex, debug, slacktoken, slackuse) = config.readconfig(elasticHost, esindex, localServer, localPort, mongoport, mongohost, debug, slacktoken, slackuse) # first read data from command line (elasticHost, esindex, localServer, localPort, mongoport, mongohost, createIndex, debug, testSettings) = config.readCommandLine(elasticHost, esindex, localServer, localPort, mongoport, mongohost, createIndex, debug, testSettings) if (createIndex): print("Info: Just creating an index " + esindex) elastic.initIndex(elasticHost, esindex) elif (testSettings): print("Checking all connections based on supplied settings")