def delete_file_info(self, file_id): str_sql = "delete from FileInfo where FileID = ('%s')" % (file_id) logger.debug("Delete FileInfo SQL:" + str_sql) try: DBUtil().exec_sql(str_sql) finally: DBUtil().close_db()
def get_file_by_file_path(self, file_path): """支持大小写根据文件路径查找文件 """ str_sql = "select * from FileInfo where BINARY FilePath = '%s'" % (file_path) logger.debug("Get FileInfo by file_path :" + str_sql) try: data = DBUtil().get_data(str_sql) finally: DBUtil().close_db() return data
def save_file_info(self, file_name, file_path, file_md5): file_id = ComFun().get_guid() logger.debug("file_md5:" + file_md5) logger.debug("file_path:" + file_path) str_sql = "insert into FileInfo (FileID,FileName,FilePath,FileMD5) \ values ('%s', '%s', '%s', '%s')" \ % (file_id, file_name, file_path, file_md5) logger.debug("Add FileInfo SQL:" + str_sql) try: DBUtil().exec_sql(str_sql) finally: DBUtil().close_db()
def add_songlist2mid(conn, songlistid, mid): sql = "select * from t_runtime_songlist_sync where mid=" + str(mid) rows = DBUtil().get_data(sql) if rows: mid_row = rows[0] if mid_row["dst_songlist"]: dst_songlist = demjson.decode(mid_row["dst_songlist"]) logger.debug( "minik[{}] dst_songlist exists, dst_songlist is {}".format( mid, dst_songlist)) dst_songlist.append(songlistid) else: sql = "select * from t_publish_songlist" allpubsonglist = DBUtil().get_data(sql) dst_songlist = [] if allpubsonglist: for psl in allpubsonglist: psl_id = psl["songlistid"] psl_imset = demjson.decode(psl["include_mset"]) for psl_imset_id in psl_imset: sql = "select * from t_config_machset where id = '{}'".format( psl_imset_id) sql_r = DBUtil().get_data(sql) if sql_r and sql_r[0]: psl_imset_machset = demjson.decode( sql_r[0]["machset"]) if mid in psl_imset_machset: dst_songlist.append(psl_id) else: logger.error( 'can not found machset where id = {}'.format( psl_imset_id)) else: logger.warn('can not found any songlist') return songlist = list(set(dst_songlist)) songlist.sort() s = demjson.encode(songlist) logger.debug("allsonglist for minik[{}] is {}".format(mid, s)) sql = "update t_runtime_songlist_sync set dst_songlist='%s' where mid=%d" % ( s, mid) DBUtil().exec_sql(sql) logger.debug("mid[%d] add songlist(%d) to sync\n" % (mid, songlistid)) else: logger.warn( "mid[%d] add songlist(%d) to sync failed, mid no in sync table\n" % (mid, songlistid)) return
def __init__(self, init_flag=False): """ init """ path = 'data/userinfo.db' self.query_result = "" self.db = DBUtil(path, init_flag=init_flag)
def get(self): db = DBUtil('Info.db', 'information') data = json.dumps(db.getUsers()) with open('data.json', 'w+') as outfile: json.dump(data, outfile) db.close() self.render('show.html')
def post(self): name = self.get_argument('name') age = int(self.get_argument('age')) city = self.get_argument('city') db = DBUtil('Info.db', 'information') db.insert(name, age, city) db.close() self.redirect('/insert')
def get_file_info(self, **kw): if "file_id" in kw: file_id = kw["file_id"] str_sql = "select * from FileInfo where FileID = '%s'" % (file_id) elif "file_md5" in kw: file_md5 = kw["file_md5"] str_sql = "select * from FileInfo where FileMD5 = '%s'" % (file_md5) elif "file_path" in kw: file_path = kw["file_path"] str_sql = "select * from FileInfo where FilePath = '%s'" % (file_path) logger.debug("Get FileInfo :" + str_sql) try: data = DBUtil().get_data(str_sql) finally: DBUtil().close_db() return data
job_name,start_time,dep_job,start_time """ def get_dependency(cursor, job_name, dep_jobs): dep_sql = "select job_name,dependency_job from t_etl_job_dependency where job_name = %s" cursor.execute(dep_sql, (job_name,)) deps = cursor.fetchall() for dep_job in deps: dep_jobs.add(dep_job["dependency_job"]) get_dependency(cursor, dep_job["dependency_job"], dep_jobs) return dep_jobs if __name__ == '__main__': dbUtil = DBUtil() connection = dbUtil.get_connection() cursor = connection.cursor(MySQLdb.cursors.DictCursor) today = DateUtil.get_now_fmt() job_sql = "select job_name,last_start_time,last_end_time from t_etl_job where 1=1 " cursor.execute(job_sql + " and last_run_date=%s", (today,)) jobs = cursor.fetchall() count = 0 failed = 0 error = 0 for job in jobs: job_name = job["job_name"] job_start_time = datetime.datetime.strptime(job["last_start_time"], "%Y-%m-%d %H:%M:%S")
light1.setLightType(LightType.Directional) light1.setLightDirection(Vector3(-1.0, -1.0, -1.0)) light1.setColor(Color(1.0, 1.0, 1.0, 1.0)) light1.setAmbient(Color(0.2, 0.2, 0.2, 1.0)) light1.setEnabled(True) # Camera cam = getDefaultCamera() cam.setPosition(Vector3(46930.8, 7805.12, 65433.8)) cam.setOrientation(Quaternion(-0.99, 0.07, 0.07, 0.01)) cam.getController().setSpeed(2000) setNearFarZ(2, 400000) # UI uim = UiModule.createAndInitialize() # DB db = DBUtil(db_filename) # Stations station_rows = db.getStations() station_data = db.getData('20170515090000') size_scale = [0.3, 0.3] # as in config.ini stations = {} for r in station_rows: s = Station(r, uim) s.updatePosition(ref_point, size_scale, data_height_scale) stations[s.id] = s for d in station_data: stations[d[0]].updateData(d)
def __init__(self): '''初始化数据库信息''' path = 'data/userinfo.db' table = 'userinfo' self.db = DBUtil(path, table)
def main(): dbutil = DBUtil(args.db) result = dbutil.get_nutrition_for_plant(1) pprint.pprint(result)
def get(self): db = DBUtil('Info.db', 'information') data = json.dumps(db.getUsers()) self.write(data)
def setUp(self): print('当前用例执行前初始化...') self.dbutil = DBUtil('localhost','root','','ops_db')
def __init__(self): self.config = ConfigUtil() self.dbUtil = DBUtil() self.dboption = DBOption() self.smsUtil = SMSUtil()
def __init__(self, interval=1): logging.info('initializing performance handler ...') self.interval = interval self.queue = Queue.Queue() self.loadcount = 0 logging.info('Reading application config ...') conf = os.path.join(os.path.dirname(__file__), './config/application.conf') config = ConfigParser.ConfigParser() config.read(conf) host = config.get('DB', 'HOST') port = config.get('DB', 'PORT') user = config.get('DB', 'USER') password = config.get('DB', 'PASSWORD') readdb = config.get('DB', 'READDB') writedb = config.get('DB', 'WRITEDB') self.loopcount = int( config.get('PROCESSING', 'SAMPLE_LOOP_COUNT_IN_SEC')) self.waittime = int(config.get('PROCESSING', 'SAMPLE_WAIT_TIME_IN_MS')) self.aggregationtime = int( config.get('PROCESSING', 'SAMPLE_AGGREGATION_TIME_IN_SEC')) self.rampuptime = int(config.get('PROCESSING', 'RAMP_UP_TIME')) containertype = config.get('PROCESSING', 'CONTAINER_TYPE') action_model = config.get('PROCESSING', 'ACTION_MODEL') logging.info('Reading benchmark config ...') benchconf = os.path.join(os.path.dirname(__file__), './config/benchmark.conf') self.activebench = create_benchmark(benchconf, containertype) conf = os.path.join(os.path.dirname(__file__), './config/interference.info') interference_config = ConfigParser.ConfigParser() interference_config.read(conf) containermanager_port = config.get('CONTAINER_MANAGER', 'PORT') containermanager_ip = config.get('CONTAINER_MANAGER', 'HOST_NAME') managertype = config.get('CONTAINER_MANAGER', 'TYPE') self.container_manager = create_container_manager( managertype, containermanager_ip, containermanager_port) dointerference = config.get('PROCESSING', 'PERFORM_INTERFERENCE') == 'true' if dointerference: interference_wkload_file = os.path.join(os.path.dirname(__file__), './config/int_load.csv') interference = {} for key, value in interference_config.items('INTERFERENCE'): interference[key.upper()] = value self.loadInterferenceData(interference_wkload_file, interference) self.reschange = [] for key, value in interference_config.items('RESOURCE_ALLOC'): interval = int(key) res = value.split(',') core = int(res[0]) mem = int(res[1]) self.reschange.append((int(key), (core, mem))) details = self.activebench.getTargetDetails() self.targethost = details[0] self.targetcontainer = details[2] loadfile = details[5] loadfile = os.path.join(os.path.dirname(__file__), loadfile) with open(loadfile, 'rb') as f: reader = csv.reader(f) self.load = list(reader) self.perfaction = PerfAction(self.container_manager, details[1], details[2], details[3], details[4], action_model) print 'cores: ' + str(self.perfaction.getCurrentCoreCount()) self.perftime_map = {} for server in self.activebench.getPerfServers(): self.perftime_map[server] = 0 self.dbutil = DBUtil(host, int(port), user, password, readdb, writedb) thread = threading.Thread(target=self.run, args=()) logging.info('Starting handler thread for benchmark: ' + ' ' + ' ..') thread.daemon = True # Daemonize thread thread.start() # Start the execution