Ejemplo n.º 1
0
 def __init__(self, init_flag=False):
     """
     init 
     """
     path = 'data/userinfo.db'
     self.query_result = ""
     self.db = DBUtil(path, init_flag=init_flag)
Ejemplo n.º 2
0
    def get(self):
        db = DBUtil('Info.db', 'information')
        data = json.dumps(db.getUsers())
        with open('data.json', 'w+') as outfile:
            json.dump(data, outfile)
        db.close()

        self.render('show.html')
    def delete_file_info(self, file_id):

        str_sql = "delete from FileInfo where FileID = ('%s')" % (file_id)
        logger.debug("Delete FileInfo SQL:" + str_sql)

        try:
            DBUtil().exec_sql(str_sql)
        finally:
            DBUtil().close_db()
    def get_file_by_file_path(self, file_path):
        """支持大小写根据文件路径查找文件

        """
        str_sql = "select * from FileInfo where BINARY FilePath = '%s'" % (file_path)
        logger.debug("Get FileInfo by file_path :" + str_sql)
        try:
            data = DBUtil().get_data(str_sql)
        finally:
            DBUtil().close_db()
        return data
    def save_file_info(self, file_name, file_path, file_md5):

        file_id = ComFun().get_guid()
        logger.debug("file_md5:" + file_md5)
        logger.debug("file_path:" + file_path)
        str_sql = "insert into FileInfo (FileID,FileName,FilePath,FileMD5) \
                   values ('%s', '%s', '%s', '%s')" \
            % (file_id, file_name, file_path, file_md5)

        logger.debug("Add FileInfo SQL:" + str_sql)
        try:
            DBUtil().exec_sql(str_sql)
        finally:
            DBUtil().close_db()
Ejemplo n.º 6
0
def add_songlist2mid(conn, songlistid, mid):
    sql = "select * from t_runtime_songlist_sync where mid=" + str(mid)
    rows = DBUtil().get_data(sql)
    if rows:
        mid_row = rows[0]
        if mid_row["dst_songlist"]:
            dst_songlist = demjson.decode(mid_row["dst_songlist"])
            logger.debug(
                "minik[{}] dst_songlist exists, dst_songlist is {}".format(
                    mid, dst_songlist))
            dst_songlist.append(songlistid)
        else:
            sql = "select * from t_publish_songlist"
            allpubsonglist = DBUtil().get_data(sql)
            dst_songlist = []
            if allpubsonglist:
                for psl in allpubsonglist:
                    psl_id = psl["songlistid"]
                    psl_imset = demjson.decode(psl["include_mset"])
                    for psl_imset_id in psl_imset:
                        sql = "select * from t_config_machset where id = '{}'".format(
                            psl_imset_id)
                        sql_r = DBUtil().get_data(sql)
                        if sql_r and sql_r[0]:
                            psl_imset_machset = demjson.decode(
                                sql_r[0]["machset"])
                            if mid in psl_imset_machset:
                                dst_songlist.append(psl_id)
                        else:
                            logger.error(
                                'can not found machset where id = {}'.format(
                                    psl_imset_id))
            else:
                logger.warn('can not found any songlist')
                return
        songlist = list(set(dst_songlist))
        songlist.sort()
        s = demjson.encode(songlist)
        logger.debug("allsonglist for minik[{}] is {}".format(mid, s))
        sql = "update t_runtime_songlist_sync set dst_songlist='%s' where mid=%d" % (
            s, mid)
        DBUtil().exec_sql(sql)
        logger.debug("mid[%d] add songlist(%d) to sync\n" % (mid, songlistid))
    else:
        logger.warn(
            "mid[%d] add songlist(%d) to sync failed, mid no in sync table\n" %
            (mid, songlistid))
        return
    def get_file_info(self, **kw):

        if "file_id" in kw:
            file_id = kw["file_id"]
            str_sql = "select * from FileInfo where FileID = '%s'" % (file_id)
        elif "file_md5" in kw:
            file_md5 = kw["file_md5"]
            str_sql = "select * from FileInfo where FileMD5 = '%s'" % (file_md5)
        elif "file_path" in kw:
            file_path = kw["file_path"]
            str_sql = "select * from FileInfo where FilePath = '%s'" % (file_path)
        
        logger.debug("Get FileInfo  :" + str_sql)

        try:
            data = DBUtil().get_data(str_sql)
        finally:
            DBUtil().close_db()

        return data
Ejemplo n.º 8
0
class MyTest(unittest.TestCase):
    def tearDown(self):
        print('当前用例执行后清理...')
              
        sql = 'delete from vir_host'
        params = ()
        self.dbutil.update(sql,params)
              
    def setUp(self):
        print('当前用例执行前初始化...')
        self.dbutil = DBUtil('localhost','root','','ops_db')
    @classmethod
    def tearDownClass(self):
        print('所有用例执行后的清理...')

    @classmethod
    def setUpClass(self):
        print('所有用例执行前的初始...')

    def test_update(self):
        sql = 'insert into vir_host(ip,purpose,type,state,root_pass,depart,owner,email,phone,memo)  values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
        params = ('127.0.0.1',u'测试','test','active','123kissme',u'工业互联网','song','*****@*****.**','18638217761','haha')
        self.dbutil.update(sql,params)
        
        sql = 'select ip,purpose,type,state,root_pass,depart,owner,email,phone,memo from vir_host where ip = %s'
        params = ('127.0.0.1')
        result = self.dbutil.query(sql,params)
        #print(result)
        
        self.assertEqual('test',result[0][2])

    def test_batchUpdate(self):
        
        sql1 = 'insert into vir_host(ip,purpose,type,state,root_pass,depart,owner,email,phone,memo)  values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
        params1 = ('127.0.0.1',u'测试1','test1','active','123kissme',u'工业互联网','song','*****@*****.**','18638217761','haha')
        sql2 = 'insert into vir_host(ip,purpose,type,state,root_pass,depart,owner,email,phone,memo)  values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
        params2 = ('127.0.0.2',u'测试2','test2','active','123kissme',u'工业互联网','song','*****@*****.**','18638217761','haha')     
        
        sql_params = []
        sql_param = {}
        sql_param['sql'] = sql1
        sql_param['params'] = (params1)
        
        sql_params.append(sql_param)
        
        sql_param2 ={}
        
        sql_param2['sql'] = sql2
        sql_param2['params'] = (params2)
        sql_params.append(sql_param2)
                       
        self.dbutil.batchUpdate(sql_params)
        
        sql = 'select count(1) from vir_host '
        params = ()
        result = self.dbutil.query(sql,params)
        
        
        
        self.assertEqual(2,result[0][0])
Ejemplo n.º 9
0
    def post(self):
        name = self.get_argument('name')
        age = int(self.get_argument('age'))
        city = self.get_argument('city')

        db = DBUtil('Info.db', 'information')
        db.insert(name, age, city)
        db.close()
        
        self.redirect('/insert')
Ejemplo n.º 10
0
light1.setLightType(LightType.Directional)
light1.setLightDirection(Vector3(-1.0, -1.0, -1.0))
light1.setColor(Color(1.0, 1.0, 1.0, 1.0))
light1.setAmbient(Color(0.2, 0.2, 0.2, 1.0))
light1.setEnabled(True)
# Camera
cam = getDefaultCamera()
cam.setPosition(Vector3(46930.8, 7805.12, 65433.8))
cam.setOrientation(Quaternion(-0.99, 0.07, 0.07, 0.01))
cam.getController().setSpeed(2000)
setNearFarZ(2, 400000)
# UI
uim = UiModule.createAndInitialize()

# DB
db = DBUtil(db_filename)

# Stations
station_rows = db.getStations()
station_data = db.getData('20170515090000')
size_scale = [0.3, 0.3] # as in config.ini
stations = {}
for r in station_rows:
    s = Station(r, uim)
    s.updatePosition(ref_point, size_scale, data_height_scale)
    stations[s.id] = s

for d in station_data:
    stations[d[0]].updateData(d)

Ejemplo n.º 11
0
class UserInfoService(object):
    '''
		提供对数据库操作的接口
		1.save(data)
		2.save_flag(data)
		3.save_read_record(data)
		4.update(data)
		5.update_flag(data)
		6.update_all_flag(data)
		7.get_all()
		8.get_all_user_read()
		9.get_all_user_flag(read_flag, share_flag)
		12.get_register()
	'''
    def __init__(self):
        '''初始化数据库信息'''
        path = 'data/userinfo.db'
        table = 'userinfo'
        self.db = DBUtil(path, table)

    def create_token_table(self):
        ''' create token table'''
        userflag_sql = '''CREATE TABLE IF NOT EXISTS `tokens` (
				  `id` INTEGER PRIMARY KEY AUTOINCREMENT,
				  `member_id` int(11) NOT NULL,
				  `token` varchar(64) NOT NULL,
				   FOREIGN KEY(`member_id`) REFERENCES `userinfo`(`member_id`)
				)'''
        conn = self.db.get_conn()
        self.db.create_table(conn, userflag_sql)

    def save(self, data):
        '''
			保存一条用户信息
			
			data = [(member_id (int), telephone (string),  
					balance(string), invite_code(string), coin(int), 
					teacher_id(int), device_code(string))]
		'''
        save_sql = '''INSERT INTO userinfo 
			(member_id, telephone, balance, coin, invite_code, teacher_id, device_code
			) 
			VALUES 
			(?, ?, ?, ?, ?, ?, ?)
		'''
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def save_token(self, data):
        '''
			保存一条用户标志
			
			args: data = [(member_id(int), token(str))]
		'''
        save_sql = '''INSERT INTO tokens
				(member_id, token) VALUES (?, ?)
			'''
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def init_all_user_token(self):
        self.create_token_table()
        users = self.get_all()
        for user in users:
            data = [(user[0], "token")]
            self.save_token(data)
            print("insert one")

    def save_flag(self, data):
        '''
			保存一条用户标志
			
			args: data = [(member_id(int),)]
		'''
        save_sql = '''INSERT INTO userflag
				(member_id) VALUES (?)
			'''
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def save_read_record(self, data):
        '''
			保存一条用户阅读信息
			
			arg: data = [(member_id(int), read_time(string), read_count(int))]
		'''
        save_sql = '''INSERT INTO userread
				(member_id, read_time, read_count)
				VALUES
				(?, ?, ?)
			'''
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def update(self, data):
        '''
			更新一条用户信息
			
			data = [balance(string), coin(int), (member_id(int)]
		'''
        update_sql = '''UPDATE userinfo 
					SET balance=?, coin=?
					WHERE member_id=?
				'''
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, data)

    def update_user_info(self, member_id, device_code, token):
        '''
			update one user info
			args: member_id, device_code, token
		'''
        update_sql = '''UPDATE userinfo
				set device_code=? WHERE member_id=?'''
        data = [(device_code, member_id)]
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, data)

        self.update_token([(token, member_id)])

    def update_token(self, data):
        '''
			保存一条用户标志
			
			args: data = [(token(str), member_id(int))]
		'''
        update_sql = '''UPDATE tokens
				set token=? WHERE member_id=?'''
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, data)

    def update_flag(self, data):
        '''
			更新一条flag信息
			
			data = [read_flag(int{0,1,2}), share_flag(int {0, 1})), (member_id(int)]
		'''
        update_sql = '''UPDATE userflag
					SET read_flag=?, share_flag=?
					WHERE member_id=?
				'''
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, data)

    def update_all_flag(self):
        '''
			更新所有read_flag信息
		'''
        update_sql = ''' UPDATE userflag
					SET read_flag=?
				'''
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, [(0, )])

    def delete(self, tel):
        users = self.get_user_mobile(tel)
        for user in users:
            # update_sql = ''' DELETE FROM tokens WHERE member_id=?'''
            # conn = self.db.get_conn()
            # self.db.update(conn, update_sql, [(user[0],)])
            update_sql = ''' DELETE FROM userflag WHERE member_id=?'''
            conn = self.db.get_conn()
            self.db.update(conn, update_sql, [(user[0], )])
            update_sql = ''' DELETE FROM userread WHERE member_id=?'''
            conn = self.db.get_conn()
            self.db.update(conn, update_sql, [(user[0], )])
            update_sql = ''' DELETE FROM userinfo WHERE member_id=?'''
            conn = self.db.get_conn()
            self.db.update(conn, update_sql, [(user[0], )])

    def get_all(self):
        '''
			查询所有用户信息
		'''
        sql = '''SELECT * FROM userinfo'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        return res

    def get_all_tokens(self):
        '''
			查询所有用户信息
		'''
        update_sql = ''' DELETE FROM tokens WHERE id<?'''
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, [(67, )])
        sql = '''SELECT * FROM tokens'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        return res

    def get_all_user_read(self):
        '''
			查询所有read_record
		'''
        sql = '''SELECT * FROM userread'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        return res

    def get_all_user_flag(self, read_flag, share_flag=1):
        '''
			查询所有user_flag
			
			args: read_flag(0, 1, 2)
					share_flag(0, 1)
		'''
        sql = '''SELECT * FROM userflag WHERE read_flag=? AND share_flag=?'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql, (read_flag, share_flag))
        return res

    def get_one(self, member_id):
        '''
			查询一条用户信息
			
			arg: member_id (int)
		'''
        sql = '''SELECT * FROM userinfo WHERE member_id=?'''
        conn = self.db.get_conn()
        res = self.db.fetchone(conn, sql, uid)

        if len(res) > 0:
            return res[0]
        else:
            return None

    def get_user_mobile(self, telephone):
        '''
			查询一条用户信息
			
			arg: telephone (string)
		'''
        sql = '''SELECT * FROM userinfo WHERE telephone=?'''
        conn = self.db.get_conn()
        res = self.db.fetchone(conn, sql, telephone)

        if len(res) > 0:
            return res
        else:
            return []

    def get_token(self, member_id):
        sql = '''SELECT * FROM tokens WHERE member_id=?'''
        conn = self.db.get_conn()
        res = self.db.fetchone(conn, sql, member_id)

        if len(res) > 0:
            return res
        else:
            return []

    def get_register(self):
        '''
			查询注册用户
		'''
        sql = ''' SELECT * FROM userinfo ui, userflag uf
				WHERE ui.member_id = uf.member_id AND uf.share_flag=0
			'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        if len(res) > 0:
            return res
        else:
            return []

    def get_one_user(self):
        '''
			查询一条flag=0的用户信息
		'''

        sql = '''SELECT * 
			FROM userinfo ui, userflag uf
			WHERE ui.member_id = uf.member_id AND uf.read_flag = 0 AND uf.share_flag=1
		'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        if len(res) > 0:
            return res[random.randint(0, len(res) - 1)]
        else:
            return None

    def get_all_already_read_user(self):
        '''
			查询已完成的用户
		'''
        sql = '''SELECT * 
			FROM userinfo ui, userflag uf
			WHERE ui.member_id = uf.member_id AND uf.read_flag = 2 AND uf.share_flag=1
		'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        return res

    def get_all_not_read_user(self):
        '''
			查询未完成的用户
		'''
        sql = '''SELECT * 
			FROM userinfo ui, userflag uf
			WHERE ui.member_id = uf.member_id AND uf.read_flag = 0 AND uf.share_flag=1
		'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        return res

    def get_all_reading_user(self):
        '''
			查询正在完成的用户
		'''
        sql = '''SELECT * 
			FROM userinfo ui, userflag uf
			WHERE ui.member_id = uf.member_id AND uf.read_flag = 1 AND uf.share_flag=1
		'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql)
        return res

    def get_user_read_count(self, member_id):
        '''
			查询用户的阅读量
		'''
        time_ms = int(time.time())
        time_local = time.localtime(time_ms)
        time_format = "%Y-%m-%d"
        time_str = time.strftime(time_format, time_local)
        time_local = time.strptime(time_str, time_format)
        time_ms = int(time.mktime(time_local))
        #print("今日00:00对应毫秒:{}".format(time_ms))
        sql = '''SELECT sum(read_count) FROM userread
				WHERE member_id = ? AND read_time > ?
			'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql, (member_id, time_ms))
        if res[0][0]:
            return res[0][0]
        else:
            return 0

    def get_user_coin(self, coin):
        '''
			查询一定coin的用户
		'''
        sql = '''SELECT * FROM userinfo
				WHERE coin >= ?'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql, (score, ))
        return res

    def get_user_balance(self, balance):
        '''
			查询一定balance的用户
		'''
        sql = '''SELECT * FROM userinfo
				WHERE balance == ?'''
        conn = self.db.get_conn()
        res = self.db.fetchall(conn, sql, (balance, ))
        return res

    def get_time_str(self, time_ms, time_format="%Y-%m-%d"):
        '''
			毫秒-->指定格式
		'''
        time_local = time.localtime(time_ms)
        time_str = time.strftime(time_format, time_local)
        return time_str
Ejemplo n.º 12
0
 def __init__(self):
     '''初始化数据库信息'''
     path = 'data/userinfo.db'
     table = 'userinfo'
     self.db = DBUtil(path, table)
Ejemplo n.º 13
0
def main():
    dbutil = DBUtil(args.db)

    result = dbutil.get_nutrition_for_plant(1)
    pprint.pprint(result)
Ejemplo n.º 14
0
class UserInfoService(object):
    """
    UserInfo Service
    """
    def __init__(self, init_flag=False):
        """
        init 
        """
        path = 'data/userinfo.db'
        self.query_result = ""
        self.db = DBUtil(path, init_flag=init_flag)

    def save(self, data):
        """
            save one userinfo record
            
            data = [( (uid (int), name (string), mobile (string),  
                    father (string), balance (string), coin (int), 
                    device_code (string), token (string), os (string)
                    brand (string), mac (string), android_id (string) )]
        """
        save_sql = """INSERT INTO userinfo 
            (uid , username, mobile, father, balance, coin, device_code, token, os, brand, mac, android_id) 
            VALUES 
            (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
        """
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def save_flag(self, data):
        """
            save one userflag record
            args: data = [(uid(int),)]
        """
        save_sql = """INSERT INTO userflag
                (uid) VALUES (?)
            """
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def save_read_record(self, data):
        """
            save one read record
            
            arg: data = [(uid(int), read_time(string), read_count(int))]
        """
        save_sql = """INSERT INTO userread
                (uid, read_time, read_count)
                VALUES
                (?, ?, ?)
            """
        conn = self.db.get_conn()
        self.db.save(conn, save_sql, data)

    def update(self, data):
        """
            update one userinfo record
            
            data = [
                (balance (string), coin (int), token (string), device_code(string), uid (int))
            ]
        """
        update_sql = """UPDATE userinfo 
                    SET balance=?, coin=?, token=?, device_code=?
                    WHERE uid=?
                """
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, data)

    def update_read_flag(self, data):
        """
            update one userflag record
            
            data = [(read_flag(int{0,1,2}), (uid(int))]
        """
        update_sql = """UPDATE userflag
                    SET read_flag=?
                    WHERE uid=?
                """
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, data)

    def clear_all_flag(self):
        """
            clear all userflag record
        """
        update_sql = """ UPDATE userflag
                    SET read_flag=?
                """
        conn = self.db.get_conn()
        self.db.update(conn, update_sql, [(0, )])

    def get_all(self):
        """
            query all userinfo records
        """
        sql = """SELECT * FROM userinfo"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql)
        return self.query_result

    def get_all_user_read(self):
        """
            query all userread
        """
        sql = """SELECT * FROM userread"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql)
        return self.query_result

    def get_all_user_flag(self, read_flag):
        """
            query all userinfo by read_flag
            
            args: read_flag(0, 1, 2)
            return: userinfo list
        """
        sql = """SELECT * FROM userflag WHERE read_flag=?"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql, (read_flag, ))
        return self.query_result

    def get_one(self, uid):
        """
            query one userinfo record by uid
            arg: uid (int)
            return one userinfo or None
        """
        sql = """SELECT * FROM userinfo WHERE uid=?"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql, (uid, ))

        if len(self.query_result) > 0:
            return self.query_result[0]
        else:
            return None

    def get_user_mobile(self, mobile):
        """
            query one userinfo record by mobile
            
            arg: mobile (string)
            :return one userinfo or None
        """
        sql = """SELECT * FROM userinfo WHERE mobile=?"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql, (mobile, ))

        if len(self.query_result) > 0:
            return self.query_result[0]
        else:
            return None

    def get_one_user(self):
        """
            query one userinfo record where read_flag=0
            :return one userinfo or None
        """

        sql = """SELECT * FROM userinfo ui, userflag uf
            WHERE ui.uid = uf.uid AND uf.read_flag=0"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql)
        if len(self.query_result) > 0:
            return self.query_result[random.randint(0,
                                                    len(self.query_result) -
                                                    1)]
        else:
            return None

    def get_all_already_read_user(self):
        """
            query userinfo records where read_flag=1
            :return  userinfo list
        """
        sql = """SELECT * 
            FROM userinfo ui, userflag uf
            WHERE ui.uid = uf.uid AND uf.read_flag=2"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql)
        return self.query_result

    def get_all_not_read_user(self):
        """
            query userinfo records where read_flag=0
            :return userinfo list
        """
        sql = """SELECT * FROM userinfo ui, userflag uf
            WHERE ui.uid = uf.uid AND uf.read_flag=0"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql)
        return self.query_result

    def get_all_reading_user(self):
        """
            query userinfo records where read_flag=1
            :return userinfo list 
        """
        sql = """SELECT * FROM userinfo ui, userflag uf
            WHERE ui.uid = uf.uid AND uf.read_flag = 1"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql)
        return self.query_result

    def get_user_read_count(self, uid):
        """
            query total read count by uid
            :return read_count (int)
        """
        time_ms = int(time.time())
        time_local = time.localtime(time_ms)
        time_format = "%Y-%m-%d"
        time_str = time.strftime(time_format, time_local)
        time_local = time.strptime(time_str, time_format)
        time_ms = int(time.mktime(time_local))
        sql = """SELECT sum(read_count) FROM userread
                WHERE uid = ? AND read_time > ?
            """
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql, (uid, time_ms))
        if self.query_result[0][0]:
            return self.query_result[0][0]
        else:
            return 0

    def get_user_coin(self, coin):
        """
            query userinfo records where coin >= coin
        """
        sql = """SELECT * FROM userinfo
                WHERE coin >= ?"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql, (coin, ))
        return self.query_result

    def get_user_balance(self, balance):
        """
            query userinfo records where balance >= balance
            :return userinfo list 
        """
        sql = """SELECT * FROM userinfo
                WHERE balance >= ?"""
        conn = self.db.get_conn()
        self.query_result = self.db.fetchall(conn, sql, (balance, ))
        return self.query_result

    def get_time_str(self, time_ms, time_format="%Y-%m-%d"):
        """
            transfer the time_ms to some format
            :return format_time string
        """
        time_local = time.localtime(time_ms)
        time_str = time.strftime(time_format, time_local)
        return time_str
Ejemplo n.º 15
0
 def execute_reports(self):
     DBUtil.create_report(self.__db_config, self.__sql_query, self.__file_name, self.__dir_data_out)
Ejemplo n.º 16
0
class ETLMonitor(object):
    def __init__(self):
        self.config = ConfigUtil()
        self.dbUtil = DBUtil()
        self.dboption = DBOption()
        self.smsUtil = SMSUtil()

    def get_dependency(self, cursor, job_name, dep_jobs):
        dep_sql = "select job_name,dependency_job from t_etl_job_dependency where job_name = %s"
        cursor.execute(dep_sql, (job_name, ))
        deps = cursor.fetchall()
        for dep_job in deps:
            dep_jobs.add(dep_job["dependency_job"])
            self.get_dependency(cursor, dep_job["dependency_job"], dep_jobs)
        return dep_jobs

    def run_check(self, cursor, today, msg):
        job_sql = "select job_name,last_start_time,last_end_time from t_etl_job where 1=1 "

        cursor.execute(job_sql + " and last_run_date=%s", (today, ))
        jobs = cursor.fetchall()

        count = 0
        failed = 0
        error = 0
        failed_job = []
        for job in jobs:
            job_name = job["job_name"]
            job_start_time = datetime.datetime.strptime(
                job["last_start_time"], "%Y-%m-%d %H:%M:%S")
            dep_jobs = set()
            self.get_dependency(cursor, job_name, dep_jobs)
            for dep_job in dep_jobs:
                cursor.execute(job_sql + " and job_name = %s", (dep_job, ))
                dep_job_detail = cursor.fetchone()
                # print dep_job_detail
                try:
                    dep_job_end_time = datetime.datetime.strptime(
                        dep_job_detail["last_end_time"], "%Y-%m-%d %H:%M:%S")
                    duration = (job_start_time -
                                dep_job_end_time).total_seconds()
                    if duration <= 0:
                        failed += 1
                        print job_name, job_start_time, dep_job, dep_job_end_time, str(
                            duration)
                        failed_job.append(job_name)
                except Exception as e:
                    print traceback.format_exc()
                    print "job->", job
                    print "dep_job->", dep_job_detail
                    error += 1
            count += 1
        print "check:" + str(count) + " jobs failed:" + str(
            failed) + " exception:" + str(error)
        if len(failed_job) > 0 or failed > 0:
            msg.append("调度异常数:" + str(failed))
            msg.append("调度异常任务:" + str(",".join(failed_job)))

    def run_count(self, cursor, today, msg):
        total_sql = 'select count(*) as job_count from t_etl_job where pending_time >= %s '
        cursor.execute(total_sql, (today, ))
        row = cursor.fetchone()
        msg.append("总的任务数:" + str(row['job_count']))
        sql = "select job_status,count(*) as job_count from t_etl_job where last_start_time >= %s group by job_status"
        cursor.execute(sql, (today, ))
        rows = cursor.fetchall()
        for row in rows:
            msg.append(str(row['job_status']) + ":" + str(row['job_count']))

    # today 共运行完成 xx job
    def run(self):
        today = DateUtil.get_now_fmt(None)
        msg = []
        connection = self.dbUtil.get_connection()
        cursor = connection.cursor(MySQLdb.cursors.DictCursor)

        self.run_count(cursor, today, msg)

        self.run_check(cursor, today, msg)

        connection.close()

        main_phones = self.dboption.get_main_man_by_role("admin")
        phones = set()
        for main_phone in main_phones:
            phones.add(main_phone['user_phone'])
        if not phones or len(phones) == 0:
            print("没有配置短信发送phone")
            return
        content = today + " 运行日志信息:\n" + str(",\n".join(msg))
        response = self.smsUtil.send(",".join(phones), content)
        print(response)
Ejemplo n.º 17
0
    def __init__(self, interval=1):

        logging.info('initializing performance handler ...')
        self.interval = interval
        self.queue = Queue.Queue()
        self.loadcount = 0

        logging.info('Reading application config ...')
        conf = os.path.join(os.path.dirname(__file__),
                            './config/application.conf')
        config = ConfigParser.ConfigParser()
        config.read(conf)

        host = config.get('DB', 'HOST')
        port = config.get('DB', 'PORT')
        user = config.get('DB', 'USER')
        password = config.get('DB', 'PASSWORD')
        readdb = config.get('DB', 'READDB')
        writedb = config.get('DB', 'WRITEDB')
        self.loopcount = int(
            config.get('PROCESSING', 'SAMPLE_LOOP_COUNT_IN_SEC'))
        self.waittime = int(config.get('PROCESSING', 'SAMPLE_WAIT_TIME_IN_MS'))
        self.aggregationtime = int(
            config.get('PROCESSING', 'SAMPLE_AGGREGATION_TIME_IN_SEC'))
        self.rampuptime = int(config.get('PROCESSING', 'RAMP_UP_TIME'))
        containertype = config.get('PROCESSING', 'CONTAINER_TYPE')
        action_model = config.get('PROCESSING', 'ACTION_MODEL')

        logging.info('Reading benchmark config ...')
        benchconf = os.path.join(os.path.dirname(__file__),
                                 './config/benchmark.conf')
        self.activebench = create_benchmark(benchconf, containertype)

        conf = os.path.join(os.path.dirname(__file__),
                            './config/interference.info')
        interference_config = ConfigParser.ConfigParser()
        interference_config.read(conf)

        containermanager_port = config.get('CONTAINER_MANAGER', 'PORT')
        containermanager_ip = config.get('CONTAINER_MANAGER', 'HOST_NAME')
        managertype = config.get('CONTAINER_MANAGER', 'TYPE')
        self.container_manager = create_container_manager(
            managertype, containermanager_ip, containermanager_port)

        dointerference = config.get('PROCESSING',
                                    'PERFORM_INTERFERENCE') == 'true'
        if dointerference:
            interference_wkload_file = os.path.join(os.path.dirname(__file__),
                                                    './config/int_load.csv')
            interference = {}
            for key, value in interference_config.items('INTERFERENCE'):
                interference[key.upper()] = value
            self.loadInterferenceData(interference_wkload_file, interference)

        self.reschange = []
        for key, value in interference_config.items('RESOURCE_ALLOC'):
            interval = int(key)
            res = value.split(',')
            core = int(res[0])
            mem = int(res[1])
            self.reschange.append((int(key), (core, mem)))

        details = self.activebench.getTargetDetails()
        self.targethost = details[0]
        self.targetcontainer = details[2]
        loadfile = details[5]
        loadfile = os.path.join(os.path.dirname(__file__), loadfile)
        with open(loadfile, 'rb') as f:
            reader = csv.reader(f)
            self.load = list(reader)
        self.perfaction = PerfAction(self.container_manager, details[1],
                                     details[2], details[3], details[4],
                                     action_model)
        print 'cores: ' + str(self.perfaction.getCurrentCoreCount())

        self.perftime_map = {}
        for server in self.activebench.getPerfServers():
            self.perftime_map[server] = 0

        self.dbutil = DBUtil(host, int(port), user, password, readdb, writedb)

        thread = threading.Thread(target=self.run, args=())
        logging.info('Starting handler thread for benchmark: ' + ' ' + ' ..')
        thread.daemon = True  # Daemonize thread
        thread.start()  # Start the execution
Ejemplo n.º 18
0
parser.add_argument('-whereValue', help = 'used in update')

parser.add_argument('-column', help = 'used in delete arg')
parser.add_argument('-value', help = 'used in delete arg')

parser.add_argument('-columnList', help = 'used in query arg (use wildcard in quotes)', nargs = '*');

parser.add_argument('-name', help = 'used in insert arg')
parser.add_argument('-age', type = int, help = 'used in insert arg')
parser.add_argument('-club', help = 'used in insert arg')

args = parser.parse_args()

if args.path:
	p = args.path
	db = DBUtil(p)

if args.insert:
	db.insert(args.name, args.age, args.club)

if args.delete:
	db.delete(args.column, args.value)

if args.query:
	print(args.columnList)
	for i in db.query(args.columnList):
		print(i)

if args.update:
	db.update(args.setColumn, args.setValue, args.whereColumn, args.whereValue)
Ejemplo n.º 19
0
 def get(self):
     db = DBUtil('Info.db', 'information')
     data = json.dumps(db.getUsers())
     self.write(data)
Ejemplo n.º 20
0
 def __init__(self):
     self.config = ConfigUtil()
     self.dbUtil = DBUtil()
     self.dboption = DBOption()
     self.smsUtil = SMSUtil()
Ejemplo n.º 21
0
 def setUp(self):
     print('当前用例执行前初始化...')
     self.dbutil = DBUtil('localhost','root','','ops_db')
Ejemplo n.º 22
0
job_name,start_time,dep_job,start_time
"""


def get_dependency(cursor, job_name, dep_jobs):
    dep_sql = "select job_name,dependency_job from t_etl_job_dependency where job_name = %s"
    cursor.execute(dep_sql, (job_name,))
    deps = cursor.fetchall()
    for dep_job in deps:
        dep_jobs.add(dep_job["dependency_job"])
        get_dependency(cursor, dep_job["dependency_job"], dep_jobs)
    return dep_jobs

if __name__ == '__main__':

    dbUtil = DBUtil()
    connection = dbUtil.get_connection()
    cursor = connection.cursor(MySQLdb.cursors.DictCursor)

    today = DateUtil.get_now_fmt()
    job_sql = "select job_name,last_start_time,last_end_time from t_etl_job where 1=1 "

    cursor.execute(job_sql + " and last_run_date=%s", (today,))
    jobs = cursor.fetchall()

    count = 0
    failed = 0
    error = 0
    for job in jobs:
        job_name = job["job_name"]
        job_start_time = datetime.datetime.strptime(job["last_start_time"], "%Y-%m-%d %H:%M:%S")
Ejemplo n.º 23
0
class PerfHandler(object):
    def loadInterferenceData(self, interference_wkload_file, interference):
        interference_load = []
        with open(interference_wkload_file, 'rb') as f:
            # skip header line
            f.readline()
            for line in f:
                values = line.split(',')
                row = {}
                row['end_interval'] = int(values[0])
                row['count'] = int(values[1])

                #row['vmid'] = values[0]
                #row['start_interval'] = int(values[3])
                #row['end_interval'] = int(values[4])
                #row['cores'] = int(values[5])
                #row['mem'] = int(values[6])
                #row['cpu_util'] = int(float(values[7]))
                #row['mem_util_percent'] = float(values[9])
                interference_load.append(row)
        threading.Thread(target=run_interference,
                         args=(interference_load, self.container_manager,
                               interference, self)).start()

    def __init__(self, interval=1):

        logging.info('initializing performance handler ...')
        self.interval = interval
        self.queue = Queue.Queue()
        self.loadcount = 0

        logging.info('Reading application config ...')
        conf = os.path.join(os.path.dirname(__file__),
                            './config/application.conf')
        config = ConfigParser.ConfigParser()
        config.read(conf)

        host = config.get('DB', 'HOST')
        port = config.get('DB', 'PORT')
        user = config.get('DB', 'USER')
        password = config.get('DB', 'PASSWORD')
        readdb = config.get('DB', 'READDB')
        writedb = config.get('DB', 'WRITEDB')
        self.loopcount = int(
            config.get('PROCESSING', 'SAMPLE_LOOP_COUNT_IN_SEC'))
        self.waittime = int(config.get('PROCESSING', 'SAMPLE_WAIT_TIME_IN_MS'))
        self.aggregationtime = int(
            config.get('PROCESSING', 'SAMPLE_AGGREGATION_TIME_IN_SEC'))
        self.rampuptime = int(config.get('PROCESSING', 'RAMP_UP_TIME'))
        containertype = config.get('PROCESSING', 'CONTAINER_TYPE')
        action_model = config.get('PROCESSING', 'ACTION_MODEL')

        logging.info('Reading benchmark config ...')
        benchconf = os.path.join(os.path.dirname(__file__),
                                 './config/benchmark.conf')
        self.activebench = create_benchmark(benchconf, containertype)

        conf = os.path.join(os.path.dirname(__file__),
                            './config/interference.info')
        interference_config = ConfigParser.ConfigParser()
        interference_config.read(conf)

        containermanager_port = config.get('CONTAINER_MANAGER', 'PORT')
        containermanager_ip = config.get('CONTAINER_MANAGER', 'HOST_NAME')
        managertype = config.get('CONTAINER_MANAGER', 'TYPE')
        self.container_manager = create_container_manager(
            managertype, containermanager_ip, containermanager_port)

        dointerference = config.get('PROCESSING',
                                    'PERFORM_INTERFERENCE') == 'true'
        if dointerference:
            interference_wkload_file = os.path.join(os.path.dirname(__file__),
                                                    './config/int_load.csv')
            interference = {}
            for key, value in interference_config.items('INTERFERENCE'):
                interference[key.upper()] = value
            self.loadInterferenceData(interference_wkload_file, interference)

        self.reschange = []
        for key, value in interference_config.items('RESOURCE_ALLOC'):
            interval = int(key)
            res = value.split(',')
            core = int(res[0])
            mem = int(res[1])
            self.reschange.append((int(key), (core, mem)))

        details = self.activebench.getTargetDetails()
        self.targethost = details[0]
        self.targetcontainer = details[2]
        loadfile = details[5]
        loadfile = os.path.join(os.path.dirname(__file__), loadfile)
        with open(loadfile, 'rb') as f:
            reader = csv.reader(f)
            self.load = list(reader)
        self.perfaction = PerfAction(self.container_manager, details[1],
                                     details[2], details[3], details[4],
                                     action_model)
        print 'cores: ' + str(self.perfaction.getCurrentCoreCount())

        self.perftime_map = {}
        for server in self.activebench.getPerfServers():
            self.perftime_map[server] = 0

        self.dbutil = DBUtil(host, int(port), user, password, readdb, writedb)

        thread = threading.Thread(target=self.run, args=())
        logging.info('Starting handler thread for benchmark: ' + ' ' + ' ..')
        thread.daemon = True  # Daemonize thread
        thread.start()  # Start the execution

    # adds the benchmark input to the processing queue
    def addData(self, perfdata):
        self.queue.put(perfdata)

    def evaluatePredictionModel(self):
        return self.perfaction.evalModel()

    def plotChart(self):
        self.perfaction.plotResults()

    def exportChartData(self):
        self.perfaction.exportResults()

    def finishAndResetExperiment(self):
        self.perfaction.evalModel()
        self.perfaction.exportResults()
        details = self.activebench.getTargetDetails()
        self.perfaction = PerfAction(self.container_manager, details[1],
                                     details[2], details[3], details[4])

    # waits till the results have arrived for the analysis duration
    def ensureDataSetWithinTimeBounds(self, benchmarktimestamp):
        loop = self.loopcount
        while (loop > 0):

            items = self.dbutil.getLastRecordForHosts()
            istimeinbounds = True
            for key, gen in items:
                hostname = key[1]['host']
                # this may return any old saved server time diff
                if self.perftime_map.get(hostname) is not None:
                    result = gen.next()
                    servertimestamp = result['time'] / 1000000
                    self.perftime_map[hostname] = result['time']
                    if benchmarktimestamp - servertimestamp > self.waittime:
                        logging.warning(
                            'time not within bounds, differ by {0} ms '.format(
                                (benchmarktimestamp - servertimestamp)))
                        istimeinbounds = False

            if istimeinbounds:
                return
            time.sleep(1)
            loop -= 1

    # aggregates data for latency and througput
    def formatDataForModelProcessing(self, formatted_data):
        #print formatted_data
        throughput = sum(formatted_data['thru'])
        # changing throughput definition to input data instead of output
        #throughput = sum(formatted_data['requests'])
        latency = -1.0
        latency90 = -1.0
        weighted_latency = -1.0
        resptime = 0.0
        resptime90 = 0.0
        wt_resptime = 0.0
        count = 0
        for i in range(0, len(formatted_data['resp'])):
            resp = formatted_data['resp'][i]
            resp90 = formatted_data['resp90'][i]
            if resp != '-' and resp90 != '-':
                resptime += resp
                # bad approximation
                resptime90 += resp90
                wt_resptime += resp * formatted_data['thru'][i]
                # changing for now
                #wt_resptime += resp*formatted_data['requests'][i]
                count += 1
        if count != 0:
            latency = round(resptime / float(count), 3)
            latency90 = round(resptime90 / float(count), 3)
            if throughput == 0:
                weighted_latency = 0.0
            else:
                weighted_latency = round(wt_resptime / throughput, 3)

        return (throughput, latency, weighted_latency, latency90)

    def getUserCount(self, interval):
        users = 0.0
        for vals in self.load:
            if interval < int(vals[0]):
                break
            users = int(vals[1])
        return users

    def checkAndPerformConfigChange(self, interval, last_reschange):
        for reschange_interval, resource in self.reschange:
            if interval >= reschange_interval and last_reschange < reschange_interval:
                print 'updating core count to ' + str(resource[0])
                self.container_manager.updateResources(self.targethost,
                                                       self.targetcontainer,
                                                       resource)
                last_reschange = reschange_interval
                return reschange_interval
        return last_reschange

    # run thread processes the queue
    def run(self):
        corecount = 0
        last_interference = 0
        last_reschange = 0
        while True:
            try:
                data = self.queue.get()
                benchmarktimestamp = data['timestamp']
                benchname = data.get('name')
                #interference_tasks = data.get('interference')
                #interference_tasks_str = ';' + ';'.join(interference_tasks)
                run_grp = data.get('run_grp')
                #print run_grp
                result = None

                # timestamp start
                starttime = datetime.datetime.now()
                self.ensureDataSetWithinTimeBounds(benchmarktimestamp)
                #timestamp processing after waiting
                intermediatetime = datetime.datetime.now()
                logging.info(
                    'waited for {0}s for the times to sync before processing '.
                    format((intermediatetime - starttime).total_seconds()))

                # query the database to get all records
                serverperf_data = self.activebench.getServerData(
                    self.dbutil.getReadClient(), self.perftime_map,
                    benchmarktimestamp - (self.aggregationtime * 1000))
                # aggregate and format the records for benchmark specific content
                #print serverperf_data
                formatted_data = self.activebench.formatData(
                    data, serverperf_data)
                logging.debug(formatted_data)
                runid = formatted_data['run_id']
                lastrunid = self.activebench.getRunId()

                if (lastrunid is not None) and (lastrunid != runid):
                    logging.info(
                        'This is a new experiment run, finish {0}'.format(
                            self.activebench.getRunId()))
                    #self.finishAndResetExperiment()

                interval = formatted_data['interval']
                users = float(self.getUserCount(interval))
                if interval <= self.rampuptime:
                    logging.info('ignore data set as still in warmup')
                    continue

                last_reschange = self.checkAndPerformConfigChange(
                    interval, last_reschange)
                corecount = self.container_manager.getCurrentCoreCount(
                    self.targethost, self.targetcontainer)

                perfvalues = self.formatDataForModelProcessing(formatted_data)
                throughput = perfvalues[0]
                latency = perfvalues[1]
                weighted_latency = perfvalues[2]
                latency90 = perfvalues[3]
                utilization = self.activebench.getUtilizationStats(
                    formatted_data,
                    (self.targethost, self.perfaction.getCurrentCoreCount()))
                logging.debug('utilization {0}'.format(utilization))
                if utilization is None:
                    logging.warning('Utilization vector is empty, ignore ..')
                    continue
                utilization_keys = utilization[0]
                #print str(utilization_keys)
                utilization_stats = utilization[1]
                dataprocessed = False

                if weighted_latency != -1.0:

                    # invoke model processing
                    logging.info('Invoking prediction model at interval: ' +
                                 str(interval))
                    #result = self.perfaction.invokeModel(users, latency90, weighted_latency, utilization_stats)
                    result = self.perfaction.invokeModel(
                        throughput, latency, weighted_latency,
                        utilization_stats)
                    corecount = result[0]
                    dataprocessed = True
                    logging.info(
                        'current core count after model processing: {0}'.
                        format(corecount))

                else:
                    logging.info('no latency to process, ignore')

                #timestamp processing at the end
                endtime = datetime.datetime.now()
                totaldiff = (endtime - starttime).total_seconds() * 1000
                processingdiff = (endtime -
                                  intermediatetime).total_seconds() * 1000
                logging.info(
                    ' processing time with wait: {0}ms and without wait: {1}ms. '
                    .format(totaldiff, processingdiff))

                perfstoragedata = {}
                #perfstoragedata['interference'] = interference_tasks_str
                perfstoragedata['total_dur_ms'] = totaldiff
                perfstoragedata['process_dur_ms'] = processingdiff
                perfstoragedata['benchtime_ms'] = benchmarktimestamp
                perfstoragedata['cores'] = corecount
                perfstoragedata['load_count'] = self.loadcount
                perfstoragedata[
                    'cp_count'] = self.container_manager.getCheckpointedCount(
                    )

                perfstoragedata['users'] = users
                #perfstoragedata['users'] = formatted_data['requests']
                # changed the throughput data in processing, so swap for actual storage
                #perfstoragedata['users'] = throughput
                perfstoragedata['interference_change'] = last_interference
                perfstoragedata['interval'] = formatted_data['interval']
                # changed the throughput data in processing, so swap for actual storage
                perfstoragedata['throughput'] = formatted_data['thru'][0]
                if formatted_data.get('kernel') is None:
                    perfstoragedata['kernel'] = 'base'
                    perfstoragedata['kernel'] = '0.001'
                else:
                    perfstoragedata['kernel'] = formatted_data['kernel']
                    perfstoragedata['intensity'] = formatted_data['intensity']
                perfstoragedata['latency'] = latency
                perfstoragedata['latency90'] = latency90
                perfstoragedata['weighted_latency'] = weighted_latency
                for i in range(0, len(utilization_keys)):
                    perfstoragedata[utilization_keys[i]] = utilization_stats[i]

                if benchname is None:
                    benchname = self.activebench.getName()

                self.dbutil.saveBenchmarkRecord(runid,
                                                run_grp,
                                                dataprocessed,
                                                perfstoragedata,
                                                benchname=benchname,
                                                resp=formatted_data['resp'],
                                                thru=formatted_data['thru'])
                #ops=formatted_data['operations'], resp=formatted_data['resp'], thru=formatted_data['thru'])

                if result is not None and len(result) > 1:
                    perfstoragedata['throughput'] = result[3]
                    perfstoragedata['latency'] = result[1]
                    #perfstoragedata['latency90'] = result[4]
                    perfstoragedata['weighted_latency'] = result[2]
                    #for i in range(0, 4):
                    #for i in range(0, len(utilization_keys)):
                    #	perfstoragedata[utilization_keys[i]] = result[4][i]

                    # saving only 1 metrics used in prediction as of now
                    perfstoragedata[utilization_keys[4]] = result[4][0]
                    #perfstoragedata[utilization_keys[2]] = result[4][1]
                    self.dbutil.saveBenchmarkRecord(runid,
                                                    run_grp,
                                                    dataprocessed,
                                                    perfstoragedata,
                                                    benchname=benchname,
                                                    isagg=True)

                self.activebench.setRunId(runid)
            except Exception, e:
                logging.error(traceback.print_exc())
                logging.error(e)
                print e