def main(config_path="cfbench.json"): import os isSuccess,config_objs=read_config(os.path.join(os.getcwd(),config_path)) if isSuccess==False: print "read json %s except error" %(config_path) return db_conn,mcur = mysql_utils.connMySQL(host='10.65.7.151') mcur.execute('select max(respid) from responses') last_resp_id=mcur.fetchone() if len(last_resp_id) ==0 or( len(last_resp_id) != 0 and last_resp_id[0]==None) : last_resp_id = [] last_resp_id.append(0) iat_module,iat_name = configuration(config_objs['request_of_iat']['name'],config_objs['request_of_iat']['conf_tag']) mIATGenerator=getattr(iat_module,iat_name)(**(config_objs['request_of_iat']['kwargs'])) kwargs_context={} FS_MODULE,FS_CLASS_NAME=configuration(config_objs['file_system']['name'],config_objs['file_system']['conf_tag']) kwargs_context.update({'fs_module':FS_MODULE,'fs_class_name':FS_CLASS_NAME}) kwargs_context.update(config_objs['request_generator']['kwargs']) kwargs_context.update({'IATGenerator':mIATGenerator}) kwargs_context.update({'interval_get_number_of_active_thread':config_objs['interval_get_number_of_active_thread']}) kwargs_context.update(config_objs['tenant']) req_module,req_name = configuration(config_objs['request_generator']['name'],config_objs['request_generator']['conf_tag']) mRequestGenerator = getattr(req_module,req_name)(**kwargs_context) btime = time.time() mRequestGenerator.runGenerator() endtime = time.time() mcur.execute('insert into analysis(tid,disruptive,sleep_for_excess,'\ 'quota,threads_over_quota,total_generate_time,requests_per_make,threads_of_pool,run_time,begintime,endtime,last_resp_id) values' \ '(%d,"%s",%d,%d,%d,%d,%d,%d,"%s","%s","%s",%d)' %\ (config_objs['tenant']['tenantid'],str(config_objs['tenant']['disruptive']),config_objs['tenant']['sleep_for_excess'],\ config_objs['tenant']['tenant_quota'],config_objs['tenant']['threads_over_quota'],config_objs['request_generator']['kwargs']['total_generate_time'],\ config_objs['request_generator']['kwargs']['request_per_make'],config_objs['request_generator']['kwargs']['threads_of_pool'],str(endtime - btime),\ str(btime),str(endtime),last_resp_id[0])) db_conn.commit() mysql_utils.closeConn(db_conn,mcur) return
def init(): global prometh_server global node_name global logger global vm_id #read configuration conf = configuration("node.conf") prometh_server = conf.ConfigSectionMap("Prometheus")['server_url'] node_name = conf.ConfigSectionMap("vm_node")['node_name'] logger = logging.getLogger('dataCollector') hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) vm_id = getMetaData() if vm_id == None: vm_id = node_name print vm_id logger.info('VM Data Collector') logger.info('Promth P/W Server ' + prometh_server) logger.info('Monitoring Node ' + node_name)
def init(): global prometh_server global node_name global logger global vm_id #read configuration conf = configuration("/opt/Monitoring/node.conf") if hasattr(conf.ConfigSectionMap("vm_node"),'cadvisor'): cadvisor = conf.ConfigSectionMap("vm_node")['cadvisor'] node_name = os.getenv('NODE_NAME', conf.ConfigSectionMap("vm_node")['node_name']) prometh_server = os.getenv('PROM_SRV', conf.ConfigSectionMap("Prometheus")['server_url']) #node_name = conf.ConfigSectionMap("vm_node")['node_name'] #prometh_server = conf.ConfigSectionMap("Prometheus")['server_url'] if hasattr(conf.ConfigSectionMap("vm_node"),'node_exporter'): node_exporter = conf.ConfigSectionMap("vm_node")['node_exporter'] logger = logging.getLogger('dataCollector') #hdlr = logging.FileHandler('dataCollector.log', mode='w') hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) #vm_id = getMetaData() vm_id = getUUID() if vm_id == None: vm_id = node_name node_name +=":"+vm_id print vm_id logger.info('SP Data Collector') logger.info('Promth Server '+prometh_server) logger.info('Monitoring Node '+node_name)
def init(): global prometh_server global odl_server global node_name global user global logger #read configuration conf = configuration("odc.conf") #odl_server = conf.ConfigSectionMap("ODL_server")['odl_url'] #prometh_server = conf.ConfigSectionMap("Prometheus")['server_url'] #node_name = conf.ConfigSectionMap("ODL_server")['node_name'] #user = json.loads(conf.ConfigSectionMap("ODL_server")['user']) odl_server = os.getenv('ODL_SRV', conf.ConfigSectionMap("vm_node")['odl_url']) prometh_server = os.getenv('PROM_SRV', conf.ConfigSectionMap("Prometheus")['server_url']) node_name = os.getenv('NODE_NAME', conf.ConfigSectionMap("Prometheus")['node_name']) user = os.getenv('USR_CRED', conf.ConfigSectionMap("Prometheus")['user']) logger = logging.getLogger('dataCollector') hdlr = logging.FileHandler('dataCollector.log', mode='w') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) #logger.error('We have a problem') logger.info('OpenDayLight Data Collector') logger.info('ODL Server '+odl_server) logger.info('Promth Server '+prometh_server) logger.info('Monitoring Node '+node_name)
def init(): global prometh_server global node_name global interval global logger global vm_id #read configuration interval=3 conf = configuration("/opt/Monitoring/node.conf") node_name = os.getenv('NODE_NAME', conf.ConfigSectionMap("vm_node")['node_name']) prometh_server = os.getenv('PROM_SRV', conf.ConfigSectionMap("Prometheus")['server_url']) interval = conf.ConfigSectionMap("vm_node")['post_freq'] if is_json(prometh_server): prometh_server = json.loads(prometh_server) else: prometh_server = [prometh_server] logger = logging.getLogger('dataCollector') #hdlr = logging.FileHandler('dataCollector.log', mode='w') hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) #vm_id = getMetaData() vm_id = getUUID() if vm_id == None: vm_id = node_name node_name +=":"+vm_id print vm_id logger.info('SP Data Collector') logger.info('Promth P/W Server '+json.dumps(prometh_server)) logger.info('Monitoring Node '+node_name) logger.info('Monitoring time interval '+interval)
def do_something(): settings, token, access_key, secret_key = configuration() target = _build_target(settings, token) buckets_info = request(target, token) client = _s3_client(buckets_info, access_key, secret_key) list_buckets = client.list_buckets().get('Buckets') if len(list_buckets) == 1: bucket = list_buckets[0] contents = client.list_objects_v2(Bucket=bucket.get('Name')) print(contents.get('Contents'))
def init(): global prometh_server global node_name global interval global logger global vm_id #read configuration interval = 3 conf = configuration("/opt/Monitoring/node.conf") node_name = os.getenv('NODE_NAME', conf.ConfigSectionMap("vm_node")['node_name']) prometh_server = os.getenv( 'PROM_SRV', conf.ConfigSectionMap("Prometheus")['server_url']) interval = conf.ConfigSectionMap("vm_node")['post_freq'] if is_json(prometh_server): prometh_server = json.loads(prometh_server) else: prometh_server = [prometh_server] logger = logging.getLogger('dataCollector') #hdlr = logging.FileHandler('dataCollector.log', mode='w') hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) #vm_id = getMetaData() vm_id = getUUID() if vm_id == None: vm_id = node_name node_name += ":" + vm_id print vm_id logger.info('SP Data Collector') logger.info('Promth P/W Server ' + json.dumps(prometh_server)) logger.info('Monitoring Node ' + node_name) logger.info('Monitoring time interval ' + interval)
def init(): global prometh_server global node_name global logger global vm_id #read configuration conf = configuration("/opt/Monitoring/node.conf") if hasattr(conf.ConfigSectionMap("vm_node"), 'cadvisor'): cadvisor = conf.ConfigSectionMap("vm_node")['cadvisor'] node_name = os.getenv('NODE_NAME', conf.ConfigSectionMap("vm_node")['node_name']) prometh_server = os.getenv( 'PROM_SRV', conf.ConfigSectionMap("Prometheus")['server_url']) #node_name = conf.ConfigSectionMap("vm_node")['node_name'] #prometh_server = conf.ConfigSectionMap("Prometheus")['server_url'] if hasattr(conf.ConfigSectionMap("vm_node"), 'node_exporter'): node_exporter = conf.ConfigSectionMap("vm_node")['node_exporter'] logger = logging.getLogger('dataCollector') #hdlr = logging.FileHandler('dataCollector.log', mode='w') hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) #vm_id = getMetaData() vm_id = getUUID() if vm_id == None: vm_id = node_name node_name += ":" + vm_id print vm_id logger.info('SP Data Collector') logger.info('Promth Server ' + prometh_server) logger.info('Monitoring Node ' + node_name)
def init(): global keystone_url global node_name global tenants global logger global controller_ip conf = configuration("odc.conf") keystone_url = conf.ConfigSectionMap("Openstack")['keystone_url'] node_name = conf.ConfigSectionMap("Openstack")['node_name'] tenants = json.loads(conf.ConfigSectionMap("Openstack")['tenants']) controller_ip = conf.ConfigSectionMap("Openstack")['controller_ip'] logger = logging.getLogger('dataCollector') hdlr = logging.FileHandler('dataCollector.log', mode='w') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) logger.info('Openstack Data Collector') logger.info('keystone_url: '+keystone_url) logger.info('node_name: '+node_name) logger.info('tenants: '+json.dumps(tenants))
def init(): global keystone_url global node_name global tenants global logger global controller_ip conf = configuration("odc.conf") keystone_url = conf.ConfigSectionMap("Openstack")['keystone_url'] node_name = conf.ConfigSectionMap("Openstack")['node_name'] tenants = json.loads(conf.ConfigSectionMap("Openstack")['tenants']) controller_ip = conf.ConfigSectionMap("Openstack")['controller_ip'] logger = logging.getLogger('dataCollector') hdlr = logging.FileHandler('dataCollector.log', mode='w') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) logger.info('Openstack Data Collector') logger.info('keystone_url: ' + keystone_url) logger.info('node_name: ' + node_name) logger.info('tenants: ' + json.dumps(tenants))
def init(): global prometh_server global cadvisor global node_name global node_exporter global logger #read configuration conf = configuration("node.conf") cadvisor = conf.ConfigSectionMap("vm_node")['cadvisor'] prometh_server = conf.ConfigSectionMap("Prometheus")['server_url'] node_name = conf.ConfigSectionMap("vm_node")['node_name'] node_exporter = conf.ConfigSectionMap("vm_node")['node_exporter'] logger = logging.getLogger('dataCollector') hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) logger.addHandler(hdlr) logger.setLevel(logging.WARNING) logger.setLevel(logging.INFO) logger.info('Node Data Collector') logger.info('Promth Server '+prometh_server) logger.info('Monitoring Node '+node_name)
from configure import configuration if __name__=='__main__': # mod,mod_name = configuration('ceph','drivers') mod,mod_name = configuration('pareto','iat_distribution_impl')##E:\pycharm workspace\CFBench\adaptor\ pareto = getattr(module,mod_name)() #ceph = DriverFactory() print pareto ##ceph.create() ## ceph.read('D:\\cost_time_log.txt') ## ceph.write() ## ceph.delete() ## ceph.rename()