def main(): os.chdir('/home/%s/RESTfulSwarm/Discovery' % utl.get_username()) with open('../DBInfo.json') as f: db_info = json.load(f) discovery = Discovery(db_info) discovery.__logger.info('Initialized Discovery block.') discovery.discovery() os.chdir('/home/%s/RESTfulSwarm/ManagementEngine' % utl.get_username())
def main(session_id): # parser = argparse.ArgumentParser() # parser.add_argument('-a', '--address', type=str, help='Front end node address.') # args = parser.parse_args() # fe_address = args.address os.chdir('/home/%s/RESTfulSwarm/Client' % utl.get_username()) try: json_path = 'BurstyStressClientInfo.json' with open(json_path, 'r') as f: data = json.load(f) client = BurstyStressClient(lambda_=data['lambda']) client.feed_jobs(session_id) except Exception: traceback.print_exc(file=sys.stdout) os.chdir('/home/%s/RESTfulSwarm/ManagementEngine' % utl.get_username())
def kill_worker(): os.chdir('/home/%s/RESTfulSwarm/Worker' % utl.get_username()) if len(dh.list_containers(docker_client)) != 0: # leave swarm dh.leave_swarm(docker_client) # clean containers os.system('sudo docker rm -f $(docker ps -aq)') # kill process for proc in psutil.process_iter(): if proc.name() == PROCNAME and proc.pid != os.getpid(): proc.kill()
def main(frequency): os.chdir('/home/%s/RESTfulSwarm/Worker' % utl.get_username()) with open('../ActorsInfo.json') as f: data = json.load(f) gm_address = data['GM']['address'] worker_address = utl.get_local_address() dis_address = data['DC']['address'] frequency = frequency worker = Worker(gm_address, worker_address, dis_address, frequency) worker.controller() worker.request_join_swarm() while True: pass
def main(): os.chdir('/home/%s/RESTfulSwarm/FrontEnd' % utl.get_username()) global db_address global messenger global db with open('../ActorsInfo.json') as f: data = json.load(f) with open('../DBInfo.json') as f: db_info = json.load(f) db_client = mg.get_client(usr=db_info['user'], pwd=db_info['pwd'], db_name=db_info['db_name'], address=db_info['address'], port=SystemConstants.MONGODB_PORT) db = mg.get_db(db_client, SystemConstants.MONGODB_NAME) jm_address = data['JM']['address'] messenger = Messenger(messenger_type='C/S', address=jm_address, port=SystemConstants.JM_PORT) fe_address = utl.get_local_address() template = { "swagger": "2.0", "info": { "title": "RESTfulSwarm", "description": "An RESTful application for Docker Swarm.", "contact": { "responsibleDeveloper": "Zhuangwei Kang", "email": "*****@*****.**" }, "version": "0.0.1" }, "host": '%s:%s' % (fe_address, SystemConstants.FE_PORT), "basePath": "", "schemes": [ "http", ] } swagger = Swagger(app, template=template) @app.route('/RESTfulSwarm/FE/request_new_job', methods=['POST']) @swag_from('./Flasgger/FrontEnd.yml', validation=True) def request_new_job(): global messenger # Write job data into MongoDB data = request.get_json() data.update({'submit_time': time.time()}) col_name = data['job_name'] m_col = mg.get_col(db, col_name) mg.insert_doc(m_col, data) # Notify job manager messenger.send(prompt='newJob', content=col_name) return 'OK', 200 @app.route('/RESTfulSwarm/FE/switch_scheduler/<new_scheduler>', methods=['GET']) @swag_from('./Flasgger/SwitchScheduler.yml') def switch_scheduler(new_scheduler): global messenger # Notify Job Manager to switch scheduler messenger.send(prompt='SwitchScheduler', content=new_scheduler) return 'OK', 200 os.chdir('/home/%s/RESTfulSwarm/ManagementEngine' % utl.get_username()) app.run(host=fe_address, port=SystemConstants.FE_PORT, debug=False)
def main(): # clear /etc/exports to avoid duplicated nfs client with open('/etc/exports', 'w') as f: f.write('') os.chdir('/home/%s/RESTfulSwarm/GlobalManager' % utl.get_username()) global db_address global db_client global db global worker_col global worker_resource_col global gm_address global dockerClient gm_address = utl.get_local_address() template = { "swagger": "2.0", "info": { "title": "RESTfulSwarm", "description": "An RESTful application for Docker Swarm.", "contact": { "responsibleDeveloper": "Zhuangwei Kang", "email": "*****@*****.**" }, "version": "0.0.1" }, "host": '%s:%s' % (gm_address, SystemConstants.GM_PORT), "basePath": "", "schemes": [ "http", ] } swagger = Swagger(app, template=template) dockerClient = docker.set_client() # mongodb with open('../DBInfo.json') as f: db_info = json.load(f) db_client = mg.get_client(usr=db_info['user'], pwd=db_info['pwd'], db_name=db_info['db_name'], address=db_info['address'], port=SystemConstants.MONGODB_PORT) db = mg.get_db(db_client, SystemConstants.MONGODB_NAME) worker_col = mg.get_col(db, SystemConstants.WorkersInfo) worker_resource_col = mg.get_col(db, SystemConstants.WorkersResourceInfo) # periodically prune unused network def prune_nw(): while True: networks = [] for job in job_buffer[:]: job_info = mg.filter_col(mg.get_col(db, job), 'job_name', job) if job_info is not None and job_info['status'] == 'Down': networks.append(job_info['job_info']['network']['name']) job_buffer.remove(job) docker.rm_networks(dockerClient, networks) print('Remove networks:', networks) time.sleep(60) prune_nw_thr = threading.Thread(target=prune_nw, args=()) prune_nw_thr.daemon = True prune_nw_thr.start() os.chdir('/home/%s/RESTfulSwarm/ManagementEngine' % utl.get_username()) app.run(host=gm_address, port=SystemConstants.GM_PORT, debug=False)