def check_connections(): shelf = db_handler.get_db('state.db') if not ('state' in shelf): shelf['state'] = 0 state = shelf['state'] shelf.close() return 'current_connections:' + str(state)
def list_downstream(job): stuff = db_handler.get_db('jobs.db') if not (job in stuff): return {'message': 'Job not found', 'data': {}}, 404 downstreams = stuff[job]['sink_broker'] for downstream in downstreams: log.debug(downstream) return {'message': 'Success', 'data': downstreams}, 200
def stop_job(job): ''' Stop Job request to SPE ''' shelf = db_handler.get_db('jobs.db') if not (job in shelf): return {'message': 'Job not found', 'data': {}}, 404 host = 'http://' + shelf[job]['agent_address'] + ':' + spe_port spe_handler.delete_jar(host, shelf[job]['jarid']) spe_handler.stop_job(host, shelf[job]['jobid']) return {'message': 'Success'}, 200
def delete_job(job): ''' Removes the Jar file locally, Removes the Job info from dict ''' shelf = db_handler.get_db('jobs.db') if not (job in shelf): return {'message': 'Job not found', 'data': {}}, 404 if os.path.exists(shelf[job]['job_path']): os.remove(shelf[job]['job_path']) del shelf[job] #check if it's deleting all db shelf.close() return {'message': 'Success'}, 200
def update_downstream(): ''' Downstream Agents to update their upstream address accordingly ''' json_data = request.json updated_source_broker = json_data['update_source_broker'] updated_topic = json_data['source_topic'] shelf = db_handler.get_db('jobs.db') for job in shelf: for i in range(len(shelf[job]['source_topic'])): if updated_topic == shelf[job]['source_topic'][i]: shelf[job]['source_broker'][i] = updated_source_broker for job in shelf: log.debug(shelf[job]['source_broker']) return {'message': 'Success'}, 200
def start_job(args, filename): ''' Prepares and saves the info from request Tells the upstreams to start MQTT clients ''' spe_addr = 'http://' + args['agent_address'] + ':' + spe_port full_path = job_path + '/' + filename entry_class = args['entry_class'] job_name = args['job_name'] broker = args['agent_address'] source_topic = args['source_topic'] sink_topic = args['sink_topic'] jarid = spe_handler.upload_jar(spe_addr, full_path) jobid = spe_handler.start_jar(spe_addr, jarid, entry_class, broker, source_topic, sink_topic, job_name) args['filename'] = filename args['jarid'] = jarid args['jobid'] = jobid args['job_path'] = full_path log.debug(jobid) # send request to upstream (clients to start) upstreams = args['source_broker'] for i in range(len(upstreams)): client_id = args['job_name'] + "_source_" + args['source_topic'][i] log.debug("starting " + client_id + " on " + args['source_broker'][i]) json_data = { "client_id": client_id, "source_broker": args['source_broker'][i], "topic": args['source_topic'][i], "sink_broker": args['sink_broker'][i] } req = requests.get("http://" + args['source_broker'][i] + ":5001/create_client", json=json_data) log.debug(req.text) shelf = db_handler.get_db('jobs.db') shelf[args['job_name']] = args shelf.close() return {'message': 'Job Started'}, 200
def syn_response(): url = request.remote_addr base_url = 'http://' + url available_taskslots = int( metrics_handler.get_available_task_slots(base_url)) # mutex.acquire() shelf = db_handler.get_db('state.db') if not ('state' in shelf): shelf['state'] = 0 state = shelf['state'] if available_taskslots - state > 0: # time.sleep(10) state += 1 shelf['state'] = state shelf.close() # mutex.release() return {'message': 'Success', 'data': state}, 200 shelf.close() # mutex.release() return {'message': 'Failed', 'data': state}, 500
def send_job(url, job): ''' Stops MQTT client request to each upstream address Stops the Job Stops the clients related with migrating Job Prepares the request body with Job info to a new node Sends the Job and Jar to a new node (new node asks upstream to update and create clients) Tells the downstream Agents to update their DB with new sources Tells the new Agent to start the sink MQTT clients ''' shelf = db_handler.get_db('jobs.db') if not (job in shelf): return {'message': 'Job not found', 'data': {}}, 404 upstreams = shelf[job]['source_broker'] for i in range(len(upstreams)): client_id = shelf[job]['job_name'] + "_source_" + shelf[job][ 'source_topic'][i] log.debug("deleting " + client_id + " on " + shelf[job]['source_broker'][i]) req = requests.get("http://" + shelf[job]['source_broker'][i] + ":5001/delete_client/" + client_id) log.debug(req.text) stop_job(job) #stop request to flink downstreams = shelf[job]['sink_broker'] clients = db_handler.get_db('clients.db') for i in range(len(downstreams)): job_topic = shelf[job]['sink_topic'][i] for client in clients: client_topic = client['topic'] if client_topic == job_topic: client_id = client['client_id'] log.debug("deleting " + client_id + " on " + shelf[job]['agent_address']) req = requests.get("http://" + shelf[job]['agent_address'] + ":5001/delete_client/" + client_id) log.debug("starting " + client_id + " on " + url) json_data = { "client_id": client_id, "source_broker": url, "topic": client_topic, "sink_broker": shelf[job]['sink_broker'][i] } req = requests.get("http://" + url + ":5001/create_client", json=json_data) log.debug(req.text) body = { 'pipeline_name': shelf[job]['pipeline_name'], 'job_name': shelf[job]['job_name'], 'agent_address': url, 'source_broker': shelf[job]['source_broker'], 'sink_broker': shelf[job]['sink_broker'], 'source_topic': shelf[job]['source_topic'], 'sink_topic': shelf[job]['sink_topic'], 'entry_class': shelf[job]['entry_class'] } files = [ ('jar', ('test.jar', open(shelf[job]['job_path'], 'rb'), 'application/octet')), ('data', ('data', json.dumps(body), 'application/json')), ] req = requests.post("http://" + url + ":5001/upload", files=files) if req.status_code == 200: downstreams = shelf[job]['sink_broker'] for i in range(len(downstreams)): json_data = { "update_source_broker": url, "source_topic": shelf[job]['sink_topic'][i] } req = requests.get("http://" + shelf[job]['sink_broker'][i] + ":5001/update_downstream", json=json_data) log.debug(req.text) delete_job(job) #delete from DB return {'message': 'Success'}, 200
def clear_connections(): shelf = db_handler.get_db('state.db') shelf['state'] = 0 shelf.close() return 'state cleared'
def list_upstream(job): stuff = db_handler.get_db('jobs.db') if not (job in stuff): return {'message': 'Job not found', 'data': {}}, 404 upstreams = stuff[job]['source_broker'] return {'message': 'Success', 'data': upstreams}, 200