def booking(): form = BookingForm() if form.validate_on_submit(): s_date = form.bdate.data sr_date = s_date.strftime("%d/%m/%Y") bresort = form.bresort.data res = db.session().query( Booking.id).filter(Booking.bresort == bresort).filter( Booking.bdaynight == form.bdaynight.data).filter( Booking.bdate >= datetime.strptime(sr_date, '%d/%m/%Y'), Booking.bdate <= datetime.strptime(sr_date, '%d/%m/%Y')).first() if res == None: booking = Booking(bname=form.bname.data, baddress=form.baddress.data, bcontact=form.bcontact.data, bresort=form.bresort.data, bref=form.bref.data, bdate=form.bdate.data, bdaynight=form.bdaynight.data, brentcat=form.brentcat.data, bcat=form.bcat.data, bcatveg=form.bcatveg.data, bgathconf=form.bgathconf.data, bnote=form.bnote.data, author=current_user) db.session().add(booking) db.session().commit() flash('New Booking sucessfully Added', 'success') return redirect('/dash_index') else: flash('Date Already Booked', 'info') return redirect('/booking') return render_template('booking.html', title='Register', form=form)
def delete_post(post_id): post = Post.query.get_or_404(post_id) if post.author != current_user: abort(403) db.session().delete(post) db.session().commit() flash('Your post has been deleted!', 'success') return redirect(url_for('home'))
def new_post(): form = PostForm() if form.validate_on_submit(): post=Post(title=form.title.data, content=form.content.data, author=current_user) db.session().add(post) db.session().commit() flash('Your post has been created ', 'success') return redirect(url_for('main.home')) return render_template('create_post.html', title='New Post', form=form, legend='New Post')
def dash_index(): res = db.session().query( Booking.bresort, Booking.bdate, func.count(Booking.bresort).label('count')).filter( Booking.bauth == 'Yes').filter( Booking.bdate >= datetime.now()).group_by( Booking.bresort).all() resno = db.session().query( Booking.bresort, Booking.bdate, func.count(Booking.bresort).label('count')).filter( Booking.bauth == 'No').filter( Booking.bdate >= datetime.now()).group_by( Booking.bresort).all() return render_template('dash_index.html', res=res, resno=resno)
def remove_old_nodes(): """ Delete older pinger and pongers registered in more than 30 minutes :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Remove_old_nodes called".format(current_f_name)) if not pipong_is_master(): return None since = datetime.now() - timedelta(minutes=30) s = db.session() pinger_t = db.session.query(models.RegisteredPingerNode).filter( or_(models.RegisteredPingerNode.last_updated_date is None, models.RegisteredPingerNode.last_updated_date < since)) logger.debug("{}: Old pingers: {}".format(current_f_name, pinger_t.count())) pinger_t.delete() ponger_t = db.session.query(models.RegisteredPongerNode).filter( or_(models.RegisteredPongerNode.last_updated_date is None, models.RegisteredPongerNode.last_updated_date < since)) logger.debug("{}: Old pongers: {}".format(current_f_name, ponger_t.count())) ponger_t.delete() s.commit()
def finish_old_iterations(): """ Finish the iterations that are older than 30 minutes :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Remove_old_nodes called".format(current_f_name)) if not pipong_is_master(): return None since = datetime.now() - timedelta(minutes=30) s = db.session() master_t = db.session.query(models.MasterIteration).filter( or_(models.MasterIteration.created_date is None, models.MasterIteration.created_date < since)) logger.debug("{}: Old iterations: {}".format(current_f_name, master_t.count())) for e in master_t: e.status = "FINISHED" s.commit()
def update_post(post_id): post = Post.query.get_or_404(post_id) if post.author != current_user: abort(403) form = PostForm() if form.validate_on_submit(): post.title = form.title.data post.content = form.content.data db.session().commit() flash('Your post has been updated!','success') return redirect(url_for('posts.post', post_id=post_id)) elif request.method == 'GET': form.title.data = post.title form.content.data = post.content return render_template('create_post.html', title='Update Post', form=form, legend='Update Post')
def remove_old_nodes(self): """ Add old pinger/ponger nodes, and check for deletion :return: """ with self.app.app_context(): s = db.session() s.add( models.RegisteredPingerNode(address='127.0.0.1', api_protocol='http://', api_port='5000', created_date='2004-10-19 10:23:54', last_updated_date='')) s.add( models.RegisteredPongerNode(address='127.0.0.1', api_protocol='http://', api_port='5000', created_date='2004-10-19 10:23:54', last_updated_date='')) s.commit() tasks.master_tasks.remove_old_nodes() with self.app.app_context(): pinger_count = db.session.query( models.RegisteredPingerNode).count() ponger_count = db.session.query( models.RegisteredPongerNode).count() assert pinger_count <= 0 assert ponger_count <= 0
def sindexmonth(bresort): res = db.session().query( Booking.bresort, Booking.bdate, func.count(Booking.bdate).label('count')).filter( Booking.bauth == 'Yes').filter(Booking.bresort == bresort).filter( Booking.bdate >= datetime.now()).group_by( func.strftime('%m-%Y', Booking.bdate)).all() return render_template('sindexmonth.html', res=res, bresort=bresort)
def edit_data(id): booking = Booking.query.filter_by(id=id).first() booking.bname = request.form['bname'] booking.baddress = request.form['baddress'] booking.bcontact = request.form['bcontact'] booking.bresort = request.form['bresort'] booking.bref = request.form['bref'] booking.bdate = request.form['bdate'] booking.bdaynight = request.form['bdaynight'] booking.brentcat = request.form['brentcat'] booking.bcat = request.form['bcat'] booking.bcatveg = request.form['bcatveg'] booking.bgathconf = request.form['bgathconf'] booking.bnote = request.form['bnote'] db.session().commit() flash('Booking Record has been updated!', 'success') return redirect('/dash_index')
def add_ponger_localhost(self): with self.app.app_context(): s = db.session() pingp_t = models.RegisteredPongerNode(address='localhost', api_port='5003', api_protocol='http://') s.add(pingp_t) s.commit()
def sindexdate(): bdate = request.args['bdate'] bresort = request.args['bresort'] res = db.session().query( Booking.bresort, Booking.bdate, func.count(Booking.bdate).label('count')).filter( Booking.bauth == 'Yes').filter(Booking.bresort == bresort).filter( Booking.bdate >= datetime.now()).group_by(Booking.bdate).all() return render_template('sindexdate.html', res=res, bresort=bresort)
def test_dublin_tracert(self): """ Execute an dublin tracert to 8.8.8.8 :return: """ rv = self.client.post('/api/v1.0/start_session', data=json.dumps({ 'hosts': { "8.8.8.8": { "api_port": 5000, "api_protocol": "http://", } }, 'tracert_qty': 1, 'master_iteration_id': 1 }), follow_redirects=True, headers=self.auth_header, content_type='application/json') assert b'success' in rv.data json_data = json.loads(str(rv.data, 'utf-8')) dst_port = get_local_free_port() src_port = get_local_free_port() with self.app.app_context(): s = db.session() ponger_port_t = models.PongerPort(ponger_id=1, dst_port=dst_port, src_port_min=src_port, src_port_max=src_port + 1) s.add(ponger_port_t) s.flush() tracert_t = models.Tracert( pinger_iteration_id=json_data['ping_iteration_id'], status='PENDING', ponger_port_id=ponger_port_t.id, ) s.add(tracert_t) s.flush() tracert_id = tracert_t.id tasks.pinger_tasks.do_dublin_tracert( json_data['ping_iteration_id']) tracert_t = s.query( models.Tracert).filter_by(id=tracert_id).first() tracert_result = tracert_t.result s.commit() s.close_all() assert "flows" in tracert_result
def sindexdateone(): bdate = request.args['bdate'] bresort = request.args['bresort'] res = db.session().query( Booking.id, Booking.bname, Booking.bdaynight, Booking.brentcat, Booking.bgathconf, Booking.bresort, Booking.bdate).filter( Booking.bauth == 'Yes').filter(Booking.bresort == bresort).filter( Booking.bdate >= datetime.strptime(bdate, '%d/%m/%Y'), Booking.bdate <= datetime.strptime(bdate, '%d/%m/%Y')).all() return render_template('sindexdateone.html', res=res, bresort=bresort)
def start_iperf_server(): """ This method is to be executed by a pinger. The ponger reserves a port to be used exclusivelly by the requesting pinger :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: start_iperf_server".format(current_f_name)) if not pipong_is_ponger(): return jsonify({ 'result': 'failure', 'msg': 'this server is not a ponger' }) ip_addr = request.remote_addr pingerp_t = db.session.query( models.AllocatedPingerPort).filter_by(address=ip_addr).first() if not pingerp_t: all_t = db.session.query(models.AllocatedPingerPort).all() all_ports = [row.port for row in all_t] possible_ports = list( range(app.config['RESERVED_PORT_RANGE_MIN'], app.config['RESERVED_PORT_RANGE_MAX'])) available_ports = sorted(list(set(possible_ports) - set(all_ports))) port = available_ports[0] logger.debug("{}: For host:{} new selected port generated:{}".format( current_f_name, ip_addr, port)) s = db.session() pingp_t = models.AllocatedPingerPort(address=ip_addr, port=port) s.add(pingp_t) s.commit() else: port = pingerp_t.port logger.debug("{}: For host:{} selected port:{}".format( current_f_name, ip_addr, port)) result = tasks.ponger_tasks.create_iperf_server.delay(port) creation_status = result.get() if not creation_status: return jsonify({ 'result': 'failure', 'msg': 'cannot start iperf server' }) return jsonify({'result': 'success', 'port': port})
def UpdateBooking(id): entries = Booking.query.order_by(Booking.id.asc()) entry = Booking.query.get(id) form = UpdateBookingForm(obj=entry) if form.validate_on_submit(): s_date = form.bdate.data sr_date = s_date.strftime("%d/%m/%Y") bresort = form.bresort.data res = db.session().query( Booking.id, Booking.bauth).filter(Booking.bresort == bresort).filter( Booking.bdaynight == form.bdaynight.data).filter( Booking.bdate >= datetime.strptime(sr_date, '%d/%m/%Y'), Booking.bdate <= datetime.strptime(sr_date, '%d/%m/%Y')) if res.count() == 1: booking = Booking.query.filter_by(id=id).first() booking.bname = request.form['bname'] booking.baddress = request.form['baddress'] booking.bcontact = request.form['bcontact'] booking.bresort = request.form['bresort'] booking.bref = request.form['bref'] booking.bdate = form.bdate.data booking.bdaynight = request.form['bdaynight'] booking.brentcat = request.form['brentcat'] booking.bcat = request.form['bcat'] booking.bcatveg = request.form['bcatveg'] booking.bgathconf = request.form['bgathconf'] booking.bnote = request.form['bnote'] booking.bauth = 'No' db.session().commit() flash('Your account has been updated!', 'success') return redirect('/dash_index') else: flash('Date Already Booked ....', 'info') return redirect('/booking') return render_template('editbooking.html', entries=entries, form=form)
def check_master_iteration_done(master_iteration_id): """ Check if for the specific iteration all the pingers have sent their results :return: """ current_f_name = inspect.currentframe().f_code.co_name # logger.info("{}: check_master_iteration_done called".format(current_f_name)) is_finished = False master_it = db.session.query( models.MasterIteration).filter_by(id=master_iteration_id).first() if master_it is None: logger.error("{}: No MasterIteration found with id: {}".format( current_f_name, master_iteration_id)) return {'is_finished': is_finished, 'percentage': 0.0} count = 0 pinger_size = len(master_it.master_iteration_pinger) for master_pinger_it in master_it.master_iteration_pinger: if master_pinger_it.status == "FINISHED": count += 1 if count >= pinger_size: s = db.session() is_finished = True master_it.status = 'FINISHED' s.commit() if count > pinger_size: logger.warn("{}: count > pinger_size {}>{}".format( current_f_name, count, pinger_size)) count = pinger_size percent = 0 if pinger_size > 0: percent = (count / float(pinger_size)) * 100 return { 'is_finished': is_finished, 'percentage': percent, 'count': count, 'total': pinger_size }
def populateDb(self): """ Insert base data into database :return: """ with self.app.app_context(): s = db.session() s.add(models.PingerIterationStatusType(type_id='CREATED')) s.add(models.PingerIterationStatusType(type_id='RUNNING')) s.add( models.PingerIterationStatusType(type_id='RUNNING_TRACEROUTE')) s.add(models.PingerIterationStatusType(type_id='RUNNING_IPERF')) s.add( models.PingerIterationStatusType(type_id='RUNNING_FINISHING')) s.add(models.PingerIterationStatusType(type_id='FINISHED')) s.add(models.PingerIterationStatusType(type_id='ERROR')) s.add(models.TaskStatusType(type_id='PENDING')) s.add(models.TaskStatusType(type_id='STARTED')) s.add(models.TaskStatusType(type_id='SUCCESS')) s.add(models.TaskStatusType(type_id='FAILURE')) s.add(models.TaskStatusType(type_id='RETRY')) s.add(models.TaskStatusType(type_id='REVOKED')) s.commit()
def register_pinger(): """ Register a pinger in this master node :return: """ current_f_name = inspect.currentframe().f_code.co_name if not pipong_is_master(): return jsonify({ 'result': 'failure', 'msg': 'this server is not a master' }) data = request.get_json() ip_addr = request.remote_addr api_port = data['api_port'] api_protocol = data['api_protocol'] registrered_t = db.session.query(models.RegisteredPingerNode).filter_by( address=ip_addr, api_port=api_port).first() s = db.session() if not registrered_t: pingp_t = models.RegisteredPingerNode( address=ip_addr, api_port=api_port, api_protocol=api_protocol) s.add(pingp_t) logger.debug( "{}: Registering ping: host:{} api_port:{} api_protocol:{}".format( current_f_name, ip_addr, api_port, api_protocol)) else: registrered_t.last_updated_date = datetime.now() s.commit() return jsonify({'result': 'success'})
def test_finish_old_iterations(self): """ Check finishing old iterations that are probably hanging :return: """ rv = self.client.post('/api/v1.0/master/register_pinger', data=json.dumps( dict(api_port='1234', api_protocol='http://')), follow_redirects=True, headers=self.auth_header, content_type='application/json') assert b'success' in rv.data with self.app.app_context(): tasks.master_tasks.create_iteration() master_count = db.session.query(models.MasterIteration).order_by( desc(models.MasterIteration.created_date)).count() assert master_count == 1 master_it_q = db.session.query(models.MasterIteration).order_by( desc(models.MasterIteration.created_date)).first() s = db.session() master_it_q.created_date = "2000-11-16 17:30:00" s.commit() assert master_it_q.status == "CREATED" tasks.master_tasks.finish_old_iterations() master_it_q = db.session.query(models.MasterIteration).order_by( desc(models.MasterIteration.created_date)).first() assert master_it_q.status == "FINISHED"
def perform_pipong_iteration_3(result, pinger_iteration_id): """ Third iteration of the discovery and monitor Get the results, compile them into a JSON string and then them to the master node :param result: previous result :param pinger_iteration_id: the iteration id from the db :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Perform_pipong_iteration_3".format(current_f_name)) logger.info("{}: Input:{} pinger_iteration_id:{}".format( current_f_name, result, pinger_iteration_id)) iter_t = db.session.query( models.PingerIteration).filter_by(id=pinger_iteration_id).first() if iter_t is None: logger.error("{}: Iteration not found with ID: {}".format( current_f_name, pinger_iteration_id)) return master_remote_id = iter_t.remote_id s = db.session() iter_t.status = "RUNNING_FINISHING" s.commit() iteration_result = [] iperf_t = db.session.query(models.Iperf).filter_by( pinger_iteration_id=pinger_iteration_id, status='SUCCESS') for iperf in iperf_t: tracert_t = db.session.query(models.Tracert).filter_by( pinger_iteration_id=pinger_iteration_id, ponger_port_id=iperf.ponger_port_id, status='SUCCESS').first() iperf_res = json.loads(iperf.result) logger.info( "{}: tracert_t.ponger_port_id:{} iperf.ponger_port_id:{}".format( current_f_name, tracert_t.ponger_port_id, iperf.ponger_port_id)) if tracert_t: result_dict = { "ponger_address": str(iperf.ponger_port.ponger.address), "src_port": int(iperf.src_port), "dst_port": int(iperf.ponger_port.dst_port) } try: tracert_res = json.loads(tracert_t.result) tracert_path = [] src_ip = tracert_res['flows'][str( iperf.src_port)][0]['sent']['ip']['src'] for flow in tracert_res['flows'][str(iperf.src_port)]: flow_name = flow['name'] if flow_name is not "": ip_addr_node = flow['received']['ip']['src'] if ip_addr_node != src_ip and ip_addr_node != str( iperf.ponger_port.ponger.address): tracert_path.append(ip_addr_node) result_dict["path"] = tracert_path result_dict["seconds"] = iperf_res['end']['sum']['seconds'] result_dict["bytes"] = iperf_res['end']['sum']['bytes'] result_dict["bits_per_second"] = iperf_res['end']['sum'][ 'bits_per_second'] result_dict["lost_percent"] = iperf_res['end']['sum'][ 'lost_percent'] iteration_result.append(result_dict) except Exception as e: logger.error( "{}: Error obtaining data from iperf iteration:{} " "lost_percent for this host:{} result_dict:{}".format( current_f_name, str(e), str(iperf.ponger_port.ponger.address), str(result_dict))) # result_dict["path"] = [str(iperf.ponger_port.ponger.address)] # result_dict["lost_percent"] = 999 pass master_host = app.config['MASTER_SERVER'] master_port = app.config['MASTER_PORT'] http_user = app.config['HTTP_AUTH_USER'] http_pass = app.config['HTTP_AUTH_PASS'] try: post_url = ("http://{}:{}/api/v1.0/master/" "register_pinger_result".format(master_host, master_port)) iter_t.status = "FINISHED" s.commit() try: post_data = { "master_remote_id": master_remote_id, "local_port": app.config['API_PORT'], "result": iteration_result } req = requests.post(post_url, auth=requestHTTPAuth(http_user, http_pass), json=post_data, timeout=10) logger.info("{}: Sent pinger result response:{} data:{}".format( current_f_name, req.text, post_data)) return True except Exception as e: logger.error("{}: Error registering pinger in master: {}".format( current_f_name, str(e))) return None except SoftTimeLimitExceeded: logger.error("{}: Error SoftTimeLimitExceeded".format(current_f_name)) return None
def start_session(): """ Receive a json with the configuration of a new iteration. The json follows this configuration: { "hosts": { "127.0.0.1": { "api_port": 5003, "api_protocol": "http://", } }, "tracert_qty": 20, "master_iteration_id": "myremoteid02" } :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info('{}: Start_session called'.format(current_f_name)) if not pipong_is_pinger(): return jsonify({ 'result': 'failure', 'msg': 'this server is not a pinger' }) response = {'result': 'success'} data = request.get_json() logger.info(data) try: host_list = data['hosts'] remote_id = data['master_iteration_id'] tracert_qty = data['tracert_qty'] ip_addr = request.remote_addr exists = db.session.query( db.session.query(models.PingerIteration).filter_by( remote_id=str(remote_id)).exists()).scalar() if not exists: s = db.session() iter_t = models.PingerIteration(status="CREATED", remote_id=str(remote_id), remote_address=ip_addr, tracert_qty=tracert_qty) s.add(iter_t) s.flush() for k, v in host_list.items(): api_port = v['api_port'] api_protocol = v['api_protocol'] ponger_t = models.Ponger(address=k, pinger_iteration_id=iter_t.id, api_port=api_port, api_protocol=api_protocol) s.add(ponger_t) s.flush() s.commit() logger.info('{}: New pinger iteration ID:{}'.format( current_f_name, iter_t.id)) tasks.pinger_tasks.perform_pipong_iteration_1.apply_async( args=[iter_t.id], kwargs={}) response['ping_iteration_id'] = iter_t.id else: logger.error( '{}: Remote id already registered'.format(current_f_name)) return jsonify({ 'result': 'failure', 'msg': 'remote id already registered' }) logger.info('{}: port_list:{} ip_addr:{} exists:{}'.format( current_f_name, host_list, ip_addr, exists)) except Exception: exception_log = traceback.format_exc() logger.debug('{}: e:{}'.format(current_f_name, exception_log)) jsonify({'result': 'failure', 'msg': exception_log}) return jsonify(response)
def analyse_iteration(master_iteration_id): """ Analyse a iteration, the results of this iteration will be added to the 'master_iteration_result' table. This use a simple approach to detect problematic nodes: - the lost_percent using iperf must be 0 for healty paths - if the paths have a lost_percent > 0 then this is a problematic path - for every pinger result the addresses of the hops are grouped by their network address - for every network addres on a pinger result a packet_loss result is stored - then a voting mechanism is performed, if more than half of the packet loss value for a single hop have the same value (generally this would be 0), then we use this value for the packet loss if there is no majority value, then we use the aritmetic mean of those packet loss values as the single packet loss of this hop * this voting mecanism helps to remove noise in the readings - finally the results of all pingers are joined, and for every hop there is a list of values assigned from every pinger that had touch that hop on a path - then its calculated the mean of those measurements - the scores are then classified using a 25% outlier percentile metric - the outliers are then added to the database :return: """ current_f_name = inspect.currentframe().f_code.co_name network_segmentation = app.config['DEFAULT_NETWORK_SEGMENTATION'] logger.debug("{}: Analyse_iteration called".format(current_f_name)) pinger_iteration_t = db.session.query( models.MasterIterationPinger).filter_by( master_iteration_id=master_iteration_id, status="FINISHED") logger.debug("{}: Found: {} results".format(current_f_name, pinger_iteration_t.count())) s = db.session() node_data = [] edges = set() for p_iter in pinger_iteration_t: try: json_data = ast.literal_eval(p_iter.result) except Exception as e: logger.error( "{}: Error loading data. Master pinger {} result:{} error:{}". format(current_f_name, p_iter.id, p_iter.result, str(e))) continue node_data_local = {} for ping_result in json_data: src = ping_result['pinger_address'] dst = ping_result['ponger_address'] packet_loss = ping_result['lost_percent'] path = ping_result['path'] logger.debug( "{}: Testing path:{} src:{} dst:{} lost_percent:{}".format( current_f_name, path, src, dst, packet_loss)) for i in range(len(path)): hop = path[i] # remove repeated paths if hop == "?": continue a1, n1, bn1 = get_network(hop, netmask=network_segmentation) if i < len(path) - 1: hop2 = path[i + 1] a2, n2, bn2 = get_network(hop2, netmask=network_segmentation) edges.add((bn1, bn2)) if bn1 not in node_data_local.keys(): node_data_local[bn1] = {'samples': [packet_loss]} else: node_data_local[bn1] = { 'samples': node_data_local[bn1]['samples'] + [packet_loss] } # calculate loss by voting (if it is 0) or by mean for k, v in node_data_local.items(): # count the number of samples samples = v['samples'] max_e = max(samples, key=samples.count) count_max_e = samples.count(max_e) if count_max_e > len(samples) / 2: v['loss'] = max_e logger.debug( "{}: k:{} loss:{} using max value:{} count:{}".format( current_f_name, k, v['loss'], max_e, count_max_e)) else: # or calculate the mean v['loss'] = sum(samples) / len(samples) logger.debug("{}: k:{} loss:{} using mean:{}".format( current_f_name, k, v['loss'], samples)) pass node_data.append(node_data_local) # after filtering the information locally for every trace # now the traces are joined and the information shared node_data_final = {} for nd in node_data: for k, v in nd.items(): if k in node_data_final.keys(): node_data_final[k] = { 'samples': node_data_final[k]['samples'] + [v['loss']] } else: node_data_final[k] = {'samples': [v['loss']]} for _k, v in node_data_final.items(): v['mean'] = sum(v['samples']) / len(v['samples']) sorted_by_value = sorted(node_data_final.items(), key=lambda kv: kv[1]['mean'], reverse=True) logger.debug("{}: Node score:{}".format(current_f_name, sorted_by_value)) problematic_nodes = [] if len(sorted_by_value) > 0: values = [e[1]['mean'] for e in sorted_by_value] outliers_index = get_outliers(values) logger.debug("{}: outliers_index:{}".format(current_f_name, outliers_index)) for i in outliers_index: k = sorted_by_value[i][0] score = sorted_by_value[i][1]['mean'] logger.debug("{}: problematic host:{} score:{}".format( current_f_name, k, score)) s.add( models.MasterIterationResult( master_iteration_id=master_iteration_id, problematic_host=k, score=score)) problematic_nodes.append(k) s.commit() # generate the graph with the probabilities G = nx.DiGraph() for k, v in node_data_final.items(): G.add_node(k, mean=v['mean']) G.add_edges_from([(k[0], k[1]) for k in list(edges)]) g_json = json_graph.node_link_data(G) logger.debug("{}: Json graph:{}".format(current_f_name, g_json)) master_it = db.session.query( models.MasterIteration).filter_by(id=master_iteration_id).first() if master_it: master_it.json_graph = json.dumps(g_json) s.commit() return problematic_nodes
def register_pinger_result(): """ Register a pinger results in this master node The results came in JSON format and are related to the iperf session performed on the pinger node :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: called".format(current_f_name)) if not pipong_is_master(): logger.info("{}: pipong_is_master:{}".format(current_f_name, pipong_is_master())) return jsonify({ 'result': 'failure', 'msg': 'this server is not a master' }) data = request.get_json() ip_addr = request.remote_addr master_iteration_id = data['master_remote_id'] pinger_port = data['local_port'] pinger_result = data['result'] registrered_t = db.session.query(models.RegisteredPingerNode).filter_by( address=ip_addr, api_port=pinger_port).first() if not registrered_t: logger.error( "{}: Error, the pinger node was not registered {}:{}".format( current_f_name, ip_addr, pinger_port)) return jsonify({ 'result': 'failure', 'msg': 'the pinger node was not registered' }) pinger_iteration_t = db.session.query( models.MasterIterationPinger).filter_by( master_iteration_id=master_iteration_id, registered_pinger_id=registrered_t.id).first() if not pinger_iteration_t: logger.error("{}: Error, the master pinger iteration was not found. " "Master iter:{} registered pinger:{}".format( current_f_name, master_iteration_id, registrered_t.id)) return jsonify({ 'result': 'failure', 'msg': 'the master pinger iteration was not found' }) if pinger_iteration_t.status == "FINISHED": logger.error("{}: Error, the pinger iteration was finished. " "Pinger iteration:{} status:{}".format( current_f_name, pinger_iteration_t.id, pinger_iteration_t.status)) return jsonify({ 'result': 'failure', 'msg': ' the master pinger iteration is already finished' }) s = db.session() for e in pinger_result: e['pinger_address'] = ip_addr pinger_iteration_t.result = str(pinger_result) pinger_iteration_t.status = "FINISHED" s.commit() logger.info( "{}: Pinger result registrered. Pinger address:{} result: {}".format( current_f_name, ip_addr, str(pinger_result))) res = tasks.master_tasks.check_master_iteration_done(master_iteration_id) logger.debug( "{}: check_master_iteration_done: {}".format( current_f_name, res)) if res['is_finished']: # big info message on the logs for easy visualization logger.info("{}: ################################".format(current_f_name)) logger.info("{}: # ITERATION id:{} FINISHED".format(current_f_name, master_iteration_id)) logger.info("{}: ################################".format(current_f_name)) # analyse last iteration results tasks.master_tasks.analyse_iteration.apply_async(args=[master_iteration_id], kwargs={}) return jsonify({'result': 'success'})
def wrapper(*args, **kwargs): s = db.session() s.expire_on_commit = False return func(*args, **kwargs)
def create_iteration(): """ Create a new iteration in a master node This will run at intervals on the master server and will trigger the analysis of the previouly finished iteration and generate a new master iteration with the registrered pingers :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.debug("{}: Create_iteration called".format(current_f_name)) if not pipong_is_master(): return None ponger_t = db.session.query(models.RegisteredPongerNode).all() logger.debug("{}: Ponger_t: {}".format(current_f_name, ponger_t)) ponger_list = {} for row in ponger_t: ponger_list[row.address] = { 'api_port': row.api_port, 'api_protocol': row.api_protocol } logger.debug("{}: Ponger list: {}".format(current_f_name, ponger_list)) http_user = app.config['HTTP_AUTH_USER'] http_pass = app.config['HTTP_AUTH_PASS'] tracert_qty = app.config['MASTER_TRACERT_QTY'] s = db.session() # get last iteration previous_master_iter = db.session.query(models.MasterIteration).order_by( desc(models.MasterIteration.created_date)).limit(1).first() if previous_master_iter: logger.debug("{}: Previous_master_iter: {} status:{}".format( current_f_name, previous_master_iter.id, previous_master_iter.status)) if previous_master_iter.status != 'FINISHED': logger.error( "{}: Cannot start a new iteration while the previous one (id:{}) is not FINISHED (status:{})" .format(current_f_name, previous_master_iter.id, previous_master_iter.status)) return None # create a new master iteration master_ite_t = models.MasterIteration() s.add(master_ite_t) s.flush() s.commit() # start the pinger sessions pinger_t = db.session.query(models.RegisteredPingerNode).all() for pinger in pinger_t: plist = dict(ponger_list) if pinger.address in plist.keys(): del plist[pinger.address] if len(plist.keys()) > 0: s.add( models.MasterIterationPinger( master_iteration_id=master_ite_t.id, registered_pinger_id=pinger.id, status="RUNNING")) s.commit() else: # dont call any pinger that does not have pongers to query continue post_url = "http://{}:{}/api/v1.0/start_session".format( pinger.address, pinger.api_port) post_json = { "hosts": plist, "tracert_qty": tracert_qty, "master_iteration_id": master_ite_t.id } try: logger.debug("post url: {} json:{}".format(post_url, post_json)) requests.post(post_url, auth=requestHTTPAuth(http_user, http_pass), json=post_json, timeout=5) except Exception as e: logger.error( "{}: Error calling create session on pinger {}:{} {}".format( current_f_name, pinger.address, pinger.api_port, str(e))) logger.debug("{}: Create_iteration finished".format(current_f_name))
def Delete(id): Booking.query.filter_by(id=id).delete() db.session().commit() flash('Booking sucessfully Deleted', 'success') return redirect('/dash_index')
def perform_pipong_iteration_2(result, pinger_iteration_id): """ Second iteration of the discovery and monitor With the tracert information when find the unique paths and use those ports to create multiple iperf sessions :param result: previous result :param pinger_iteration_id: the iteration id from the db :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Perform_pipong_iteration_2".format(current_f_name)) logger.info("{}: Input:{} pinger_iteration_id:{}".format( current_f_name, result, pinger_iteration_id)) s = db.session() ponger_t = db.session.query( models.Ponger).filter_by(pinger_iteration_id=pinger_iteration_id) for pong in ponger_t: logger.info("{}: Iterating for pong id:{} {}".format( current_f_name, pong.id, pong.address)) tracert_t = [] for pport in pong.ponger_port: tracert_t.append( db.session.query(models.Tracert).filter_by( pinger_iteration_id=pinger_iteration_id, ponger_port_id=pport.id).first()) paths_port = [] for row in tracert_t: try: logger.debug("{}: Task tracert id:{} status:{}".format( current_f_name, row.id, row.status)) json_res = json.loads(row.result) for src_port in json_res['flows']: local_path = [] for flow in json_res['flows'][src_port]: flow_name = flow['name'] if flow_name is not "": ip_addr_node = flow['received']['ip']['src'] local_path.append(ip_addr_node) if flow['is_last'] or (len(local_path) > 0 and local_path[-1] == pong.address): if len(local_path) > 0: if local_path[-1] == pong.address: # delete the last element that contains # the target del local_path[-1] paths_port.append({ 'ponger_port_id': row.ponger_port.id, 'src_port': flow['sent']['udp']['sport'], 'dst_port': flow['sent']['udp']['dport'], 'path': local_path, }) except Exception as e: logger.error("{}: Error loading data from json result. " "Tracert id: {} result:{} error:{}".format( current_f_name, row.id, row.result, str(e))) continue unique_paths = [ list(x) for x in set(tuple(x['path']) for x in paths_port) ] unique_path_port = [] for path in unique_paths: for data in paths_port: data_path = data['path'] if path == data_path: unique_path_port.append(data) break logger.info("{}: Unique_path_port for ponger: {} {}".format( current_f_name, pong.address, unique_path_port)) for path_port in unique_path_port: logger.debug( "{}: Creating iperf pinger_iteration_id:{} ponger_port_id:{} ". format(current_f_name, pinger_iteration_id, path_port['ponger_port_id'])) iperf_n_t = models.Iperf( pinger_iteration_id=pinger_iteration_id, status='PENDING', ponger_port_id=path_port['ponger_port_id'], src_port=path_port['src_port']) s.add(iperf_n_t) s.flush() s.commit() task_list = [] ponger_t = db.session.query( models.Ponger).filter_by(pinger_iteration_id=pinger_iteration_id) for pong in ponger_t: logger.debug("{}: Task creating iperf tasks ponger_id:{}".format( current_f_name, pong.id)) task_list.append(do_iperf3_client.s(pong.id)) iter_t = db.session.query( models.PingerIteration).filter_by(id=pinger_iteration_id).first() if iter_t: iter_t.status = "RUNNING_IPERF" s.commit() chord(task_list)(perform_pipong_iteration_3.s(pinger_iteration_id))
def perform_pipong_iteration_1(pinger_iteration_id): """ First iteration of the discovery and monitor Create all tracert configurations on the DB perform the tasks asynchronously When all task are finished a callback is performed to the second iteration step :param pinger_iteration_id: the iteration id from the db :return: """ current_f_name = inspect.currentframe().f_code.co_name logger.info("{}: Perform_pipong_iteration_1".format(current_f_name)) iter_t = db.session.query( models.PingerIteration).filter_by(id=pinger_iteration_id).first() if iter_t is None: logger.error("{}: Iteration not found with ID: {}".format( current_f_name, pinger_iteration_id)) return if iter_t.status != "CREATED": logger.error( "{}: Iteration ID:{} is not with in CREATED status: {}".format( current_f_name, pinger_iteration_id, iter_t.status)) return s = db.session() iter_t.status = "RUNNING" s.flush() s.commit() src_port_start = 40000 for ponger in iter_t.ponger: api_port = "" if ponger.api_port != "": api_port = ":" + str(ponger.api_port) post_url = "{}{}{}/api/v1.0/iperf/server".format( ponger.api_protocol, ponger.address, api_port) logger.debug("{}: post_url: {}".format(current_f_name, post_url)) try: req_res = requests.post(post_url, auth=requestHTTPAuth( app.config['HTTP_AUTH_USER'], app.config['HTTP_AUTH_PASS']), timeout=10) except Exception as e: logger.error("{}: Error requesting servers: {}".format( current_f_name, str(e))) continue if req_res.status_code != 200: logger.error( "{}: Error creating servers: {} returned status: {}".format( current_f_name, post_url, req_res.status_code)) continue json_data = req_res.json() if 'port' not in json_data or 'result' not in json_data or \ json_data['result'] != 'success': logger.error("{}: Json data invalid: {}".format( current_f_name, json_data)) continue logger.debug("{}: Host:{}{}{} Json data: {}".format( current_f_name, ponger.api_protocol, ponger.address, api_port, json_data)) dst_port = json_data['port'] # register the tracerts src_port_end = src_port_start + iter_t.tracert_qty ponger_port_t = models.PongerPort(ponger_id=ponger.id, dst_port=dst_port, src_port_max=src_port_end, src_port_min=src_port_start) s.add(ponger_port_t) s.flush() src_port_start = src_port_end + 1 logger.debug( "{}: Creating tracert pinger_iteration_id:{} ponger_port_id:{} ". format(current_f_name, pinger_iteration_id, ponger_port_t.id)) tracert_t = models.Tracert(pinger_iteration_id=pinger_iteration_id, status='PENDING', ponger_port_id=ponger_port_t.id) s.add(tracert_t) s.flush() task_list = [] tracert_qt = db.session.query(models.Tracert).filter_by( pinger_iteration_id=pinger_iteration_id, status='PENDING') for row in tracert_qt: logger.debug("{}: Task creating tracert tasks tracert_id:{}".format( current_f_name, row.id)) task_list.append(do_dublin_tracert.s(row.id)) iter_t.status = "RUNNING_TRACEROUTE" s.flush() s.commit() # run async tasks with callback chord(task_list)(perform_pipong_iteration_2.s(pinger_iteration_id))
def un_auth(id): booking = Booking.query.filter_by(id=id).first() booking.bauth = 'No' db.session().commit() return redirect('/dash_index')