def probe_cache(cid, fpath='probe_cache/'): global cached_probes global fcached_probes fnum = cid / 1000 fpath = format_dirpath(fpath) fname = fpath+'pc'+str(fnum)+'.json' ret = None scid = str(cid) if isfile(fname): with open(fname, 'r+') as f: pc = json.load(f) if scid in pc: ret = pc[scid] if ret is None: pc = dict() try: tmp = Probe(id=cid) pc[scid] = {'country': tmp.country_code, 'asn': tmp.asn_v4, 'prefix': tmp.prefix_v4, 'coords': tmp.geometry['coordinates']} ret = pc[scid] except Exception as e: print(e) print('failed to get '+scid) if scid in pc: with open(fname, 'w+') as f: json.dump(pc, f) with open(fcached_probes, 'w+') as f: json.dump(list(cached_probes), f) cached_probes.add(cid) if ret is None: raise Exception('failed to get client info') else: return ret
import socket import sys import struct import json from helpers import mydir from helpers import format_dirpath from helpers import get_myip from helpers import fix_ownership import threading from daemon2x import daemon import time import os import urllib2 topdir = format_dirpath(mydir() + "../") datadir = format_dirpath(topdir + "data/") statedir = format_dirpath(topdir + "state/") packetsdir = format_dirpath(datadir + "packets/") datahost = sys.argv[3] dataport = sys.argv[4] cycle_time = float(sys.argv[2]) dump_time = time.time() + cycle_time myip = get_myip() loc_port_map = dict() with open(statedir + "port_loc_mapping.json", "r+") as f: loc_port_map = json.load(f)
#Packet sniffer in python #For Linux - Sniffs all incoming and outgoing packets :) #Silver Moon ([email protected]) from collections import defaultdict import SocketServer, SimpleHTTPServer, BaseHTTPServer import sys from helpers import mydir from helpers import format_dirpath from helpers import get_myip from helpers import fix_ownership from daemon2x import daemon import os topdir = format_dirpath(mydir() + "../") statedir = format_dirpath(topdir + "state/") class req_handler(SimpleHTTPServer.SimpleHTTPRequestHandler): pass class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): pass class mydaemon(daemon): def __init__(self, inport,
import meas_handler ################################################################## # LOGGING ################################################################## import logging import logging.config logging.config.fileConfig('logging.conf', disable_existing_loggers=False) # create logger logger = logging.getLogger(__name__) logger.debug(__name__ + "logger loaded") ################### SET UP FILE I/O ###################### topdir = format_dirpath(mydir() + "../") supportdir = format_dirpath(topdir + "support_files/") hardataf = format_dirpath(topdir + 'data/parse_hars/') + 'getlists.json' label = 'short_query_experiment' datadir = format_dirpath(topdir + "data/" + label) platform = "ripe_atlas" ################### load domain list ###################### with open(hardataf, 'r+') as f: hardata = json.load(f) sites = list(hardata.keys()) # number of sites that include dom site_count = defaultdict(int) for site in sites: for dom in hardata[site]['gets']:
#!/usr/bin/env python import sys, os, time, atexit from signal import SIGTERM from helpers import mydir from helpers import format_dirpath topdir = format_dirpath(mydir()+"../") statedir = format_dirpath(topdir+"state/daemon/") class daemon: """ A generic daemon class. Usage: subclass the Daemon class and override the run() method """ def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'): self.stdin = stdin self.stdout = stdout self.stderr = stderr self.pidfile = pidfile def daemonize(self): """ do the UNIX double-fork magic, see Stevens' "Advanced Programming in the UNIX Environment" for details (ISBN 0201563177) http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 """ try: pid = os.fork() if pid > 0: # exit first parent
from wanderful import browser from helpers import format_dirpath, mydir, listfiles import csv from bs4 import BeautifulSoup import pyautogui as pag from time import sleep import signal import unicodedata class ToutEx(Exception): pass def touthand(signum, frame): raise toutex topdir = format_dirpath(mydir()+"../") supportdir = format_dirpath(topdir+"support_files/") hardir = "/home/marc/Downloads/" doms = list() with open(supportdir+"top-1m.csv", 'r+') as f: reader = csv.reader(f) for line in reader: doms.append(line[1]) if len(doms) > 9999: break ff = browser.firefox_manager(headless=False)
def save_packet_data(post): nodedir = format_dirpath(packetsdir+"ip"+post['c_ip']) filename = nodedir+"t"+str(time.time())+".json" with open(filename, 'w+') as f: json.dump(post['data'], f)
# LOGGING ################################################################## import logging import logging.config logging.config.fileConfig('logging.conf', disable_existing_loggers=False) # create logger logger = logging.getLogger(__name__) logger.debug(__name__+"logger loaded") ################################################################## # GLOBALS ################################################################## topdir = format_dirpath(mydir()+"../") supportdir = format_dirpath(topdir+"support_files/") hardir = format_dirpath(topdir+'data/har0') outdir = format_dirpath(topdir+'data/parse_hars/') ################################################################## # CODE ################################################################## harfiles = listfiles(hardir) getlists = dict() getcounts = list() filters = defaultdict(int) for ind, hf in enumerate(harfiles): try: