def trackEvents(start_datetime, end_datetime, events): alert_events = [] track_list = tracklist.TrackList() track_list.refreshTrackList(end_datetime) for event in events: # Avoid web access from NAT (Horizontal Scan) if iplists.allInternal([event[1]]) and event[4] in ["80","443"] and \ event[6] == "TXFT" and event[5] < 10000: continue if event[5] > rules[event[6]]["suprathreshold"]: track_list.deleteTrackListItem(event) alert_events.append(event) else: # Avoid normal DNS query (Maybe NAT) if event[6] == "TXTT" and event[4] == "53": continue start_time, event = track_list.updateTrackList(end_datetime, event) if event != None: output_track = output.Output(start_time, end_datetime) #output_track.writeAnomalyFile([event]) output_track.writeAnomalyDB([event]) # Output output_alert = output.Output(start_datetime, end_datetime) #output_alert.writeAnomalyFile(alert_events) output_alert.writeAnomalyDB(alert_events)
def monitorBlacklistIP(start_datetime, end_datetime): # Set up the filter rules (blacklist IP) # Well Known Ports (0 – 1023) # Registered Ports (1024 – 49151) # Dynamic or Private Ports (49152 – 65535) # We think port 0-10000 as system reversed ports blackConn_list = [] nf = nflow.NetFlow() plain = nf.readLog(start_datetime, end_datetime, minute_interval, options=["dst ip in [%s] and src port > 10000" \ % ",".join(black_list)], mode="csv") for e in nf.parseLogLine(plain, mode="csv"): blackConn_list.append({ 'src_ip': e['sa'], 'dst_ip': e['da'], 'src_port': e['sp'], 'dst_port': e['dp'], 'date': e['te'] }) # Output to DB output_handler = output.Output(start_datetime, end_datetime) output_handler.writeBlackList(blackConn_list)
def parcours(self): pos = 0 pos = self.version.parcours(pos, self) pos = self.inputCount.parcours(pos, self) for i in range(self.inputCount.decimal): self.input = input.Input() pos = self.input.txid.parcours(pos, self) pos = self.input.vout.parcours(pos, self) d = pos pos = self.input.scriptSigSize.parcours(pos, self) self.input.scriptSig.length = int(self.raw[d:pos], 16) pos = self.input.scriptSig.parcours(pos, self) pos = self.input.sequence.parcours(pos, self) self.inputs.append(self.input) if pos != len(self.raw): pos = self.outputCount.parcours(pos, self) for i in range(self.outputCount.decimal): self.output = output.Output() pos = self.output.value.parcours(pos, self) d = pos pos = self.output.scriptPubKeySize.parcours(pos, self) self.output.scriptPubKey.length = int(self.raw[d:pos], 16) pos = self.output.scriptPubKey.parcours(pos, self) self.outputs.append(self.output) if pos != len(self.raw): pos = self.locktime.parcours(pos, self)
def __init__(self): # 实例化 管理器、下载器、解析器、输出器 self.manager = manager.Manager() self.download = download.Download() self.parser = parser.Parser() self.output = output.Output()
def getDconnPort(start_datetime, end_datetime, minute_interval): DCONN = [] ignore_port = [] ignore_port.extend(ACK) outputer = output.Output(start_datetime, end_datetime) nf = nflow.NetFlow() # get the set of dst port where (srcip,dstip,dstport) over threshold plain = nf.readLog(start_datetime, end_datetime, minute_interval, \ options=["-A", "srcip,dstip,dstport", "-s", \ "record/flows", "-n", "20", "-N"], \ mode="fmt:%sa,%da,%dp,%fl") for line in nf.parseLogLine(plain, mode="fmt:%sa,%da,%dp,%fl"): if int(line["fl"]) > 1000: ignore_port.append(int(line["dp"])) # get the (dstip,dstport) without wellknown ports and the port below plain = nf.readLog(start_datetime, end_datetime, minute_interval, \ options=["-A", "dstip,dstport", "-s", "record", "-n", \ "20", "-N", "not port in %s" % ignore_port], \ mode="fmt:%da,%dp,%fl") for line in nf.parseLogLine(plain, mode="fmt:%da,%dp,%fl"): if int(line["fl"]) > 1000: DCONN.append(int(line["dp"])) outputer.writeDconn(line["da"], line["dp"], line["fl"]) return DCONN
def input(self, s): '''Handle user input and return output. @type s: str @rtype : Output''' s = self.cleanInput(s) o = output.Output() if s == 'pdb' and self.debug: self.debugger() o.write('Debugger closed', False) return o s = lib.Sentence(s.encode()) self.actor.input(s, o) if self.transcribe: if not hasattr(self, 'transcription'): import datetime self.transcription = open( "%s-%s.txt" % (self.name, datetime.datetime.now().strftime(r"%Y-%m-%d %H:%M")), 'w') self.transcription.write(self.actor.state.request + ' ' + s.s + '\n') self.transcription.write('\n'.join(o.lines) + '\n') return o
def __init__(self, vis, dt, writecsv, disturbanceSize): if vis == "mpl": import visualizer.visMatplotlib self.visualizer = visualizer.visMatplotlib.VisMatplotlib() elif vis == "vispy": import visualizer.visVispy self.visualizer = visualizer.visVispy.VisVispy() elif vis == "null": import visualizer.visNull self.visualizer = visualizer.visNull.VisNull() else: raise Exception("Unknown visualization backend: {}".format(vis)) self.t = 0.0 self.dt = dt # Since our integration/animation ticks are always the fixed duration # dt, any call to sleep() with a non-multiple of dt will have some # "leftover" time. Keep track of it here and add extra ticks in future. self.sleepResidual = 0.0 self.crazyflies = [] self.disturbanceSize = disturbanceSize if writecsv: import output self.output = output.Output() else: self.output = None
def test_build_non_anki_note_html(self): out = output.Output() long_text = " ".join(["a" for _ in range(5001)]) html = out._build_non_anki_note_html(long_text) self.assertNotEqual(html, None) self.assertTrue(len(html) > 0) self.assertTrue("Text was cut" in html)
def __init__(self, dcm_id, topo, node, dip, inflows, pid, silent = False): """Creates a new Dcm object. Args: dcm_id: Unique numerical ID of a Dcm. topo: The Topology, to which the Dcm belongs. node: The name of the Bridge running on the same host as this Dcm or the last Bridge before this host. dip: This Dcm's IP. inflows: List of TCP flows coming into this Dcm as a tuple (dport, sip, sport) pid: This Dcm's process ID. silent: Suppress the output, if True. Returns: An instance of the Dcm class. """ self.__o = output.Output(silent) self.__id = dcm_id self.__topo = topo #todo add validation for node naming self.__node = node self.__dip = dip self.__inflows = inflows self.__pid = pid
def test_most_common_words(self): out = output.Output() lorem_ipsum = "a testcase is created by subclassing unittest.TestCase. The three individual tests are defined with methods whose names start with the letters test. This naming convention informs the test runner about which methods represent tests. The crux of each test is a call to assertEqual() to check for an expected result; assertTrue() or assertFalse() to verify a condition; or assertRaises() to verify that a specific exception gets raised. These methods are used instead of the assert statement so the test runner can accumulate all test results and produce a report. The setUp() and tearDown() methods allow you to define instructions that will be executed before and after each test method. They are covered in more detail in the section Organizing test code. The final block shows a simple way to run the tests. unittest.main() provides a command-line interface to the test script. When run from the command line, the above script produces an output that looks like this:" words_html = out._mostCommonWords(lorem_ipsum) self.assertNotEqual(words_html, None) self.assertTrue(len(words_html) > 0) words_html = out._mostCommonWords(None) self.assertNotEqual(words_html, None) self.assertTrue(len(words_html) > 0) words_html = out._mostCommonWords("") self.assertNotEqual(words_html, None) self.assertTrue(len(words_html) > 0) words_html = out._mostCommonWords(" ") self.assertNotEqual(words_html, None) self.assertTrue(len(words_html) > 0) words_html = out._mostCommonWords("a a a") self.assertNotEqual(words_html, None) self.assertTrue(len(words_html) > 0) words_html = out._mostCommonWords("\n") self.assertNotEqual(words_html, None) self.assertTrue(len(words_html) > 0)
def __init__(self, tx): self.version = version.Version(tx) self.input_count = input_count.InputCount(tx) if self.input_count.value == 0: self.segwit = segwit.Segwit(tx) self.input_count = input_count.InputCount(tx) else: self.segwit = None self.inputs = [] for i in range(self.input_count.value): self.inputs.append(input.Input(tx)) self.output_count = output_count.OutputCount(tx) self.outputs = [] for i in range(self.output_count.value): self.outputs.append(output.Output(tx)) if self.segwit is not None: for i in range(self.input_count.value): self.witness_count = witness_count.WitnessCount(tx) self.witnesss = [] if self.witness_count != 0: for i in range(self.witness_count.value): self.witnesss.append(witness.Witness(tx)) else: self.witnesss.append(None) self.locktime = locktime.Locktime(tx)
def __init__(self, file, output_path, timedelta=None): self.file = file self.output_path = output_path self.output = output.Output(output_path, "|") self.parser_account = client.AccountParser(timedelta) self.parser_account_info = client.PagesFacebook(timedelta) self.parser_keywords = client.GraphParser(timedelta)
def main(): """主程序""" file = sys.argv[2] #从命令行中获取参数文件名 command = sys.argv[1] file_name = file if command == '-x': Window() op.Output(command, file_name)
def setUp(self): self.node_list = [] self.edge_list = [] self.gv_input = gvInput.GvInput(self.node_list, self.edge_list, "../assets/pallete.xml") self.gv_input.get_data_from_blivet() for n in self.node_list: n.prepare() self.output = output.Output(self.node_list, self.edge_list)
def __init__(self, name, rest_client, ip=None, port=16640, isDpdk=True, silent=True): """Creates a new OvsdbClient object. Args: name: Name of the this OVSDB node. Must follow the convention defined in the of module. rest_client: RestClient object to access OpenDaylight's northbound REST interface. ip: IP address of this OVSDB server. port: TCP port of this OVSDB server. isDPDK: If True, DPDK acceleration will be used for the bridges to be created with this server. silent: Supresses output, if False. Returns: An instance of the OvsdbClient class. """ self.__o = output.Output(silent) self.__r = rest_client self.__id = name self.__ovsdb_id = 'ovsdb://' + name self.__ip = ip self.__port = port self.__ovsdb_oper_path = None self.__ovsdb_oper_id = None self.__node = {'network-topology:node':[{'node-id': self.__ovsdb_id}]} self.__bridge_path = None self.__bridge_id = None self.__of_id = None self.__ovsdb_path = \ 'network-topology:network-topology/topology/ovsdb:1/node/' + self.__ovsdb_id.replace('/', '%2F') # Check, if node of this name already exists in the datastore. try: self.__node['network-topology:node'] = self.__r.conf_ds_get(self.__ovsdb_path)['node'] self.__ip = self.__node['network-topology:node'][0]['ovsdb:connection-info']['remote-ip'] self.__port = self.__node['network-topology:node'][0]['ovsdb:connection-info']['remote-port'] except (rest.RestError, KeyError): pass # Check, if connection is already active. if (self.__ip): self.__node['network-topology:node'][0]['connection-info'] = { 'ovsdb:remote-port':port, 'ovsdb:remote-ip':ip } # Initialize DPDK configuration, if requested. if isDpdk: self.__node['network-topology:node'][0]['ovsdb:openvswitch-other-configs'] = [ {'ovsdb:other-config-key':'n-dpdk-rxqs', 'ovsdb:other-config-value':8}, {'ovsdb:other-config-key':'pmd-cpu-mask', 'ovsdb:other-config-value':'0xfffe'}] self.__retries = 10 self.__sleep_time = 10
def main(): if debug: print("\nMODULE TANFIRE") if debug: print("FUNCTION main") stats = { 'computers_total': 0, 'computers_hashes': 0, 'total': 0, 'excluded': 0, 'unique': 0, 'wf_cache': 0, 'wf_new': 0, 'wf_uploaded': 0, 'vt_cache': 0, 'vt_new': 0, 'vt_uploaded': 0, 'malware': 0 } hashes_list = [] hashes_unique = {} wf_hashes = {} wf_stats = {} vt_hashes = {} vt_stats = {} #Connect to Tanium and import list of new hashes in the environment user, password = Credentials() tanium_handler = Tanium_Connect(user, password) hashes_list, hashes_unique, stats = Import_Index(tanium_handler, stats) print 'computers total: ' + str(stats['computers_total']) print 'computers hashes: ' + str(stats['computers_hashes']) print 'hashes total: ' + str(stats['total']) print 'hashes excluded: ' + str(stats['excluded']) print 'hashes unique: ' + str(stats['unique']) #Check dictionary of all the unique hashes with WildFire cache, directly, and upload if necessary. if config.get('config', 'wildfire') == 'yes': wf_hashes, wf_stats = wildfire.WildFire(hashes_list, hashes_unique, tanium_handler) stats.update(wf_stats) #Check dictionary of all the unique hashes with VirusTotal cache and directly if necessary. if config.get('config', 'virustotal') == 'yes': vt_hashes, vt_stats = virustotal.VirusTotal(hashes_list, hashes_unique) stats.update(vt_stats) #Update list of hashes with results of WildFire and VirusTotal checks hashes_list = Check(hashes_list, wf_hashes, vt_hashes) #Output results output.Output(hashes_list, stats) if debug: print( "\n----------------------------------END----------------------------------------------\n\n\n" )
def __init__(self): object.__init__(self) self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36' } self.outputer = output.Output() self.session = requests.Session() self.lister = list.List(self.session) self.session.headers.update(self.headers)
def spamDetect(start_datetime, end_datetime): output_handler = output.Output(start_datetime) nf = nflow.NetFlow() plain = nf.readLog(start_datetime, end_datetime, 1, options=["-N", "-s", "srcip/flows", "dst port 25"]) for line in nf.parseLogLine(plain): if int(line["fl"]) > 1000: output_handler.writeSpam(line["val"], int(line["fl"]))
def getIntro(self): '''Create new output object and write game intro on it. @rtype: Output''' o = output.Output() try: self.writeIntro(o) except OutputClosed: pass return o
def sigterm_handler(signum, frame): '''called when program is terminated with systemctl stop ... or with "shutdown -r now". See shdn.py for the shutdown/reboot button which runs either "shutdown -r now" or "shutdown -h now" and sets a nice color for the status led. ''' logger.info("program received SIGTERM (signum={})".format(signum)) # do not switch off status led, it is set in shdn.py # led3c.set_led(led3c.OFF) o = output.Output() o.all_off() import os os._exit(0)
def __init__(self): self.logger = logging.getLogger(conf.TSGRAIN_LOGGER) led3c.set_led(led3c.GREEN) ticks.start() self.state = MAN_STATE self.out = output.Output() self.mc = manctrl.ManCtrl(ticks=ticks) # singleton self.pb = pbutton.PButtons() self.pb.subscribe(self.mc.pb_pressed) self.ac = autoctrl.AutoCtrl() self.pb.subscribe(self.ac.autooff_btn_callback) self.ac.register_auto_on_hdl(self.auto_on_hdl) self.ac.register_auto_off_hdl(self.auto_off_hdl) self.logger.info("Controller instance created")
def aflowDetect(start_datetime, end_datetime): output_handler = output.Output(start_datetime) nf = nflow.NetFlow() plain = nf.readLog(start_datetime, end_datetime, 1, options=["-N", "-A", "proto,srcip,dstip,dstport", "-s", "record/bytes", "-L", "5G", "inet and proto udp and not net 10.0.0.0/8" \ " and not ip 140.116.49.6 and" \ " not port in [3389]"], mode="fmt:%sa,%da,%dp,%byt,%fl") for line in nf.parseLogLine(plain, mode="fmt:%sa,%da,%dp,%byt,%fl"): if int(line["fl"]) > 5: output_handler.writeAflow(start_datetime, line["sa"], line["da"], line["dp"], line["byt"], line["fl"])
def create_graph(self, graph_abs_path, graph_name): """ Function that creates the a visualized graph using data extracted from blivet :param str graph_name filename of the graph that is being created :param graph_abs_path absolute path to the directory where the graph should be created :param object blivet Blivet object from which to take the data """ if not os.path.exists(graph_abs_path): os.mkdir(graph_abs_path) gv_input = gvInput.GvInput(self.node_list, self.edge_list, self.palletePath, self.blivet) gv_input.get_data_from_blivet() self.prepare_nodes() out = output.Output(self.node_list, self.edge_list) out.create_svg(graph_abs_path, graph_name)
def __init__(self, vis, dt, writecsv): if vis == "mpl": import visualizer.visMatplotlib self.visualizer = visualizer.visMatplotlib.VisMatplotlib() elif vis == "vispy": import visualizer.visVispy self.visualizer = visualizer.visVispy.VisVispy() else: raise Exception("Unknown visualization backend: {}".format(vis)) self.t = 0.0 self.dt = dt self.crazyflies = [] if writecsv: import output self.output = output.Output() else: self.output = None
def __init__(self, name, rest_client, silent=False): """Creates a new Topology object based on the current state of the datastore. Args: name: Name of the topology. rest_client: RestClient object to access OpenDaylight's northbound REST interface. silent: Supresses output, if False. Returns: An instance of the Topology class. """ self.__o = output.Output(silent) self.__r = rest_client self.__id = name self.__bridges = {} self.__update()
def monitorUnusedIP(start_datetime, end_datetime): monIP_list = [] nf = nflow.NetFlow() plain = nf.readLog(start_datetime, end_datetime, minute_interval, options=["dst ip in [%s]" % ",".join(unused_list)], mode="csv") for e in nf.parseLogLine(plain, mode="csv"): monIP_list.append({ 'source': e['sa'], 'srcport': e['sp'], 'target': e['da'], 'dstport': e['dp'], 'date': e['te'] }) # Output to DB output_handler = output.Output(start_datetime, end_datetime) output_handler.writeMonIPLog(monIP_list)
def playing(self): print("Game start") Game = ff.Game() Player1 = ff.Player(Game, []) Player2 = ff.Player(Game, []) PlayerList = [Player1, Player2] Output = op.Output(Game) print("First Hand") print(Game.evaluate_hand()) while Game.get_current_number() != 999: for Player in PlayerList: Player.turn() Output.every_turn() if Game.get_current_number() >= 999: break print(Game.HistoryOfNumber) print(Game.TurnsOfDoubt)
def scanDetect(start_datetime, end_datetime): """ We only detect the following scan: 22,23,139,445,3389 """ port_statistics = {} output_handler = output.Output(start_datetime) nf = nflow.NetFlow() plain = nf.readLog(start_datetime, end_datetime, 1, options=["-N", "-s", "record/flows", "-n", "0", \ "-A", "srcip,dstip,dstport", \ "inet and not net 10.0.0.0/8 and " \ " port in [22,23,139,445,3389]"], \ mode="fmt:%sa,%dp") for line in nf.parseLogLine(plain, mode="fmt:%sa,%dp"): item = (line["sa"], line["dp"]) if item in port_statistics: port_statistics[item] += 1 else: port_statistics[item] = 0 for tuples, count in port_statistics.iteritems(): if count > 300: output_handler.writeScan(tuples[0], tuples[1], count)
def main(): init_state = np.zeros((20, 20), dtype=np.bool_) g = grid.Grid(init_state) # blinker structures.blinker(g, (3, 4), True) # toad structures.toad(g, (8, 6), True) # glider structures.glider(g, (3, 10), False, False) out = output.Output(g, 800, 600, 100) out.render_frame(g) def callback(): out.render_frame(g) current_frame = 0 while not out.query_closed(): # wait time.sleep(1. / 3) out.update_fps(current_frame) # process grid updates, render g.update_tick(callback) current_frame += 1
def plots(start_datetime, end_datetime): nf = nflow.NetFlow() iplists = iplist.IPList() src_plain = nf.readLog( start_datetime, end_datetime, 1, options=["-a", "-A", "srcip4/16", iplists.getNetList("src")]) src = nf.parseSummary(src_plain) srcbytes, srcflows, srcpackets = src['flows'], src['bytes'], src['packets'] dst_plain = nf.readLog( start_datetime, end_datetime, 1, options=["-a", "-A", "dstip4/16", iplists.getNetList("dst")]) dst = nf.parseSummary(dst_plain) dstbytes, dstflows, dstpackets = dst['flows'], dst['bytes'], dst['packets'] outputer = output.Output(end_datetime=end_datetime) outputer.writePlot(srcbytes, srcflows, srcpackets, "src") outputer.writePlot(dstbytes, dstflows, dstpackets, "dst")