def route(self, dest, verbose=None): if dest in self.cache: return self.cache[dest] if verbose is None: verbose = conf.verb # Transform "192.168.*.1-5" to one IP of the set dst = dest.split("/")[0] dst = dst.replace("*", "0") while 1: l = dst.find("-") if l < 0: break m = (dst[l:] + ".").find(".") dst = dst[:l] + dst[l + m:] dst = atol(dst) pathes = [] for d, m, gw, i, a in self.routes: aa = atol(a) if aa == dst: pathes.append((0xffffffffL, (LOOPBACK_NAME, a, "0.0.0.0"))) if (dst & m) == (d & m): pathes.append((m, (i, a, gw))) if not pathes: if verbose: warning("No route found (no default route?)") return LOOPBACK_NAME, "0.0.0.0", "0.0.0.0" #XXX linux specific! # Choose the more specific route (greatest netmask). # XXX: we don't care about metrics pathes.sort() ret = pathes[-1][1] self.cache[dest] = ret return ret
def get_if_bcast(self, iff): for net, msk, gw, iface, addr in self.routes: if (iff == iface and net != 0L): bcast = atol(addr) | (~msk & 0xffffffffL) # FIXME: check error in atol() return ltoa(bcast) warning("No broadcast address found for iface %s\n" % iff)
def __init__(self, filename): RawPcapReader.__init__(self, filename) try: self.LLcls = conf.l2types[self.linktype] except KeyError: warning("PcapReader: unknown LL type [%i]/[%#x]. Using Raw packets" % (self.linktype,self.linktype)) self.LLcls = conf.raw_layer
def route(self, dest, verbose=None): if type(dest) is list and dest: dest = dest[0] if dest in self.cache: return self.cache[dest] if verbose is None: verbose = conf.verb # Transform "192.168.*.1-5" to one IP of the set dst = dest.split("/")[0] dst = dst.replace("*", "0") while 1: l = dst.find("-") if l < 0: break m = (dst[l:] + ".").find(".") dst = dst[:l] + dst[l + m:] dst = atol(dst) pathes = [] for d, m, gw, i, a in self.routes: aa = atol(a) if aa == dst: pathes.append((0xffffffffL, (LOOPBACK_NAME, a, "0.0.0.0"))) if (dst & m) == (d & m): pathes.append((m, (i, a, gw))) if not pathes: if verbose: warning("No route found (no default route?)") return LOOPBACK_NAME, "0.0.0.0", "0.0.0.0" #XXX linux specific! # Choose the more specific route (greatest netmask). # XXX: we don't care about metrics pathes.sort() ret = pathes[-1][1] self.cache[dest] = ret return ret
def __init__(self, buff): try: import psyco psyco.full() except ImportError: warning("module psyco not found") self.__buff = buff self.__idx = 0
def delt(self, *args, **kargs): """delt(host|net, gw|dev)""" self.invalidate_cache() route = self.make_route(*args,**kargs) try: i=self.routes.index(route) del(self.routes[i]) except ValueError: warning("no matching route found")
def __init__(self, filename): RawPcapReader.__init__(self, filename) try: self.LLcls = conf.l2types[self.linktype] except KeyError: warning( "PcapReader: unknown LL type [%i]/[%#x]. Using Raw packets" % (self.linktype, self.linktype)) self.LLcls = conf.raw_layer
def delt(self, *args, **kargs): """delt(host|net, gw|dev)""" self.invalidate_cache() route = self.make_route(*args, **kargs) try: i = self.routes.index(route) del (self.routes[i]) except ValueError: warning("no matching route found")
def do_graph(graph, prog=None, format=None, target=None, type=None, string=None, options=None): """do_graph(graph, prog=conf.prog.dot, format="svg", target="| conf.prog.display", options=None, [string=1]): string: if not None, simply return the graph string graph: GraphViz graph description format: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option target: filename or redirect. Defaults pipe to Imagemagick's display program prog: which graphviz program to use options: options to be passed to prog""" if format is None: if WINDOWS: format = "png" # use common format to make sure a viewer is installed else: format = "svg" if string: return graph if type is not None: format = type if prog is None: prog = conf.prog.dot start_viewer = False if target is None: if WINDOWS: tempfile = os.tempnam("", "scapy") + "." + format target = "> %s" % tempfile start_viewer = True else: target = "| %s" % conf.prog.display if format is not None: format = "-T %s" % format cmd = "%s %s %s %s" % (prog, options or "", format or "", target) w, r = os.popen2(cmd) w.write(graph) w.close() if start_viewer: # Workaround for file not found error: We wait until tempfile is written. waiting_start = time.time() while not os.path.exists(tempfile): time.sleep(0.1) if time.time() - waiting_start > 3: warning( "Temporary file '%s' could not be written. Graphic will not be displayed." % tempfile) break else: if conf.prog.display == conf.prog._default: os.startfile(tempfile) else: subprocess.Popen([conf.prog.display, tempfile])
def _write_header(self, pkt): if self.linktype == None: if type(pkt) is list or type(pkt) is tuple or isinstance(pkt,BasePacketList): pkt = pkt[0] try: self.linktype = conf.l2types[pkt.__class__] except KeyError: warning("PcapWriter: unknown LL type for %s. Using type 1 (Ethernet)" % pkt.__class__.__name__) self.linktype = 1 RawPcapWriter._write_header(self, pkt)
def __main__() : print sys.argv if len( sys.argv ) > 1 : files = [] for p in sys.argv[1].split(":") : files.extend( get_classes( p ) ) a = androguard.Androguard( files ) try : a.do( sys.argv[2] ) except Exception, e: warning("!!!! Androguard failed !!!!") traceback.print_exc()
def _write_header(self, pkt): if self.linktype == None: if type(pkt) is list or type(pkt) is tuple or isinstance( pkt, BasePacketList): pkt = pkt[0] try: self.linktype = conf.l2types[pkt.__class__] except KeyError: warning( "PcapWriter: unknown LL type for %s. Using type 1 (Ethernet)" % pkt.__class__.__name__) self.linktype = 1 RawPcapWriter._write_header(self, pkt)
def main(): if len(sys.argv) != 2: print_usage() #import config and error first to setup basic framework logging settings import config import error if not config.init(sys.argv[1]): error.warning("No Config file found. Using defaults") error.init() #Import everything else now that config and error are setup: import data import parser import ruleParser import teu error.message("\n\n**** TASK PARSE TREE ****") t_list = parser.parseTaskList() if error.isError(): error.writeMsgs() sys.exit(1) parser.printTaskList(t_list) error.message("\n\n**** RULES LIST ****") r_list = ruleParser.getRules() if error.isError(): error.writeMsgs() sys.exit(1) parser.printTaskList(r_list) #setup shared info for rules: #common = None common = data.Data() common.shared.putVar("USER_LIB_PATH", config.getUserLibPath()) common.shared.putVar("COMMON_LIB_PATH", config.getCommonLibPath()) common.shared.putVar("TASK_LIST", t_list) #enforce rules error.message("\nRunning Rules:") teu.runTaskList(r_list, common) if error.isError(): error.writeMsgs() sys.exit(1) error.message("All 'FATAL' Rules Passed; Your job is ready to run!\n") error.writeMsgs()
def import_hexcap(): p = "" try: while 1: l = raw_input().strip() try: p += re_extract_hexcap.match(l).groups()[2] except: warning("Parsing error during hexcap") continue except EOFError: pass p = p.replace(" ","") return p.decode("hex")
def import_hexcap(): p = "" try: while 1: l = raw_input().strip() try: p += re_extract_hexcap.match(l).groups()[2] except: warning("Parsing error during hexcap") continue except EOFError: pass p = p.replace(" ", "") return p.decode("hex")
def do_graph(graph,prog=None,format=None,target=None,type=None,string=None,options=None): """do_graph(graph, prog=conf.prog.dot, format="svg", target="| conf.prog.display", options=None, [string=1]): string: if not None, simply return the graph string graph: GraphViz graph description format: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option target: filename or redirect. Defaults pipe to Imagemagick's display program prog: which graphviz program to use options: options to be passed to prog""" if format is None: if WINDOWS: format = "png" # use common format to make sure a viewer is installed else: format = "svg" if string: return graph if type is not None: format=type if prog is None: prog = conf.prog.dot start_viewer=False if target is None: if WINDOWS: tempfile = os.tempnam("", "scapy") + "." + format target = "> %s" % tempfile start_viewer = True else: target = "| %s" % conf.prog.display if format is not None: format = "-T %s" % format cmd = "%s %s %s %s" % (prog,options or "", format or "", target) w,r = os.popen2(cmd) w.write(graph) w.close() if start_viewer: # Workaround for file not found error: We wait until tempfile is written. waiting_start = time.time() while not os.path.exists(tempfile): time.sleep(0.1) if time.time() - waiting_start > 3: warning("Temporary file '%s' could not be written. Graphic will not be displayed." % tempfile) break else: if conf.prog.display == conf.prog._default: os.startfile(tempfile) else: subprocess.Popen([conf.prog.display, tempfile])
def run(self, coord_x, coord_y) : try : import gmpy coord_x = [ gmpy.mpz(i) for i in coord_x ] coord_y = [ gmpy.mpz(i) for i in coord_y ] except ImportError : warning("module gmpy not found") try : import psyco psyco.bind(self._run) psyco.bind(self.NevilleAlgorithm) psyco.bind(self.interpolate) except ImportError : warning("module psyco not found") return self._run( coord_x, coord_y )
def route(self,dest,verbose=None, iface_hint=None): # iface_hint: if has the same dest and mask( mulitcast specific), get iface_hint route # by chenzongze 2013.10.17 if type(dest) is list and dest: dest = dest[0] if iface_hint : cache_hash = dest + iface_hint else: cache_hash = dest if cache_hash in self.cache: return self.cache[cache_hash] if verbose is None: verbose=conf.verb # Transform "192.168.*.1-5" to one IP of the set dst = dest.split("/")[0] dst = dst.replace("*","0") while 1: l = dst.find("-") if l < 0: break m = (dst[l:]+".").find(".") dst = dst[:l]+dst[l+m:] dst = atol(dst) pathes=[] MAX_METRIC = 0xffffffffL # take metric into considaration by chenzongze 2012.07.24 for d,m,gw,i,a,metric in self.routes: metric = MAX_METRIC - int(metric) aa = atol(a) if aa == dst: pathes.append((0xffffffffL,(MAX_METRIC,LOOPBACK_NAME,a,"0.0.0.0"))) if (dst & m) == (d & m): pathes.append((m,(metric,i,a,gw))) if not pathes: if verbose: warning("No route found (no default route?)") return LOOPBACK_NAME,"0.0.0.0","0.0.0.0" #XXX linux specific! # Choose the more specific route (greatest netmask). # XXX: we don't care about metrics pathes.sort() ret = pathes[-1][1][1:] # omit the metric again if iface_hint: # indicate that last_metric = pathes[-1][0] for i in range(-1,-len(pathes)-1,-1): if last_metric == pathes[i][0]: if pathes[i][1][1] == iface_hint: # we know we get it ret = pathes[i][1][1:] else: break self.cache[cache_hash] = ret return ret
def Exit(msg): warning("Error : " + msg) raise ("oops")
def getRules(): rule_list = [] task_ptr = None # There are two rules files: # 1) user rules file - any rules specific to that job # 2) common rules file - rules common all jobs for file_name in [config.getUserRulesFile(), config.getCommonRulesFile()]: if not os.path.isfile(file_name): continue file = open(file_name) lines = file.readlines() file.close() #Get the names of all the rules and import them into a list. for x in range(len(lines)): line = lines[x] name, tmp, tmp1, tmp2, fatal = parser.parseLine(line) if name == None: continue #We dont need to check the lib paths because that was #already done in the parsing stage u_py_file, u_task_ptr = parser.findTask(name, config.getUserLibPath()) c_py_file, c_task_ptr = parser.findTask(name, config.getCommonLibPath()) error.sysDebug("GetRules: user: "******", " + str(u_task_ptr)) error.sysDebug("GetRules: common: " + str(c_py_file) + ", " + str(c_task_ptr)) #did we find a py file? if not u_py_file and not c_py_file: error.error("GetRules: rule: " + name + " Does not exist. See Debug output for traceback") continue if u_py_file == c_py_file and u_task_ptr and c_task_ptr: error.warning("Overriding common task with user task: " + \ name + " from: " + u_py_file) py_file = u_py_file task_ptr = u_task_ptr elif u_py_file: py_file = u_py_file task_ptr = u_task_ptr else: py_file = c_py_file task_ptr = c_task_ptr if not task_ptr: error.error("Can't find rule: " + str(name)) else: #Setup rule task task_item = task.Task(name, py_file, file_name, x + 1, None, task_ptr, fatal) rule_list.append(task_item) error.sysDebug("getRules: adding rule:" + name) return rule_list
def mainProcess(str1,str2,order,name_log): process_failed=False renew_working_dir() # --- get the std command. std_order=processFuncs.separate_num_and_letter(order) command=classCommands.makeCommand(std_order) # --- Get the file list: flt=getFileList() if not command.whether_rename(): # --- this command will not rename the files if command.Help.value: command.print_help() else: # --- this command will rename the files. if command.cancel.value: if name_log and name_log[0].length>=2: for i in range(len(name_log)): cur_name=name_log[i].back() dot_pos=cur_name.rfind('.') suffix=cur_name[dot_pos:] if dot_pos>0 else '' tmp_name='Begin_bcigiuipgva3cvGHVGHjsbv__{Num:0>6d}__avhav_End'.format(Num=i)+suffix os.rename(cur_name,tmp_name) for i in range(len(name_log)): pre_name=name_log[i].lists[name_log[i].length-2] dot_pos=pre_name.rfind('.') suffix=pre_name[dot_pos:] if dot_pos>0 else '' cur_name='Begin_bcigiuipgva3cvGHVGHjsbv__{Num:0>6d}__avhav_End'.format(Num=i)+suffix os.rename(cur_name,pre_name) name_log[i].pop() else: error.warning(418) process_failed=True else: flt=processFuncs.sort_by_command(flt,command) for i in range(len(flt)): begins,ends=processFuncs.getBegs_and_ends(flt[i].mainName,str1,command) if begins: real_str2=processFuncs.real_str2(i+1,str2,command,len(flt)) new_main_name=processFuncs.replace(flt[i].mainName,real_str2,begins,ends) flt[i].rename(new_main_name) # --- To Check if there are empty names in new names. if validity.have_empty_names_in(flt): error.warning(419) process_failed=True return command.Quit and not command.Help # --- To Check if new names are the sames the olds. all_same=True for f in flt: if f.oriFullName!=f.fullName: all_same=False if all_same: error.warning(420) process_failed=True return command.Quit and not command.Help # --- To Check if the new names have conflict with old names during and after the process of raneming if not validity.conflict_while_rename(flt): for i in range(len(flt)): os.rename(flt[i].oriFullName,flt[i].fullName) log.update(name_log,flt) else: error.warning(validity.conflict_while_rename(flt)) process_failed=True if not process_failed: error.warning(0) return command.Quit and not command.Help
def literal(self, e): if '\\' in e.value: warning("'\\' is not path separator in bakefiles (use '/')", pos=e.pos) return super(_SplitIntoPathVisitor, self).literal(e)
def sndrcv(pks, pkt, timeout = None, inter = 0, verbose=None, chainCC=0, retry=0, multi=0): if not isinstance(pkt, Gen): pkt = SetGen(pkt) if verbose is None: verbose = conf.verb debug.recv = plist.PacketList([],"Unanswered") debug.sent = plist.PacketList([],"Sent") debug.match = plist.SndRcvList([]) nbrecv=0 ans = [] # do it here to fix random fields, so that parent and child have the same all_stimuli = tobesent = [p for p in pkt] notans = len(tobesent) hsent={} for i in tobesent: h = i.hashret() if h in hsent: hsent[h].append(i) else: hsent[h] = [i] if retry < 0: retry = -retry autostop=retry else: autostop=0 while retry >= 0: found=0 if timeout < 0: timeout = None rdpipe,wrpipe = os.pipe() rdpipe=os.fdopen(rdpipe) wrpipe=os.fdopen(wrpipe,"w") pid=1 try: pid = os.fork() if pid == 0: try: sys.stdin.close() rdpipe.close() try: i = 0 if verbose: print "Begin emission:" for p in tobesent: pks.send(p) i += 1 time.sleep(inter) if verbose: print "Finished to send %i packets." % i except SystemExit: pass except KeyboardInterrupt: pass except: log_runtime.exception("--- Error in child %i" % os.getpid()) log_runtime.info("--- Error in child %i" % os.getpid()) finally: try: os.setpgrp() # Chance process group to avoid ctrl-C sent_times = [p.sent_time for p in all_stimuli if p.sent_time] cPickle.dump( (conf.netcache,sent_times), wrpipe ) wrpipe.close() except: pass elif pid < 0: log_runtime.error("fork error") else: wrpipe.close() stoptime = 0 remaintime = None inmask = [rdpipe,pks] try: try: while 1: if stoptime: remaintime = stoptime-time.time() if remaintime <= 0: break r = None if arch.FREEBSD or arch.DARWIN: inp, out, err = select(inmask,[],[], 0.05) if len(inp) == 0 or pks in inp: r = pks.nonblock_recv() else: inp, out, err = select(inmask,[],[], remaintime) if len(inp) == 0: break if pks in inp: r = pks.recv(MTU) if rdpipe in inp: if timeout: stoptime = time.time()+timeout del(inmask[inmask.index(rdpipe)]) if r is None: continue ok = 0 h = r.hashret() if h in hsent: hlst = hsent[h] for i in range(len(hlst)): if r.answers(hlst[i]): ans.append((hlst[i],r)) if verbose > 1: os.write(1, "*") ok = 1 if not multi: del(hlst[i]) notans -= 1; else: if not hasattr(hlst[i], '_answered'): notans -= 1; hlst[i]._answered = 1; break if notans == 0 and not multi: break if not ok: if verbose > 1: os.write(1, ".") nbrecv += 1 if conf.debug_match: debug.recv.append(r) except KeyboardInterrupt: if chainCC: raise finally: try: nc,sent_times = cPickle.load(rdpipe) except EOFError: warning("Child died unexpectedly. Packets may have not been sent %i"%os.getpid()) else: conf.netcache.update(nc) for p,t in zip(all_stimuli, sent_times): p.sent_time = t os.waitpid(pid,0) finally: if pid == 0: os._exit(0) remain = reduce(list.__add__, hsent.values(), []) if multi: remain = filter(lambda p: not hasattr(p, '_answered'), remain); if autostop and len(remain) > 0 and len(remain) != len(tobesent): retry = autostop tobesent = remain if len(tobesent) == 0: break retry -= 1 if conf.debug_match: debug.sent=plist.PacketList(remain[:],"Sent") debug.match=plist.SndRcvList(ans[:]) #clean the ans list to delete the field _answered if (multi): for s,r in ans: if hasattr(s, '_answered'): del(s._answered) if verbose: print "\nReceived %i packets, got %i answers, remaining %i packets" % (nbrecv+len(ans), len(ans), notans) return plist.SndRcvList(ans),plist.PacketList(remain,"Unanswered")
def parseFile(file, file_name): lines = file.readlines() task_list = [] #for each line in the file for x in range(len(lines)): line = lines[x] name, fork, num, type, fatal = parseLine(line) if name == None: continue #Detect type by trying to find it as a task first: # if it exists: # if its a task list, then parse it # if its a real task, set it up #check common and user paths #we look in user lib first, then common lib common_lib_path = config.getCommonLibPath() user_lib_path = config.getUserLibPath() if not validPaths([user_lib_path]) and not validPaths([common_lib_path]): error.fatalError("Neither the common lib path: " + common_lib_path + \ ", or the user path: " + user_lib_path + ", exists") elif not validPaths([user_lib_path]): error.warning("User library path: " + user_lib_path + " does not exist") elif not validPaths([common_lib_path]): error.warning("Common library path: " + common_lib_path + " does not exist") #look for a task and try to import it u_py_file, u_task_ptr = findTask(name, user_lib_path) c_py_file, c_task_ptr = findTask(name, common_lib_path) error.sysDebug("parseFile: user: "******", "+str(u_task_ptr)) error.sysDebug("parseFile: common: "+ str(c_py_file)+", "+str(c_task_ptr)) #did we find a py file? task_ptr = None if not u_py_file and not c_py_file: error.error("Task: " + name + " Does not exist") continue else: #choose a user task if one exists in multiple places if u_py_file == c_py_file and u_task_ptr and c_task_ptr: error.warning("Overriding common task with user task: " + \ name + " from: " + u_py_file) py_file = u_py_file task_ptr = u_task_ptr elif u_py_file: py_file = u_py_file task_ptr = u_task_ptr else: py_file = c_py_file task_ptr = c_task_ptr #is this a list? if not task_ptr: task_ptr = parse(py_file) #Setup task error.sysDebug("parseFile: task_ptr: " + str(task_ptr)) error.sysDebug("parseFile: " + name + " : fork= " + str(fork)) if fork: fork_obj = task.ForkObj(fork, num, type) else: fork_obj = None task_item = task.Task(name, py_file, file_name, x+1, fork_obj, task_ptr, fatal) task_list.append(task_item) error.sysDebug("parseFile: adding task:" + name) return task_list
def main(argv): #Handle OPTIONS try: opts, args = getopt.getopt(argv[1:], "ht:i:e:") except getopt.GetoptError: print "Invalid option!" usage() #Default args tag = None id = None notificationEmail = None for opt, arg, in opts: if (opt == "-h"): usage() elif (opt == "-t"): tag = arg elif (opt == "-i"): id = arg elif (opt == "-e"): notificationEmail = arg if tag == None: print "A TAG must be specified to run!" usage() if id == None: print "An ID must be specified to run!" usage() #handle VARIABLES variables, job_dir = parseArgs(args) if job_dir == None: print "No JOB_DIR given!" usage() if variables == None: print "Invalid option or variable decleration!" usage() #Finish setting up framework import config, error if not config.init(job_dir): error.warning("No Config file found. Using defaults") error.init() import data, parser, ruleParser, teu #Register our signalHandler now #We will need to start some kinda signal handler at this point so that we can have a graceful exit signal.signal(signal.SIGTERM, signalHandler) #include our command line options if notificationEmail != None: error.addEmailAddr(notificationEmail) variables.append(('TAG', tag)) variables.append(('ID', id)) #Parse the task lists error.message("Parsing the task lists...") taskList = parser.parseTaskList() if error.isError(): error.fatalError("Error parsing the task lists!") error.message("task lists parsed OK!") #Parse Rules error.message("Parsing the rule lists...") ruleList = ruleParser.getRules() if error.isError(): error.fatalError("Error parsing the rules list!") error.message("rule lists parsed OK!") #enforce the rules rulesObj = data.Data() rulesObj.shared.putVar("USER_LIB_PATH", config.getUserLibPath()) rulesObj.shared.putVar("COMMON_LIB_PATH", config.getCommonLibPath()) rulesObj.shared.putVar("TASK_LIST", taskList) error.message("Enforcing the Rules...") teu.runTaskList(ruleList, rulesObj) if error.isError(): error.fatalError("Your job did not pass a FATAL rule") error.message("All FATAL rules passed!") #Start the job (run the tasks) error.message("Running the tasks...") dataObj = data.Data() for var in variables: name = var[0] value = var[1] dataObj.shared.putVar(name, value) teu.runTaskList(taskList, dataObj) if error.isError(): error.message("Some tasks failed during execution!") error.message("Your job may not have completed sucessfuly!") error.message("Trying to exit gracefully...") error.gracefulExit( "Framework Job: " + job_dir + "-" + tag + " completed with errors", "Framework Job: " + job_dir + "-" + tag + " completed, however some tasks failed during execution. Please check logs as your job may not have been successful." ) error.message("All tasks completed sucessfully!") error.gracefulExit( "Job: " + job_dir + "-" + tag + " completed successfully", "The Task Execution Framework has completed Job: " + job_dir + "-" + tag)
# -*- coding:utf-8 -*- # --- Written by Corona # --- Finished on 2020-08-06 import error, procedure, validity, classCommands, processFuncs name_log = [] count = 1 procedure.display(0) while True: procedure.display(count) count = count + 1 raw = input( 'Please input your operation:' ) # --- to get the original operation order :str1\str2[\command] errCode = validity.notValid(raw) if errCode: error.warning(errCode) continue str1, str2, order = procedure.splitRaw(raw) if procedure.mainProcess( str1, str2, order, name_log ): # --- Function return True if the program need to quit(\q). break