def execute(self): if len(self.args) < 2: yield self.help() return ## Try to glob the inode list: dbh = DB.DBO(self.environment._CASE) dbh.execute( "select inode_id from vfs where !isnull(inode_id) and path rlike %r", (fnmatch.translate(self.args[0]))) pdbh = DB.DBO() pdbh.mass_insert_start('jobs') ## This is a cookie used to identify our requests so that we ## can check they have been done later. cookie = time.time() scanners = [] for i in range(1, len(self.args)): scanners.extend( fnmatch.filter(Registry.SCANNERS.scanners, self.args[i])) scanners = ScannerUtils.fill_in_dependancies(scanners) for row in dbh: Scanner.scan_inode_distributed(dbh.case, row['inode_id'], scanners, cookie=cookie) self.wait_for_scan(cookie) yield "Scanning complete"
def execute(self): if len(self.args) < 2: yield self.help() return ## Try to glob the inode list: dbh = DB.DBO(self.environment._CASE) dbh.execute("select inode from inode where inode rlike %r", DB.glob2re(self.args[0])) pdbh = DB.DBO() pdbh.mass_insert_start("jobs") ## This is a cookie used to identify our requests so that we ## can check they have been done later. cookie = int(time.time()) scanners = [] for i in range(1, len(self.args)): scanners.extend(fnmatch.filter(Registry.SCANNERS.scanners, self.args[i])) scanners = ScannerUtils.fill_in_dependancies(scanners) for row in dbh: inode = row["inode"] pdbh.mass_insert( command="Scan", arg1=self.environment._CASE, arg2=row["inode"], arg3=",".join(scanners), cookie=cookie ) pdbh.mass_insert_commit() ## Wait for the scanners to finish: if self.environment.interactive: self.wait_for_scan(cookie) yield "Scanning complete"
def execute(self): if len(self.args)<2: yield self.help() return ## Try to glob the inode list: dbh=DB.DBO(self.environment._CASE) dbh.execute("select inode_id from vfs where !isnull(inode_id) and path rlike %r", (fnmatch.translate(self.args[0]))) pdbh = DB.DBO() pdbh.mass_insert_start('jobs') ## This is a cookie used to identify our requests so that we ## can check they have been done later. cookie = time.time() scanners = [] for i in range(1,len(self.args)): scanners.extend(fnmatch.filter(Registry.SCANNERS.scanners, self.args[i])) scanners = ScannerUtils.fill_in_dependancies(scanners) for row in dbh: Scanner.scan_inode_distributed(dbh.case, row['inode_id'], scanners, cookie=cookie) self.wait_for_scan(cookie) yield "Scanning complete"
def calculate_scanners(self, query): """ Calculates the scanners required, filling in dependancies and considering scanner groups. returns an array of scanner names. """ ## The scanners that users asked for: q = FlagFramework.query_type(()) for cls in ScannerUtils.scan_groups_gen(): drawer = cls.Drawer() drawer.add_defaults(q, query) scanner_names = [] l = len("scan_") for k, v in q: if k[:l] == "scan_" and v == "on": scanner_names.append(k[l:]) ## Now pull in any scanners which are needed scanner_names = ScannerUtils.fill_in_dependancies(scanner_names) return scanner_names
def calculate_scanners(self,query): """ Calculates the scanners required, filling in dependancies and considering scanner groups. returns an array of scanner names. """ ## The scanners that users asked for: q = FlagFramework.query_type(()) for cls in ScannerUtils.scan_groups_gen(): drawer=cls.Drawer() drawer.add_defaults(q,query) scanner_names = [] l = len("scan_") for k,v in q: if k[:l]=="scan_" and v=='on': scanner_names.append(k[l:]) ## Now pull in any scanners which are needed scanner_names = ScannerUtils.fill_in_dependancies(scanner_names) return scanner_names
def execute(self): if len(self.args)<2: yield self.help() return case = self.environment._CASE scanners = [] for i in range(1,len(self.args)): scanners.extend(fnmatch.filter(Registry.SCANNERS.scanners, self.args[i])) scanners = ScannerUtils.fill_in_dependancies(scanners) Scanner.scan_inode(case, self.args[0], scanners, force = True, cookie=time.time())
def form(self,query,result): try: ## Draw the form for each scan group: result.text(DB.expand("Scanning Inode %s", (query['inode']))) groups = [] for cls in ScannerUtils.scan_groups_gen(): try: drawer = cls.Drawer() if drawer.group in groups: continue groups.append(drawer.group) drawer.form(query,result) except RuntimeError: pass result.checkbox('Click here when finished','final','ok') except KeyError: return result
def execute(self): if len(self.args) < 2: yield self.help() return case = self.environment._CASE scanners = [] for i in range(1, len(self.args)): scanners.extend( fnmatch.filter(Registry.SCANNERS.scanners, self.args[i])) scanners = ScannerUtils.fill_in_dependancies(scanners) Scanner.scan_inode(case, self.args[0], scanners, force=True, cookie=time.time())
def form(self, query, result): try: ## Draw the form for each scan group: result.text(DB.expand("Scanning Inode %s", (query["inode"]))) groups = [] for cls in ScannerUtils.scan_groups_gen(): try: drawer = cls.Drawer() if drawer.group in groups: continue groups.append(drawer.group) drawer.form(query, result) except RuntimeError: pass result.checkbox("Click here when finished", "final", "ok") except KeyError: return result
def form(self,query,result): try: result.case_selector() if query['case']!=config.FLAGDB: result.textfield('Scan files (glob or path)','path',size=50) ## Draw the form for each scan group: groups = [] for cls in ScannerUtils.scan_groups_gen(): try: drawer = cls.Drawer() if drawer.group in groups: continue groups.append(drawer.group) drawer.form(query,result) except RuntimeError: pass result.checkbox('Click here when finished','final','ok') except KeyError: return result
def form(self, query, result): try: result.case_selector() if query["case"] != config.FLAGDB: result.textfield("Scan files (glob or path)", "path", size=50) ## Draw the form for each scan group: groups = [] for cls in ScannerUtils.scan_groups_gen(): try: drawer = cls.Drawer() if drawer.group in groups: continue groups.append(drawer.group) drawer.form(query, result) except RuntimeError: pass result.checkbox("Click here when finished", "final", "ok") except KeyError: return result
def __init__(self,flag,ui=None): Reports.report.__init__(self,flag,ui) self.parameters = self.parameters.copy() ## Work out what scan groups are available and require they be ## in the parameters: groups = [] for cls in ScannerUtils.scan_groups_gen(): drawer = cls.Drawer() if drawer.group in groups: continue scan_group_name = drawer.get_group_name() groups.append(scan_group_name) ## Add the scan group to our parameters - this will ensure ## that type checking is done on it: self.parameters[scan_group_name]='onoff' ## Adjust this reports parameters list. This is ## required to ensure that caching works correctly ## (caching must include all the individual scanners so ## they are sensitive to changes in sub group tuning) for k,t in drawer.get_parameters(): self.parameters[k]=t
def execute(self): if len(self.args) < 2: yield self.help() return ## Try to glob the inode list: dbh = DB.DBO(self.environment._CASE) dbh.execute("select inode from inode where inode rlike %r", fnmatch.translate(self.args[0])) pdbh = DB.DBO() pdbh.mass_insert_start('jobs') ## This is a cookie used to identify our requests so that we ## can check they have been done later. cookie = int(time.time()) scanners = [] for i in range(1, len(self.args)): scanners.extend( fnmatch.filter(Registry.SCANNERS.scanners, self.args[i])) scanners = ScannerUtils.fill_in_dependancies(scanners) for row in dbh: inode = row['inode'] pdbh.mass_insert( command='Scan', arg1=self.environment._CASE, arg2=row['inode'], arg3=','.join(scanners), cookie=cookie, ) pdbh.mass_insert_commit() ## Wait for the scanners to finish: if self.environment.interactive: self.wait_for_scan(cookie) yield "Scanning complete"
def __init__(self, flag, ui=None): Reports.report.__init__(self, flag, ui) self.parameters = self.parameters.copy() ## Work out what scan groups are available and require they be ## in the parameters: groups = [] for cls in ScannerUtils.scan_groups_gen(): drawer = cls.Drawer() if drawer.group in groups: continue scan_group_name = drawer.get_group_name() groups.append(scan_group_name) ## Add the scan group to our parameters - this will ensure ## that type checking is done on it: self.parameters[scan_group_name] = "onoff" ## Adjust this reports parameters list. This is ## required to ensure that caching works correctly ## (caching must include all the individual scanners so ## they are sensitive to changes in sub group tuning) for k, t in drawer.get_parameters(): self.parameters[k] = t
config.add_option("single", default=False, action='store_true', help = "Single shot (exit once done)") config.parse_options(True) try: directory = config.args[0] except IndexError: print "You must specify a directory to monitor" sys.exit(-1) if not config.case: print "You must specify a case to load into" sys.exit(-1) scanners = ScannerUtils.fill_in_dependancies(config.scanners.split(',')) count = 0 processed_length = 0 def load_file(urn, processor, pcap_dispatch): """ Loads the urn into the processor """ ## Get a unique id for the urn dbfs = FileSystem.DBFS(config.case) fd = dbfs.open(urn=urn) id = len(pcap_dispatch)+1 pcap_dispatch[id] = urn try: input_file = pypcap.PyPCAP(fd, file_id = id) except IOError,e:
config.parse_options(True) try: directory = config.args[0] output_file = config.args[1] except IndexError: print "You must specify both a directory to monitor and an output file" sys.exit(-1) if not config.case: print "You must specify a case to load into" sys.exit(-1) scanners = config.scanners.split(',') scanners = ScannerUtils.fill_in_dependancies(scanners) print scanners output_fd = None def create_output_file(): global output_fd, output_file print "Will read from %s and write to %s. Will use these scanners: %s" % ( directory, output_file, scanners) ## Check if the file is already there: filename = config.UPLOADDIR + '/' + output_file if output_file != '-': try:
config.parse_options(True) try: directory = config.args[0] output_file = config.args[1] except IndexError: print "You must specify both a directory to monitor and an output file" sys.exit(-1) if not config.case: print "You must specify a case to load into" sys.exit(-1) scanners = config.scanners.split(',') scanners = ScannerUtils.fill_in_dependancies(scanners) print scanners output_fd = None def create_output_file(): global output_fd, output_file print "Will read from %s and write to %s. Will use these scanners: %s" % (directory, output_file, scanners) ## Check if the file is already there: filename = config.UPLOADDIR + '/' + output_file if output_file != '-': try: os.stat(filename) ## Yep its there: