def search_next_text_region(self, query, result): """ searches for the next text region and updates query['period_number'] """ ## Open all the disks filenames = query.getarray('filename') fds = [ IO.open_URL(f) for f in filenames ] if query.get('ismissing',False): fds.append(ParityFD(copy.deepcopy(filenames))) period_number = int(query.get('period_number',0)) + 1 blocksize = FlagFramework.calculate_offset_suffix(query['blocksize']) period = FlagFramework.calculate_offset_suffix(query['period']) p=0 while 1: offset = blocksize * (p + period_number * period) for fd in fds: fd.seek(offset) ## We classify a text region as one with 20 chars at ## the start of the period data = fd.read(20) if not data: result.heading("Error") result.para("Unable to read data from %r" % fd) return m = self.text_re.match(data) if m: period_number = period_number + p / period query.set('period_number',period_number) result.refresh(0, query, 'parent') return p += 1
def drop_table(case, name): """ Drops the log table tablename """ if not name: return dbh = DB.DBO(case) pyflaglog.log(pyflaglog.DEBUG, "Dropping log table %s in case %s" % (name, case)) dbh.execute("select * from log_tables where table_name = %r limit 1" , name) row = dbh.fetch() ## Table not found if not row: return preset = row['preset'] ## Get the driver for this table: log = load_preset(case, preset) log.drop(name) ## Ask the driver to remove its table: dbh.delete("log_tables", where= DB.expand("table_name = %r ", name)); ## Make sure that the reports get all reset FlagFramework.reset_all(family='Load Data', report="Load Preset Log File", table = name, case=case)
def right(path, result): case = self.defaults.get('case',None) dbh = DB.DBO(case) tablename = dbh.get_temp() dbh.execute("""create table %s ( `filename` varchar(250) NOT NULL default '.', `timestamp` timestamp NOT NULL, `size` bigint(11) not null )""", tablename) ## populate the table: full_path=FlagFramework.sane_join(config.UPLOADDIR,path) dbh.mass_insert_start(tablename) ## List all the files in the directory: try: for d in os.listdir(full_path): filename = FlagFramework.sane_join(path,d) full_filename = FlagFramework.sane_join(config.UPLOADDIR, filename) try: if not os.path.isdir(full_filename): s = os.stat(full_filename) dbh.mass_insert(filename = filename, _timestamp = "from_unixtime(%d)" % s.st_mtime, size = s.st_size) except OSError: pass dbh.mass_insert_commit() except OSError,e: pass
def navbar(self,query=None,next=None,previous=None,pageno=None): """ Returns the HTML for the navigation bar. """ if query==None: query=FlagFramework.query_type(()) if not query.has_key('family'): query['family']='' if next: #Make a link q=query.clone() q.FillQueryTarget(next) next = '<a href="f?%s"><img height=25 src="/images/forward.png" border="0"></a>' % (str(q)) else: next = '<img src="/images/arrow_right_grey.gif" height=25 border="0">' if previous<0: previous = '<img src="/images/arrow_left_grey.gif" height=25 border="0">' else: q=query.clone() q.FillQueryTarget(previous) previous = '<a href="f?%s"><img height=25 src="/images/back.png" border="0"></a>' % (str(q)) bar = {'family': Theme.propegate(query,FlagFramework.query_type()),'back': previous,'case': query['case'],'pageno': pageno,'next': next,'reset': str(query)+'&reset=1','stop': str(query)+'&stop=1'} toolbar = '''<table><tr> <td valign="bottom"><a href="%(family)s"><img height=25 src="/images/home_grey.png" border="0"></a></td><td valign="bottom">%(back)s</td><td>%(case)s - page %(pageno)s</td><td valign="bottom">%(next)s</td> <td valign="bottom"> <td valign="bottom"><a href="flag?%(reset)s"><img height=25 src="/images/reset_grey.png" border="0"></a></td></tr></table>''' % bar return toolbar
def start_workers(): print "%s: starting workers" % os.getpid() global job_pipe, keepalive, write_keepalive children = [] ## These pipes control the worker. If the master exits, the pipes ## will be closed which will notify the worker immediately. It ## will then exit. if not keepalive: keepalive, write_keepalive = os.pipe() ## Start up as many children as needed for i in range(config.WORKERS): pid = os.fork() if pid: children.append(pid) else: os.close(write_keepalive) ## Initialise the worker worker_run(keepalive) sys.exit(0) ## The process which called this function is a master FlagFramework.post_event("startup") ## The master is responsible for ensuring its child is running - ## if the child quits, we restart it. signal.signal(signal.SIGCHLD, handler)
def search_next_text_region(self, query, result): """ searches for the next text region and updates query['period_number'] """ ## Open all the disks filenames = query.getarray('filename') fds = [IO.open_URL(f) for f in filenames] period_number = int(query.get('period_number', 0)) + 1 blocksize = FlagFramework.calculate_offset_suffix(query['blocksize']) period = FlagFramework.calculate_offset_suffix(query['period']) p = 0 while 1: offset = blocksize * (p + period_number * period) for fd in fds: fd.seek(offset) ## We classify a text region as one with 20 chars at ## the start of the period data = fd.read(20) if not data: result.heading("Error") result.para("Unable to read data from %r" % fd) return m = self.text_re.match(data) if m: period_number = period_number + p / period query.set('period_number', period_number) result.refresh(0, query, 'parent') return p += 1
def run(self, *args, **kwargs): pyflaglog.log(pyflaglog.VERBOSE_DEBUG, "Running Housekeeping tasks on %s" % time.ctime()) try: FlagFramework.post_event('periodic', None) finally: self.schedule()
def display(self,query, result): # We use to call all the scanners from here, but now # we just call pyflash to do all the work #dbh=DB.DBO(query['case']) #fsfd = Registry.FILESYSTEMS.fs['DBFS'](query['case']) # # #scanners = [ ] #for i in scanner_names: # try: # tmp = Registry.SCANNERS.dispatch(i) # scanners.append(tmp(fsfd)) # except Exception,e: # pyflaglog.log(pyflaglog.ERRORS,"Unable to initialise scanner %s (%s)" % (i,e)) # #pyflaglog.log(pyflaglog.DEBUG,"Will reset the following scanners: %s" % scanners) scanner_names = self.calculate_scanners(query) ## Use pyflash to do all the work env = pyflagsh.environment(case=query['case']) pyflagsh.shell_execv(env=env, command="scanner_reset_path", argv=[query['path'], scanner_names]) ## Reset the ScanFS reports from the database FlagFramework.reset_all(family = query['family'], report="ScanFS", case=query['case']) FlagFramework.reset_all(family = query['family'], report="Scan Filesystem", case=query['case']) ## Browse the filesystem instantly result.refresh(0, FlagFramework.query_type((),case=query['case'], family='Disk Forensics', report='BrowseFS', open_tree = query['path']) )
def pane_cb(path, result): branch = FlagFramework.splitpath(path) node = get_node(branch) result.heading("Packet %s" % id) data = dissected_packet.serialise() h = FlagFramework.HexDump(data, result) try: result.text("%s" % node.get_name(), font='bold') result.text('', style='black', font='normal') start, length = node.get_range() except AttributeError: result.text("%s\n" % node, style='red', wrap='full', font='typewriter', sanitise='full') result.text('', style='black', font='normal') node = get_node(branch[:-1]) start, length = node.get_range(branch[-1]) h.dump(highlight=[ [start, length, 'highlight'], ]) return
def display(self, query, result): result.heading("Uploaded FS Image from IO Source %s to case %s" % (query["iosource"], query["case"])) result.link( "Analyse this data", FlagFramework.query_type((), case=query["case"], family="Disk Forensics", report="BrowseFS"), ) result.refresh(0, FlagFramework.query_type((), case=query["case"], family="Disk Forensics", report="BrowseFS"))
def display(self, query, result): try: FlagFramework.delete_case(query['remove_case']) except DB.DBError: pass result.heading("Deleted case") result.para("Case %s has been deleted" % query['remove_case']) return result
def error_popup(self,e): """ Draw the text in an error message box @arg e: The exception object to print """ result=GTKUI.GTKUI(server=main,ftoolbar=main.ftoolbar) FlagFramework.get_traceback(e,result) self.create_window(result.display(),gtk.STOCK_DIALOG_ERROR)
def display(self,query,result): try: FlagFramework.delete_case(query['remove_case']) except DB.DBError: pass result.heading("Deleted case") result.para("Case %s has been deleted" % query['remove_case']) return result
def error_popup(self, e): """ Draw the text in an error message box @arg e: The exception object to print """ result = GTKUI.GTKUI(server=main, ftoolbar=main.ftoolbar) FlagFramework.get_traceback(e, result) self.create_window(result.display(), gtk.STOCK_DIALOG_ERROR)
def test01types(self): query = FlagFramework.query_type(family='Disk Forensics', report='Browse Types', case=self.test_case) self.gui_test(query) query = FlagFramework.query_type(family='Network Forensics', report='Browse HTTP Requests', case=self.test_case) self.gui_test(query)
def add_inodes(path, root_item): for item in pst_file.listitems(root_item): properties = item.properties() item_inode = "%s|P%s" % (self.fd.inode, item.get_id()) new_path = FlagFramework.normpath( "%s/%s" % (path, item.__str__().replace('/', '_'))) ## This is a little optimization - we save the ## cache copy of the property list so the File ## driver does not need to do anything: property_data = format_properties(properties) ## These are the inode properties: args = dict(size=len(property_data)) try: args['_ctime'] = properties.get( 'create_date', properties['arrival_date']) except: pass try: args['_mtime'] = properties.get( 'modify_date', properties['sent_date']) except: pass self.ddfs.VFSCreate(None, item_inode, new_path, **args) ## Make sure we can scan it: fd = self.ddfs.open(inode=item_inode) Scanner.scanfile(self.ddfs, fd, self.factories) ## If its an email we create VFS nodes for its ## attachments: try: for i in range(len(properties['_attachments'])): att = properties['_attachments'][i] attachment_path = FlagFramework.normpath( "%s/%s" % (new_path, att['filename1'].replace('/', '_'))) args['size'] = len(att['body']) attach_inode = "%s:%s" % (item_inode, i) self.ddfs.VFSCreate(None, attach_inode, attachment_path, **args) ## Make sure we scan it: fd = self.ddfs.open(inode=attach_inode) Scanner.scanfile(self.ddfs, fd, self.factories) except KeyError: pass ## Recursively add the next inode: add_inodes(new_path, item)
def create(self, name,case, query): offset = FlagFramework.calculate_offset_suffix(query.get('offset','0')) filenames = self.glob_filenames(query.getarray('filename')) ## Open the io sources here fds = [ IO.open_URL(f) for f in filenames ] blocksize = FlagFramework.calculate_offset_suffix(query.get('blocksize','32k')) period = int(query.get('period',3)) return RAIDFD(fds, blocksize, query['map'], offset, period)
def tree(self, tree_cb=None, pane_cb=None, branch=('/'), layout="horizontal"): """ A Text tree implementation """ query = self.defaults try: ## Get the right part: branch = FlagFramework.splitpath(query['open_tree']) except KeyError: branch = [''] #Start building the tree using the branch. def draw_branch(depth, tree_array): #We search through all the items until we find the one #that matches the branch for this depth, then recurse into #it. branch_array = branch[:depth] path = FlagFramework.joinpath(branch[:depth]) for k, v, t in tree_cb(path): if not k: continue if not t: continue tree_array.append((depth, k, v, t)) try: if k == branch[depth]: #Recurse into the next level in the tree draw_branch(depth + 1, tree_array) except IndexError: pass tree_array = [] #The first item in the tree is the first one provided in branch if not branch[0]: tree_array.append((0, '/', '/', 'branch')) else: tree_array.append((0, branch[0], branch[0], 'branch')) #Build the tree_array draw_branch(1, tree_array) left = self.__class__(self) for depth, k, v, t in tree_array: icon = '-' if t == "branch": icon = '+' left.text(" " * depth + icon + v.__str__() + "\r\n") right = self.__class__(self) path = FlagFramework.joinpath(branch) pane_cb(path, right) self.row(left, right)
def display(self,query,result): dbh = self.DBO(query['case']) graph = GraphViz(query['prog'],result) ##What conditions did the user ask to see? conditions = "description='%s'" % "' or description='".join(query.getarray('deductions')) ## If the user didnt ask to see disconnected nodes, we create a temporary knowledge table, else we use the original table if query.has_key('show_disc'): knowledge = 'knowledge' else: knowledge = dbh.get_temp() ## This gives us those nodes that appear in transitive links meeting the conditions dbh.execute("create table %s select * from knowledge as a where a.link='transitive' and (%s)",(knowledge,conditions)) def find_root_node(name,type): """ Follows node named by name up the kb tree to find the node denoted by type @arg name: Name of node to start searching from @arg type: When a node of this type is found it is returned. @return: A node of the given type which is up the tree from the named node """ dbh2 = self.DBO(query['case']) while 1: dbh2.execute('select type from knowledge_node where name = %r',name) rs = dbh2.fetch() if rs['type'] == type: return name dbh2.execute('select pname from knowledge where name = %r and link="no"' ,(name)) rs = dbh2.fetch() if not rs: return None name = rs['pname'] ## We follow each node up the tree to reach the root as defined by query['type'] dbh.execute('select a.name,a.pname,description from %s as a,knowledge_node as b where a.name=b.name and a.link="transitive"',knowledge) for row in dbh: from_node = find_root_node(row['pname'],query['type']) to_node = find_root_node(row['name'],query['type']) new_query = FlagFramework.query_type((), family=query['family'], report='DisplayObject', object_name=from_node, case=query['case'] ) graph.node(from_node,label=from_node,URL="f?%s" % new_query) new_query = FlagFramework.query_type((), family=query['family'], report='DisplayObject', object_name=to_node, case=query['case'] ) graph.node(to_node,label=to_node,URL="f?%s" % new_query) graph.edge(from_node,to_node,label=row['description']) graph.draw()
def add_inodes(path, root_item): for item in pst_file.listitems(root_item): properties = item.properties() item_inode = "%s|P%s" % (self.fd.inode, item.get_id()) new_path = FlagFramework.normpath( "%s/%s" % (path, item.__str__().replace('/','_')) ) ## This is a little optimization - we save the ## cache copy of the property list so the File ## driver does not need to do anything: property_data = format_properties(properties) ## These are the inode properties: args = dict(size = len(property_data)) try: args['_ctime'] = properties.get('create_date', properties['arrival_date']) except: pass try: args['_mtime'] = properties.get('modify_date', properties['sent_date']) except: pass self.ddfs.VFSCreate(None, item_inode, new_path, **args) ## Make sure we can scan it: fd = self.ddfs.open(inode = item_inode) Scanner.scanfile(self.ddfs, fd, self.factories) ## If its an email we create VFS nodes for its ## attachments: try: for i in range(len(properties['_attachments'])): att = properties['_attachments'][i] attachment_path = FlagFramework.normpath( "%s/%s" % (new_path, att['filename1'].replace('/','_'))) args['size'] = len(att['body']) attach_inode = "%s:%s" % (item_inode,i) self.ddfs.VFSCreate(None, attach_inode, attachment_path, **args) ## Make sure we scan it: fd = self.ddfs.open(inode = attach_inode) Scanner.scanfile(self.ddfs, fd, self.factories) except KeyError: pass ## Recursively add the next inode: add_inodes(new_path, item)
def execute(self): try: dbh = DB.DBO() except: dbh = DB.DBO('mysql') dbh.execute("create database `%s`" % config.FLAGDB) dbh = DB.DBO() FlagFramework.post_event("init_default_db", None) yield "Done"
def execute(self): try: case=self.args[0] dbh = DB.DBO(case) FlagFramework.delete_case(case) yield "Deleted case %r" %(case) except Exception,e: ## Should we just return here or report an error? return raise RuntimeError("Unable to delete case %s (%s)" %(case,e))
def display(self,query,result): ## Try to delete the old cases: try: dbh = DB.DBO() dbh.execute("select * from meta where property='flag_db'") for row in dbh: pyflaglog.log(pyflaglog.INFO, "Deleting case %s due to an upgrade" % row['value']) FlagFramework.delete_case(row['value']) except DB.DBError,e: pass
def display(self,query,result): ## Reset the report cache for us - this eliminates the bug ## where a re-scan on the same directory doesnt work. FlagFramework.reset_all(family = query['family'], report=query['report'], case=query['case'], path=query['inode']) ## Browse the filesystem instantly result.refresh(0, FlagFramework.query_type(case=query['case'], family='Disk Forensics', report='ViewFile', inode = query['inode']))
def readlink(self, path): try: result = self.fs.readlink(path) if not result: raise FuseError("Cannot read symbolic link %s" % path, 2) return result except FuseError: raise except Exception,e: print "%r: %s" % (e,e) print FlagFramework.get_bt_string(e)
def readdir(self, path, offset): try: path = os.path.normpath("%s/%s" % (self.root, path)) if not path.endswith('/'): path=path+'/' for e in self.fs.ls(path=path): if not e: continue yield fuse.Direntry(e.encode("utf8")) except Exception,e: print "%r: %s" % (e,e) print FlagFramework.get_bt_string(e)
def readdir(self, path, offset): try: path = os.path.normpath("%s/%s" % (self.root, path)) if not path.endswith('/'): path = path + '/' for e in self.fs.ls(path=path): if not e: continue yield fuse.Direntry(e.encode("utf8")) except Exception, e: print "%r: %s" % (e, e) print FlagFramework.get_bt_string(e)
def scan(self, fd, scanners, type, mime, cookie, scores=None, **args): if 'Filesystem' in type: print "Will load %s" % fd.urn.value fs = sk.skfs(fd) for root, dirs, files in fs.walk('/', unalloc=True, inodes=True): for d, dirname in dirs: self.create_map(fd, fs, d, FlagFramework.sane_join(root[1], dirname)) for f, filename in files: self.create_map(fd, fs, f, FlagFramework.sane_join(root[1], filename))
def readlink(self, path): try: result = self.fs.readlink(path) if not result: raise FuseError("Cannot read symbolic link %s" % path, 2) return result except FuseError: raise except Exception, e: print "%r: %s" % (e, e) print FlagFramework.get_bt_string(e)
def tree(self,tree_cb = None, pane_cb=None, branch = ('/'), layout="horizontal"): """ A Text tree implementation """ query = self.defaults try: ## Get the right part: branch=FlagFramework.splitpath(query['open_tree']) except KeyError: branch=[''] #Start building the tree using the branch. def draw_branch(depth,tree_array): #We search through all the items until we find the one #that matches the branch for this depth, then recurse into #it. branch_array=branch[:depth] path = FlagFramework.joinpath(branch[:depth]) for k,v,t in tree_cb(path): if not k: continue if not t: continue tree_array.append((depth,k,v,t)) try: if k == branch[depth]: #Recurse into the next level in the tree draw_branch(depth+1,tree_array) except IndexError: pass tree_array = [] #The first item in the tree is the first one provided in branch if not branch[0]: tree_array.append((0,'/','/','branch')) else: tree_array.append((0,branch[0],branch[0],'branch')) #Build the tree_array draw_branch(1,tree_array) left = self.__class__(self) for depth,k,v,t in tree_array: icon = '-' if t=="branch": icon = '+' left.text(" "*depth + icon + v.__str__() + "\r\n") right = self.__class__(self) path = FlagFramework.joinpath(branch) pane_cb(path, right) self.row(left, right)
def display(self, query, result): ## Reset the report cache for us - this eliminates the bug ## where a re-scan on the same directory doesnt work. FlagFramework.reset_all(family=query["family"], report=query["report"], case=query["case"], path=query["inode"]) ## Browse the filesystem instantly result.refresh( 0, FlagFramework.query_type( case=query["case"], family="Disk Forensics", report="ViewFile", inode=query["inode"] ), )
def create_output_file(): global output_fd, output_file print "Will read from %s and write to %s. Will use these scanners: %s" % (directory, output_file, scanners) ## Check if the file is already there: filename = config.UPLOADDIR + "/" + output_file if output_file != "-": try: os.stat(filename) ## Yep its there: output_fd = open(filename, "a") output_fd.seek(0, os.SEEK_END) offset = output_fd.tell() ## There can be only one: try: fcntl.flock(output_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError, e: print "Highlander Error: %s" % e sys.exit(1) except OSError: output_fd = open(filename, "w") ## This is a hardcoded header for the output file: header = "\xd4\xc3\xb2\xa1\x02\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00" offset = len(header) ## Write the file header on output_fd.write(header) output_fd.flush() else: output_fd = None offset = 0 ## Make a new IO source for the output: try: pyflagsh.shell_execv( command="execute", argv=[ "Load Data.Load IO Data Source", "case=%s" % config.case, "iosource=%s" % config.iosource, "subsys=Standard", "filename=%s" % (output_file), "offset=0", ], ) except Reports.ReportError: FlagFramework.print_bt_string()
def create_output_file(): global output_fd, output_file print "Will read from %s and write to %s. Will use these scanners: %s" % ( directory, output_file, scanners) ## Check if the file is already there: filename = config.UPLOADDIR + '/' + output_file if output_file != '-': try: os.stat(filename) ## Yep its there: output_fd = open(filename, 'a') output_fd.seek(0, os.SEEK_END) offset = output_fd.tell() ## There can be only one: try: fcntl.flock(output_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError, e: print "Highlander Error: %s" % e sys.exit(1) except OSError: output_fd = open(filename, 'w') ## This is a hardcoded header for the output file: header = '\xd4\xc3\xb2\xa1\x02\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00' offset = len(header) ## Write the file header on output_fd.write(header) output_fd.flush() else: output_fd = None offset = 0 ## Make a new IO source for the output: try: pyflagsh.shell_execv(command="execute", argv=[ "Load Data.Load IO Data Source", 'case=%s' % config.case, "iosource=%s" % config.iosource, "subsys=Standard", "filename=%s" % (output_file), "offset=0", ]) except Reports.ReportError: FlagFramework.print_bt_string()
def getattr(self, path): print "Get attr" try: path = os.path.normpath("%s/%s" % (self.root, path)) try: result = self.fs.lstat(path=path) except RuntimeError,e: print "Error: %s" % e print FlagFramework.get_bt_string(e) raise FuseError("%s Not found" % path, 2) if not result: return os.stat_result((16877, 1L, 1, 1, 0, 0, 4096L, 0, 0, 0)) return result
def getattr(self, path): print "Get attr" try: path = os.path.normpath("%s/%s" % (self.root, path)) try: result = self.fs.lstat(path=path) except RuntimeError, e: print "Error: %s" % e print FlagFramework.get_bt_string(e) raise FuseError("%s Not found" % path, 2) if not result: return os.stat_result((16877, 1L, 1, 1, 0, 0, 4096L, 0, 0, 0)) return result
def prepare(self): """ Returns a report, query all ready from the current args """ args=self.args query=FlagFramework.query_type(()) try: query['family'],query['report']=args[0].split('.') except: raise RuntimeError("Unable to parse %s as a family.report" % args[0]) report = Registry.REPORTS.dispatch(query['family'],query['report']) ## Include the report and family: for arg in args[1:]: try: # del query[arg[:arg.index('=')]] query[arg[:arg.index('=')]]=arg[arg.index('=')+1:] except ValueError: raise RuntimeError("Argument should be of the form key=value, got %s" % arg) ## Include environment variables in the query: for arg in dir(self.environment): if not arg.startswith('_') and not query.has_key(arg): try: query[arg]=self.environment.__dict__[arg] except KeyError: pass # if not query.has_key('case'): query['case']=config.FLAGDB return report,query
def __init__(self, case, fd, inode): File.__init__(self, case, fd, inode) # strategy: must determine basepath from parent, get our path # from db and then return the file: ## Note this _must_ work because we can only ever be called on ## a mounted iosource - it is an error otherwise: basepath = fd.io.directory self.case = case dbh = DB.DBO(case) dbh.check_index("file", "inode") dbh.execute("select path,name from file where inode=%r limit 1", (inode)) row = dbh.fetch() path = row["path"] mount_point = fd.io.mount_point ## Prune the path down to the mount point: if path[: len(mount_point)] != mount_point: raise RuntimeError(DB.expand("Something went wrong - %s should be mounted on %s", (path, mount_point))) path = path[len(mount_point) :] path = basepath + "/" + path + "/" + row["name"] if not path.startswith(posixpath.normpath(config.UPLOADDIR)): path = FlagFramework.sane_join(config.UPLOADDIR, path) if os.path.isdir(path): self.fd = StringIO.StringIO("") else: self.fd = open(path, "r") s = os.stat(path) self.size = s.st_size
def stats(self, query,result, merge = None): """ Show statistics about the file """ fsfd = DBFS(query['case']) istat = fsfd.istat(inode_id=query['inode_id']) left = result.__class__(result) link = result.__class__(result) link.link(self.urn, FlagFramework.query_type((),family="Disk Forensics", report='BrowseFS', open_tree=self.urn, case=query['case']) ) left.row("Filename:",'',link) if merge: istat.update(merge) try: for k,values in istat.items(): for v in values: left.row('%s:' % k,'',v, align='left') except AttributeError: pass left.end_table() result.start_table(width="100%") result.row(left,valign='top',align="left")
def geoip_display_hook(self, value, row, result): ## We try to show a whois if possible id = lookup_whois(value) tmp2 = result.__class__(result) tmp3 = result.__class__(result) if config.WHOIS_DISPLAY: identify_network(id, value, tmp3) try: if config.GEOIP_DISPLAY: geoip_resolve(value,tmp3) except AttributeError: pass try: if config.EXTENDED_GEOIP_DISPLAY: geoip_resolve_extended(value,tmp3) except AttributeError: pass tmp2.link(tmp3, target=FlagFramework.query_type(family="Log Analysis", report="LookupIP", address=value), pane='popup') result.start_table() result.row(tmp2) result.end_table()
def create_map(self, fd, fs, skfs_inode, path): block_size = fs.block_size if str(skfs_inode) == "0-0-0": return 1 if skfs_inode.alloc: status = 'alloc' else: status = 'deleted' ## Add the map under the path skfd = fs.open(inode=skfs_inode) skfd.seek(0, 2) size = skfd.tell() map = CacheManager.AFF4_MANAGER.create_cache_map( fd.case, "%s/__inodes__/%s" % (fd.urn.parser.query, skfs_inode), size=size, target=fd.urn, status=status) for block in skfd.blocks(): map.write_from(fd.urn, block * block_size, block_size) ## update the size of the map map.size.set(size) CacheManager.AFF4_MANAGER.create_link( fd.case, map.urn, FlagFramework.sane_join(fd.urn.parser.query, path)) map.close()
def geoip_display_hook(self, value, row, result): ## We try to show a whois if possible id = lookup_whois(value) tmp2 = result.__class__(result) tmp3 = result.__class__(result) if config.WHOIS_DISPLAY: identify_network(id, value, tmp3) try: if config.GEOIP_DISPLAY: geoip_resolve(value, tmp3) except AttributeError: pass try: if config.EXTENDED_GEOIP_DISPLAY: geoip_resolve_extended(value, tmp3) except AttributeError: pass tmp2.link(tmp3, target=FlagFramework.query_type(family="Log Analysis", report="LookupIP", address=value), pane='popup') result.start_table() result.row(tmp2) result.end_table()
def parse(self,args): """ This method parses the args storing the option args in self.opts and non-option args in self.args. Note that we expect to have self.optlist as the getopt string for this command. """ opts,self.args=getopt.gnu_getopt(args,self.optlist,self.long_opts) self.opts=FlagFramework.query_type(opts)
def make_menu_javascript(self, query): """ Creates the javascript required to generate the menu """ ## Find all families: module_list = Registry.REPORTS.get_families() Theme.order_families(module_list) menus = [] menus_titles = [] result = '' for k in module_list: submenu_text = '' ## Add the reports in the family: report_list = Registry.REPORTS.family[k] for r in report_list: if r.hidden: continue submenu_text += '''<div dojoType="MenuItem2" caption="%s" onClick="update_container('main','%s');"></div>\n''' % ( r.name, FlagFramework.query_type((), family=k, report=r.name)) if len(submenu_text) > 0: menus.append( '<div dojoType="PopupMenu2" widgetId="%s" toggle="wipe">%s</div>\n' % (k, submenu_text)) menus_titles.append( '<div dojoType="MenuBarItem2" caption="%s" submenuId="%s"></div>\n' % (k, k)) return result + ''' %s <div dojoType="MenuBar2" id="menubar" layoutAlign="top">%s</div> <div dojoType="ToolbarContainer" layoutAlign="top" id="ToolbarContainer" bindArgs="preventCache:false;"> <div dojoType="Toolbar" id="toolbar"></div></div> ''' % ('\n'.join(menus), '\n'.join(menus_titles))
def form(self, query, result): result.para( 'NOTICE: This loader attempts to load IIS log files completely automatically by determining field names and types from the header comments, if this loader fails, please use the "Simple" loader' ) def test(query, result): self.parse(query) result.text( "The following is the result of importing the first few lines from the log file into the database.\nPlease check that the importation was successfull before continuing." ) self.display_test_log(result) return True result.wizard(names=( "Step 1: Select Log File", "Step 2: View test result", "Step 3: Save Preset", "Step 4: End", ), callbacks=( LogFile.get_file, test, FlagFramework.Curry(LogFile.save_preset, log=self), LogFile.end, ))
def longls(self,path='/', dirs = None): dbh=DB.DBO(self.case) if self.isdir(path): ## If we are listing a directory, we list the files inside the directory if not path.endswith('/'): path=path+'/' where = DB.expand(" path=%r " ,path) else: ## We are listing the exact file specified: where = DB.expand(" path=%r and name=%r", ( FlagFramework.normpath(posixpath.dirname(path)+'/'), posixpath.basename(path))) mode ='' if(dirs == 1): mode=" and file.mode like 'd%'" elif(dirs == 0): mode=" and file.mode like 'r%'" dbh.execute("select * from file where %s %s", (where, mode)) result = [dent for dent in dbh] for dent in result: if dent['inode']: dbh.execute("select * from inode where inode = %r", dent['inode']) data = dbh.fetch() if data: dent.update(data) return result
class LibMagic(Magic.Magic): """ A Generic handler which uses libmagic """ magic = None mimemagic = None def __init__(self): if not LibMagic.magic: LibMagic.magic = magic.open(magic.MAGIC_CONTINUE) if magic.load(LibMagic.magic, config.MAGICFILE) < 0: raise IOError("Could not open magic file %s" % config.MAGICFILE) if not LibMagic.mimemagic: LibMagic.mimemagic=magic.open(magic.MAGIC_MIME | magic.MAGIC_CONTINUE) if magic.load(LibMagic.mimemagic,config.MAGICFILE) < 0: raise IOError("Could not open magic file %s" % config.MAGICFILE) def score(self, data, case, inode_id): ## The Magic library expects a byte string and does not look ## at encoding at all. We need to provide it a utf8 encoded ## string. data = FlagFramework.smart_str(data, errors='ignore') self.type = magic.buffer(LibMagic.magic, data) self.mime = magic.buffer(LibMagic.mimemagic, data) return 20
def prepare(self): """ Returns a report, query all ready from the current args """ args = self.args query = FlagFramework.query_type(()) try: query['family'], query['report'] = args[0].split('.') except: raise RuntimeError("Unable to parse %s as a family.report" % args[0]) report = Registry.REPORTS.dispatch(query['family'], query['report']) ## Include the report and family: for arg in args[1:]: try: # del query[arg[:arg.index('=')]] query[arg[:arg.index('=')]] = arg[arg.index('=') + 1:] except ValueError: raise RuntimeError( "Argument should be of the form key=value, got %s" % arg) ## Include environment variables in the query: for arg in dir(self.environment): if not arg.startswith('_') and not query.has_key(arg): try: query[arg] = self.environment.__dict__[arg] except KeyError: pass # if not query.has_key('case'): query['case']=config.FLAGDB return report, query
def parse(self, args): """ This method parses the args storing the option args in self.opts and non-option args in self.args. Note that we expect to have self.optlist as the getopt string for this command. """ opts, self.args = getopt.gnu_getopt(args, self.optlist, self.long_opts) self.opts = FlagFramework.query_type(opts)
def CaseTableTests(self, tablename): ## Apply each column's test filters: t = Registry.CASE_TABLES.dispatch(tablename)() result = HTMLUI.HTMLUI(initial=True) dbh = DB.DBO(self.test_case) ## For each column run all its test cases: elements = [c for c in t.bind_columns(self.test_case)] ## Create a renderer: r = UI.TableRenderer(elements=elements, table=tablename) for c in elements: for operator, arg, e in c.tests: try: ## Get the SQL: r.filter_str = "'%s' %s '%s'" % (c.name, operator, arg) query = FlagFramework.query_type(direction=1) sql = r._make_sql(query) print "%s: Testing %s: %s" % (tablename, c.__class__, r.filter_str) dbh.execute(sql + " limit 1") dbh.fetch() except Exception: if not e: raise continue if e: raise Exception( "Expected an exception but did not receive one on filter string %s. SQL was %s" % (r.filter_str, sql))
def form(self, query, result): def test_csv(query, result): self.parse(query) result.start_table() result.row("Unprocessed text from file", colspan=5) sample = [] count = 0 for line in self.read_record(): sample.append(line) count += 1 if count > 3: break result.row('\n'.join(sample), bgcolor='lightgray') result.end_table() self.draw_type_selector(result) def test(query, result): self.parse(query) result.text( "The following is the result of importing the first few lines from the log file into the database.\nPlease check that the importation was successfull before continuing." ) self.display_test_log(result) return True result.wizard(names=[ "Step 1: Select Log File", "Step 2: Configure Columns", "Step 3: View test result", "Step 4: Save Preset", "Step 5: End" ], callbacks=[ LogFile.get_file, test_csv, test, FlagFramework.Curry(LogFile.save_preset, log=self), LogFile.end ])
def plain_display_hook(self, value, row, result): offset, inode = value.split(",") offset = int(offset) ## The target Inode is the inode we are operating on: inodes = inode.split("|") last_inode = "|".join(inodes[:-1]) ## Note that the offset is in virtual address space, we want ## to send the user back to the image (physical address space) ## so we need to convert here: v = get_vol_object(self.case, last_inode[1:]) ## Physical offset: phy_offset = v.addr_space.vtop(offset) target = FlagFramework.query_type(family="Disk Forensics", report="ViewFile", offset=phy_offset, inode=last_inode, case=self.case, memory=last_inode[1:], mode="HexDump") result.link("0x%08X" % offset, target=target, pane='new')
def longls(self, path='/', dirs=None): dbh = DB.DBO(self.case) if self.isdir(path): ## If we are listing a directory, we list the files inside the directory if not path.endswith('/'): path = path + '/' where = DB.expand(" path=%r ", path) else: ## We are listing the exact file specified: where = DB.expand( " path=%r and name=%r", (FlagFramework.normpath(posixpath.dirname(path) + '/'), posixpath.basename(path))) mode = '' if (dirs == 1): mode = " and file.mode like 'd%'" elif (dirs == 0): mode = " and file.mode like 'r%'" dbh.execute("select * from file where %s %s", (where, mode)) result = [dent for dent in dbh] for dent in result: if dent['inode']: dbh.execute("select * from inode where inode = %r", dent['inode']) data = dbh.fetch() if data: dent.update(data) return result
def display(self, value, row, result): dbh = DB.DBO(self.case) fsfd = FileSystem.DBFS(self.case) dbh.execute("select file.inode_id as inode_id, name from file, webmail_attachments where webmail_attachments.inode_id = %r and file.inode_id = webmail_attachments.attachment", value) for row in dbh: tmp = result.__class__(result) try: fd = fsfd.open(inode_id=row['inode_id']) image = Graph.Thumbnailer(fd,100) except IOError: pass if image.height>0: tmp.image(image,width=image.width,height=image.height) else: tmp.image(image,width=image.width) link = result.__class__(result) name = row['name'] if len(name) > 20: name = name[:20]+" ..." tmp.para(name) link.link(tmp, tooltip = row['name'], pane = 'new', target= FlagFramework.query_type(family = "Disk Forensics", report = "ViewFile", case = self.case, mode = 'Summary', inode_id = row['inode_id'])) result.row(link)
def display(self, value, row, result): dbh = DB.DBO(self.case) fsfd = FileSystem.DBFS(self.case) dbh.execute( "select file.inode_id as inode_id, name from file, webmail_attachments where webmail_attachments.inode_id = %r and file.inode_id = webmail_attachments.attachment", value) for row in dbh: tmp = result.__class__(result) try: fd = fsfd.open(inode_id=row['inode_id']) image = Graph.Thumbnailer(fd, 100) except IOError: pass if image.height > 0: tmp.image(image, width=image.width, height=image.height) else: tmp.image(image, width=image.width) link = result.__class__(result) name = row['name'] if len(name) > 20: name = name[:20] + " ..." tmp.para(name) link.link(tmp, tooltip=row['name'], pane='new', target=FlagFramework.query_type( family="Disk Forensics", report="ViewFile", case=self.case, mode='Summary', inode_id=row['inode_id'])) result.row(link)