def get_url(inode_id, case): try: store = URL_STORE.get(case) except KeyError: store = Store.Store() URL_STORE.put(store, key=case) ## Now try to retrieve the URL: try: url = store.get(inode_id) except KeyError: url = '' dbh = DB.DBO(case) dbh.execute("select url from http where inode_id=%r limit 1", inode_id) row = dbh.fetch() if not row: dbh.execute("select url from http_sundry where id=%r limit 1", inode_id) row = dbh.fetch() if row: url = row['url'] else: ## Its not in the http take, maybe its in the VFS: dbh.execute( "select concat(path,name) as path from vfs where inode_id = %r limit 1", inode_id) row = dbh.fetch() if row: url = row['path'] store.put(url, key=inode_id) return url
def worker_run(keepalive=None): """ The main loop of the worker """ ## It is an error to fork with db connections ## established... they can not be shared: if DB.db_connections > 0: ## We try to fix it by making the child get new ## handlers. Note that the child still needs to hold the ## handles or they will get closed on the parent as well ## - this seems like a hack DB.DBO.DBH_old = DB.DBO.DBH DB.DBO.DBH = Store.Store(max_size=10) DB.db_connections = 0 ## These are all the methods we support jobs = [] my_pid = os.getpid() ## This is the last broadcast message we handled. We will ## only handle broadcasts newer than this. broadcast_id = 0 try: dbh = DB.DBO() dbh.execute("select max(id) as max from jobs") row = dbh.fetch() broadcast_id = row['max'] or 0 except: pass while 1: ## Ping the parent try: if keepalive: os.write(keepalive, "Checking") except Exception, e: print e pyflaglog.log(pyflaglog.WARNING, "Our nanny died - quitting") os._exit(1) ## Check for memory usage check_mem(os._exit, 0) ## Check for new tasks: if not jobs: try: r = win32event.WaitForMultipleObjects( [SyncEvent, TerminateEvent], False, 10000) if r == 1: ## TerminateEvent signaled sys.exit(0) except (NameError, AttributeError), e: time.sleep(config.JOB_QUEUE_POLL)
'on','off'],['Enabled','Disabled'] ) result.end_table() result.end_form() right=result.__class__(result) right.popup(configure_cb,"Configure %s" % self.group,icon="spanner.png") left.row(right,self.description) result.const_selector(left, scan_group_name, ['on','off'],['Enabled','Disabled']) ## This is a global store for factories: import pyflag.Store as Store factories = Store.Store() def get_factories(case,scanners): """ Scanner factories are obtained from the Store or created as required. Scanners is a list in the form case:scanner """ ## Ensure dependencies are satisfied scanners = ScannerUtils.fill_in_dependancies(scanners) ## First prepare the required factories: result = [] for scanner in scanners: key = DB.expand("%s:%s", (case,scanner)) try: f=factories.get(key) except KeyError:
def populate_tasks(self): ## Maybe its not populated yet task_start_vaddr = self.symtable.lookup('init_tasks') print process_list task_list = process_list(self.addr_space, self.theProfile.abstract_types, self.symtable, self.theProfile) for task in task_list: self.tasks[task.pid] = task import pyflag.Store as Store VOLATILITY_CACHE = Store.Store(max_size=3) def get_vol_object(case, iosource_name): key = "%s:%s" % (case, iosource_name) try: return VOLATILITY_CACHE.get(key) except KeyError: dbh = DB.DBO(case) dbh.execute( "select * from filesystems where iosource = %r and " "property='profile' limit 1", iosource_name) row = dbh.fetch() profile = row['value'] dbh.execute(
import pyflag.IO as IO import pyflag.FlagFramework as FlagFramework from pyflag.FlagFramework import normpath import pyflag.Registry as Registry import pyflag.pyflaglog as pyflaglog import time, re import math import bisect import zipfile import StringIO import pyflag.Scanner as Scanner import pyflag.Graph as Graph import pyflag.Store as Store import pyflag.CacheManager as CacheManager FSCache = Store.Store() class FileSystem: """ This is the base class for accessing file systems in PyFlag. This class is abstract and is here purely for documentation purposes. @cvar name: The name of the filesystem to show in the loadfs dialog """ ## This is the cookie which will be used to identify scanning jobs ## from this FS: cookie = 0 def __init__(self, case, query=None): """ Constructor for creating an new filesystem object @arg case: Case to use
a.seek(0) result = a.read() a.close() return result def print_bt_string(): print get_bt_string() def get_traceback(e, result): result.heading("%s: %s" % (sys.exc_info()[0], sys.exc_info()[1])) result.text(get_bt_string(e)) STORE = Store.Store(1000) class FlagException(Exception): """ Generic Flag Exception """ pass class query_type: """ A generic wrapper for holding CGI parameters. This is almost like a dictionary, except that there are methods provided to give access to CGI arrays obtained by repeated use of the same key mutiple times. @note: This property necessitate the sometime unituitive way of resetting a paramter by initially deleting it. For example, to change the 'report' parameter in query you must do: >>> del query['report'] >>> query['report'] = 'newvalue'
self.inode,"T%s" % i,namelist[i], size=tar.getmember(namelist[i]).size, _mtime=tar.getmember(namelist[i]).mtime, uid=tar.getmember(namelist[i]).uid, gid=tar.getmember(namelist[i]).gid, mode=oct(tar.getmember(namelist[i]).mode), ) new_inode="%s|T%s" % (self.inode,i) inodes.append(new_inode) for inode in inodes: ## Scan the new file using the scanner train: fd=self.ddfs.open(inode=inode) Scanner.scanfile(self.ddfs,fd,self.factories) ZIPCACHE = Store.Store(max_size=5) ## These are the corresponding VFS modules: class ZipFile(File): """ A file like object to read files from within zip files. We essentially decompress the file on the disk because the file may be exceptionally large. """ specifier = 'Z' def __init__(self, case, fd, inode): File.__init__(self, case, fd, inode) ## Make sure our parent is cached: self.fd.cache()
dbh = DB.DBO() dbh.delete("whois_cache", where="1", _fast=True) ## This is not needed because we use DB caching anyway: ##config.add_option("GEOIP_MEMORY_CACHE", default=True, ## help="Should the GEOIP database(s) (if found) be loaded into memory? Will result in better performance but will use more memory") ## NYI - Current the PRECACHE IP METADATA does this #config.add_option("SEARCHABLE_WHOIS", default=True, help="Should the WHOIS data be preloaded so you can search on it (makes things slower)") #config.add_option("SEARCHABLE_ORG #config.add_option("SEARCHABLE_ISP ## A cache of whois addresses - This really does not need to be ## invalidated as the data should never change WHOIS_CACHE = Store.Store() ## Try for the GeoIP City Stuff.... def load_geofile(name, type): filename = os.path.join(config.GEOIPDIR, name) if not os.access(filename, os.R_OK): raise IOError("%s not found" % filename) return GeoIP(filename, type) try: from geoip import GeoIP, GEOIP_CITY_EDITION_REV1, GEOIP_ORG_EDITION, GEOIP_ISP_EDITION
def IODrawForm(query, result, subsys='subsys'): """ draws the correct form on the result depending on the query['subsys']. Returns true if all parameters are filled in, False otherwise. """ io = subsystems[query[subsys]] io = io(query, result, subsys) return io.form(query, result) import pyflag.DB as DB ## This caches the io subsys IO_Cache = Store.Store() class FileHandler: """ This is a base class for handling files. PyFlag needs to access different kinds of files all the time. Its convenient to have a single function which can be used to access files regardless of the method. This is provided by implementors of this class. The open factory function below takes a URL to access the file in the form: method:/name/path
class SanitizingTag2(SanitizingTag): """ A more restrictive sanitiser which removes e.g. body tags etc """ allowable_tags = [ 'b', 'i', 'a', 'img', 'em', 'br', 'strong', 'tt', 'li', 'ol', 'ul', 'p', 'table', 'td', 'tr', 'h1', 'h2', 'h3', 'pre', 'form', 'html', 'pre', 'body', 'sup', 'input', 'label', 'option', 'select', 'div', 'span', 'nobr', 'u', 'textarea', 'center', 'small' ] forbidden_tag = ['script', 'style', 'meta', 'head'] import pyflag.Store as Store URL_STORE = Store.Store() def get_url(inode_id, case): try: store = URL_STORE.get(case) except KeyError: store = Store.Store() URL_STORE.put(store, key=case) ## Now try to retrieve the URL: try: url = store.get(inode_id) except KeyError: url = '' dbh = DB.DBO(case)