def search_next_text_region(self, query, result): """ searches for the next text region and updates query['period_number'] """ ## Open all the disks filenames = query.getarray('filename') fds = [IO.open_URL(f) for f in filenames] if query.get('ismissing', False): fds.append(ParityFD(copy.deepcopy(filenames))) period_number = int(query.get('period_number', 0)) + 1 blocksize = FlagFramework.calculate_offset_suffix(query['blocksize']) period = FlagFramework.calculate_offset_suffix(query['period']) p = 0 while 1: offset = blocksize * (p + period_number * period) for fd in fds: fd.seek(offset) ## We classify a text region as one with 20 chars at ## the start of the period data = fd.read(20) if not data: result.heading("Error") result.para("Unable to read data from %r" % fd) return m = self.text_re.match(data) if m: period_number = period_number + p / period query.set('period_number', period_number) result.refresh(0, query, 'parent') return p += 1
def __init__(self, filenames, offset): if type(filenames) == str: filenames = [ filenames, ] fds = [IO.open_URL(filename) for filename in filenames] OffsettedFDFile.__init__(self, fds, offset)
def search_next_text_region(self, query, result): """ searches for the next text region and updates query['period_number'] """ ## Open all the disks filenames = query.getarray('filename') fds = [ IO.open_URL(f) for f in filenames ] if query.get('ismissing',False): fds.append(ParityFD(copy.deepcopy(filenames))) period_number = int(query.get('period_number',0)) + 1 blocksize = FlagFramework.calculate_offset_suffix(query['blocksize']) period = FlagFramework.calculate_offset_suffix(query['period']) p=0 while 1: offset = blocksize * (p + period_number * period) for fd in fds: fd.seek(offset) ## We classify a text region as one with 20 chars at ## the start of the period data = fd.read(20) if not data: result.heading("Error") result.para("Unable to read data from %r" % fd) return m = self.text_re.match(data) if m: period_number = period_number + p / period query.set('period_number',period_number) result.refresh(0, query, 'parent') return p += 1
def create(self, name,case, query): offset = FlagFramework.calculate_offset_suffix(query.get('offset','0')) filenames = self.glob_filenames(query.getarray('filename')) ## Open the io sources here fds = [ IO.open_URL(f) for f in filenames ] if query.get('ismissing',False): fds += [ParityFD(copy.deepcopy(filenames))] blocksize = FlagFramework.calculate_offset_suffix(query.get('blocksize','32k')) period = int(query.get('period',3)) return RAIDFD(fds, blocksize, query['map'], offset, period)
def tree_cb(path): fd = IO.open_URL(query['file']) b = Buffer(fd = fd) header = RegFile.RegF(b) key = header.get_key(path) for k in key.keys(): try: name = k['key_name'].get_value() except: name = None yield (name,name,'branch')
def read(self,length): fds = [ IO.open_URL(f) for f in self.filenames ] for fd in fds: fd.seek(self.readptr) data = False for fd in fds: if data == False: data = map(lambda x: ord(x), fd.read(length)) else: data = map(lambda (x,y): x^y,zip(data,map(lambda z: ord(z), fd.read(length)))) self.readptr += length return ''.join(map(lambda x: chr(x), data))
def read(self, length): fds = [IO.open_URL(f) for f in self.filenames] for fd in fds: fd.seek(self.readptr) data = False for fd in fds: if data == False: data = map(lambda x: ord(x), fd.read(length)) else: data = map(lambda (x, y): x ^ y, zip(data, map(lambda z: ord(z), fd.read(length)))) self.readptr += length return ''.join(map(lambda x: chr(x), data))
def details(query,result): fd = IO.open_URL(query['file']) b = Buffer(fd = fd) header = RegFile.RegF(b) key = header.get_key(path) result.heading("Key %s" % path) result.text("%s" % key, font='typewriter', wrap='full') for v in key.values(): try: name = "%s"% v['keyname'] result.heading("%s" % name) result.text("%s" % v, font='typewriter', wrap='full') except: pass
def map_popup(query, result): result.decoration = 'naked' try: map = str(query['map']).split('.') except KeyError: map = [] result.start_form(query) ## Open all the disks filenames = query.getarray('filename') fds = [IO.open_URL(f) for f in filenames] if query.get('ismissing', False): fds += [ParityFD(copy.deepcopy(filenames))] filenames += ('Missing Disk', ) uis = [result.__class__(result) for f in filenames] period_number = int(query.get('period_number', 0)) blocksize = FlagFramework.calculate_offset_suffix( query['blocksize']) period = FlagFramework.calculate_offset_suffix(query['period']) logical_period_size = period * (len(fds) - 1) ## Let the user know our disk offset result.para("Disk Offset is %s (%s periods)" % (blocksize * (period_number * period), period_number)) if query.has_key("__submit__"): ## Build the new map variable query.clear('map') map = [] for x in range(period * len(fds)): map.append(query['position_%s' % x]) query.clear('position_%s' % x) query.set('map', '.'.join(map)) ## Now check that this map is valid by instantiating a ## RAIDFD (we will raise if anything is wrong: try: RAIDFD(fds, blocksize, query['map'], 0, period) result.refresh(0, query, pane='parent') except Exception, e: result.heading("Error with map") result.para("%s" % e) return result
def map_popup(query,result): result.decoration = 'naked' try: map = str(query['map']).split('.') except KeyError: map = [] result.start_form(query) ## Open all the disks filenames = query.getarray('filename') fds = [ IO.open_URL(f) for f in filenames ] if query.get('ismissing',False): fds += [ParityFD(copy.deepcopy(filenames))] filenames += ('Missing Disk',) uis = [ result.__class__(result) for f in filenames ] period_number = int(query.get('period_number',0)) blocksize = FlagFramework.calculate_offset_suffix(query['blocksize']) period = FlagFramework.calculate_offset_suffix(query['period']) logical_period_size = period * (len(fds)-1) ## Let the user know our disk offset result.para("Disk Offset is %s (%s periods)" % (blocksize * (period_number * period), period_number)) if query.has_key("__submit__"): ## Build the new map variable query.clear('map') map = [] for x in range(period * len(fds)): map.append(query['position_%s' % x]) query.clear('position_%s' % x) query.set('map', '.'.join(map)) ## Now check that this map is valid by instantiating a ## RAIDFD (we will raise if anything is wrong: try: RAIDFD(fds, blocksize, query['map'], 0, period) result.refresh(0,query,pane='parent') except Exception,e: result.heading("Error with map") result.para("%s" % e) return result
def pane_cb(path, result): fd = IO.open_URL(query['file']) b = Buffer(fd=fd) header = RegFile.RegF(b) key = header.get_key(path) result.text("Timestamp: %s" % key['WriteTS'], style='red') result.start_table(**{'class': 'GeneralTable'}) ## We dont want to reference the keys because we ## will leak memeory while the callback remains stored. def details(query, result): fd = IO.open_URL(query['file']) b = Buffer(fd=fd) header = RegFile.RegF(b) key = header.get_key(path) result.heading("Key %s" % path) result.text("%s" % key, font='typewriter', wrap='full') for v in key.values(): try: name = "%s" % v['keyname'] result.heading("%s" % name) result.text("%s" % v, font='typewriter', wrap='full') except: pass result.toolbar(cb=details, text="Examine Details", icon="examine.png") result.row('Type', 'Length', 'Name', 'Value', **{'class': 'hilight'}) for v in key.values(): try: t = "%s" % v['data']['val_type'] length = "%s" % v['data']['len_data'] name = "%s" % v['keyname'] data = "%s" % v['data'] data = RAW(data[:100]) result.row(t, length, name, data) except Exception, e: print e pass
def read_record(self, ignore_comment=True): """ Generates records. This can handle multiple files as provided in the constructor. """ blank = re.compile("^\s*$") if self.datafile == None: raise IOError("Datafile is not set!!!") for file in self.datafile: ## open the file as a url: fd = IO.open_URL(file) buffer = '' while 1: if len(buffer) < 1024: data = fd.read(1024) buffer = buffer + data tmp = buffer.split("\n", 1) if len(tmp) == 0: break line = tmp[0] try: buffer = tmp[1] except: data = fd.read(1024) if len(data) == 0: break buffer = line + data continue if blank.match(line) or not line: continue if line.startswith('#') and ignore_comment: continue else: yield line
def read_record(self, ignore_comment = True): """ Generates records. This can handle multiple files as provided in the constructor. """ blank = re.compile("^\s*$") if self.datafile==None: raise IOError("Datafile is not set!!!") for file in self.datafile: ## open the file as a url: fd = IO.open_URL(file) buffer = '' while 1: if len(buffer) < 1024: data = fd.read(1024) buffer = buffer + data tmp = buffer.split("\n",1) if len(tmp) == 0: break line = tmp[0] try: buffer = tmp[1] except: data = fd.read(1024) if len(data) == 0: break buffer = line + data continue if blank.match(line) or not line: continue if line.startswith('#') and ignore_comment: continue else: yield line
def pane_cb(path, result): fd = IO.open_URL(query['file']) b = Buffer(fd = fd) header = RegFile.RegF(b) key = header.get_key(path) result.text("Timestamp: %s" % key['WriteTS'], style='red') result.start_table(**{'class':'GeneralTable'}) ## We dont want to reference the keys because we ## will leak memeory while the callback remains stored. def details(query,result): fd = IO.open_URL(query['file']) b = Buffer(fd = fd) header = RegFile.RegF(b) key = header.get_key(path) result.heading("Key %s" % path) result.text("%s" % key, font='typewriter', wrap='full') for v in key.values(): try: name = "%s"% v['keyname'] result.heading("%s" % name) result.text("%s" % v, font='typewriter', wrap='full') except: pass result.toolbar(cb = details, text = "Examine Details", icon = "examine.png") result.row('Type','Length','Name','Value', **{'class':'hilight'}) for v in key.values(): try: t = "%s" % v['data']['val_type'] length = "%s" % v['data']['len_data'] name = "%s"% v['keyname'] data = "%s" % v['data'] data = RAW(data[:100]) result.row(t,length,name,data) except Exception,e: print e pass
def __init__(self, filenames, offset): if type(filenames)==str: filenames = [ filenames,] fds = [ IO.open_URL(filename) for filename in filenames ] OffsettedFDFile.__init__(self, fds, offset)
m = self.text_re.match(data) if m: period_number = period_number + p / period query.set('period_number', period_number) result.refresh(0, query, 'parent') return p += 1 def create(self, name, case, query): offset = FlagFramework.calculate_offset_suffix(query.get( 'offset', '0')) filenames = self.glob_filenames(query.getarray('filename')) ## Open the io sources here fds = [IO.open_URL(f) for f in filenames] if query.get('ismissing', False): fds += [ParityFD(copy.deepcopy(filenames))] blocksize = FlagFramework.calculate_offset_suffix( query.get('blocksize', '32k')) period = int(query.get('period', 3)) return RAIDFD(fds, blocksize, query['map'], offset, period) import pyflag.tests import pyflag.pyflagsh as pyflagsh class RaidTest(pyflag.tests.ScannerTest): """ Test the RAID IOSource loader """ test_case = "PyFlagTestCase"
def get_fields(self): if self.datafile==None: raise IOError("Datafile is not set!!!") print "Datafile %s" % (self.datafile,) for file in self.datafile: ## open the file as a url: fd = IO.open_URL(file) dbh = DB.DBO() buffer = Buffer(fd=fd) header = EVTLog.Header(buffer) buffer = buffer[header.size():] while 1: try: event = EVTLog.Event(buffer) source = event['Source'].get_value() machine = event['Machine'].get_value() ## Find the filename for this source: dbh.execute("select filename from EventMessageSources where source=%r", source) row=dbh.fetch() if row: dbh.execute("select message from EventMessages where filename=%r and message_id=%r", (row['filename'], event['EventID'].get_value())) row = dbh.fetch() if row: message=EVTLog.format_message(row['message'],event['Strings']) ## Message not found else: message="Unable to find message format string (Maybe file was not loaded with --mode=dll?). Parameters are: %s" % event['Strings'] ## Filename not found for this source: else: message="Unable to locate file for source %s. Maybe you need to run EventLogTool with the --reg flag on the SYSTEM registry hive? Parameters are: %s " % (source,event['Strings']) buffer=buffer[event.size():] result = dict( _time= "from_unixtime('%s')" % event['TimeGenerated'].get_value(), message= message, event = event['EventID'].get_value(), Source = event['Source'].get_value(), record = event['RecordNumber'].get_value(), ) try: result['arg1'] = event['Strings'][0].get_value() except: pass try: result['arg2'] = event['Strings'][1].get_value() except: pass try: result['arg3'] = event['Strings'][2].get_value() except: pass yield result except IOError: break
def get_fields(self): if self.datafile == None: raise IOError("Datafile is not set!!!") print "Datafile %s" % (self.datafile, ) for file in self.datafile: ## open the file as a url: fd = IO.open_URL(file) dbh = DB.DBO() buffer = Buffer(fd=fd) header = EVTLog.Header(buffer) buffer = buffer[header.size():] while 1: try: event = EVTLog.Event(buffer) source = event['Source'].get_value() machine = event['Machine'].get_value() ## Find the filename for this source: dbh.execute( "select filename from EventMessageSources where source=%r", source) row = dbh.fetch() if row: dbh.execute( "select message from EventMessages where filename=%r and message_id=%r", (row['filename'], event['EventID'].get_value())) row = dbh.fetch() if row: message = EVTLog.format_message( row['message'], event['Strings']) ## Message not found else: message = "Unable to find message format string (Maybe file was not loaded with --mode=dll?). Parameters are: %s" % event[ 'Strings'] ## Filename not found for this source: else: message = "Unable to locate file for source %s. Maybe you need to run EventLogTool with the --reg flag on the SYSTEM registry hive? Parameters are: %s " % ( source, event['Strings']) buffer = buffer[event.size():] result = dict( _time="from_unixtime('%s')" % event['TimeGenerated'].get_value(), message=message, event=event['EventID'].get_value(), Source=event['Source'].get_value(), record=event['RecordNumber'].get_value(), ) try: result['arg1'] = event['Strings'][0].get_value() except: pass try: result['arg2'] = event['Strings'][1].get_value() except: pass try: result['arg3'] = event['Strings'][2].get_value() except: pass yield result except IOError: break