def start(self, args): self.nodes = [] self.nodescount = 1 self.oldcur = 0 fname = args["filter_name"].value() expression = args["expression"].value() root_node = args["root_node"].value() if args.has_key("verbose"): self.verbose = True else: self.verbose = False if args.has_key("recursive"): recursive = True else: recursive = False f = Filter(fname) f.connection(self) try: f.compile(expression) except RuntimeError: self.res["error"] = Variant("provided expression is not valid") f.process(root_node, recursive) self.res["total of matching nodes"] = Variant(len(self.nodes)) if args.has_key("save_result"): si_node = self.vfs.getnode("/Bookmarks") if si_node == None: root = self.vfs.getnode("/") si_node = Node("Bookmarks", 0, root) si_node.__disown__() fnode = Node(fname, 0, si_node) fnode.__disown__() for node in self.nodes: vl = VLink(node, fnode, node.name()) vl.__disown__()
def attributes(self, node): m = VMap() hinfos = self.calculatedHash[long(node.this)] hashes = self.getHashes(node) for algo in hashes: v = Variant(str(hashes[algo])) m[str(algo)] = v if len(hinfos.hsets): knownBad = [] knownGood = [] for setId in hinfos.hsets: hset = self.parent.hashSets.get(setId) if hset.knownGood: knownGood.append(hset) else: knownBad.append(hset) if len(knownBad): badList = VList() for badSet in knownBad: vname = Variant(badSet.name) badList.append(vname) m["known bad"] = badList if len(knownGood): goodList = VList() for goodSet in knownGood: vname = Variant(goodSet.name) goodList.append(vname) m["known good"] = goodList return m
def attributesTypes(values, types): if (types == DateTime): val = DateTime(values) val.thisown = False elif (types == MS64DateTime): val = MS64DateTime(values) val.thisown = False elif (types == DosDateTime): val = DosDateTime(*values) val.thisown = False elif (types == int) or (types == long): if type(values) == str: #XXX strange ? values = 0 val = types(values) elif (types == dict): val = VMap() for k, v in values.iteritems(): vval = Variant(attributesTypes(*v)) val[k] = vval elif (types == list): val = VList() for v in values: vval = Variant(attributesTypes(*v)) val.append(vval) elif (types == str): if type(values) == unicode: val = values.encode("UTF-8", "replace") else: val = str(values) elif (types == VLink): #return node is already created val = values else: val = types(values) return val
def _attributes(self): vmap = VMap() v = Variant(self.cbFormat) vmap['Clibboard Format'] = v v = Variant(self.cbDataFormat) vmap['Clibboard Data Format'] = v return vmap
def __init__(self, vfile): size = unpack('I', vfile.read(4))[0] data = vfile.read(size) if len(data) > 1: Variant.__init__(self, str(data)) else: Variant.__init__(self, None)
def attributes(self, node): attr = VMap() vfile = node.open() img = Image.open(vfile) info = img._getexif() vfile.close() for tag, values in info.items(): if tag in self.dateTimeTags: try: decoded = str(TAGS.get(tag, tag)) try: dt = strptime(values, "%Y:%m:%d %H:%M:%S") except ValueError: try: dt = strptime(values[:-6], "%Y-%m-%dT%H:%M:%S") except ValueError: dt = strptime(values.rstrip(' '), "%a %b %d %H:%M:%S") vt = vtime(dt.tm_year, dt.tm_mon, dt.tm_mday, dt.tm_hour, dt.tm_min, dt.tm_sec, 0) vt.thisown = False attr[decoded] = Variant(vt) except Exception as e: attr[decoded] = Variant(str(values)) else: decoded = str(TAGS.get(tag, tag)) if isinstance(values, tuple): vl = VList() for value in values: vl.push_back(Variant(value)) attr[decoded] = vl else: attr[decoded] = Variant(values) return attr
def _attributes(self): attr = VMap() vmap = VMap() for t in self.tables.map: vmap[hex(t)] = Variant(hex(self.tables.map[t].write)) attr["tables"] = Variant(vmap) attr["reallocated blocks"] = Variant(self.aalloc) return attr
def _attributes(self): i = 1 attr = VMap() vlist = VList() for f in self.files: node = f.value() vlist.append(Variant(node.absolute())) attr["concatanated files (ordered)"] = Variant(vlist) return attr
def __notifyFileProgress(self, node, percent): e = event() e.thisown = False e.type = Extract.FileProgress vl = VList() vl.append(Variant(node)) vl.append(Variant(int(percent))) e.value = RCVariant(Variant(vl)) self.notify(e)
def __notifyRename(self, src, dst): e = event() e.thisown = False e.type = Extract.RenameOccured vl = VList() vl.append(Variant(src)) vl.append(Variant(dst)) e.value = RCVariant(Variant(vl)) self.notify(e)
def __notifyFailure(self, src, ftype, tb): e = event() e.thisown = False e.type = ftype vl = VList() vl.append(Variant(src)) vl.append(Variant(str(tb))) e.value = RCVariant(Variant(vl)) self.notify(e)
def __init__(self, vfile): data = unpack('Q', vfile.read(8))[0] #MS didn't differentiate absolute and relative time (time/datetime) #so use ugly trick heare if data >= 116444736000000000: #a date time should be superior than the lep between unix & ms epoch vt = vtime(data, TIME_MS_64) vt.thisown = False Variant.__init__(self, vt) else: Variant.__init__(self, data)
def notifyWrite(self, eventType, value): e = event() e.thisown = False e.type = eventType try: e.value = RCVariant(Variant(value)) except Exception as error: print 'report.fragments.notifyWrite ', error, value e.value = RCVariant(Variant("")) self.notify(e)
def __attributes(self): attrs = VMap() if self._baseaddr != -1: attrs["Virtual Base address"] = Variant(self._boffset) attrs["Physical Base address"] = Variant( self._ldr_entry.obj_offset) else: attrs["Virtual Base address (not valid addr)"] = Variant( self._boffset) return attrs
def __attributes(self): attrs = VMap() if self._baseaddr != -1: attrs["Virtual Base address"] = Variant(self._boffset) attrs["Physical Base address"] = Variant( self._aspace.translate(self._boffset)) else: attrs["Virtual Base address (not valid addr)"] = Variant( self._boffset) return attrs
def __setConnections(self, attrs): if self._fsobj.connections.has_key(long(self.eproc.UniqueProcessId)): conns = VMap() count = 0 for conn_obj in self._fsobj.connections[long( self.eproc.UniqueProcessId)]: count += 1 conn = VMap() conn["Local IP address"] = Variant(str(conn_obj.localAddr)) conn["Local port"] = Variant(int(conn_obj.localPort)) if conn_obj.proto is not None: conn["Protocol"] = Variant(int(conn_obj.proto)) conn["Protocol type"] = Variant(conn_obj.type) if conn_obj.ctime is not None: create_datetime = conn_obj.ctime.as_windows_timestamp() vt = MS64DateTime(create_datetime) vt.thisown = False conn["Create time"] = Variant(vt) if conn_obj.remoteAddr is not None: conn["Remote IP address"] = Variant( str(conn_obj.remoteAddr)) conn["Remote port"] = Variant(int(conn_obj.remotePort)) if conn_obj.state is not None: conn["State"] = Variant(str(conn_obj.state)) conns["Connection " + str(count)] = Variant(conn) attrs["Connections"] = conns
def setResults(self): v = Variant(len(self.attributeHash.calculatedHash)) self.res["hashed files"] = v v = Variant(self.knownGoodFiles) self.res["known good files"] = v v = Variant(self.knownBadFiles) self.res["known bad files"] = v v = Variant(self.skippedFiles) self.res["skipped files"] = v v = Variant(self.errorFiles) self.res["Errors"] = v
def __split(self, root, chunksize): nodesize = self.origin.size() chunks = nodesize / chunksize vmap = VMap() vmap["complete chunks"] = Variant(chunks) for idx in xrange(0, chunks*chunksize, chunksize): snode = SplitNode(self, root, self.origin, idx, chunksize) lastchunk = nodesize % chunksize if lastchunk != 0: snode = SplitNode(self, root, self.origin, self.origin.size() - lastchunk, lastchunk) vmap["truncated chunk (size)"] = Variant(lastchunk) self.res[str(chunksize) + " bytes split"] = Variant(vmap)
def __init__(self, node, largs, mfsobj = None): self.node = node self.attr = {} self.extraAttr = [] try : self.cdh = CompoundDocumentHeader(node, mfsobj) self.cdh.parseDocument(not 'no-extraction' in largs) except : #error() raise Exception("Can't parse document") streams = self.cdh.streams() for stream in streams: if stream.objectType == "StreamObject": try: if stream.objectName == "WordDocument": if not 'no-extraction' in largs: wd = WordDocument(stream) if not 'no-text' in largs: wd.createTextNodes() if not 'no-pictures' in largs: wd.createPictureNodes() elif stream.objectName == "Pictures": if not ('no-pictures' in largs or 'no-extraction' in largs): ppt = PPT(stream) ppt.createPictureNodes() else: propertySet = PropertySetStream(stream, OfficeDocumentSectionCLSID.keys()) for clsid in OfficeDocumentSectionCLSID.iterkeys(): section = propertySet.sectionCLSID(clsid) if section: (sectionName, sectionIDS) = OfficeDocumentSectionCLSID[clsid] mattr = VMap() for k, v in sectionIDS.iteritems(): Property = section.PropertyList.propertyID(k) if Property and Property.Variant.Value: p = section.PropertyList.propertyID(k).Variant.Value if p and isinstance(p, Variant): #Thumbnail is type node if v == 'Total editing time': #special case see msoshared.py p = Variant(str(datetime.timedelta(seconds=(p.value()/10000000)))) else: p = Variant(p) mattr[v] = p stream.setExtraAttributes((sectionName, mattr,)) if not 'no-root_metadata' in largs: self.extraAttr.append((sectionName, stream.parent().name(), mattr,)) #except RuntimeError, e: #pass except : #error() pass if not 'no-extraction' in largs: del stream
def _setLoadedModules(self, attrs): dlls = VList() for m in self.eproc.get_load_modules(): name = str(m.FullDllName) or 'N/A' dll = VMap() dllattribs = VMap() dllattribs["Base"] = Variant(long(m.DllBase)) dllattribs["Size"] = Variant(long(m.SizeOfImage)) if name != "N/A": self._setImportedFunctions(name, dllattribs) dll[name] = Variant(dllattribs) dlls.append(dll) attrs["Loaded modules"] = Variant(dlls)
def _setTimestamp(self, obj, attrs): if obj.ExitTime: exit_datetime = obj.ExitTime.as_windows_timestamp() vt = MS64DateTime(exit_datetime) vt.thisown = False attrs["State"] = Variant("Exited") attrs["Exit time"] = Variant(vt) else: attrs["State"] = Variant("Running") create_datetime = obj.CreateTime.as_windows_timestamp() vt = MS64DateTime(create_datetime) vt.thisown = False attrs["Create time"] = Variant(vt)
def _setFileAttributes(self, handle, files): file_attrs = VMap() fileobj = handle.dereference_as("_FILE_OBJECT") #self._setTimestamp(thrd, thrd_attrs) fnamedev = fileobj.file_name_with_device() if True: if fnamedev.find("\\Device\\HarddiskVolume1") != -1: fnamedev_overlay = fnamedev.replace( "\\Device\\HarddiskVolume1", "WinXpPro/WinXpPro.vmdk/Baselink/VirtualHDD/Partitions/Partition 1/NTFS" ).replace("\\", "/") node = self.v.getnode(fnamedev_overlay) if node: file_attrs["HardDriveImage"] = Variant(node) else: file_attrs["HardDriveImage"] = Variant("Not found") file_attrs["WriteAccess"] = Variant(fileobj.WriteAccess > 0, typeId.Bool) file_attrs["ReadAccess"] = Variant(fileobj.ReadAccess > 0, typeId.Bool) file_attrs["DeleteAccess"] = Variant(fileobj.DeleteAccess > 0, typeId.Bool) file_attrs["SharedRead"] = Variant(fileobj.SharedRead > 0, typeId.Bool) file_attrs["SharedWrite"] = Variant(fileobj.SharedWrite > 0, typeId.Bool) file_attrs["SharedDelete"] = Variant(fileobj.SharedDelete > 0, typeId.Bool) files[fnamedev] = file_attrs
def _attributes(self): attr = VMap() attr.thisown = False try: vlist = Variant(self.evt_record.getStrings(), typeId.String) vlist.thisown = False attr["Log strings"] = vlist except RuntimeError: pass s_name = Variant(self.evt_record.sourceName()) attr["Source name"] = s_name c_name = Variant(self.evt_record.computerName()) attr["Computer name"] = c_name event_type = Variant(self.evt_record.eventType()) attr["Event type"] = event_type time_gen = Variant(self.evt_record.getTimeGenerated()) attr["Time generated"] = time_gen time_written = Variant(self.evt_record.getTimeWritten()) attr["Time written"] = time_written return attr
def _attributes(self): attr = VMap() vt = MS64DateTime(self.timestamp) vt.thisown = False vmodified = Variant(vt) attr["modified"] = vmodified return attr
def __setResults(self): self.__lock.acquire() try: v = Variant(self.attributeHash.count()) self.res["hashed files"] = v v = Variant(self.__knownGoodFiles) self.res["known good files"] = v v = Variant(self.__knownBadFiles) self.res["known bad files"] = v v = Variant(self.__skippedFiles) self.res["skipped files"] = v v = Variant(self.__errorFiles) self.res["Errors"] = v except: pass self.__lock.release()
def attributes(self, node): attr = VMap() try: attr["skin percent"] = Variant(self.skinnodes[node.uid()]) except Exception: pass return attr
def _attributes(self): attr = VMap() vt = vtime(self.timestamp, TIME_MS_64) vt.thisown = False vmodified = Variant(vt) attr["modified"] = vmodified return attr
def start(self, args): try: expr = args["expression"].value() except IndexError: raise envError("modules evalexp need an expression to evaluate") buff = eval(expr) self.res["result"] = Variant(buff)
def start(self, args): self.origin = args["file"].value() if args.has_key("start-offset"): self.soffset = args["start-offset"].value() else: self.soffset = 0 self.chunklist = args["chunk-sizes"].value() nodesize = self.origin.size() err = "" if self.soffset < 0: err += "start offset (" + str(self.soffset) + ") must be equal or greater to 0\n" if self.soffset >= nodesize: err += "start offset (" + str(self.soffset) + ") must be lesser than size of provided node\n" if not err: for vchunksize in self.chunklist: cerr = "" chunksize = vchunksize.value() if chunksize <= 0: cerr += "size of chunk (" + str(chunksize) + " bytes) must be positive\n" if chunksize >= nodesize: cerr += "size of chunk (" + str(chunksize) + " bytes) must be lesser than size of provided node\n" if not cerr: root = Node(self.origin.name() + " splitted by " + str(chunksize), 0, None, self) root.__disown__() self.__split(root, chunksize) self.registerTree(self.origin, root) else: err += cerr if err: self.res["error"] = Variant(err)
def __notifyOverallProgress(self): buff = "" if self.total_files > 0: percent = (float(self.extracted_files) * 100) / self.total_files buff += "extracted file(s): " + str(self.extracted_files) + "/" + str(self.total_files) + " (" + str(round(percent, 2)) + "%)\n" if self.total_folders > 0: percent = (float(self.extracted_folders) * 100) / self.total_folders buff += "extracted folder(s): " + str(self.extracted_folders) + "/" + str(self.total_folders) + " (" + str(round(percent, 2)) + "%)\n" if self.files_errors > 0 and self.total_files > 0: percent = (float(self.files_errors) * 100) / self.total_files buff += "file(s) error: " + str(self.files_errors) + "/" + str(self.total_files) + " (" + str(round(percent, 2)) + "%)\n" if self.folders_errors > 0 and self.total_folders > 0: percent = (float(self.folders_errors) * 100) / self.total_folders buff += "folder(s) error: " + str(self.folders_errors) + "/" + str(self.total_folders) + " (" + str(round(percent, 2)) + "%)\n" if self.ommited_files > 0 and self.total_files > 0: percent = (float(self.ommited_files) * 100) / self.total_files buff += "ommited file(s): " + str(self.ommited_files) + "/" + str(self.total_files) + " (" + str(round(percent, 2)) + "%)\n" if self.ommited_folders > 0 and self.total_folders > 0: percent = (float(self.ommited_folders) * 100) / self.total_folders buff += "ommited folder(s): " + str(self.ommited_folders) + "/" + str(self.total_folders) + " (" + str(round(percent, 2)) + "%)\n" e = event() e.type = Extract.OverallProgress e.value = RCVariant(Variant(buff)) e.thisown = False self.notify(e)
def start(self, args): self.total_files = 0 self.total_folders = 0 self.extracted_files = 0 self.extracted_folders = 0 self.files_errors = 0 self.folders_errors = 0 self.ommited_files = 0 self.ommited_folders = 0 self.__failed_files = [] self.__failed_folders = [] self.__renamed = {} try: self.nodes = args['files'].value() self.syspath = args['syspath'].value().path if not os.path.isdir(self.syspath): self.res["errors"] = Variant(self.syspath + " is not a valid directory") return if args.has_key('recursive'): self.recursive = args["recursive"].value() else: self.recursive = False if args.has_key('preserve'): self.preserve = args["preserve"].value() else: self.preserve = False if args.has_key('overwrite'): self.overwrite = args["overwrite"].value() else: self.overwrite = False self.__extract() self.__createReport() except KeyError: pass
def __init__(self, vfile): size = unpack('I', vfile.read(4))[0] data = vfile.read(size) data = unicode(data, 'UTF-16').encode('UTF-8') Variant.__init__(self, data)
def __init__(self, vfile): Variant.__init__(self, unpack('H', vfile.read(2))[0])