def dbupdate(cls, src, pdestdir): ''' when given a directory of updates (CABs/MSUs/ZIPs) and no extraction, only set up update files to be added to dbc. Destination is where patched files are. ''' extlogger = logging.getLogger("BAM.Pools.ExWkr") logmsg = "[EXMGR][DBUP] starting on " + str(src) extlogger.log(logging.DEBUG, logmsg) # initialize deliverables deliverables = None newpath = '' # indicates that this cab is one of the new update version that MS started using for v1809 and forward # can't handle this type of update yet, so skip it. if "PSFX" in src or "psfx" in src: return deliverables hashes = getfilehashes(src) if hashes is None: return hashes if not (validatecab(str(src)) or ispe(str(src)) or validatezip(str(src))): logmsg = "[EXMGR][DBUP] invalid cab/pe/zip" extlogger.log(logging.DEBUG, logmsg) return deliverables newname = src.split("\\")[-1].lstrip() newpath = pdestdir + "\\" + newname if ".exe" in newname: newpath = newpath.split(".exe")[0] elif ".cab" in newname: newpath = newpath.split(".cab")[0] elif ".zip" in newname: newpath = newpath.split(".zip")[0] deliverables = ((newpath, []), hashes[0], hashes[1]) # No need to locate nested CABs/MSUs as long the parent update file # is found. Revisit if needed logmsg = "[EXMGR][DBUP] Extraction (DB update only) task completed for " + src extlogger.log(logging.DEBUG, logmsg) # Send the job to the next manager (DB will be updated eventually) return deliverables
def analyzepesignature(file): global _pcertlogger _pcertlogger.log( logging.DEBUG, "[PCERT] Working on " + file + " for certificate information and signature verification") pscmdpath = os.environ[ 'systemdrive'] + '\\Windows\\system32\\WindowsPowerShell\\v1.0\\powershell.exe' args = pscmdpath + ' -nologo -noprofile -executionpolicy bypass -Command ".\\post\\ps_dgsverify.ps1 -binarypath \'' + file + '\'"' vfile = Path(pscmdpath) if not vfile.exists(): _pcertlogger.log( logging.DEBUG, "[PCERT] Provided PS path (" + pscmdpath + ") does not exist. Skipping " + file) return elif not vfile.is_file(): _pcertlogger.log( logging.DEBUG, "[PCERT] Provided file (" + pscmdpath + ") does not exist. Skipping.....") return hashes = getfilehashes(file) if hashes is None: _pcertlogger.log(logging.DEBUG, "[PCERT] Error getting hashes for " + file) return _pcertlogger.log(logging.DEBUG, "[PCERT] Starting: " + args) try: with subprocess.Popen(args, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as dgsverify: rawout, dummy = dgsverify.communicate() pecerts = None try: pecerts = json.loads(rawout) except json.decoder.JSONDecodeError as error: _pcertlogger.log(logging.DEBUG, "[PCERT] JSON Error: " + error.msg) return try: strtime = str(time()) isosbinary = False if pecerts["IsOSBinary"] == "True": isosbinary = True dbcursor = DBCONN2.cursor() dbcursor.execute("BEGIN TRANSACTION") dbcursor.execute( "INSERT INTO " + "DigiSignFiles" + " VALUES (" + "?," * 24 + "?)", # FileName, SHA256, SHA1, Status, StatusMessage ( file, hashes[0], hashes[1], pecerts["Status"], pecerts["StatusMessage"], # SignatureType, IsOSBinary, SignerCertificateName int(pecerts["SignatureType"]), int(isosbinary), pecerts["SignerCertificateName"], # SignerCertificateFriendlyName, SignerCertificateIssuer pecerts["SignerCertificateFriendlyName"], pecerts["SignerCertificateIssuer"], # SignerCertificateSerialNumber pecerts["SignerCertificateSerialNumber"], # SignerCertificateNotBefore, SignerCertificateNotAfter pecerts["SignerCertificateNotBefore"], pecerts["SignerCertificateNotAfter"], # SignerCertificateThumbprint, TimeStamperCertificateSubject pecerts["SignerCertificateThumbprint"], pecerts["TimeStamperCertificateSubject"], # TimeStamperCertificateFriendlyName, TimeStamperCertificateIssuer pecerts["TimeStamperCertificateFriendlyName"], pecerts["TimeStamperCertificateIssuer"], # TimeStamperCertificateSerialNumber, TimeStamperCertificateNotBefore pecerts["TimeStamperCertificateSerialNumber"], pecerts["TimeStamperCertificateNotBefore"], # TimeStamperCertificateNotAfter, TimeStamperCertificateThumbprint pecerts["TimeStamperCertificateNotAfter"], pecerts["TimeStamperCertificateThumbprint"], # NumberOfCertsInSignerChain, NumberOfCertsInTimeStampChain int(pecerts["NumberOfCertsInSignerChain"]), int(pecerts["NumberOfCertsInTimeStampChain"]), # PsObjdata, pecerts["PsObjData"], strtime)) dbcursor.execute("END TRANSACTION") dbcursor.close() except sqlite3.Error as error: _pcertlogger.log(logging.DEBUG, ( "[PCERT] INSERT Certificate/Digital Signature Information error: " + error.args[0])) except subprocess.CalledProcessError as error: logmsg = ( "[PCERT] {-} Skipping insertion into DB. PowerShell commanded failed with error: " + str(error.returncode) + ". Command: " + args) _pcertlogger.log(logging.DEBUG, logmsg) except FileNotFoundError as error: logmsg = ( "[PBSK] {-} Skipping insertion into DB. powershell.exe not found") _pcertlogger.log(logging.DEBUG, logmsg) _pcertlogger.log(logging.DEBUG, "[PCERT] " + file + " Completed")
def findbannedapis(file): global _pbanlogger _pbanlogger.log( logging.DEBUG, "[PBAN] Working on " + file + " for Banned APIs verification") bannedapis = getbannedapis() if bannedapis is None: logmsg = ("[PBAN] {-} Skipping Banned Analysis.") _pbanlogger.log(logging.DEBUG, logmsg) return basename = os.path.basename(file) pe_file = None try: pe_file = pefile.PE(file) except pe_file.PEFormatError as peerror: logmsg = ( "[PBAN] {-} Skipping DB insertion. Issue with handling PE file" + str(peerror.value)) _pbanlogger.log(logging.DEBUG, logmsg) return hashes = getfilehashes(file) if hashes is None: _pbanlogger.log(logging.DEBUG, "[PBSK] Error getting hashes for " + file) return dbcursor = DBCONN2.cursor() dbcursor.execute("BEGIN TRANSACTION") if hasattr(pe_file, 'DIRECTORY_ENTRY_IMPORT'): for module in pe_file.DIRECTORY_ENTRY_IMPORT: for importm in module.imports: if importm.name is not None and importm.name.decode( 'ascii') in bannedapis: mname = module.dll.decode('ascii') fn = importm.name.decode('ascii') try: dbcursor.execute( "INSERT INTO " + "BannedApiFiles" + " VALUES (" + "?," * 5 + "?)", # FileName, SHA256, SHA1, ModuleName, BannedApiUsed ( basename, hashes[0], hashes[1], mname, fn, # timestamp str(time()))) except sqlite3.Error as error: _pbanlogger.log(logging.DEBUG, ( "[PBSK] INSERT ConfigurationNotifications error (incomplete): " + error.args[0])) dbcursor.execute("END TRANSACTION") _pbanlogger.log(logging.DEBUG, ("[PBAN] Completed " + file)) dbcursor.close()
def writesymbol(file, symchkerr, symchkout, sha256, sha1, infolist, \ exdest, dbname=globs.SYMBOLFILESDBNAME, conn=globs.DBCONN): ''' The fields taken from symchk.exe are taken from: MSDN docs - _IMAGEHLP_MODULE64 structure - https://docs.microsoft.com/en-us/windows/desktop/api/dbghelp/ns-dbghelp-_imagehlp_module64 ''' basename = os.path.basename(file) dbcursor = conn.cursor() ignored = False symcontains = 'UNKNOWN' public = False private = False ignoredreason = 'None' hashes = ("", "") symchkarr = { "Struct size:": '', "Base:": 0, "Image size:": 0, "Date:": 0, "Checksum:": 0, "NumSyms:": '', "SymType:": '', "ModName:": '', "ImageName:": '', "LoadedImage:": '', "PDB:": '', "CV:": '', "CV DWORD:": 0, "CV Data:": '', "PDB Sig:": '', "PDB7 Sig:": '', "Age:": 0, "PDB Matched:": '', "DBG Matched:": '', "Line nubmers:": '', "Global syms:": '', "Type Info:": '', "SymbolCheckVersion": 0, "Result": 0, "DbgFilename": '', "DbgTimeDateStamp": 0, "DbgSizeOfImage": 0, "DbgChecksum": 0, "PdbFilename": '', "PdbSignature": '', "PdbDbiAge": 0 } global _wdblogger for line in symchkerr: try: if re.search("^DBGHELP: " + basename.split('.')[0] + " - ", line): symcontains = line.split("- ")[1] if re.search("public", symcontains): public = True elif re.search("private", symcontains): private = True except IndexError as ierror: _wdblogger.log( logging.DEBUG, "[WSUS_DB] {-} Parsing symchk output DBGHELP: " + str(ierror) + " on " + file) continue for field in symchkarr: result = parseline(field, line) if result is not None: symchkarr[field] = result line = None if re.search(" IGNORED -", symchkout[-5]): ignored = True ignoredreason = symchkout[-5].split(" - ")[1] if symchkarr["SymType:"] == "SymNone": source = '' else: source = symchkerr[-1] logmsg = "[WSUS_DB] Inserted new file " + str( file) + " and hash (SHA1/" + sha1 + ") to symbolDB" _wdblogger.log(logging.DEBUG, logmsg) symbolobtained = int(False) # update PatchedFile table (symbol obtained and ignored status) if int(public) != 0 or int(private) != 0: symbolobtained = int(True) dbcursor.execute(("UPDATE " + globs.PATCHEDFILESDBNAME + \ " SET SymbolObtained = " + \ "{} WHERE SHA256 = '{}' AND Signature = '{}'").format(symbolobtained, sha256, infolist['signature'])) dbcursor.execute(("UPDATE " + globs.PATCHEDFILESDBNAME + \ " SET SymbolPath = '{}' WHERE " + \ "SHA256 = '{}' AND Signature = '{}'").format(symchkarr["PDB:"], sha256, infolist['signature'])) base = os.path.basename(exdest) uindex = 0 for index, x in enumerate(symchkarr["ImageName:"].split("\\")): if x == base: uindex = index + 1 updateid = symchkarr["ImageName:"].split("\\")[uindex] dbcursor.execute(("UPDATE " + globs.PATCHEDFILESDBNAME + \ " SET UpdateId = '{}' WHERE " + \ "SHA256 = '{}' AND Signature = '{}'").format(updateid, sha256, infolist['signature'])) symchkarr["PDB:"] = symchkarr["PDB:"].strip('"') hashes = getfilehashes(symchkarr["PDB:"]) if ignored: dbcursor.execute("UPDATE " + globs.PATCHEDFILESDBNAME + \ " SET Ignored = {} WHERE SHA256 = '{}' AND Signature = '{}'".format(int(ignored), sha256, infolist['signature'])) dbcursor.execute( "INSERT INTO " + dbname + " VALUES (" + "?," * 42 + "?)", # FileName, Architecture, Signature, SHA256 ( basename, infolist['arch'], infolist['signature'], hashes[0], # SHA1, PublicSymbol, PrivateSymbol hashes[1], int(public), int(private), # SymbolContains, structSize, base, imagesize, symDate symcontains, symchkarr["Struct size:"], symchkarr["Base:"], symchkarr["Image size:"], symchkarr["Date:"], # checksum (int), numsyms (int), symtype, modname, imagename symchkarr["Checksum:"], symchkarr["NumSyms:"], symchkarr["SymType:"], symchkarr["ModName:"], symchkarr["ImageName:"], # loadedimage, pdb, CV, CVDWORD, CVData symchkarr["LoadedImage:"], symchkarr["PDB:"], symchkarr["CV:"], symchkarr["CV DWORD:"], symchkarr["CV Data:"], # PDBSig, PDB7Sig, Age, PDBMatched, DBGMatched symchkarr["PDB Sig:"], symchkarr["PDB7 Sig:"], symchkarr["Age:"], symchkarr["PDB Matched:"], symchkarr["DBG Matched:"], # LineNumber, Globalsyms, TypeInfo, SymbolCheckVersionUsed, # DbgFileName symchkarr["Line nubmers:"], symchkarr["Global syms:"], symchkarr["Type Info:"], symchkarr["SymbolCheckVersion"], symchkarr['DbgFilename'], # DbgTimeDatestamp, DbgSizeOfTime symchkarr['DbgTimeDateStamp'], symchkarr["DbgSizeOfImage"], # DbgChecksum, PdbDbiAgeFullPdbFilename, PdbSignature, # PdbDbiAge symchkarr["DbgChecksum"], symchkarr["PdbFilename"], symchkarr["PdbSignature"], symchkarr["PdbDbiAge"], # Source, Result, Ignored, IgnoredReason, source, symchkarr["Result"], int(ignored), ignoredreason, #SymbolObtained symbolobtained)) dbcursor.close() return True
def binskimanalysis(file, sympath): global _bsklogger _bsklogger.log( logging.DEBUG, "[PBSK] Working on " + file + " with symservr (" + sympath + ")") vsympath = Path(sympath) vfile = Path(file) if not vsympath.exists(): _bsklogger.log( logging.DEBUG, "[PBSK] Provided symbol path (" + sympath + ") does not exist. Skipping " + file) return elif not vfile.is_file(): _bsklogger.log( logging.DEBUG, "[PBSK] Provided file (" + file + ") does not exist. Skipping.....") return strtime = str(time()) basename = os.path.basename(file) bskjson = "_" + basename + "_" + strtime + "_binskim.json" args = (".\\tools\\x64\\\\binskim\\binskim.exe analyze \"" + file + "\" --verbose --sympath \"Cache*" + sympath + "\" -o \"" + bskjson + "\" -p -f") _bsklogger.log(logging.DEBUG, "[PBSK] Starting: " + args) try: with subprocess.Popen(args, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as pbinskim: dummy = pbinskim.communicate() except subprocess.CalledProcessError as error: logmsg = ("[PBSK] {-} BinSkim failed with error: " + str(error.returncode) + ". File: " + file) _bsklogger.log(logging.DEBUG, logmsg) return except FileNotFoundError as error: logmsg = ("[PBSK] {-} BinSkim.exe not found") _bsklogger.log(logging.DEBUG, logmsg) return hashes = getfilehashes(file) if hashes is None: _bsklogger.log(logging.DEBUG, "[PBSK] Error getting hashes for " + file) return count = 0 dbcursor = DBCONN2.cursor() dbcursor.execute("BEGIN TRANSACTION") try: with open(bskjson) as data_file: data = None try: data = json.load(data_file) except json.decoder.JSONDecodeError as error: _bsklogger.log(logging.DEBUG, ("[PBSK] JSON error: " + error.msg)) dbcursor.execute("END TRANSACTION") dbcursor.close() return for entry in data["runs"][0]["results"]: if entry["ruleId"][:3] != "BA3": # ignore ELF rules msg = constructSarifMsg(entry["ruleId"], entry["message"]["messageId"], entry["message"]["arguments"], data) try: dbcursor.execute( "INSERT INTO " + "BinSkimfiles" + " VALUES (" + "?," * 7 + "?)", # FileName, SHA256, SHA1, RuleId, Result ( basename, hashes[0], hashes[1], entry["ruleId"], entry["level"], # MessageId, Message entry["message"]["messageId"], msg, # time strtime)) count = count + 1 except sqlite3.Error as error: _bsklogger.log( logging.DEBUG, ("[PBSK] INSERT Rules error (incomplete): " + error.args[0])) for entry in data["runs"][0]["invocations"]: try: entry["configurationNotifications"] except KeyError as dummy: continue for ec in entry["configurationNotifications"]: if ec["ruleId"][:3] != "BA3": # ignore ELF rules try: dbcursor.execute( "INSERT INTO " + "BinSkimFiles" + " VALUES (" + "?," * 7 + "?)", # FileName, SHA256, SHA1, RuleId, Result ( basename, hashes[0], hashes[1], ec["ruleId"], ec["id"], # MessageId, Message "", ec["message"]["text"], # time strtime)) count = count + 1 except sqlite3.Error as error: _bsklogger.log(logging.DEBUG, ( "[PBSK] INSERT ConfigurationNotifications error (incomplete): " + error.args[0])) except FileNotFoundError as error: logmsg = ("[PBSK] {-} Skipping insertion into DB. " + bskjson + " not found.") _bsklogger.log(logging.DEBUG, logmsg) dbcursor.execute("END TRANSACTION") rmfile(bskjson) _bsklogger.log(logging.DEBUG, ("[PBSK] " + str(count) + " rules were applied on " + file)) dbcursor.close()
def cleantask(cls, jobfile, updateid): ''' task to clean up folder before submitting jobs for symbol search if item is removed in cleaning, None is returned, else return item ''' clnlogger = logging.getLogger("BAM.Pools.ClnWkr") results = None logmsg = "[CLNMGR] Starting on " + str(jobfile) clnlogger.log(logging.DEBUG, logmsg) if ispe(jobfile): # check db to see if job already exists: hashes = getfilehashes(jobfile) if hashes is None: return hashes if wsuse_db.dbentryexistwithsymbols(globs.DBCONN.cursor(), \ globs.PATCHEDFILESDBNAME, hashes[0], hashes[1]): # if PE is already in db with symbols obtained, # do not retask job to symbol manager, return None instead return results else: pass logmsg = "[CLNMGR] continuing forward with " + str(jobfile) clnlogger.log(logging.DEBUG, logmsg) # getting to this point means item is not in db, may need to come up # with case where db needs to update item though infolist = { 'OriginalFilename': '', 'FileDescription': '', 'ProductName': '', 'Comments': '', 'CompanyName': '', 'FileVersion': '', 'ProductVersion': '', 'IsDebug': '', 'IsPatched': '', 'IsPreReleased': '', 'IsPrivateBuild': '', 'IsSpecialBuild': '', 'Language': '', 'PrivateBuild': '', 'SpecialBuild': '' } try: unpefile = pefile.PE(jobfile, fast_load=True) except pefile.PEFormatError as peerror: logmsg = "[WSUS_DB] skipping " + str(jobfile) + " due to exception: " + peerror.value clnlogger.log(logging.ERROR, logmsg) return results infolist['fileext'], infolist['stype'] = pebinarytype(unpefile) infolist['arch'] = getpearch(unpefile) infolist['age'] = getpeage(unpefile) infolist['strippedpe'] = ispedbgstripped(unpefile) infolist['builtwithdbginfo'] = ispebuiltwithdebug(unpefile) direntires=[ pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_DEBUG'], \ pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_RESOURCE'] ] unpefile.parse_data_directories(directories=direntires) infolist['pdbfilename'] = getpepdbfilename(unpefile) infolist['signature'] = getpesigwoage(unpefile) # a PE only have 1 VERSIONINFO, but multiple language strings # More information on different properites can be found at # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381058 # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381049 if getattr(unpefile, "VS_VERSIONINFO", None) is not None and \ getattr(unpefile, "FileInfo", None) is not None: for fileinfoentries in unpefile.FileInfo: for fileinfoentry in fileinfoentries: if getattr(fileinfoentry, "StringTable", None) is not None: for strtable in fileinfoentry.StringTable: # Currently only handling unicode en-us if strtable.LangID[:4] == b'0409' or \ (strtable.LangID[:4] == b'0000' and (strtable.LangID[4:] == b'04b0' or strtable.LangID[4:] == b'04B0')): infolist["Language"] \ = strtable.LangID.decode("utf-8") for field, value in strtable.entries.items(): dfield = field.decode('utf-8') dvalue = value.decode('utf-8') if dfield == "OriginalFilename": infolist["OriginalFilename"] \ = dvalue if dfield == "FileDescription": infolist["FileDescription"] \ = dvalue if dfield == "ProductName": infolist["ProductName"] \ = dvalue if dfield == "Comments": infolist["Comments"] \ = dvalue if dfield == "CompanyName": infolist["CompanyName"] \ = dvalue if dfield == "FileVersion": infolist["FileVersion"] \ = dvalue if dfield == "ProductVersion": infolist["ProductVersion"] \ = dvalue if dfield == "IsDebug": infolist["IsDebug"] \ = dvalue if dfield == "IsPatched": infolist["IsPatched"] \ = dvalue if dfield == "IsPreReleased": infolist["IsPreReleased"] \ = dvalue if dfield == "IsPrivateBuild": infolist["IsPrivateBuild"] \ = dvalue if dfield == "IsSpecialBuild": infolist["IsSpecialBuild"] \ = dvalue if dfield == "PrivateBuild": infolist["PrivateBuild"] \ = dvalue if dfield == "SpecialBuild": infolist["SpecialBuild"] \ = dvalue # Get the OS this PE is designed towards. # Microsoft PE files distributed via Microsoft's Updates typically # use the ProductVersion file properties to indicate the OS a specific # PE file is built towards. # If this is a Microsoft binary, the Product version is typically # the OS version it was built towards, but other products this is not # necessarily true if infolist['ProductName'].find("Operating System") != -1: infolist['osver'] = "NT" + infolist['ProductVersion'] else: infolist['osver'] = "UNKNOWN" unpefile.close() results = ((str(jobfile), updateid), hashes[0], hashes[1], infolist) else: # if jobfile is not a PE, then check if it's a cab. If not a cab, remove it. if not validatecab(str(jobfile)): logmsg = "[CLNMGR] cleantask: Removing " + str(jobfile) clnlogger.log(logging.DEBUG, logmsg) rmfile(jobfile) logmsg = "[CLNMGR] " + str(jobfile) + " removed, not PE or cab file" clnlogger.log(logging.DEBUG, logmsg) else: pass logmsg = "[CLNMGR] " + str(jobfile) + " is nested cab, skipping" clnlogger.log(logging.DEBUG, logmsg) return results logmsg = "[CLNMGR] completed one cleantask for " + str(jobfile) clnlogger.log(logging.DEBUG, logmsg) return results
def extracttask(cls, src, pdir, dst): ''' task for workers to extract contents of .cab file and return directory of result to for use by cleaner ''' extlogger = logging.getLogger("BAM.Pools.ExWkr") hashes = getfilehashes(src) if hashes is None: return hashes entryexists = False if cls.verifyentry(src, hashes[0], hashes[1], extlogger): entryexists = True logmsg = "[EXMGR] started on " + str(src) + " extracting files to " + str(dst) extlogger.log(logging.DEBUG, logmsg) # initialize deliverables deliverables = None # indicates that this cab is one of the new update version that MS started using for v1809 and forward # can't handle this type of update yet, so skip it. if "PSFX" in src or "psfx" in src: return deliverables newname = src.split("\\")[-1].lstrip() # If the files being worked on is a PE file # see if it can be opened with 7z.exe and that # it has PE files. Otherwise, skip to other # update files. if ispe(src): logmsg = "[EXMGR] extracting PE file (" + src + ")..." extlogger.log(logging.DEBUG, logmsg) newdir = (dst + "\\" + newname).split(".exe")[0] try: os.mkdir(newdir) except FileExistsError: pass except OSError as oserror: logmsg = "[EXMGR] OSError creating new directory... skipping extraction for (" + \ src + "). Error: " + str(oserror) extlogger.log(logging.ERROR, logmsg) return deliverables if not entryexists and cls.perform7zextract(src, newdir, extlogger) is None: return deliverables deliverables = ((newdir, []), hashes[0], hashes[1]) # if nothing was extracted, remove the directory to clean up try: os.rmdir(newdir) except OSError: pass else: if not validatecab(str(src)): logmsg = "[EXMGR] {-} invalid file: " + src extlogger.log(logging.ERROR, logmsg) return None # make new directory to hold extracted files newdir = "" # if this is true, this must be a nested cab file if dst in src: newdir = str(os.path.dirname(src)) # otherwise the cab is brand new and should create a newdir in dst else: if ".cab" in newname: newdir = (dst + "\\" + newname).split(".cab")[0] elif ".msu" in newname: newdir = (dst + "\\" + newname).split(".msu")[0] try: os.mkdir(newdir) except FileExistsError: pass except OSError as oserror: logmsg = "[EXMGR] OSError creating new directory... skipping extraction for (" + \ src + "). Error: " + str(oserror) extlogger.log(logging.ERROR, logmsg) return deliverables if not entryexists: # extract .dll, .exe and .sys first cls.performcabextract("*.dll", src, newdir, extlogger) cls.performcabextract("*.exe", src, newdir, extlogger) cls.performcabextract("*.sys", src, newdir, extlogger) deliverables = ((newdir, []), hashes[0], hashes[1]) # search through rest of .cab for nested cabs or msus to extract # again if not entryexists: listing = cls.performcablisting(src, extlogger) if listing is None: return deliverables stroutput = listing.decode("ascii").split("\r\n") # indicates that this cab is one of the new update version that MS started using for v1809 and forward # can't handle this type of update yet, so skip it. if "psfx" in stroutput[3] or "PSFX" in stroutput[4]: return deliverables for line in stroutput: if line.endswith(".cab") or line.endswith(".msu"): # expand that line only to start another thread on it potentialfile = line.split(":")[-1].lstrip() # make a new directory to store the nested cab # nested cabs with the same name may exists, keep contents # under the newly created extracted directory for update parentdir = src.split("\\")[-1][0:-4] ncabdir = str(dst) + "\\" + str(parentdir) + "\\" + str(potentialfile)[0:-4] if not os.path.exists(ncabdir): try: os.mkdir(ncabdir) ncabdir = Path(ncabdir).resolve() except OSError as error: logmsg = "[EXMGR] {-} unable to make nested cab directory: " + str(error) extlogger.log(logging.ERROR, logmsg) break logmsg = "[EXMGR] beginning extraction of nestedcab: " + str(src) extlogger.log(logging.DEBUG, logmsg) extractstdout = cls.performcabextract(potentialfile, src, str(ncabdir), extlogger) if extractstdout is not None: # Case where there exists nested cabs with a .manifest file newpath = None for root, dummy, cabs in os.walk(ncabdir): for cab in cabs: if str(cab) == potentialfile: newpath = Path(os.path.join(root, cab)).resolve() break if newpath is None: continue # if file is not a cab/msu, remove it since that's all we're interested # in at this point if not validatecab(str(newpath)): logmsg = "[EXMGR] {-} extracttask: " + str(newpath) + " extracted from " + str(src) + " is not a validate cab" extlogger.log(logging.ERROR, logmsg) logmsg = "[EXMGR] extracttask: Removing " + str(newpath) extlogger.log(logging.ERROR, logmsg) rmfile(newpath) continue logmsg = "[EXMGR] Creating " + str(newpath) + " for new thread..." extlogger.log(logging.DEBUG, logmsg) # return new location of extracted cab for addition to job queue deliverables[0][1].append(str(newpath)) logmsg = "[EXMGR] Extraction task completed for " + src extlogger.log(logging.DEBUG, logmsg) return deliverables