Exemple #1
0
def dump(tocPath, outPath):
    toc = dbo.readToc(tocPath)
    if not (toc.getSubEntry("bundles") or toc.get("chunks")):
        return  #there's nothing to extract (the sb might not even exist)

    sbPath = tocPath[:-3] + "sb"
    sb = open(sbPath, "rb")

    chunkPathToc = os.path.join(outPath, "chunks")
    bundlePath = os.path.join(outPath, "bundles")
    ebxPath = os.path.join(bundlePath, "ebx")
    resPath = os.path.join(bundlePath, "res")
    chunkPath = os.path.join(bundlePath, "chunks")

    if not toc.get("das"):
        raise Exception("Non-DAS superbundle found in NFS: Edge.")

    bundles = toc.getSubEntry(
        "bundles")  #names offsets sizes (list sizes should be same)
    offsets = bundles.get("offsets")

    for offset in offsets:
        sb.seek(offset.content)
        bundle = dbo.DbObject(sb)

        for entry in bundle.get("ebx", list()):  #name sha1 size originalSize
            path = os.path.join(ebxPath, entry.get("name") + ".ebx")
            if payload.casBundlePayload(entry, path,
                                        entry.get("originalSize")):
                ebx.addEbxGuid(path, ebxPath)

        for entry in bundle.get(
                "res",
                list()):  #name sha1 size originalSize resRid resType resMeta
            path = os.path.join(resPath, entry.get("name") + ".res")
            payload.casBundlePayload(entry, path, entry.get("originalSize"))

        for entry in bundle.get("chunks", list(
        )):  #id sha1 size logicalOffset logicalSize chunkMeta::meta
            path = os.path.join(chunkPath, entry.get("id").format() + ".chunk")
            payload.casBundlePayload(
                entry, path,
                entry.get("logicalOffset") + entry.get("logicalSize"))

    #Deal with the chunks which are defined directly in the toc.
    #These chunks do NOT know their originalSize.
    for entry in toc.get("chunks"):  #id sha1
        targetPath = os.path.join(chunkPathToc,
                                  entry.get("id").format() + ".chunk")
        payload.casChunkPayload(entry, targetPath)

    sb.close()
Exemple #2
0
def dump(tocPath,baseTocPath,outPath):
    """Take the filename of a toc and dump all files to the targetFolder."""

    #Depending on how you look at it, there can be up to 2*(3*3+1)=20 different cases:
    #    The toc has a cas flag which means all assets are stored in the cas archives. => 2 options
    #        Each bundle has either a delta or base flag, or no flag at all. => 3 options
    #            Each file in the bundle is one of three types: ebx/res/chunks => 3 options
    #        The toc itself contains chunks. => 1 option
    #
    #Simplify things by ignoring base bundles (they just state that the unpatched bundle is used),
    #which is alright, as the user needs to dump the unpatched files anyway.
    #
    #Additionally, add some common fields to the ebx/res/chunks entries so they can be treated the same.
    #=> 6 cases.

    toc=dbo.readToc(tocPath)
    if not (toc.get("bundles") or toc.get("chunks")): return #there's nothing to extract (the sb might not even exist)

    sbPath=tocPath[:-3]+"sb"
    sb=open(sbPath,"rb")

    chunkPathToc=os.path.join(outPath,"chunks")
    bundlePath=os.path.join(outPath,"bundles")
    ebxPath=os.path.join(bundlePath,"ebx")
    resPath=os.path.join(bundlePath,"res")
    chunkPath=os.path.join(bundlePath,"chunks")

    ###read the bundle depending on the four types (+cas+delta, +cas-delta, -cas+delta, -cas-delta) and choose the right function to write the payload
    if toc.get("cas"):
        for tocEntry in toc.get("bundles"): #id offset size, size is redundant
            if tocEntry.get("base"): continue #Patched bundle. However, use the unpatched bundle because no file was patched at all.

            sb.seek(tocEntry.get("offset"))
            bundle=dbo.DbObject(sb)
                    
            #pick the right function
            if tocEntry.get("delta"):
                writePayload=payload.casPatchedBundlePayload
            else:
                writePayload=payload.casBundlePayload

            for entry in bundle.get("ebx",list()): #name sha1 size originalSize
                path=os.path.join(ebxPath,entry.get("name")+".ebx")
                if writePayload(entry,path,False):
                    ebx.addEbxGuid(path,ebxPath)

            for entry in bundle.get("res",list()): #name sha1 size originalSize resRid resType resMeta
                path=os.path.join(resPath,entry.get("name")+".res")
                writePayload(entry,path,False)

            for entry in bundle.get("chunks",list()): #id sha1 size logicalOffset logicalSize chunkMeta::h32 chunkMeta::meta
                path=os.path.join(chunkPath,entry.get("id").format()+".chunk")
                writePayload(entry,path,True)

        #Deal with the chunks which are defined directly in the toc.
        #These chunks do NOT know their originalSize.
        for entry in toc.get("chunks"): #id sha1
            targetPath=os.path.join(chunkPathToc,entry.get("id").format()+".chunk")
            payload.casChunkPayload(entry,targetPath)
    else:
        for tocEntry in toc.get("bundles"): #id offset size, size is redundant
            if tocEntry.get("base"): continue #Patched bundle. However, use the unpatched bundle because no file was patched at all.

            sb.seek(tocEntry.get("offset"))

            if tocEntry.get("delta"):
                #The sb currently points at the delta file.
                #Read the unpatched toc of the same name to get the base bundle.
                baseToc=dbo.readToc(baseTocPath)
                for baseTocEntry in baseToc.get("bundles"):
                    if baseTocEntry.get("id").lower() == tocEntry.get("id").lower():
                        break
                else: #if no base bundle has with this name has been found:
                    pass #use the last base bundle. This is okay because it is actually not used at all (the delta has uses instructionType 3 only).
                    
                basePath=baseTocPath[:-3]+"sb"
                base=open(basePath,"rb")
                base.seek(baseTocEntry.get("offset"))
                bundle=noncas.patchedBundle(base, sb) #create a patched bundle using base and delta
                base.close()
                writePayload=payload.noncasPatchedBundlePayload
                sourcePath=[basePath,sbPath] #base, delta
            else:
                bundle=noncas.unpatchedBundle(sb)
                writePayload=payload.noncasBundlePayload
                sourcePath=sbPath

            for entry in bundle.ebx:
                path=os.path.join(ebxPath,entry.name+".ebx")
                if writePayload(entry,path,sourcePath):
                    ebx.addEbxGuid(path,ebxPath)

            for entry in bundle.res:
                path=os.path.join(resPath,entry.name+".res")
                writePayload(entry,path,sourcePath)

            for entry in bundle.chunks:
                path=os.path.join(chunkPath,entry.id.format()+".chunk")
                writePayload(entry,path,sourcePath)

        #Deal with the chunks which are defined directly in the toc.
        #These chunks do NOT know their originalSize.
        for entry in toc.get("chunks"): #id offset size
            targetPath=os.path.join(chunkPathToc,entry.get("id").format()+".chunk")
            payload.noncasChunkPayload(entry,targetPath,sbPath)

    sb.close()
Exemple #3
0
def dump(tocPath, outPath, baseTocPath=None, commonDatPath=None):
    toc = dbo.readToc(tocPath)
    if not (toc.get("bundles") or toc.get("chunks")):
        return  #there's nothing to extract (the sb might not even exist)

    sbPath = tocPath[:-3] + "sb"
    sb = openSbFile(sbPath)

    chunkPathToc = os.path.join(outPath, "chunks")
    bundlePath = os.path.join(outPath, "bundles")
    ebxPath = os.path.join(bundlePath, "ebx")
    dbxPath = os.path.join(bundlePath, "dbx")
    resPath = os.path.join(bundlePath, "res")
    chunkPath = os.path.join(bundlePath, "chunks")

    if toc.get("cas"):
        #deal with cas bundles => ebx, dbx, res, chunks.
        for tocEntry in toc.get("bundles"):  #id offset size, size is redundant
            sb.seek(tocEntry.get("offset"))
            bundle = dbo.DbObject(sb)

            for entry in bundle.get("ebx",
                                    list()):  #name sha1 size originalSize
                path = os.path.join(ebxPath, entry.get("name") + ".ebx")
                if casBundlePayload(entry, path, False):
                    ebx.addEbxGuid(path, ebxPath)

            for entry in bundle.get("dbx",
                                    list()):  #name sha1 size originalSize
                if entry.get(
                        "idata"
                ):  #dbx appear only idata if at all, they are probably deprecated and were not meant to be shipped at all.
                    path = os.path.join(dbxPath, entry.get("name") + ".dbx")
                    out = open2(path, "wb")
                    if entry.get("size") == entry.get("originalSize"):
                        out.write(entry.get("idata"))
                    else:
                        out.write(zlibIdata(entry.get("idata")))
                    out.close()

            for entry in bundle.get(
                    "res",
                    list()):  #name sha1 size originalSize resType resMeta
                path = os.path.join(resPath, entry.get("name") + ".res")
                casBundlePayload(entry, path, False)

            for entry in bundle.get(
                    "chunks",
                    list()):  #id sha1 size chunkMeta::h32 chunkMeta::meta
                path = os.path.join(chunkPath,
                                    entry.get("id").format() + ".chunk")
                casBundlePayload(entry, path, True)

        #deal with cas chunks defined in the toc.
        for entry in toc.get("chunks"):  #id sha1
            path = os.path.join(chunkPathToc,
                                entry.get("id").format() + ".chunk")
            casChunkPayload(entry, path)

    else:
        #deal with noncas bundles
        for tocEntry in toc.get("bundles"):  #id offset size, size is redundant
            if tocEntry.get("base"):
                continue  #Patched noncas bundle. However, use the unpatched bundle because no file was patched at all.

            sb.seek(tocEntry.get("offset"))

            if tocEntry.get("delta"):
                #Patched noncas bundle. Here goes the hilarious part. Take the patched data and glue parts from the unpatched data in between.
                #When that is done (in memory of course) the result is a new valid bundle file that can be read like an unpatched one.

                deltaSize, deltaMagic, padding = unpack(">IIQ", sb.read(16))

                class Delta:
                    def __init__(self, sb):
                        self.size, self.typ, self.offset = unpack(
                            ">IiQ", sb.read(16))

                deltas = list()
                for deltaEntry in range(deltaSize // 16):
                    deltas.append(Delta(sb))

                bundleStream = io.BytesIO()  #here be the new bundle data
                unpatchedSb = openSbFile(baseTocPath[:-3] + "sb")
                commonDat = open(
                    commonDatPath,
                    "rb") if os.path.isfile(commonDatPath) else None

                for delta in deltas:
                    if delta.typ == 1:
                        unpatchedSb.seek(delta.offset)
                        bundleStream.write(unpatchedSb.read(delta.size))
                    elif delta.typ == 0:
                        bundleStream.write(sb.read(delta.size))
                    elif delta.typ == -1:
                        if not commonDat:
                            raise Exception(
                                "Found delta type -1 without common.dat present."
                            )
                        commonDat.seek(delta.offset)
                        bundleStream.write(commonDat.read(delta.size))
                    else:
                        raise Exception(
                            "Unknown delta type %d in patched bundle at 0x%08x"
                            % (delta.typ, tocEntry.get("offset")))

                unpatchedSb.close()
                if commonDat: commonDat.close()
                bundleStream.seek(0)

                bundle = noncas.Bundle(bundleStream)
                sb2 = bundleStream
            else:
                bundle = noncas.Bundle(sb)
                sb2 = sb

            for entry in bundle.ebxEntries:
                path = os.path.join(ebxPath, entry.name + ".ebx")
                if noncasBundlePayload(sb2, entry, path):
                    ebx.addEbxGuid(path, ebxPath)

            for entry in bundle.resEntries:
                originalSize = entry.originalSize
                path = os.path.join(resPath, entry.name + ".res")
                noncasBundlePayload(sb2, entry, path)

            for entry in bundle.chunkEntries:
                path = os.path.join(chunkPath, entry.id.format() + ".chunk")
                noncasBundlePayload(sb2, entry, path)

        #deal with noncas chunks defined in the toc
        for entry in toc.get("chunks"):  #id offset size
            path = os.path.join(chunkPathToc,
                                entry.get("id").format() + ".chunk")
            noncasChunkPayload(sb, entry, path)

    #Clean up.
    sb.close()
    clearTempFiles()
Exemple #4
0
                print("Reading %s..." % localPath)
                readCat(fname)

                #Check if there's a patched version.
                patchedName=os.path.join(patchDir,localPath)
                if os.path.isfile(patchedName):
                    print("Reading patched %s..." % os.path.relpath(patchedName,patchDir))
                    readCat(patchedName)

#make the paths absolute and normalize the slashes
gameDirectory=os.path.normpath(gameDirectory)
targetDirectory=os.path.normpath(targetDirectory) #it's an absolute path already
payload.zstdInit()

#Load layout.toc
tocLayout=dbo.readToc(os.path.join(gameDirectory,"Data","layout.toc"))

if not tocLayout.getSubObject("installManifest"):
    if not os.path.isfile(os.path.join(gameDirectory,"Data","das.dal")):
        #Old layout similar to Frostbite 2 with a single cas.cat.
        #Can also be non-cas.
        dataDir=os.path.join(gameDirectory,"Data")
        updateDir=os.path.join(gameDirectory,"Update")
        patchDir=os.path.join(updateDir,"Patch","Data")

        readCat=cas.readCat1

        catPath=os.path.join(dataDir,"cas.cat") #Seems to always be in the same place
        if os.path.isfile(catPath):
            print("Reading cat entries...")
            readCat(catPath)