def createManifest(self): sha1Line = 'SHA1(%s)= %s\n' self.manifestFileName = self.sanitizedImageName + '.' + constants.MF_EXTENSION self.manifestPath = os.path.join(self.workingDir, self.manifestFileName) mfFile = open(self.manifestPath, 'w') ovfSha1 = sha1helper.sha1FileBin(self.ovfPath).encode('hex') diskSha1 = sha1helper.sha1FileBin(self.diskFilePath).encode('hex') mfFile.write(sha1Line % (self.ovfFileName, ovfSha1)) mfFile.write(sha1Line % (self.diskFileName, diskSha1)) mfFile.close()
def fileChanged(self, path): """ check to see if the file has changed @param path: the path to check @return: FILE_MISSING, FILE_CHANGED, FILE_UNCHANGED, FILE_NEW @rtype: int """ newPath = util.joinPaths(self.macros.destdir, path) if not util.exists(newPath): return self.FILE_MISSING from conary.build.recipe import RECIPE_TYPE_CAPSULE if self.recipe._recipeType == RECIPE_TYPE_CAPSULE: if not os.path.isfile(newPath): # for capsules we get everything but contents from # the capsule header return self.FILE_UNCHANGED # For derived capsule recipes we use the exploder to provide the old # sha1. For regular capsule recipes we use the capsuleFileSha1s map # to provide the old sha1. oldSha1 = None if os.path.islink(newPath): oldSha1 = os.readlink(newPath) elif hasattr(self.recipe, 'exploder'): oldf = self.recipe.exploder.fileObjMap.get(path, None) if oldf and oldf.hasContents: oldSha1 = oldf.contents.sha1() else: capPaths = self.recipe._getCapsulePathsForFile(path) if not capPaths: return self.FILE_NEW oldSha1 = self.recipe.capsuleFileSha1s[capPaths[0]][path] if oldSha1: if os.path.islink(newPath): newSha1 = os.readlink(newPath) else: newSha1 = sha1helper.sha1FileBin(newPath) if oldSha1 == newSha1: return self.FILE_UNCHANGED return self.FILE_CHANGED return self.FILE_NEW oldMtime = self.recipe._derivedFiles.get(path, None) if os.path.islink(newPath): # symlinks are special, we compare the target of the link # instead of the mtime newMtime = os.readlink(newPath) else: newMtime = os.lstat(newPath).st_mtime if oldMtime: if oldMtime == newMtime: return self.FILE_UNCHANGED return self.FILE_CHANGED return self.FILE_NEW
def fileChanged(self, path): """ check to see if the file has changed @param path: the path to check @return: FILE_MISSING, FILE_CHANGED, FILE_UNCHANGED, FILE_NEW @rtype: int """ newPath = util.joinPaths(self.macros.destdir, path) if not util.exists(newPath): return self.FILE_MISSING from conary.build.recipe import RECIPE_TYPE_CAPSULE if self.recipe._recipeType == RECIPE_TYPE_CAPSULE: if not os.path.isfile(newPath): # for capsules we get everything but contents from # the capsule header return self.FILE_UNCHANGED # For derived capsule recipes we use the exploder to provide the old # sha1. For regular capsule recipes we use the capsuleFileSha1s map # to provide the old sha1. oldSha1=None if os.path.islink(newPath): oldSha1 = os.readlink(newPath) elif hasattr(self.recipe,'exploder'): oldf = self.recipe.exploder.fileObjMap.get(path,None) if oldf and oldf.hasContents: oldSha1 = oldf.contents.sha1() else: capPaths = self.recipe._getCapsulePathsForFile(path) if not capPaths: return self.FILE_NEW oldSha1 = self.recipe.capsuleFileSha1s[capPaths[0]][path] if oldSha1: if os.path.islink(newPath): newSha1 = os.readlink(newPath) else: newSha1 = sha1helper.sha1FileBin(newPath) if oldSha1 == newSha1: return self.FILE_UNCHANGED return self.FILE_CHANGED return self.FILE_NEW oldMtime = self.recipe._derivedFiles.get(path, None) if os.path.islink(newPath): # symlinks are special, we compare the target of the link # instead of the mtime newMtime = os.readlink(newPath) else: newMtime = os.lstat(newPath).st_mtime if oldMtime: if oldMtime == newMtime: return self.FILE_UNCHANGED return self.FILE_CHANGED return self.FILE_NEW
def updateFileContents(self, path, realPath): """ Update contents information, including sha1 and contents """ f = self.pathMap[path] assert(len(self.pathComponentMap[path]) == 1) # not payload assert(f.hasContents and isinstance(f, files.RegularFile)) sha1 = sha1helper.sha1FileBin(realPath) size = os.lstat(realPath).st_size f.contents.size.set(size) f.contents.sha1.set(sha1)
def main(): parser = optparse.OptionParser( usage="usage: %prog http://mirror.host/downloads image.file image=trove[flavor]") parser.add_option('-n', '--name', help="Base name of the file. Default is to use the input file name.") parser.add_option('-m', '--metadata', action='append', default=[], metavar="KEY=VALUE", help="Attach arbitrary metadata to the image") options, args = parser.parse_args() if len(args) != 3: parser.error("Expected 3 arguments") baseurl, filepath, trovespec = args sha1 = sha1helper.sha1FileBin(filepath).encode('hex') fileobj = open(filepath, 'rb') st = os.fstat(fileobj.fileno()) mtime = datetime.datetime.utcfromtimestamp(st.st_mtime) trove = findTrove(trovespec) doc = { 'file_sha1': sha1, 'file_basename': options.name or os.path.basename(filepath), 'file_size': st.st_size, 'file_modified': str(mtime) + ' UTC', 'trove_name': str(trove.name), 'trove_version': str(trove.version), 'trove_flavor': str(trove.flavor), 'trove_timestamp': trove.version.trailingRevision().timeStamp, 'metadata': {}, } for meta in options.metadata: key, value = meta.split('=', 1) doc['metadata'][key] = value doc = json.dumps(doc) o = Opener() print 'Creating image resource' o.open(baseurl + '/add', data=doc, method='POST', headers=[('Content-Type', 'application/json')]) print 'Uploading image file' req = o.newRequest(baseurl + '/put/' + sha1, method='PUT', headers=[('Content-Type', 'application/octet-stream')]) req.setData(fileobj, st.st_size) o.open(req)
def testTbList(self): ts, baseDir = self.mktree() for i, fn in enumerate(ts.files): tbIndex = i*2 oldEntry = ts.tblist[tbIndex].split() newEntry = ts.tblist[tbIndex+1].split() self.failUnlessEqual(len(oldEntry), 3) self.failUnlessEqual(len(newEntry), 4) self.failUnlessEqual(oldEntry[0], fn) self.failUnlessEqual(oldEntry[0], newEntry[0]) self.failUnlessEqual(oldEntry[1], newEntry[1]) self.failUnlessEqual(oldEntry[2], newEntry[2]) binSha1 = sha1helper.sha1FileBin(os.path.join(baseDir, fn)) sha1 = sha1helper.sha1ToString(binSha1) self.failUnlessEqual(newEntry[3], sha1)
def cacheFilePath(self, cachePrefix, url): cachePath = self.getCachePath(cachePrefix, url) util.mkdirChain(os.path.dirname(cachePath)) if url.filePath() in self.cacheMap: # don't check sha1 twice return self.cacheMap[url.filePath()] (troveName, troveVersion, pathId, troveFile, fileId, troveFileVersion, sha1, mode) = self.nameMap[url.filePath()] sha1Cached = None cachedMode = None if os.path.exists(cachePath): sha1Cached = sha1helper.sha1FileBin(cachePath) if sha1Cached != sha1: if sha1Cached: log.info('%s sha1 %s != %s; fetching new...', url.filePath(), sha1helper.sha1ToString(sha1), sha1helper.sha1ToString(sha1Cached)) else: log.info('%s not yet cached, fetching...', url.filePath()) if self.quiet: csCallback = None else: csCallback = ChangesetCallback() f = self.repos.getFileContents([(fileId, troveFileVersion)], callback=csCallback)[0].get() outF = util.AtomicFile(cachePath, chmod=0644) util.copyfileobj(f, outF) outF.commit() fileObj = self.repos.getFileVersion(pathId, fileId, troveFileVersion) fileObj.chmod(cachePath) cachedMode = os.stat(cachePath).st_mode & 0777 if mode != cachedMode: os.chmod(cachePath, mode) self.cacheMap[url.filePath()] = cachePath return cachePath
def cacheFilePath(self, cachePrefix, url): cachePath = self.getCachePath(cachePrefix, url) util.mkdirChain(os.path.dirname(cachePath)) if url.filePath() in self.cacheMap: # don't check sha1 twice return self.cacheMap[url.filePath()] (troveName, troveVersion, pathId, troveFile, fileId, troveFileVersion, sha1, mode) = self.nameMap[url.filePath()] sha1Cached = None cachedMode = None if os.path.exists(cachePath): sha1Cached = sha1helper.sha1FileBin(cachePath) if sha1Cached != sha1: if sha1Cached: log.info('%s sha1 %s != %s; fetching new...', url.filePath(), sha1helper.sha1ToString(sha1), sha1helper.sha1ToString(sha1Cached)) else: log.info('%s not yet cached, fetching...', url.filePath()) if self.quiet: csCallback = None else: csCallback = ChangesetCallback() f = self.repos.getFileContents( [(fileId, troveFileVersion)], callback=csCallback)[0].get() outF = util.AtomicFile(cachePath, chmod=0644) util.copyfileobj(f, outF) outF.commit() fileObj = self.repos.getFileVersion( pathId, fileId, troveFileVersion) fileObj.chmod(cachePath) cachedMode = os.stat(cachePath).st_mode & 0777 if mode != cachedMode: os.chmod(cachePath, mode) self.cacheMap[url.filePath()] = cachePath return cachePath
def FileFromFilesystem(path, pathId, possibleMatch = None, inodeInfo = False, assumeRoot=False, statBuf=None, sha1FailOk=False): if statBuf: s = statBuf else: s = os.lstat(path) global userCache, groupCache, _havePrelink if assumeRoot: owner = 'root' group = 'root' elif isinstance(s.st_uid, basestring): # Already stringified -- some capsule code will fabricate a stat result # from e.g. a RPM header owner = s.st_uid group = s.st_gid else: # + is not a valid char in user/group names; if the uid is not mapped # to a user, prepend it with + and store it as a string try: owner = userCache.lookupId('/', s.st_uid) except KeyError: owner = '+%d' % s.st_uid try: group = groupCache.lookupId('/', s.st_gid) except KeyError: group = '+%d' % s.st_gid needsSha1 = 0 inode = InodeStream(s.st_mode & 07777, s.st_mtime, owner, group) if (stat.S_ISREG(s.st_mode)): f = RegularFile(pathId) needsSha1 = 1 elif (stat.S_ISLNK(s.st_mode)): f = SymbolicLink(pathId) if hasattr(s, 'linkto'): f.target.set(s.linkto) else: f.target.set(os.readlink(path)) elif (stat.S_ISDIR(s.st_mode)): f = Directory(pathId) elif (stat.S_ISSOCK(s.st_mode)): f = Socket(pathId) elif (stat.S_ISFIFO(s.st_mode)): f = NamedPipe(pathId) elif (stat.S_ISBLK(s.st_mode)): f = BlockDevice(pathId) f.devt.major.set(s.st_rdev >> 8) f.devt.minor.set(s.st_rdev & 0xff) elif (stat.S_ISCHR(s.st_mode)): f = CharacterDevice(pathId) f.devt.major.set(s.st_rdev >> 8) f.devt.minor.set(s.st_rdev & 0xff) else: raise FilesError("unsupported file type for %s" % path) f.inode = inode f.flags = FlagsStream(0) # assume we have a match if the FileMode and object type match if possibleMatch and (possibleMatch.__class__ == f.__class__) \ and f.inode == possibleMatch.inode \ and f.inode.mtime() == possibleMatch.inode.mtime() \ and (not s.st_size or (possibleMatch.hasContents and s.st_size == possibleMatch.contents.size())): f.flags.set(possibleMatch.flags()) return possibleMatch elif (possibleMatch and (isinstance(f, RegularFile) and isinstance(possibleMatch, RegularFile)) and (f.inode.isExecutable()) and f.inode.mtime() == possibleMatch.inode.mtime() and f.inode.owner == possibleMatch.inode.owner and f.inode.group == possibleMatch.inode.group and f.inode.perms == possibleMatch.inode.perms): # executable RegularFiles match even if there sizes are different # as long as everything else is the same; this is to stop size # changes from prelink from changing fileids return possibleMatch if needsSha1: f.contents = RegularFileStream() undoPrelink = False if _havePrelink != False and f.inode.isExecutable(): try: from conary.lib import elf if elf.prelinked(path): undoPrelink = True except: pass if undoPrelink and _havePrelink is None: _havePrelink = bool(os.access(PRELINK_CMD[0], os.X_OK)) if undoPrelink and _havePrelink: prelink = subprocess.Popen( PRELINK_CMD + ("-y", path), stdout = subprocess.PIPE, close_fds = True, shell = False) d = digestlib.sha1() content = prelink.stdout.read() size = 0 while content: d.update(content) size += len(content) content = prelink.stdout.read() prelink.wait() f.contents.size.set(size) sha1 = d.digest() else: try: sha1 = sha1helper.sha1FileBin(path) except OSError: if sha1FailOk: sha1 = sha1helper.sha1Empty else: raise f.contents.size.set(s.st_size) f.contents.sha1.set(sha1) if inodeInfo: return (f, s.st_nlink, (s.st_rdev, s.st_ino)) return f
def checkSha1(self, fileName, sum): assert(sha1helper.sha1ToString(sha1helper.sha1FileBin(fileName)) == sum)