def testCopyFile(self): tmp = os.path.join(self.tmpdir, "t") copyfile(__file__, tmp) self.assertEquals(sha1sum(__file__), sha1sum(tmp)) self.assertEquals(os.stat(__file__).st_mode, os.stat(tmp).st_mode) self.assertEquals( int(os.stat(__file__).st_mtime), int(os.stat(tmp).st_mtime))
def rememberFile(self, hsh, filename): """Remember this file in our cache. `filename` will be copied into our cache identified by `hsh` Usually `hsh` is the sha1sum of the unsigned file, and `filename` points to the signed file. """ dstfile = os.path.join(self.cacheDir, hsh, os.path.basename(filename)) dstdir = os.path.dirname(dstfile) if not os.path.exists(dstdir): os.makedirs(dstdir, 0755) # Copying files isn't atomic, so copy to a temporary file first, and # then rename to the final destination when we're done copying copyfile(filename, dstfile + ".tmp") os.rename(dstfile + ".tmp", dstfile)
if len(args) != 4: parser.error("Incorrect number of arguments") format_, inputfile, destfile, filename = args tmpfile = destfile + ".tmp" passphrase = sys.stdin.read().strip() if passphrase == '': passphrase = None if format_ == "signcode": if not options.signcode_keydir: parser.error("keydir required when format is signcode") copyfile(inputfile, tmpfile) if shouldSign(filename): signfile(tmpfile, options.signcode_keydir, options.fake, passphrase, timestamp=options.signcode_timestamp) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "osslsigncode": safe_unlink(tmpfile) if not options.signcode_keydir: parser.error("keydir required when format is osslsigncode") if shouldSign(filename): osslsigncode_signfile(inputfile,
def remote_signfile(options, urls, filename, fmt, token, dest=None): filehash = sha1sum(filename) if dest is None: dest = filename if fmt == 'gpg': dest += '.asc' parent_dir = os.path.dirname(os.path.abspath(dest)) if not os.path.exists(parent_dir): os.makedirs(parent_dir) # Check the cache cached_fn = None if options.cachedir: log.debug("%s: checking cache", filehash) cached_fn = os.path.join(options.cachedir, fmt, filehash) if os.path.exists(cached_fn): log.info("%s: exists in the cache; copying to %s", filehash, dest) cached_fp = open(cached_fn, 'rb') tmpfile = dest + '.tmp' fp = open(tmpfile, 'wb') hsh = hashlib.new('sha1') while True: data = cached_fp.read(1024 ** 2) if not data: break hsh.update(data) fp.write(data) fp.close() newhash = hsh.hexdigest() if os.path.exists(dest): os.unlink(dest) os.rename(tmpfile, dest) log.info("%s: OK", filehash) # See if we should re-sign NSS if options.nsscmd and filehash != newhash and os.path.exists(os.path.splitext(filename)[0] + ".chk"): cmd = '%s "%s"' % (options.nsscmd, dest) log.info("Regenerating .chk file") log.debug("Running %s", cmd) check_call(cmd, shell=True) return True errors = 0 pendings = 0 max_errors = 20 max_pending_tries = 300 while True: if pendings >= max_pending_tries: log.error("%s: giving up after %i tries", filehash, pendings) return False if errors >= max_errors: log.error("%s: giving up after %i tries", filehash, errors) return False # Try to get a previously signed copy of this file try: url = urls[0] log.info("%s: processing %s on %s", filehash, filename, url) req = getfile(url, filehash, fmt) headers = req.info() responsehash = headers['X-SHA1-Digest'] tmpfile = dest + '.tmp' fp = open(tmpfile, 'wb') while True: data = req.read(1024 ** 2) if not data: break fp.write(data) fp.close() newhash = sha1sum(tmpfile) if newhash != responsehash: log.warn( "%s: hash mismatch; trying to download again", filehash) os.unlink(tmpfile) errors += 1 continue if os.path.exists(dest): os.unlink(dest) os.rename(tmpfile, dest) log.info("%s: OK", filehash) # See if we should re-sign NSS if options.nsscmd and filehash != responsehash and os.path.exists(os.path.splitext(filename)[0] + ".chk"): cmd = '%s "%s"' % (options.nsscmd, dest) log.info("Regenerating .chk file") log.debug("Running %s", cmd) check_call(cmd, shell=True) # Possibly write to our cache if cached_fn: cached_dir = os.path.dirname(cached_fn) if not os.path.exists(cached_dir): log.debug("Creating %s", cached_dir) os.makedirs(cached_dir) log.info("Copying %s to cache %s", dest, cached_fn) copyfile(dest, cached_fn) break except urllib2.HTTPError, e: try: if 'X-Pending' in e.headers: log.debug("%s: pending; try again in a bit", filehash) time.sleep(1) pendings += 1 continue except: raise errors += 1 # That didn't work...so let's upload it log.info("%s: uploading for signing", filehash) req = None try: try: nonce = open(options.noncefile, 'rb').read() except IOError: nonce = "" req = uploadfile(url, filename, fmt, token, nonce=nonce) nonce = req.info()['X-Nonce'] open(options.noncefile, 'wb').write(nonce) except urllib2.HTTPError, e: # python2.5 doesn't think 202 is ok...but really it is! if 'X-Nonce' in e.headers: log.debug("updating nonce") nonce = e.headers['X-Nonce'] open(options.noncefile, 'wb').write(nonce) if e.code != 202: log.info("%s: error uploading file for signing: %s %s", filehash, e.code, e.msg) urls.pop(0) urls.append(url) except (urllib2.URLError, socket.error, httplib.BadStatusLine): # Try again in a little while log.info("%s: connection error; trying again soon", filehash) # Move the current url to the back urls.pop(0) urls.append(url)
if len(args) != 4: parser.error("Incorrect number of arguments") format_, inputfile, destfile, filename = args tmpfile = destfile + ".tmp" passphrase = sys.stdin.read().strip() if passphrase == "": passphrase = None if format_ == "signcode": if not options.signcode_keydir: parser.error("keydir required when format is signcode") copyfile(inputfile, tmpfile) if shouldSign(filename): signfile(tmpfile, options.signcode_keydir, options.fake, passphrase, timestamp=options.signcode_timestamp) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "osslsigncode": safe_unlink(tmpfile) if not options.signcode_keydir: parser.error("keydir required when format is osslsigncode") if shouldSign(filename): osslsigncode_signfile( inputfile, tmpfile, options.signcode_keydir, options.fake,
def signPackage(self, pkgfile, dstdir, remember=False, compressed=False): """Sign `pkgfile`, putting the results into `dstdir`. If `remember` is True, then cache the newly signed files into our cache. If `compressed` is True, then the contents of pkgfile are bz2 compressed (e.g. in a mar file), and should be decompressed before signing. """ log.info("Processing %s", pkgfile) basename = os.path.basename(pkgfile) dstfile = convertPath(pkgfile, dstdir) # Keep track of our output in a list here, and we can output everything # when we're done This is to avoid interleaving the output from # multiple processes. logs = [] logs.append("Repacking %s to %s" % (pkgfile, dstfile)) parentdir = os.path.dirname(dstfile) if not os.path.exists(parentdir): os.makedirs(parentdir, 0755) nFiles = 0 cacheHits = 0 nSigned = 0 tmpdir = tempfile.mkdtemp() try: # Unpack it logs.append("Unpacking %s to %s" % (pkgfile, tmpdir)) unpackfile(pkgfile, tmpdir) # Swap in files we have already signed for f in findfiles(tmpdir): # We don't need to do anything to files we're not going to sign if not shouldSign(f): continue h = sha1sum(f) basename = os.path.basename(f) nFiles += 1 chk = getChkFile(f) # Look in the cache for another file with the same original # hash cachedFile = self.getFile(h, f) if cachedFile: cacheHits += 1 assert os.path.basename(cachedFile) == basename logs.append("Copying %s from %s" % (basename, cachedFile)) # Preserve the original file's mode; don't use the cached mode # We usually process installer .exe's first, and 7z doesn't # preserve the file mode, so the cached copies of the files # are mode 0666. In the mar files, executables have mode # 0777, so we want to preserve that. copyfile(cachedFile, f, copymode=False) if chk: # If there's a .chk file for this file, copy that out of cache # It's an error if this file doesn't exist in cache cachedChk = self.getFile(h, chk) logs.append("Copying %s from %s" % (os.path.basename(cachedChk), cachedChk)) copyfile(cachedChk, chk, copymode=False) else: # We need to sign this file # If this file is compressed, check if we have a cached copy that # is uncompressed if compressed: bunzip2(f) h2 = sha1sum(f) cachedFile = self.getFile(h2, f) if cachedFile: # We have a cached copy of this file that is uncompressed. # So copy it into our dstdir, and recompress it, and # save it for future use. cacheHits += 1 assert os.path.basename(cachedFile) == basename logs.append("Copying %s from uncompressed %s" % (basename, cachedFile)) # See note above about not copying the file's mode copyfile(cachedFile, f, copymode=False) bzip2(f) if chk: # If there's a .chk file for this file, copy that out of cache # It's an error if this file doesn't exist in # cache cachedChk = self.getFile(h2, chk) logs.append( "Copying %s from %s" % (os.path.basename(cachedChk), cachedChk)) copyfile(cachedChk, chk, copymode=False) bzip2(chk) if remember: logs.append("Caching compressed %s as %s" % (f, h)) self.rememberFile(h, f) # Remember any regenerated chk files if chk: logs.append("Caching %s as %s" % (chk, h)) self.rememberFile(h, chk) continue nSigned += 1 logs.append("Signing %s" % f) signfile(f, self.keydir, self.fake) if compressed: bzip2(f) # If we have a chk file, compress that too if chk: bzip2(chk) if remember: logs.append("Caching %s as %s" % (f, h)) self.rememberFile(h, f) # Remember any regenerated chk files if chk: logs.append("Caching %s as %s" % (chk, h)) self.rememberFile(h, chk) # Repack it logs.append("Packing %s" % dstfile) packfile(dstfile, tmpdir) # Sign installer if dstfile.endswith('.exe') and not self.unsignedInstallers: logs.append("Signing %s" % dstfile) signfile(dstfile, self.keydir, self.fake) return nFiles, cacheHits, nSigned except: log.exception("Error signing %s", pkgfile) return False finally: # Clean up after ourselves, and output our logs shutil.rmtree(tmpdir) log.info("\n ".join(logs))
def signPackage(self, pkgfile, dstdir, remember=False, compressed=False): """Sign `pkgfile`, putting the results into `dstdir`. If `remember` is True, then cache the newly signed files into our cache. If `compressed` is True, then the contents of pkgfile are bz2 compressed (e.g. in a mar file), and should be decompressed before signing. """ log.info("Processing %s", pkgfile) basename = os.path.basename(pkgfile) dstfile = convertPath(pkgfile, dstdir) # Keep track of our output in a list here, and we can output everything # when we're done This is to avoid interleaving the output from # multiple processes. logs = [] logs.append("Repacking %s to %s" % (pkgfile, dstfile)) parentdir = os.path.dirname(dstfile) if not os.path.exists(parentdir): os.makedirs(parentdir, 0755) nFiles = 0 cacheHits = 0 nSigned = 0 tmpdir = tempfile.mkdtemp() try: # Unpack it logs.append("Unpacking %s to %s" % (pkgfile, tmpdir)) unpackfile(pkgfile, tmpdir) # Swap in files we have already signed for f in findfiles(tmpdir): # We don't need to do anything to files we're not going to sign if not shouldSign(f): continue h = sha1sum(f) basename = os.path.basename(f) nFiles += 1 chk = getChkFile(f) # Look in the cache for another file with the same original # hash cachedFile = self.getFile(h, f) if cachedFile: cacheHits += 1 assert os.path.basename(cachedFile) == basename logs.append("Copying %s from %s" % (basename, cachedFile)) # Preserve the original file's mode; don't use the cached mode # We usually process installer .exe's first, and 7z doesn't # preserve the file mode, so the cached copies of the files # are mode 0666. In the mar files, executables have mode # 0777, so we want to preserve that. copyfile(cachedFile, f, copymode=False) if chk: # If there's a .chk file for this file, copy that out of cache # It's an error if this file doesn't exist in cache cachedChk = self.getFile(h, chk) logs.append("Copying %s from %s" % (os.path.basename(cachedChk), cachedChk)) copyfile(cachedChk, chk, copymode=False) else: # We need to sign this file # If this file is compressed, check if we have a cached copy that # is uncompressed if compressed: bunzip2(f) h2 = sha1sum(f) cachedFile = self.getFile(h2, f) if cachedFile: # We have a cached copy of this file that is uncompressed. # So copy it into our dstdir, and recompress it, and # save it for future use. cacheHits += 1 assert os.path.basename(cachedFile) == basename logs.append("Copying %s from uncompressed %s" % (basename, cachedFile)) # See note above about not copying the file's mode copyfile(cachedFile, f, copymode=False) bzip2(f) if chk: # If there's a .chk file for this file, copy that out of cache # It's an error if this file doesn't exist in # cache cachedChk = self.getFile(h2, chk) logs.append("Copying %s from %s" % ( os.path.basename(cachedChk), cachedChk)) copyfile(cachedChk, chk, copymode=False) bzip2(chk) if remember: logs.append( "Caching compressed %s as %s" % (f, h)) self.rememberFile(h, f) # Remember any regenerated chk files if chk: logs.append("Caching %s as %s" % (chk, h)) self.rememberFile(h, chk) continue nSigned += 1 logs.append("Signing %s" % f) signfile(f, self.keydir, self.fake) if compressed: bzip2(f) # If we have a chk file, compress that too if chk: bzip2(chk) if remember: logs.append("Caching %s as %s" % (f, h)) self.rememberFile(h, f) # Remember any regenerated chk files if chk: logs.append("Caching %s as %s" % (chk, h)) self.rememberFile(h, chk) # Repack it logs.append("Packing %s" % dstfile) packfile(dstfile, tmpdir) # Sign installer if dstfile.endswith('.exe') and not self.unsignedInstallers: logs.append("Signing %s" % dstfile) signfile(dstfile, self.keydir, self.fake) return nFiles, cacheHits, nSigned except: log.exception("Error signing %s", pkgfile) return False finally: # Clean up after ourselves, and output our logs shutil.rmtree(tmpdir) log.info("\n ".join(logs))
def remote_signfile(options, urls, filename, fmt, token, dest=None): filehash = sha1sum(filename) if dest is None: dest = filename if fmt == 'gpg': dest += '.asc' elif fmt in ('widevine', 'widevine_blessed'): dest += '.sig' parent_dir = os.path.dirname(os.path.abspath(dest)) if not os.path.exists(parent_dir): os.makedirs(parent_dir) # Check the cache cached_fn = None if options.cachedir: log.debug("%s: checking cache", filehash) cached_fn = os.path.join(options.cachedir, fmt, filehash) if os.path.exists(cached_fn): log.info("%s: exists in the cache; copying to %s", filehash, dest) cached_fp = open(cached_fn, 'rb') tmpfile = dest + '.tmp' fp = open(tmpfile, 'wb') hsh = hashlib.new('sha1') while True: data = cached_fp.read(1024 ** 2) if not data: break hsh.update(data) fp.write(data) fp.close() newhash = hsh.hexdigest() if os.path.exists(dest): os.unlink(dest) os.rename(tmpfile, dest) log.info("%s: OK", filehash) # See if we should re-sign NSS if options.nsscmd and filehash != newhash and os.path.exists(os.path.splitext(filename)[0] + ".chk"): cmd = '%s "%s"' % (options.nsscmd, dest) log.info("Regenerating .chk file") log.debug("Running %s", cmd) check_call(cmd, shell=True) return True errors = 0 pendings = 0 max_errors = 5 # It takes the server ~60s to respond to an attempting to get a signed file # We want to give up after about 5 minutes, so 60*5 = 5 tries. max_pending_tries = 5 while True: if pendings >= max_pending_tries: log.error("%s: giving up after %i tries", filehash, pendings) # If we've given up on the current server, try a different one! urls.pop(0) urls.append(url) errors += 1 # Pendings needs to be reset to give the next server a fair shake. pendings = 0 if errors >= max_errors: log.error("%s: giving up after %i tries", filehash, errors) return False # Try to get a previously signed copy of this file try: url = urls[0] log.info("%s: processing %s on %s", filehash, filename, url) req = getfile(url, filehash, fmt) headers = req.info() responsehash = headers['X-SHA1-Digest'] tmpfile = dest + '.tmp' fp = open(tmpfile, 'wb') while True: data = req.read(1024 ** 2) if not data: break fp.write(data) fp.close() newhash = sha1sum(tmpfile) if newhash != responsehash: log.warn( "%s: hash mismatch; trying to download again", filehash) os.unlink(tmpfile) errors += 1 continue if os.path.exists(dest): os.unlink(dest) os.rename(tmpfile, dest) log.info("%s: OK", filehash) # See if we should re-sign NSS if options.nsscmd and filehash != responsehash and os.path.exists(os.path.splitext(filename)[0] + ".chk"): cmd = '%s "%s"' % (options.nsscmd, dest) log.info("Regenerating .chk file") log.debug("Running %s", cmd) check_call(cmd, shell=True) # Possibly write to our cache if cached_fn: cached_dir = os.path.dirname(cached_fn) if not os.path.exists(cached_dir): log.debug("Creating %s", cached_dir) os.makedirs(cached_dir) log.info("Copying %s to cache %s", dest, cached_fn) copyfile(dest, cached_fn) break except urllib2.HTTPError, e: try: if 'X-Pending' in e.headers: log.debug("%s: pending; try again in a bit", filehash) time.sleep(15) pendings += 1 continue except: raise errors += 1 # That didn't work...so let's upload it log.info("%s: uploading for signing", filehash) req = None try: try: nonce = open(options.noncefile, 'rb').read() except IOError: nonce = "" req = uploadfile(url, filename, fmt, token, nonce=nonce) nonce = req.info()['X-Nonce'] open(options.noncefile, 'wb').write(nonce) except urllib2.HTTPError, e: # python2.5 doesn't think 202 is ok...but really it is! if 'X-Nonce' in e.headers: log.debug("updating nonce") nonce = e.headers['X-Nonce'] open(options.noncefile, 'wb').write(nonce) if e.code != 202: log.exception("%s: error uploading file for signing: %s %s", filehash, e.code, e.msg) urls.pop(0) urls.append(url) except (urllib2.URLError, socket.error, httplib.BadStatusLine): # Try again in a little while log.exception("%s: connection error; trying again soon", filehash) # Move the current url to the back urls.pop(0) urls.append(url)