def cleanup(self): log.info("Stats: %i hits; %i misses; %i uploads", self.hits, self.misses, self.uploads) log.debug("Pending: %s", self.pending) # Find files in unsigned that have bad hashes and delete them log.debug("Cleaning up...") now = time.time() for f in os.listdir(self.unsigned_dir): unsigned = os.path.join(self.unsigned_dir, f) if not f.endswith(".fn") and sha1sum(unsigned) != f: log.info("Deleting %s with bad hash", unsigned) safe_unlink(unsigned) continue # Clean up old files if os.path.getmtime(unsigned) < now - self.max_file_age: log.info("Deleting %s (too old)", unsigned) safe_unlink(unsigned) continue # Find files in signed that don't have corresponding files in unsigned # and delete them for format_ in os.listdir(self.signed_dir): for f in os.listdir(os.path.join(self.signed_dir, format_)): signed = os.path.join(self.signed_dir, format_, f) # Don't delete logs if signed.endswith(".out"): continue unsigned = os.path.join(self.unsigned_dir, f) if not os.path.exists(unsigned): log.info("Deleting %s with no unsigned file", signed) safe_unlink(signed) # Clean out self.tokens and self.nonces now = time.time() for token, token_data in self.tokens.items(): info = unpack_token_data(token_data) if info['valid_to'] < now: log.debug("Deleting expired token %s", token) self.delete_token(token)
def cleanup(self): log.info("Stats: %i hits; %i misses; %i uploads", self.hits, self.misses, self.uploads) log.debug("Pending: %s", self.pending) # Find files in unsigned that have bad hashes and delete them log.debug("Cleaning up...") now = time.time() for f in os.listdir(self.unsigned_dir): unsigned = os.path.join(self.unsigned_dir, f) if not f.endswith(".fn") and sha1sum(unsigned) != f: log.info("Deleting %s with bad hash", unsigned) safe_unlink(unsigned) continue # Clean up old files if os.path.getmtime(unsigned) < now - self.max_file_age: log.info("Deleting %s (too old)", unsigned) safe_unlink(unsigned) continue # Find files in signed that don't have corresponding files in unsigned # and delete them for format_ in os.listdir(self.signed_dir): for f in os.listdir(os.path.join(self.signed_dir, format_)): signed = os.path.join(self.signed_dir, format_, f) # Don't delete logs if signed.endswith(".out"): continue unsigned = os.path.join(self.unsigned_dir, f) if not os.path.exists(unsigned): log.info("Deleting %s with no unsigned file", signed) safe_unlink(signed) # Clean out self.tokens and self.nonces now = time.time() for token, token_data in self.tokens.items(): info = unpack_token_data(token_data) if info["valid_to"] < now: log.debug("Deleting expired token %s", token) self.delete_token(token)
format_, inputfile, destfile, filename = args tmpfile = destfile + ".tmp" passphrase = sys.stdin.read().strip() if passphrase == '': passphrase = None if format_ == "signcode": if not options.signcode_keydir: parser.error("keydir required when format is signcode") copyfile(inputfile, tmpfile) if shouldSign(filename): signfile(tmpfile, options.signcode_keydir, options.fake, passphrase) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "gpg": if not options.gpg_homedir: parser.error("gpgdir required when format is gpg") safe_unlink(tmpfile) gpg_signfile(inputfile, tmpfile, options.gpg_homedir, options.fake, passphrase) elif format_ == "mar": if not options.mar_cmd: parser.error("mar_cmd is required when format is mar") safe_unlink(tmpfile) mar_signfile(inputfile, tmpfile, options.mar_cmd, options.fake, passphrase) os.rename(tmpfile, destfile)
def handle_upload(self, environ, start_response, values, rest, next_nonce): format_ = rest[0] assert format_ in self.formats filehash = values["sha1"] filename = values["filename"] log.info("Request to %s sign %s (%s) from %s", format_, filename, filehash, environ["REMOTE_ADDR"]) fn = os.path.join(self.unsigned_dir, filehash) headers = [("X-Nonce", next_nonce)] if os.path.exists(fn): # Validate the file mydigest = sha1sum(fn) if mydigest != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, mydigest, filehash) safe_unlink(fn) elif os.path.exists(os.path.join(self.signed_dir, filehash)): # Everything looks ok log.info("File already exists") start_response("202 File already exists", headers) return "" elif (filehash, format_) in self.pending: log.info("File is pending") start_response("202 File is pending", headers) return "" log.info("Not pending or already signed, re-queue") # Validate filename if not any(exp.match(filename) for exp in self.allowed_filenames): log.info("%s forbidden due to invalid filename: %s", environ["REMOTE_ADDR"], filename) start_response("403 Unacceptable filename", headers) return "" try: fd, tmpname = tempfile.mkstemp(dir=self.unsigned_dir) fp = os.fdopen(fd, "wb") h = hashlib.new("sha1") s = 0 while True: data = values["filedata"].file.read(1024 ** 2) if not data: break s += len(data) h.update(data) fp.write(data) fp.close() except: log.exception("Error downloading data") os.unlink(tmpname) if s < self.min_filesize: os.unlink(tmpname) start_response("400 File too small", headers) return "" if h.hexdigest() != filehash: os.unlink(tmpname) log.warn("Hash mismatch. Bad upload?") start_response("400 Hash mismatch", headers) return "" # Good to go! Rename the temporary filename to the real filename self.save_filename(filehash, filename) os.rename(tmpname, fn) self.submit_file(filehash, filename, format_) start_response("202 Accepted", headers) self.uploads += 1 return ""
def do_GET(self, environ, start_response): """ GET /sign/<format>/<hash> """ try: _, magic, format_, filehash = environ["PATH_INFO"].split("/") assert magic == "sign" assert format_ in self.formats except: log.debug("bad request: %s", environ["PATH_INFO"]) start_response("400 Bad Request", []) yield "" return filehash = os.path.basename(environ["PATH_INFO"]) try: pending = self.pending.get((filehash, format_)) if pending: log.debug("Waiting for pending job") # Wait up to a minute for this to finish pending.wait(timeout=60) log.debug("Pending job finished!") fn = self.get_path(filehash, format_) filename = self.get_filename(filehash) if filename: log.debug("Looking for %s (%s)", fn, filename) else: log.debug("Looking for %s", fn) checksum = sha1sum(fn) headers = [("X-SHA1-Digest", checksum), ("Content-Length", os.path.getsize(fn))] fp = open(fn, "rb") os.utime(fn, None) log.debug("%s is OK", fn) start_response("200 OK", headers) while True: data = fp.read(1024 ** 2) if not data: break yield data self.hits += 1 except IOError: log.debug("%s is missing", fn) headers = [] fn = os.path.join(self.unsigned_dir, filehash) if (filehash, format_) in self.pending: log.info("File is pending, come back soon!") log.debug("Pending: %s", self.pending) headers.append(("X-Pending", "True")) # Maybe we have the file, but not for this format # If so, queue it up and return a pending response # This prevents the client from having to upload the file again elif os.path.exists(fn): log.debug("GET for file we already have, but not for the right format") # Validate the file myhash = sha1sum(fn) if myhash != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, filehash, myhash) safe_unlink(fn) else: filename = self.get_filename(filehash) if filename: self.submit_file(filehash, filename, format_) log.info("File is pending, come back soon!") headers.append(("X-Pending", "True")) else: log.debug("I don't remember the filename; re-submit please!") else: self.misses += 1 start_response("404 Not Found", headers) yield ""
def _worker(self): # Main worker process # We pop items off the queue and process them # How many jobs to process before exiting max_jobs = 10 jobs = 0 while True: # Event to signal when we're done e = None try: jobs += 1 # Fall on our sword if we're too old if jobs >= max_jobs: break try: item = self.queue.get(block=False) if not item: break except queue.Empty: log.debug("no items, exiting") break filehash, filename, format_, e = item log.info("Signing %s (%s - %s)", filename, format_, filehash) inputfile = os.path.join(self.inputdir, filehash) outputfile = os.path.join(self.outputdir, format_, filehash) logfile = outputfile + ".out" if not os.path.exists(os.path.join(self.outputdir, format_)): os.makedirs(os.path.join(self.outputdir, format_)) retval = run_signscript( self.signcmd, inputfile, outputfile, filename, format_, self.passphrases.get(format_) ) if retval != 0: if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.warning("Signing failed %s (%s - %s)", filename, format_, filehash) log.warning("Signing log: %s", logoutput) safe_unlink(outputfile) self.app.messages.put(("errors", item, "signing script returned non-zero")) continue # Copy our signed result into unsigned and signed so if # somebody wants to get this file signed again, they get the # same results. outputhash = sha1sum(outputfile) log.debug("Copying result to %s", outputhash) copied_input = os.path.join(self.inputdir, outputhash) if not os.path.exists(copied_input): safe_copyfile(outputfile, copied_input) copied_output = os.path.join(self.outputdir, format_, outputhash) if not os.path.exists(copied_output): safe_copyfile(outputfile, copied_output) self.app.messages.put(("done", item, outputhash)) except: # Inconceivable! Something went wrong! # Remove our output, it might be corrupted safe_unlink(outputfile) if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.exception("Exception signing file %s; output: %s ", item, logoutput) self.app.messages.put(("errors", item, "worker hit an exception while signing")) finally: if e: e.set() log.debug("Worker exiting")
logfile = os.path.abspath(options.logfile) daemon_ctx = daemon.DaemonContext( # We do our own signal handling in run() signal_map={}, working_directory=curdir, ) daemon_ctx.open() # gevent needs to be reinitialized after the hardcore forking action gevent.reinit() open(pidfile, "w").write(str(os.getpid())) # Set up logging again! createDaemon has closed all our open file # handles setup_logging(options) try: run(args[0], passphrases) except: log.exception("error running server") raise finally: try: if options.daemonize: daemon_ctx.close() safe_unlink(pidfile) log.info("exiting") except: log.exception("error shutting down")
tmpfile = destfile + ".tmp" passphrase = sys.stdin.read().strip() if passphrase == '': passphrase = None if format_ == "signcode": if not options.signcode_keydir: parser.error("keydir required when format is signcode") copyfile(inputfile, tmpfile) if shouldSign(filename): signfile(tmpfile, options.signcode_keydir, options.fake, passphrase) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "gpg": if not options.gpg_homedir: parser.error("gpgdir required when format is gpg") safe_unlink(tmpfile) gpg_signfile(inputfile, tmpfile, options.gpg_homedir, options.fake, passphrase) elif format_ == "mar": if not options.mar_cmd: parser.error("mar_cmd is required when format is mar") safe_unlink(tmpfile) mar_signfile(inputfile, tmpfile, options.mar_cmd, options.fake, passphrase) os.rename(tmpfile, destfile)
def handle_upload(self, environ, start_response, values, rest, next_nonce): format_ = rest[0] assert format_ in self.formats filehash = values['sha1'] filename = values['filename'] log.info("Request to %s sign %s (%s) from %s", format_, filename, filehash, environ['REMOTE_ADDR']) fn = os.path.join(self.unsigned_dir, filehash) headers = [('X-Nonce', next_nonce)] if os.path.exists(fn): # Validate the file mydigest = sha1sum(fn) if mydigest != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, mydigest, filehash) safe_unlink(fn) elif os.path.exists(os.path.join(self.signed_dir, filehash)): # Everything looks ok log.info("File already exists") start_response("202 File already exists", headers) return "" elif (filehash, format_) in self.pending: log.info("File is pending") start_response("202 File is pending", headers) return "" log.info("Not pending or already signed, re-queue") # Validate filename if not any(exp.match(filename) for exp in self.allowed_filenames): log.info("%s forbidden due to invalid filename: %s", environ['REMOTE_ADDR'], filename) start_response("403 Unacceptable filename", headers) return "" try: fd, tmpname = tempfile.mkstemp(dir=self.unsigned_dir) fp = os.fdopen(fd, 'wb') h = hashlib.new('sha1') s = 0 while True: data = values['filedata'].file.read(1024**2) if not data: break s += len(data) h.update(data) fp.write(data) fp.close() except: log.exception("Error downloading data") os.unlink(tmpname) if s < self.min_filesize: os.unlink(tmpname) start_response("400 File too small", headers) return "" if h.hexdigest() != filehash: os.unlink(tmpname) log.warn("Hash mismatch. Bad upload?") start_response("400 Hash mismatch", headers) return "" # Good to go! Rename the temporary filename to the real filename self.save_filename(filehash, filename) os.rename(tmpname, fn) self.submit_file(filehash, filename, format_) start_response("202 Accepted", headers) self.uploads += 1 return ""
def do_GET(self, environ, start_response): """ GET /sign/<format>/<hash> """ try: _, magic, format_, filehash = environ['PATH_INFO'].split('/') assert magic == 'sign' assert format_ in self.formats except: log.debug("bad request: %s", environ['PATH_INFO']) start_response("400 Bad Request", []) yield "" return filehash = os.path.basename(environ['PATH_INFO']) try: pending = self.pending.get((filehash, format_)) if pending: log.debug("Waiting for pending job") # Wait up to a minute for this to finish pending.wait(timeout=60) log.debug("Pending job finished!") fn = self.get_path(filehash, format_) filename = self.get_filename(filehash) if filename: log.debug("Looking for %s (%s)", fn, filename) else: log.debug("Looking for %s", fn) checksum = sha1sum(fn) headers = [ ('X-SHA1-Digest', checksum), ('Content-Length', os.path.getsize(fn)), ] fp = open(fn, 'rb') os.utime(fn, None) log.debug("%s is OK", fn) start_response("200 OK", headers) while True: data = fp.read(1024**2) if not data: break yield data self.hits += 1 except IOError: log.debug("%s is missing", fn) headers = [] fn = os.path.join(self.unsigned_dir, filehash) if (filehash, format_) in self.pending: log.info("File is pending, come back soon!") log.debug("Pending: %s", self.pending) headers.append(('X-Pending', 'True')) # Maybe we have the file, but not for this format # If so, queue it up and return a pending response # This prevents the client from having to upload the file again elif os.path.exists(fn): log.debug( "GET for file we already have, but not for the right format" ) # Validate the file myhash = sha1sum(fn) if myhash != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, filehash, myhash) safe_unlink(fn) else: filename = self.get_filename(filehash) if filename: self.submit_file(filehash, filename, format_) log.info("File is pending, come back soon!") headers.append(('X-Pending', 'True')) else: log.debug( "I don't remember the filename; re-submit please!") else: self.misses += 1 start_response("404 Not Found", headers) yield ""
def _worker(self): # Main worker process # We pop items off the queue and process them # How many jobs to process before exiting max_jobs = 10 jobs = 0 while True: # Event to signal when we're done e = None try: jobs += 1 # Fall on our sword if we're too old if jobs >= max_jobs: break try: item = self.queue.get(block=False) if not item: break except queue.Empty: log.debug("no items, exiting") break filehash, filename, format_, e = item log.info("Signing %s (%s - %s)", filename, format_, filehash) inputfile = os.path.join(self.inputdir, filehash) outputfile = os.path.join(self.outputdir, format_, filehash) logfile = outputfile + ".out" if not os.path.exists(os.path.join(self.outputdir, format_)): os.makedirs(os.path.join(self.outputdir, format_)) retval = run_signscript(self.signcmd, inputfile, outputfile, filename, format_, self.passphrases.get(format_)) if retval != 0: if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.warning("Signing failed %s (%s - %s)", filename, format_, filehash) log.warning("Signing log: %s", logoutput) safe_unlink(outputfile) self.app.messages.put( ('errors', item, 'signing script returned non-zero')) continue # Copy our signed result into unsigned and signed so if # somebody wants to get this file signed again, they get the # same results. outputhash = sha1sum(outputfile) log.debug("Copying result to %s", outputhash) copied_input = os.path.join(self.inputdir, outputhash) if not os.path.exists(copied_input): safe_copyfile(outputfile, copied_input) copied_output = os.path.join(self.outputdir, format_, outputhash) if not os.path.exists(copied_output): safe_copyfile(outputfile, copied_output) self.app.messages.put(('done', item, outputhash)) except: # Inconceivable! Something went wrong! # Remove our output, it might be corrupted safe_unlink(outputfile) if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.exception("Exception signing file %s; output: %s ", item, logoutput) self.app.messages.put( ('errors', item, 'worker hit an exception while signing')) finally: if e: e.set() log.debug("Worker exiting")
logfile = os.path.abspath(options.logfile) daemon_ctx = daemon.DaemonContext( # We do our own signal handling in run() signal_map={}, working_directory=curdir, ) daemon_ctx.open() # gevent needs to be reinitialized after the hardcore forking action gevent.reinit() open(pidfile, 'w').write(str(os.getpid())) # Set up logging again! createDaemon has closed all our open file # handles setup_logging(options) try: run(args[0], passphrases) except: log.exception("error running server") raise finally: try: if options.daemonize: daemon_ctx.close() safe_unlink(pidfile) log.info("exiting") except: log.exception("error shutting down")