def init(dest, bare=False): """Initializes an empty repository at dest. If dest exists and isn't empty, it will be removed. If `bare` is True, then a bare repo will be created.""" if not os.path.isdir(dest): log.info("removing %s", dest) safe_unlink(dest) else: for f in os.listdir(dest): f = os.path.join(dest, f) log.info("removing %s", f) if os.path.isdir(f): remove_path(f) else: safe_unlink(f) # Git will freak out if it tries to create intermediate directories for # dest, and then they exist. We can hit this when pulling in multiple repos # in parallel to shared repo paths that contain common parent directories # Let's create all the directories first try: os.makedirs(dest) except OSError, e: if e.errno == 20: # Not a directory error...one of the parents of dest isn't a # directory raise
def cleanup(self): log.info("Stats: %i hits; %i misses; %i uploads", self.hits, self.misses, self.uploads) log.debug("Pending: %s", self.pending) # Find files in unsigned that have bad hashes and delete them log.debug("Cleaning up...") now = time.time() for f in os.listdir(self.unsigned_dir): unsigned = os.path.join(self.unsigned_dir, f) # Clean up old files if os.path.getmtime(unsigned) < now - self.max_file_age: log.info("Deleting %s (too old)", unsigned) safe_unlink(unsigned) continue # Find files in signed that don't have corresponding files in unsigned # and delete them for format_ in os.listdir(self.signed_dir): for f in os.listdir(os.path.join(self.signed_dir, format_)): signed = os.path.join(self.signed_dir, format_, f) unsigned = os.path.join(self.unsigned_dir, f) if not os.path.exists(unsigned): log.info("Deleting %s with no unsigned file", signed) safe_unlink(signed) # Clean out self.tokens and self.nonces now = time.time() for token, token_data in self.tokens.items(): info = unpack_token_data(token_data) if info['valid_to'] < now: log.debug("Deleting expired token %s", token) self.delete_token(token)
def cleanup(self): log.info("Stats: %i hits; %i misses; %i uploads", self.hits, self.misses, self.uploads) log.debug("Pending: %s", self.pending) # Find files in unsigned that have bad hashes and delete them log.debug("Cleaning up...") now = time.time() for f in os.listdir(self.unsigned_dir): unsigned = os.path.join(self.unsigned_dir, f) # Clean up old files if os.path.getmtime(unsigned) < now - self.max_file_age: log.info("Deleting %s (too old)", unsigned) safe_unlink(unsigned) continue # Find files in signed that don't have corresponding files in unsigned # and delete them for format_ in os.listdir(self.signed_dir): for f in os.listdir(os.path.join(self.signed_dir, format_)): signed = os.path.join(self.signed_dir, format_, f) unsigned = os.path.join(self.unsigned_dir, f) if not os.path.exists(unsigned): log.info("Deleting %s with no unsigned file", signed) safe_unlink(signed)
def handle_upload(self, environ, start_response, values, rest, next_nonce): format_ = rest[0] assert format_ in self.formats filehash = values['sha1'] filename = values['filename'] log.info("Request to %s sign %s (%s) from %s", format_, filename, filehash, environ['REMOTE_ADDR']) fn = os.path.join(self.unsigned_dir, filehash) headers = [('X-Nonce', next_nonce)] if os.path.exists(fn): # Validate the file mydigest = sha1sum(fn) if mydigest != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, mydigest, filehash) safe_unlink(fn) elif os.path.exists(os.path.join(self.signed_dir, filehash)): # Everything looks ok log.info("File already exists") start_response("202 File already exists", headers) return "" elif (filehash, format_) in self.pending: log.info("File is pending") start_response("202 File is pending", headers) return "" log.info("Not pending or already signed, re-queue") # Validate filename if not any(exp.match(filename) for exp in self.allowed_filenames): log.info("%s forbidden due to invalid filename: %s", environ['REMOTE_ADDR'], filename) start_response("403 Unacceptable filename", headers) return "" try: fd, tmpname = tempfile.mkstemp(dir=self.unsigned_dir) fp = os.fdopen(fd, 'wb') h = hashlib.new('sha1') s = 0 while True: data = values['filedata'].file.read(1024**2) if not data: break s += len(data) h.update(data) fp.write(data) fp.close() except: log.exception("Error downloading data") if os.path.exists(tmpname): os.unlink(tmpname) if s < self.min_filesize: if os.path.exists(tmpname): os.unlink(tmpname) start_response("400 File too small", headers) return "" if self.max_filesize[format_] and s > self.max_filesize[format_]: if os.path.exists(tmpname): os.unlink(tmpname) start_response("400 File too large", headers) return "" if h.hexdigest() != filehash: if os.path.exists(tmpname): os.unlink(tmpname) log.warn("Hash mismatch. Bad upload?") start_response("400 Hash mismatch", headers) return "" # Good to go! Rename the temporary filename to the real filename self.save_filename(filehash, filename) os.rename(tmpname, fn) self.submit_file(filehash, filename, format_) start_response("202 Accepted", headers) self.uploads += 1 return ""
def do_GET(self, environ, start_response): """ GET /sign/<format>/<hash> """ try: _, magic, format_, filehash = environ['PATH_INFO'].split('/') assert magic == 'sign' assert format_ in self.formats except: log.debug("bad request: %s", environ['PATH_INFO']) start_response("400 Bad Request", []) yield "" return filehash = os.path.basename(environ['PATH_INFO']) try: pending = self.pending.get((filehash, format_)) if pending: log.debug("Waiting for pending job") # Wait up to a minute for this to finish pending.wait(timeout=60) log.debug("Pending job finished!") fn = self.get_path(filehash, format_) filename = self.get_filename(filehash) if filename: log.debug("Looking for %s (%s)", fn, filename) else: log.debug("Looking for %s", fn) checksum = sha1sum(fn) headers = [ ('X-SHA1-Digest', checksum), ('Content-Length', os.path.getsize(fn)), ] fp = open(fn, 'rb') os.utime(fn, None) log.debug("%s is OK", fn) start_response("200 OK", headers) while True: data = fp.read(1024**2) if not data: break yield data self.hits += 1 except IOError: log.debug("%s is missing", fn) headers = [] fn = os.path.join(self.unsigned_dir, filehash) if (filehash, format_) in self.pending: log.info("File is pending, come back soon!") log.debug("Pending: %s", self.pending) headers.append(('X-Pending', 'True')) # Maybe we have the file, but not for this format # If so, queue it up and return a pending response # This prevents the client from having to upload the file again elif os.path.exists(fn): log.debug( "GET for file we already have, but not for the right format" ) # Validate the file myhash = sha1sum(fn) if myhash != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, filehash, myhash) safe_unlink(fn) else: filename = self.get_filename(filehash) if filename: self.submit_file(filehash, filename, format_) log.info("File is pending, come back soon!") headers.append(('X-Pending', 'True')) else: log.debug( "I don't remember the filename; re-submit please!") else: self.misses += 1 start_response("404 Not Found", headers) yield ""
def _worker(self): # Main worker process # We pop items off the queue and process them # How many jobs to process before exiting max_jobs = 10 jobs = 0 while True: # Event to signal when we're done e = None try: jobs += 1 # Fall on our sword if we're too old if jobs >= max_jobs: break try: item = self.queue.get(block=False) if not item: break except queue.Empty: log.debug("no items, exiting") break filehash, filename, format_, e = item log.info("Signing %s (%s - %s)", filename, format_, filehash) inputfile = os.path.join(self.inputdir, filehash) outputfile = os.path.join(self.outputdir, format_, filehash) logfile = outputfile + ".out" if not os.path.exists(os.path.join(self.outputdir, format_)): os.makedirs(os.path.join(self.outputdir, format_)) retval = run_signscript(self.signcmd, inputfile, outputfile, filename, format_, self.passphrases.get(format_)) if retval != 0: if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.warning("Signing failed %s (%s - %s)", filename, format_, filehash) log.warning("Signing log: %s", logoutput) safe_unlink(outputfile) self.app.messages.put( ('errors', item, 'signing script returned non-zero')) continue # Copy our signed result into unsigned and signed so if # somebody wants to get this file signed again, they get the # same results. outputhash = sha1sum(outputfile) log.debug("Copying result to %s", outputhash) copied_input = os.path.join(self.inputdir, outputhash) if not os.path.exists(copied_input): safe_copyfile(outputfile, copied_input) copied_output = os.path.join(self.outputdir, format_, outputhash) if not os.path.exists(copied_output): safe_copyfile(outputfile, copied_output) self.app.messages.put(('done', item, outputhash)) except: # Inconceivable! Something went wrong! # Remove our output, it might be corrupted safe_unlink(outputfile) if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.exception("Exception signing file %s; output: %s ", item, logoutput) self.app.messages.put( ('errors', item, 'worker hit an exception while signing')) finally: if e: e.set() log.debug("Worker exiting")
umask=0o077, ) daemon_ctx.open() # gevent needs to be reinitialized after the hardcore forking action gevent.reinit() open(pidfile, 'w').write(str(os.getpid())) # Set up logging again! createDaemon has closed all our open file # handles setup_logging( logfile=options.logfile, loglevel=options.loglevel, log_maxsize=options.log_maxsize, log_maxfiles=options.log_maxfiles, ) try: run(args[0], passphrases) except: log.exception("error running server") raise finally: try: if options.daemonize: daemon_ctx.close() safe_unlink(pidfile) log.info("exiting") except: log.exception("error shutting down")
passphrase = sys.stdin.read().strip() if passphrase == "": passphrase = None if format_ == "signcode": if not options.signcode_keydir: parser.error("keydir required when format is signcode") copyfile(inputfile, tmpfile) if shouldSign(filename): signfile(tmpfile, options.signcode_keydir, options.fake, passphrase, timestamp=options.signcode_timestamp) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "osslsigncode": safe_unlink(tmpfile) if not options.signcode_keydir: parser.error("keydir required when format is osslsigncode") if shouldSign(filename): osslsigncode_signfile( inputfile, tmpfile, options.signcode_keydir, options.fake, passphrase, timestamp=options.signcode_timestamp, ) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "sha2signcode":
def _worker(self): # Main worker process # We pop items off the queue and process them # How many jobs to process before exiting max_jobs = 10 jobs = 0 while True: # Event to signal when we're done e = None try: jobs += 1 # Fall on our sword if we're too old if jobs >= max_jobs: break try: item = self.queue.get(block=False) if not item: break except queue.Empty: log.debug("no items, exiting") break filehash, filename, format_, e = item log.info("Signing %s (%s - %s)", filename, format_, filehash) inputfile = os.path.join(self.inputdir, filehash) outputfile = os.path.join(self.outputdir, format_, filehash) logfile = outputfile + ".out" if not os.path.exists(os.path.join(self.outputdir, format_)): os.makedirs(os.path.join(self.outputdir, format_)) retval = run_signscript(self.signcmd, inputfile, outputfile, filename, format_, self.passphrases.get(format_)) if retval != 0: if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.warning("Signing failed %s (%s - %s)", filename, format_, filehash) log.warning("Signing log: %s", logoutput) safe_unlink(outputfile) self.app.messages.put( ('errors', item, 'signing script returned non-zero')) continue # Copy our signed result into unsigned and signed so if # somebody wants to get this file signed again, they get the # same results. outputhash = sha1sum(outputfile) log.debug("Copying result to %s", outputhash) copied_input = os.path.join(self.inputdir, outputhash) if not os.path.exists(copied_input): safe_copyfile(outputfile, copied_input) copied_output = os.path.join( self.outputdir, format_, outputhash) if not os.path.exists(copied_output): safe_copyfile(outputfile, copied_output) self.app.messages.put(('done', item, outputhash)) except: # Inconceivable! Something went wrong! # Remove our output, it might be corrupted safe_unlink(outputfile) if os.path.exists(logfile): logoutput = open(logfile).read() else: logoutput = None log.exception( "Exception signing file %s; output: %s ", item, logoutput) self.app.messages.put(( 'errors', item, 'worker hit an exception while signing')) finally: if e: e.set() log.debug("Worker exiting")
def git(repo, dest, refname=None, revision=None, update_dest=True, shareBase=DefaultShareBase, mirrors=None, clean_dest=False): """Makes sure that `dest` is has `revision` or `refname` checked out from `repo`. Do what it takes to make that happen, including possibly clobbering dest. If `mirrors` is set, will try and use the mirrors before `repo`. """ if shareBase is DefaultShareBase: shareBase = os.environ.get("GIT_SHARE_BASE_DIR", None) if shareBase is not None: repo_name = get_repo_name(repo) share_dir = os.path.join(shareBase, repo_name) else: share_dir = None if share_dir is not None and not is_git_repo(share_dir): log.info("creating bare repo %s", share_dir) try: init(share_dir, bare=True) os.utime(share_dir, None) except Exception: log.warning("couldn't create shared repo %s; disabling sharing", share_dir, exc_info=True) shareBase = None share_dir = None dest = os.path.abspath(dest) log.info("Checking dest %s", dest) if not is_git_repo(dest): if os.path.exists(dest): log.warning( "%s doesn't appear to be a valid git directory; clobbering", dest) remove_path(dest) if share_dir is not None: # Initialize the repo and set up the share init(dest) set_share(dest, share_dir) else: # Otherwise clone into dest clone(repo, dest, refname=refname, mirrors=mirrors, update_dest=False) # Make sure our share is pointing to the right place if share_dir is not None: lock_file = os.path.join(get_git_dir(share_dir), "index.lock") if os.path.exists(lock_file): log.info("removing %s", lock_file) safe_unlink(lock_file) set_share(dest, share_dir) # If we're supposed to be updating to a revision, check if we # have that revision already. If so, then there's no need to # fetch anything. do_fetch = False if revision is None: # we don't have a revision specified, so pull in everything do_fetch = True elif has_ref(dest, revision): # revision is actually a ref name, so we need to run fetch # to make sure we update the ref do_fetch = True elif not has_revision(dest, revision): # we don't have this revision, so need to fetch it do_fetch = True if do_fetch: if share_dir: # Fetch our refs into our share try: # TODO: Handle fetching refnames like refs/tags/XXXX if refname is None: fetch(repo, share_dir, mirrors=mirrors) else: fetch(repo, share_dir, mirrors=mirrors, refname=refname) except subprocess.CalledProcessError: # Something went wrong! # Clobber share_dir and re-raise log.info("error fetching into %s - clobbering", share_dir) remove_path(share_dir) raise try: if refname is None: fetch(share_dir, dest, fetch_remote="origin") else: fetch(share_dir, dest, fetch_remote="origin", refname=refname) except subprocess.CalledProcessError: log.info("clobbering %s", share_dir) remove_path(share_dir) log.info("error fetching into %s - clobbering", dest) remove_path(dest) raise else: try: fetch(repo, dest, mirrors=mirrors, refname=refname) except Exception: log.info("error fetching into %s - clobbering", dest) remove_path(dest) raise # Set our remote set_remote(dest, 'origin', repo) if update_dest: log.info("Updating local copy refname: %s; revision: %s", refname, revision) # Sometimes refname is passed in as a revision if revision: if not has_revision(dest, revision) and has_ref( dest, 'origin/%s' % revision): log.info("Using %s as ref name instead of revision", revision) refname = revision revision = None rev = update(dest, refname=refname, revision=revision) if clean_dest: clean(dest) return rev if clean_dest: clean(dest)
def handle_upload(self, environ, start_response, values, rest, next_nonce): format_ = rest[0] assert format_ in self.formats filehash = values['sha1'] filename = values['filename'] log.info("Request to %s sign %s (%s) from %s", format_, filename, filehash, environ['REMOTE_ADDR']) fn = os.path.join(self.unsigned_dir, filehash) headers = [('X-Nonce', next_nonce)] if os.path.exists(fn): # Validate the file mydigest = sha1sum(fn) if mydigest != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, mydigest, filehash) safe_unlink(fn) elif os.path.exists(os.path.join(self.signed_dir, filehash)): # Everything looks ok log.info("File already exists") start_response("202 File already exists", headers) return "" elif (filehash, format_) in self.pending: log.info("File is pending") start_response("202 File is pending", headers) return "" log.info("Not pending or already signed, re-queue") # Validate filename if not any(exp.match(filename) for exp in self.allowed_filenames): log.info("%s forbidden due to invalid filename: %s", environ['REMOTE_ADDR'], filename) start_response("403 Unacceptable filename", headers) return "" try: fd, tmpname = tempfile.mkstemp(dir=self.unsigned_dir) fp = os.fdopen(fd, 'wb') h = hashlib.new('sha1') s = 0 while True: data = values['filedata'].file.read(1024 ** 2) if not data: break s += len(data) h.update(data) fp.write(data) fp.close() except: log.exception("Error downloading data") if os.path.exists(tmpname): os.unlink(tmpname) if s < self.min_filesize: if os.path.exists(tmpname): os.unlink(tmpname) start_response("400 File too small", headers) return "" if self.max_filesize[format_] and s > self.max_filesize[format_]: if os.path.exists(tmpname): os.unlink(tmpname) start_response("400 File too large", headers) return "" if h.hexdigest() != filehash: if os.path.exists(tmpname): os.unlink(tmpname) log.warn("Hash mismatch. Bad upload?") start_response("400 Hash mismatch", headers) return "" # Good to go! Rename the temporary filename to the real filename self.save_filename(filehash, filename) os.rename(tmpname, fn) self.submit_file(filehash, filename, format_) start_response("202 Accepted", headers) self.uploads += 1 return ""
def do_GET(self, environ, start_response): """ GET /sign/<format>/<hash> """ try: _, magic, format_, filehash = environ['PATH_INFO'].split('/') assert magic == 'sign' assert format_ in self.formats except: log.debug("bad request: %s", environ['PATH_INFO']) start_response("400 Bad Request", []) yield "" return filehash = os.path.basename(environ['PATH_INFO']) try: pending = self.pending.get((filehash, format_)) if pending: log.debug("Waiting for pending job") # Wait up to a minute for this to finish pending.wait(timeout=60) log.debug("Pending job finished!") fn = self.get_path(filehash, format_) filename = self.get_filename(filehash) if filename: log.debug("Looking for %s (%s)", fn, filename) else: log.debug("Looking for %s", fn) checksum = sha1sum(fn) headers = [ ('X-SHA1-Digest', checksum), ('Content-Length', str(os.path.getsize(fn))), ] fp = open(fn, 'rb') os.utime(fn, None) log.debug("%s is OK", fn) start_response("200 OK", headers) while True: data = fp.read(1024 ** 2) if not data: break yield data self.hits += 1 except IOError: log.debug("%s is missing", fn) headers = [] fn = os.path.join(self.unsigned_dir, filehash) if (filehash, format_) in self.pending: log.info("File is pending, come back soon!") log.debug("Pending: %s", self.pending) headers.append(('X-Pending', 'True')) # Maybe we have the file, but not for this format # If so, queue it up and return a pending response # This prevents the client from having to upload the file again elif os.path.exists(fn): log.debug("GET for file we already have, but not for the right format") # Validate the file myhash = sha1sum(fn) if myhash != filehash: log.warning("%s is corrupt; deleting (%s != %s)", fn, filehash, myhash) safe_unlink(fn) else: filename = self.get_filename(filehash) if filename: self.submit_file(filehash, filename, format_) log.info("File is pending, come back soon!") headers.append(('X-Pending', 'True')) else: log.debug("I don't remember the filename; re-submit please!") else: self.misses += 1 start_response("404 Not Found", headers) yield "" except: log.exception("ISE") start_response("500 Internal Server Error", headers) yield ""
if format_ == "signcode": if not options.signcode_keydir: parser.error("keydir required when format is signcode") copyfile(inputfile, tmpfile) if shouldSign(filename): signfile(tmpfile, options.signcode_keydir, options.fake, passphrase, timestamp=options.signcode_timestamp) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ == "osslsigncode": safe_unlink(tmpfile) if not options.signcode_keydir: parser.error("keydir required when format is osslsigncode") if shouldSign(filename): osslsigncode_signfile(inputfile, tmpfile, options.signcode_keydir, options.fake, passphrase, timestamp=options.signcode_timestamp) else: parser.error("Invalid file for signing: %s" % filename) sys.exit(1) elif format_ in ("sha2signcode", "sha2signcodestub"): safe_unlink(tmpfile) # add zipfile support
def git(repo, dest, refname=None, revision=None, update_dest=True, shareBase=DefaultShareBase, mirrors=None, clean_dest=False): """Makes sure that `dest` is has `revision` or `refname` checked out from `repo`. Do what it takes to make that happen, including possibly clobbering dest. If `mirrors` is set, will try and use the mirrors before `repo`. """ if shareBase is DefaultShareBase: shareBase = os.environ.get("GIT_SHARE_BASE_DIR", None) if shareBase is not None: repo_name = get_repo_name(repo) share_dir = os.path.join(shareBase, repo_name) else: share_dir = None if share_dir is not None and not is_git_repo(share_dir): log.info("creating bare repo %s", share_dir) try: init(share_dir, bare=True) os.utime(share_dir, None) except Exception: log.warning("couldn't create shared repo %s; disabling sharing", share_dir) shareBase = None share_dir = None dest = os.path.abspath(dest) if not is_git_repo(dest): if os.path.exists(dest): log.warning("%s doesn't appear to be a valid git directory; clobbering", dest) remove_path(dest) if share_dir is not None: # Initialize the repo and set up the share init(dest) set_share(dest, share_dir) else: # Otherwise clone into dest clone(repo, dest, refname=refname, mirrors=mirrors, update_dest=False) # Make sure our share is pointing to the right place if share_dir is not None: lock_file = os.path.join(get_git_dir(share_dir), "index.lock") if os.path.exists(lock_file): log.info("removing %s", lock_file) safe_unlink(lock_file) set_share(dest, share_dir) # If we're supposed to be updating to a revision, check if we # have that revision already. If so, then there's no need to # fetch anything. do_fetch = False if revision is None: # we don't have a revision specified, so pull in everything do_fetch = True elif has_ref(dest, revision): # revision is actually a ref name, so we need to run fetch # to make sure we update the ref do_fetch = True elif not has_revision(dest, revision): # we don't have this revision, so need to fetch it do_fetch = True if do_fetch: if share_dir: # Fetch our refs into our share try: # TODO: Handle fetching refnames like refs/tags/XXXX if refname is None: fetch(repo, share_dir, mirrors=mirrors) else: fetch(repo, share_dir, mirrors=mirrors, refname=refname) except subprocess.CalledProcessError: # Something went wrong! # Clobber share_dir and re-raise log.info("error fetching into %s - clobbering", share_dir) remove_path(share_dir) raise try: if refname is None: fetch(share_dir, dest, fetch_remote="origin") else: fetch(share_dir, dest, fetch_remote="origin", refname=refname) except subprocess.CalledProcessError: log.info("clobbering %s", share_dir) remove_path(share_dir) log.info("error fetching into %s - clobbering", dest) remove_path(dest) raise else: try: fetch(repo, dest, mirrors=mirrors, refname=refname) except Exception: log.info("error fetching into %s - clobbering", dest) remove_path(dest) raise if update_dest: log.info("Updating local copy refname: %s; revision: %s", refname, revision) # Sometimes refname is passed in as a revision if revision: if not has_revision(dest, revision) and has_ref(dest, 'origin/%s' % revision): log.info("Using %s as ref name instead of revision", revision) refname = revision revision = None rev = update(dest, refname=refname, revision=revision) if clean_dest: clean(dest) return rev if clean_dest: clean(dest)
working_directory=curdir, umask=0o077, ) daemon_ctx.open() # gevent needs to be reinitialized after the hardcore forking action gevent.reinit() open(pidfile, 'w').write(str(os.getpid())) # Set up logging again! createDaemon has closed all our open file # handles setup_logging(logfile=options.logfile, loglevel=options.loglevel, log_maxsize=options.log_maxsize, log_maxfiles=options.log_maxfiles, ) try: run(args[0], passphrases) except: log.exception("error running server") raise finally: try: if options.daemonize: daemon_ctx.close() safe_unlink(pidfile) log.info("exiting") except: log.exception("error shutting down")