def mbd_check(self, request): # Basic function checks self._mbd_schroot_run(["--info"]) # Note: When a schroot command comes back to fast, 'modern desktops' might still be busy # scrutinizing the new devices schroot created, making schroot fail when closing # them with something like # '/var/lib/schroot/mount/mini-buildd-wheezy-amd64-aaba77f3-4cba-423e-b34f-2b2bbb9789e1: device is busy.' # making this fail _and_ leave schroot cruft around. # Wtf! Hence we now just skip this ls test for now. # self._mbd_schroot_run(["--directory=/", "--", "/bin/ls"]) # Backend checks MsgLog(LOG, request).info("{c}: Running backend check.".format(c=self)) self.mbd_get_backend().mbd_backend_check(request) # "apt update/upgrade" check for args, fatal in [(["update"], True), (["--ignore-missing", "dist-upgrade"], True), (["--purge", "autoremove"], False), (["clean"], True)]: try: MsgLog(LOG, request).info("=> Running: apt-get {args}:".format(args=" ".join(args))) MsgLog(LOG, request).log_text( self._mbd_schroot_run(["--directory=/", "--", "/usr/bin/apt-get", "-q", "-o APT::Install-Recommends=false", "--yes"] + args, namespace="source")) except: MsgLog(LOG, request).warn("'apt-get {args}' not supported in this chroot.".format(args=" ".join(args))) if fatal: raise
def mbd_get_matching_release(self, request, source, gnupg): url = "{u}/dists/{d}/Release".format(u=self.url, d=source.codename) with tempfile.NamedTemporaryFile() as release_file: MsgLog(LOG, request).debug("Downloading '{u}' to '{t}'".format( u=url, t=release_file.name)) try: release_file.write(urllib2.urlopen(url).read()) except urllib2.HTTPError as e: if e.code == 404: MsgLog(LOG, request).debug( "{a}: '404 Not Found' on '{u}'".format(a=self, u=url)) # Not for us return None raise release_file.flush() release_file.seek(0) release = debian.deb822.Release(release_file) # Check release file fields if not source.mbd_is_matching_release(request, release): return None # Check signature with tempfile.NamedTemporaryFile() as signature: MsgLog(LOG, request).debug( "Downloading '{u}.gpg' to '{t}'".format(u=url, t=signature.name)) signature.write(urllib2.urlopen(url + ".gpg").read()) signature.flush() gnupg.verify(signature.name, release_file.name) # Ok, this is for this source return release
def mbd_remove(self, request): mini_buildd.misc.call_sequence(self.mbd_get_sequence(), rollback_only=True, run_as_root=True) shutil.rmtree(self.mbd_get_path(), onerror=lambda f, p, e: MsgLog(LOG, request).warn("{c}: Failure removing data dir '{p}' (ignoring): {e}".format(c=self, p=self.mbd_get_path(), e=e))) MsgLog(LOG, request).info("{c}: Removed from system.".format(c=self))
def mbd_set_changed(self, request): if self.mbd_is_active(): self.status = self.STATUS_PREPARED MsgLog(LOG, request).warn( "Deactivated due to changes: {o}".format(o=self)) self.last_checked = self.CHECK_CHANGED MsgLog(LOG, request).warn("Marked as changed: {o}".format(o=self))
def mbd_check(cls, request, obj, force=False, needs_activation=False): if obj.mbd_is_prepared() and not obj.mbd_is_changed(): try: # Also run for all status dependencies cls._mbd_run_dependencies( request, obj, cls.mbd_check, force=force, needs_activation=obj.mbd_is_active() or obj.last_checked == obj.CHECK_REACTIVATE) if force or obj.mbd_needs_check(): last_checked = obj.last_checked obj.mbd_check(request) # Handle special flags reactivated = False if obj.last_checked == obj.CHECK_REACTIVATE: obj.status = StatusModel.STATUS_ACTIVE reactivated = True MsgLog(LOG, request).info( "Auto-reactivated: {o}".format(o=obj)) # Finish up obj.last_checked = datetime.datetime.now() obj.save() # Run activation hook if reactivated if reactivated: cls._mbd_on_activation(request, obj) MsgLog(LOG, request).info( "Checked ({f}, last={c}): {o}".format( f="forced" if force else "scheduled", c=last_checked, o=obj)) else: MsgLog(LOG, request).info( "Needs no check: {o}".format(o=obj)) if needs_activation and not obj.mbd_is_active(): raise Exception( "Not active, but a (tobe-)active item depends on it. Activate this first: {o}" .format(o=obj)) except: # Check failed, auto-deactivate and re-raise exception obj.last_checked = max(obj.last_checked, obj.CHECK_FAILED) if obj.mbd_is_active(): obj.status, obj.last_checked = obj.STATUS_PREPARED, obj.CHECK_REACTIVATE MsgLog(LOG, request).error( "Automatically deactivated: {o}".format(o=obj)) obj.save() raise else: raise Exception( "Can't check removed or changed object (run 'prepare' first): {o}" .format(o=obj))
def mbd_remove(cls, request, obj): if obj.mbd_is_prepared(): obj.mbd_remove(request) obj.status, obj.last_checked = obj.STATUS_REMOVED, obj.CHECK_NONE obj.save() MsgLog(LOG, request).info("Removed: {o}".format(o=obj)) else: MsgLog(LOG, request).info("Already removed: {o}".format(o=obj))
def mbd_is_matching_release(self, request, release): "Check that this release file matches us." for key, value in self.mbd_release_file_values().items(): # Check identity: origin, codename MsgLog(LOG, request).debug("Checking '{k}: {v}'".format(k=key, v=value)) if value != release[key]: MsgLog(LOG, request).debug( "Release[{k}] field mismatch: '{rv}', expected '{v}'.". format(k=key, rv=release[key], v=value)) return False return True
def mbd_prepare(self, request): url = "http://{h}/mini_buildd/api?command=getkey&output=plain".format(h=self.http) MsgLog(LOG, request).info("Downloading '{u}'...".format(u=url)) # We prepare the GPG data from downloaded key data, so key_id _must_ be empty (see super(mbd_prepare)) self.key_id = "" self.key = urllib2.urlopen(url).read() if self.key: MsgLog(LOG, request).warn("Downloaded remote key integrated: Please check key manually before activation!") else: raise Exception("Empty remote key from '{u}' -- maybe the remote is not prepared yet?".format(u=url)) super(Remote, self).mbd_prepare(request)
def mbd_deactivate(cls, request, obj): obj.status = min(obj.STATUS_PREPARED, obj.status) if obj.last_checked == obj.CHECK_REACTIVATE: obj.last_checked = obj.CHECK_FAILED obj.save() cls._mbd_on_deactivation(request, obj) MsgLog(LOG, request).info("Deactivated: {o}".format(o=obj))
def mbd_prepare(self, request): self._mbd_gnupg.prepare() self._mbd_gnupg_long_id = self._mbd_gnupg.get_first_sec_key().key_id self._mbd_gnupg_fingerprint = self._mbd_gnupg.get_first_sec_key_fingerprint( ).user_id MsgLog(LOG, request).info("Daemon GnuPG key generated: {i}: {f}".format( i=self._mbd_gnupg_long_id, f=self._mbd_gnupg_fingerprint))
def mbd_prepare(cls, request, obj): if not obj.mbd_is_prepared(): # Fresh prepare cls._mbd_run_dependencies(request, obj, cls.mbd_prepare) obj.mbd_prepare(request) obj.status, obj.last_checked = obj.STATUS_PREPARED, obj.CHECK_NONE obj.save() MsgLog(LOG, request).info("Prepared: {o}".format(o=obj)) elif obj.mbd_is_changed(): # Update data on change cls._mbd_run_dependencies(request, obj, cls.mbd_prepare) obj.mbd_sync(request) obj.status, obj.last_checked = obj.STATUS_PREPARED, obj.CHECK_NONE obj.save() MsgLog(LOG, request).info("Synced: {o}".format(o=obj)) else: MsgLog(LOG, request).info("Already prepared: {o}".format(o=obj))
def mbd_activate(cls, request, obj): if obj.mbd_is_prepared() and obj.mbd_is_checked(): cls._mbd_run_dependencies(request, obj, cls.mbd_activate) obj.status = obj.STATUS_ACTIVE obj.save() cls._mbd_on_activation(request, obj) MsgLog(LOG, request).info("Activated: {o}".format(o=obj)) elif obj.mbd_is_prepared() and ( obj.last_checked == obj.CHECK_FAILED or obj.last_checked == obj.CHECK_NONE): obj.last_checked = obj.CHECK_REACTIVATE obj.save() MsgLog(LOG, request).info( "Will auto-activate when check succeeds: {o}".format( o=obj)) elif obj.mbd_is_active(): MsgLog(LOG, request).info("Already active: {o}".format(o=obj)) else: raise Exception("Prepare and check first: {o}".format(o=obj))
def mbd_check(self, request): "Rescan all archives, and check that there is at least one working." msglog = MsgLog(LOG, request) self.archives = [] with contextlib.closing(mini_buildd.gnupg.TmpGnuPG()) as gpg: for k in self.apt_keys.all(): gpg.add_pub_key(k.key) for archive in Archive.objects.all(): try: # Check and update the ping value archive.mbd_ping(request) # Get release if this archive serves us, else exception release = archive.mbd_get_matching_release(request, self, gpg) if release: # Implicitely save ping value for this archive self.archives.add(archive) self.description = release["Description"] # Set codeversion self.codeversion = "" if self.codeversion_override: self.codeversion = self.codeversion_override msglog.info("{o}: Codeversion override active: {r}".format(o=self, r=self.codeversion_override)) else: self.codeversion = mini_buildd.misc.guess_codeversion(release) self.codeversion_override = self.codeversion msglog.info("{o}: Codeversion guessed as: {r}".format(o=self, r=self.codeversion)) # Set architectures and components (may be auto-added) if release.get("Architectures"): for a in release["Architectures"].split(" "): new_arch, _created = Architecture.mbd_get_or_create(msglog, name=a) self.architectures.add(new_arch) if release.get("Components"): for c in release["Components"].split(" "): new_component, _created = Component.mbd_get_or_create(msglog, name=c) self.components.add(new_component) msglog.info("{o}: Added archive: {a}".format(o=self, a=archive)) else: msglog.debug("{a}: Not hosting {s}".format(a=archive, s=self)) except Exception as e: mini_buildd.setup.log_exception(msglog, "Error checking {a} for {s} (check Archive or Source)".format(a=archive, s=self), e) # Check that at least one archive can be found self.mbd_get_archive()
def mbd_prepare(self, request): if not self.apt_keys.all(): raise Exception( "{s}: Please add apt keys to this source.".format(s=self)) if self.mbd_get_extra_option("Origin"): raise Exception( "{s}: You may not override 'Origin', just use the origin field." .format(s=self)) MsgLog(LOG, request).info("{s} with pin: {p}".format( s=self, p=self.mbd_get_apt_pin()))
def mbd_action(cls, request, queryset, action, **kwargs): """ Try to run action on each object in queryset, emit error message on failure, but don't fail ourself. """ for o in queryset: try: getattr(cls, "mbd_" + action)(request, o, **kwargs) except Exception as e: mini_buildd.setup.log_exception( MsgLog(LOG, request), "{a} failed: {o}".format(a=action, o=o), e)
def mbd_prepare(self, request): """ .. note:: SUDO WORKAROUND for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=608840 Includes '--include=sudo' and all handling of 'sudoers_workaround_file' below. .. seealso:: :py:class:`~mini_buildd.builder.Build` """ mini_buildd.misc.mkdirs(os.path.join(self.mbd_get_path(), mini_buildd.setup.CHROOT_LIBDIR)) # Set personality self.personality = "" if self.personality_override: self.personality = self.personality_override else: try: self.personality = self.PERSONALITIES[self.architecture.name] except: self.personality = "linux" mini_buildd.misc.ConfFile( self.mbd_get_sudoers_workaround_file(), """\ {u} ALL=(ALL) ALL {u} ALL=NOPASSWD: ALL """.format(u=os.getenv("USER"))).save() mini_buildd.misc.ConfFile( self.mbd_get_schroot_conf_file(), """\ [{n}] description=Mini-Buildd chroot {n} setup.fstab=mini-buildd/fstab groups=sbuild users=mini-buildd root-groups=sbuild root-users=mini-buildd source-root-users=mini-buildd personality={p} # Backend specific config {b} """.format(n=self.mbd_get_name(), p=self.personality, b=self.mbd_get_backend().mbd_get_schroot_conf())).save() # Gen keyring file to use with debootstrap with contextlib.closing(mini_buildd.gnupg.TmpGnuPG()) as gpg: for k in self.source.apt_keys.all(): gpg.add_pub_key(k.key) gpg.export(self.mbd_get_keyring_file()) mini_buildd.misc.call_sequence(self.mbd_get_sequence(), run_as_root=True) MsgLog(LOG, request).info("{c}: Prepared on system for schroot.".format(c=self))
def _mbd_run_dependencies(cls, request, obj, func, **kwargs): """ Run action for all dependencies, but don't fail and run all checks for models with LETHAL_DEPENDENCIES set to False. Practical use case is the Daemon model only, where we want to run all checks on all dependencies, but not fail ourselves. """ for o in obj.mbd_get_dependencies(): try: func(request, o, **kwargs) except Exception as e: if obj.LETHAL_DEPENDENCIES: raise else: MsgLog(LOG, request).warn( "Check on '{o}' failed: {e}".format(o=o, e=e))
def error(request, code, meaning, description, api_cmd=None): # Note: Adding api_cmd if applicable; this will enable automated api links even on error pages. MsgLog(LOG, request).error("{code} {meaning}: {description}".format( code=code, meaning=meaning, description=description)) output = request.GET.get("output", "html") if output[:7] == "referer": response = django.shortcuts.redirect(_referer(request, output)) else: response = django.shortcuts.render(request, "mini_buildd/error.html", { "code": code, "meaning": meaning, "description": description, "api_cmd": api_cmd }, status=code) _add_api_messages(response, api_cmd, ["E: {d}".format(d=description)]) return response
def mbd_ping(self, request): "Ping and update the ping value." try: t0 = datetime.datetime.now() # Append dists to URL for ping check: Archive may be # just fine, but not allow to access to base URL # (like ourselves ;). Any archive _must_ have dists/ anyway. try: urllib2.urlopen("{u}/dists/".format(u=self.url)) except urllib2.HTTPError as e: # Allow HTTP 4xx client errors through; these might be valid use cases like: # 404 Usage Information: apt-cacher-ng if not 400 <= e.code <= 499: raise delta = datetime.datetime.now() - t0 self.ping = mini_buildd.misc.timedelta_total_seconds(delta) * (10** 3) self.save() MsgLog(LOG, request).debug("{s}: Ping!".format(s=self)) except Exception as e: self.ping = -1.0 self.save() raise Exception("{s}: Does not ping: {e}".format(s=self, e=e))
def mbd_check(self, request): "Rescan all archives, and check that there is at least one working." msglog = MsgLog(LOG, request) self.archives = [] with contextlib.closing(mini_buildd.gnupg.TmpGnuPG()) as gpg: for k in self.apt_keys.all(): gpg.add_pub_key(k.key) for archive in Archive.objects.all(): try: # Check and update the ping value archive.mbd_ping(request) # Get release if this archive serves us, else exception release = archive.mbd_get_matching_release( request, self, gpg) if release: # Implicitely save ping value for this archive self.archives.add(archive) self.description = release["Description"] # Set codeversion self.codeversion = "" if self.codeversion_override: self.codeversion = self.codeversion_override msglog.info( "{o}: Codeversion override active: {r}".format( o=self, r=self.codeversion_override)) else: self.codeversion = mini_buildd.misc.guess_codeversion( release) self.codeversion_override = self.codeversion msglog.info( "{o}: Codeversion guessed as: {r}".format( o=self, r=self.codeversion)) # Set architectures and components (may be auto-added) if release.get("Architectures"): for a in release["Architectures"].split(" "): new_arch, _created = Architecture.mbd_get_or_create( msglog, name=a) self.architectures.add(new_arch) if release.get("Components"): for c in release["Components"].split(" "): new_component, _created = Component.mbd_get_or_create( msglog, name=c) self.components.add(new_component) msglog.info("{o}: Added archive: {a}".format( o=self, a=archive)) else: msglog.debug("{a}: Not hosting {s}".format(a=archive, s=self)) except Exception as e: mini_buildd.setup.log_exception( msglog, "Error checking {a} for {s} (check Archive or Source)". format(a=archive, s=self), e) # Check that at least one archive can be found self.mbd_get_archive()
def mbd_sync(cls, request): MsgLog(LOG, request).warn( "The GnuPG key will never be updated automatically. Explicitly run remove+prepare to achieve this." )
def mbd_remove(self, request): self._mbd_gnupg.remove() MsgLog(LOG, request).info("Daemon GnuPG key removed.")
def mbd_check(self, request): "Just warn in case there are no repos and no chroots." if not self.mbd_get_daemon().get_active_repositories( ) and not self.mbd_get_daemon().get_active_chroots(): MsgLog(LOG, request).warn("No active chroot or repository.")
def mbd_remove(self, request): super(Remote, self).mbd_remove(request) self.mbd_set_pickled_data(mini_buildd.api.Status({})) MsgLog(LOG, request).info("Remote key and state removed.")
def mbd_backend_check(self, request): "Subclasses may implement this to do extra backend checks." MsgLog(LOG, request).info("{c}: No backend check implemented.".format(c=self))
def _mbd_on_change(cls, request, obj): "Notify the daemon keyring to update itself." if obj.mbd_get_daemon().keyrings: MsgLog(LOG, request).info("Scheduling keyrings update...") obj.mbd_get_daemon().keyrings.set_needs_update()
def mbd_backend_check(self, request): MsgLog(LOG, request).info("{c}: Running file system check...".format(c=self)) mini_buildd.misc.call(["/sbin/fsck", "-a", "-t{t}".format(t=self.filesystem), self.mbd_get_lvm_device()], run_as_root=True)
def api(request): api_cmd = None try: if request.method != 'GET': return error400_bad_request(request, "API: Allows GET requests only") # Call API index if called with no argument if not request.GET: return django.shortcuts.render_to_response( "mini_buildd/api_index.html", { "COMMANDS": mini_buildd.api.COMMANDS_DEFAULTS, "COMMAND_GROUP": mini_buildd.api.COMMAND_GROUP }, django.template.RequestContext(request)) # Get API class from 'command' parameter command = request.GET.get("command", None) if command not in mini_buildd.api.COMMANDS_DICT: return error400_bad_request( request, "API: Unknown command '{c}'".format(c=command)) api_cls = mini_buildd.api.COMMANDS_DICT[command] # Authentication def chk_login(): return request.user.is_authenticated() and request.user.is_active if (api_cls.AUTH == api_cls.LOGIN) and not chk_login(): return error401_unauthorized( request, "API: '{c}': Needs user login".format(c=command)) if (api_cls.AUTH == api_cls.STAFF) and not (chk_login() and request.user.is_staff): return error401_unauthorized( request, "API: '{c}': Needs staff user login".format(c=command)) if (api_cls.AUTH == api_cls.ADMIN) and not (chk_login() and request.user.is_superuser): return error401_unauthorized( request, "API: '{c}': Needs superuser login".format(c=command)) # Generate command object api_cmd = api_cls(request.GET, request, msglog=MsgLog(LOG, request)) # HTML output by default output = request.GET.get("output", "html") # Check if we need a running daemon if api_cls.NEEDS_RUNNING_DAEMON and not mini_buildd.daemon.get( ).is_running(): return error405_method_not_allowed( request, "API: '{c}': Needs running daemon".format(c=command)) # Check confirmable calls if api_cls.CONFIRM and request.GET.get("confirm", None) != command: if output == "html" or output == "referer": return django.shortcuts.render_to_response( "mini_buildd/api_confirm.html", { "api_cmd": api_cmd, "referer": _referer(request, output) }, django.template.RequestContext(request)) else: return error401_unauthorized( request, "API: '{c}': Needs to be confirmed".format(c=command)) # Show api command name and user calling it. api_cmd.msglog.info("API call '{c}' by user '{u}'".format( c=command, u=request.user)) # Run API call (dep-injection via daemon object) api_cmd.run(mini_buildd.daemon.get()) # Generate API call output response = None if output == "html": response = django.shortcuts.render_to_response( [ "mini_buildd/api_{c}.html".format(c=command), "mini_buildd/api_default.html".format(c=command) ], { "api_cmd": api_cmd, "repositories": mini_buildd.models.repository.Repository.mbd_get_prepared( ) }, django.template.RequestContext(request)) elif output == "plain": response = django.http.HttpResponse( api_cmd.__unicode__().encode(mini_buildd.setup.CHAR_ENCODING), content_type="text/plain; charset={charset}".format( charset=mini_buildd.setup.CHAR_ENCODING)) elif output == "python": response = django.http.HttpResponse( pickle.dumps(api_cmd, pickle.HIGHEST_PROTOCOL), content_type="application/python-pickle") elif output[:7] == "referer": # Add all plain result lines as info messages on redirect for l in api_cmd.__unicode__().splitlines(): api_cmd.msglog.info("Result: {l}".format(l=l)) response = django.shortcuts.redirect(_referer(request, output)) else: response = django.http.HttpResponseBadRequest( "<h1>Unknow output type '{o}'</h1>".format(o=output)) # Add all user messages as as custom HTTP headers _add_api_messages(response, api_cmd) return response except Exception as e: # This might as well be just an internal error; in case of no bug in the code, 405 fits better though. # ['wontfix' unless we refactor to diversified exception classes] mini_buildd.setup.log_exception(LOG, "API call error", e) return error405_method_not_allowed(request, "API call error: {e}".format(e=e), api_cmd=api_cmd)