def execute(self, args): if len (args) != 1: print "usage: elbe db del_project <project_dir>" return db = ElbeDB() db.del_project (args[0])
def execute(self, args): if len(args) != 2: print "usage: elbe db del_version <project_dir> <version>" return db = ElbeDB() db.del_version( args[0], args[1] )
def execute(self, args): if len (args) != 2: print "usage: elbe db set_xml <project_dir> <xml>" return db = ElbeDB() db.set_xml (args[0], args[1])
def __init__(self, basepath): self.basepath = basepath # Base path for new projects self.db = ElbeDB() # Database of projects and users self.worker = AsyncWorker(self.db) self.userid2project = {} # (userid, ElbeProject) map of open projects self.builddir2userid = {} # (builddir, userid) map of open projects self.lock = Lock() # Lock protecting our data
def execute(self, args): if len(args) != 2: print("usage: elbe db set_xml <project_dir> <xml>") return db = ElbeDB() db.set_xml(args[0], args[1])
def execute(self, args): if len(args) != 1: print("usage: elbe db del_project <project_dir>") return db = ElbeDB() db.del_project(args[0])
def execute(self, args): if len(args) != 2: print("usage: elbe db set_project_version <project_dir> <version>") return db = ElbeDB() db.set_project_version(args[0], args[1])
def execute(self, args): if len(args) != 2: print("usage: elbe db del_version <project_dir> <version>") return db = ElbeDB() db.del_version(args[0], args[1])
def execute(self, args): if len(args) != 2: print "usage: elbe db set_project_version <project_dir> <version>" return db = ElbeDB() db.set_project_version(args[0], args[1])
def execute(self, args): oparser = OptionParser( usage="usage: %prog db add_user [options] <username>") oparser.add_option("--fullname", dest="fullname") oparser.add_option("--password", dest="password") oparser.add_option("--email", dest="email") oparser.add_option("--admin", dest="admin", default=False, action="store_true") (opt, arg) = oparser.parse_args(args) if len(arg) != 1: print("wrong number of arguments") oparser.print_help() return if not opt.password: password = getpass('Password for the new user: ') else: password = opt.password db = ElbeDB() db.add_user(arg[0], opt.fullname, password, opt.email, opt.admin)
def execute(self, args): if len(args) != 2: print "usage: elbe db print_version_xml <project_dir> <version>" return db = ElbeDB() filename = db.get_version_xml( args[0], args[1] ) with open( filename ) as f: copyfileobj( f, sys.stdout )
def execute(self, args): if len(args) != 2: print("usage: elbe db print_version_xml <project_dir> <version>") return db = ElbeDB() filename = db.get_version_xml(args[0], args[1]) with open(filename) as f: copyfileobj(f, sys.stdout)
def execute(self, args): if len (args) != 1: print "usage: elbe db get_files <project_dir>" return db = ElbeDB() files = db.get_project_files (args[0]) for f in files: if f.description: print "%-40s %s" % (f.name, f.description) else: print f.name
def execute(self, args): if len(args) != 1: print("usage: elbe db get_files <project_dir>") return db = ElbeDB() files = db.get_project_files(args[0]) for f in files: if f.description: print("%-40s %s" % (f.name, f.description)) else: print(f.name)
def execute(self, args): oparser = OptionParser (usage="usage: %prog db save_version <project_dir>") oparser.add_option ("--description", dest="description") (opt, arg) = oparser.parse_args (args) if len(arg) != 1: print "wrong number of arguments" oparser.print_help() return db = ElbeDB() db.save_version( arg[0], opt.description )
def execute(self, args): oparser = OptionParser (usage="usage: %prog db init [options]") oparser.add_option ("--name", dest="name", default="root") oparser.add_option ("--fullname", dest="fullname", default="Admin") oparser.add_option ("--password", dest="password", default="foo") oparser.add_option ("--email", dest="email", default="root@localhost") oparser.add_option ("--noadmin", dest="admin", default=True, action="store_false") (opt, arg) = oparser.parse_args (args) ElbeDB.init_db (opt.name, opt.fullname, opt.password, opt.email, opt.admin)
def execute(self, args): if len(args) != 1: print("usage: elbe db list_versions <project_dir>") return db = ElbeDB() versions = db.list_project_versions(args[0]) for v in versions: if v.description: print("%s: %s" % (v.version, v.description)) else: print(v.version)
def execute(self, args): if len(args) != 1: print "usage: elbe db list_versions <project_dir>" return db = ElbeDB() versions = db.list_project_versions(args[0]) for v in versions: if v.description: print v.version + ": " + v.description else: print v.version
def execute(self, args): if len(args) != 1: print "usage: elbe db list_versions <project_dir>" return db = ElbeDB() versions = db.list_project_versions (args[0]) for v in versions: if v.description: print v.version + ": " + v.description else: print v.version
def execute(self, args): oparser = OptionParser ( usage="usage: %prog db create_project [options] <project_dir>" ) oparser.add_option( "--user", dest="user", help="user name of the designated project owner" ) (opt, arg) = oparser.parse_args(args) if len (arg) != 1: oparser.print_help() return db = ElbeDB() owner_id = db.get_user_id( opt.user ) db.create_project (arg[0], owner_id)
def execute(self, args): oparser = OptionParser ( usage="usage: %prog db reset_project [options] <project_dir>") oparser.add_option ("--clean", dest="clean", default=False, action="store_true") (opt, arg) = oparser.parse_args (args) if len(arg) != 1: print "wrong number of arguments" oparser.print_help() return db = ElbeDB() db.reset_project (arg[0], opt.clean)
def __init__ (self, basepath): self.basepath = basepath # Base path for new projects self.db = ElbeDB() # Database of projects and users self.worker = AsyncWorker( self.db ) self.userid2project = {} # (userid, ElbeProject) map of open projects self.builddir2userid = {} # (builddir, userid) map of open projects self.lock = Lock() # Lock protecting our data
def execute(self, args): oparser = OptionParser( usage="usage: %prog db del_user [options] <userid>") oparser.add_option("--delete-projects", dest="delete_projects", default=False, action="store_true") oparser.add_option("--quiet", dest="quiet", default=False, action="store_true") (opt, arg) = oparser.parse_args(args) if len(arg) != 1: print("usage: elbe db del_user <userid>") return try: userid = int(arg[0]) except ValueError as E: print("userid must be an integer - %s" % E) return db = ElbeDB() projects = db.del_user(userid) if projects: if not opt.quiet: if opt.delete_projects: print("removing projects owned by the deleted user:"******"keeping projects owned by the deleted user:"******"%s: %s [%s] %s" % (p.builddir, p.name, p.version, p.edit)) if opt.delete_projects: try: db.del_project(p.builddir) except ElbeDBError as e: print(" ==> %s " % str(e))
def execute(self, args): oparser = OptionParser( usage="usage: %prog db del_user [options] <userid>") oparser.add_option("--delete-projects", dest="delete_projects", default=False, action="store_true") oparser.add_option("--quiet", dest="quiet", default=False, action="store_true") (opt, arg) = oparser.parse_args(args) if len(arg) != 1: print "usage: elbe db del_user <userid>" return try: userid = int(arg[0]) except: print "userid must be an integer" return db = ElbeDB() projects = db.del_user(userid) if projects: if not opt.quiet: if opt.delete_projects: print "removing projects owned by the deleted user:"******"keeping projects owned by the deleted user:"******":", p.name, "[", p.version, "]", p.edit if opt.delete_projects: try: db.del_project(p.builddir) except ElbeDBError as e: print " ==> ", e
def execute(self, args): oparser = OptionParser( usage="usage: %prog db del_user [options] <userid>") oparser.add_option("--delete-projects", dest="delete_projects", default=False, action="store_true") oparser.add_option("--quiet", dest="quiet", default=False, action="store_true") (opt, arg) = oparser.parse_args(args) if len(arg) != 1: print("usage: elbe db del_user <userid>") return try: userid = int(arg[0]) except BaseException: print("userid must be an integer") return db = ElbeDB() projects = db.del_user(userid) if projects: if not opt.quiet: if opt.delete_projects: print("removing projects owned by the deleted user:"******"keeping projects owned by the deleted user:"******"%s: %s [%s] %s" % (p.builddir, p.name, p.version, p.edit)) if opt.delete_projects: try: db.del_project(p.builddir) except ElbeDBError as e: print(" ==> %s " % str(e))
def execute(self, args): oparser = OptionParser (usage="usage: %prog db add_user [options] username") oparser.add_option ("--fullname", dest="fullname") oparser.add_option ("--password", dest="password") oparser.add_option ("--email", dest="email") oparser.add_option ("--admin", dest="admin", default=False, action="store_true") (opt, arg) = oparser.parse_args (args) if len(arg) != 1: print "wrong number of arguments" oparser.print_help() return if not opt.password: password = getpass('Password for the new user: ') else: password = opt.password db = ElbeDB() db.add_user( arg[0], opt.fullname, password, opt.email, opt.admin )
def execute(self, args): if len(args) != 1: print "usage: elbe db build <project_dir>" return db = ElbeDB() db.set_busy(args[0], [ "empty_project", "needs_build", "has_changes", "build_done", "build_failed" ]) try: ep = db.load_project(args[0]) ep.build(skip_debootstrap=True) db.update_project_files(ep) except Exception as e: db.update_project_files(ep) db.reset_busy(args[0], "build_failed") print e return db.reset_busy(args[0], "build_done")
def execute(self, _args): db = ElbeDB() users = db.list_users() for u in users: print("%s: %s <%s>" % (u.name, u.fullname, u.email))
def execute(self, _args): db = ElbeDB() projects = db.list_projects() for p in projects: print("%s: %s [%s] %s" % (p.builddir, p.name, p.version, p.edit))
def execute(self, args): db = ElbeDB() users = db.list_users () for u in users: print u.name+":", u.fullname, "<"+u.email+">"
def execute(self, args): db = ElbeDB() projects = db.list_projects () for p in projects: print p.builddir+":", p.name, "[", p.version, "]", p.edit
def execute(self, args): db = ElbeDB() projects = db.list_projects() for p in projects: print p.builddir + ":", p.name, "[", p.version, "]", p.edit
def execute(self, args): if len(args) != 1: print("usage: elbe db build <project_dir>") return db = ElbeDB() db.set_busy(args[0], [ "empty_project", "needs_build", "has_changes", "build_done", "build_failed" ]) try: ep = db.load_project(args[0]) ep.build() db.update_project_files(ep) # pylint: disable=broad-except except Exception as e: db.update_project_files(ep) db.reset_busy(args[0], "build_failed") print(str(e)) return db.reset_busy(args[0], "build_done")
def execute(self, args): if len (args) != 1: print "usage: elbe db build <project_dir>" return db = ElbeDB() db.set_busy( args[0], [ "empty_project", "needs_build", "has_changes", "build_done", "build_failed" ] ) try: ep = db.load_project( args[0] ) ep.build( skip_debootstrap = True ) db.update_project_files( ep ) except Exception as e: db.update_project_files( ep ) db.reset_busy( args[0], "build_failed" ) print e return db.reset_busy( args[0], "build_done" )
def run_command( argv ): oparser = OptionParser(usage="usage: %prog buildchroot [options] <xmlfile>") oparser.add_option( "-t", "--target", dest="target", help="directoryname of target" ) oparser.add_option( "-o", "--output", dest="output", help="name of logfile" ) oparser.add_option( "-n", "--name", dest="name", help="name of the project (included in the report)" ) oparser.add_option( "--skip-pbuild", action="store_true", dest="skip_pbuild", default=False, help="skip building packages from <pbuilder> list" ) oparser.add_option( "--build-bin", action="store_true", dest="build_bin", default=False, help="Build Binary Repository CDROM, for exact Reproduction" ) oparser.add_option( "--build-sources", action="store_true", dest="build_sources", default=False, help="Build Source CD" ) oparser.add_option( "--proxy", dest="proxy", help="Override the http proxy" ) oparser.add_option( "--debug", action="store_true", dest="debug", default=False, help="Enable various features to debug the build" ) oparser.add_option( "--buildtype", dest="buildtype", help="Override the buildtype" ) oparser.add_option( "--cdrom-size", action="store", dest="cdrom_size", default=CDROM_SIZE, help="ISO CD size in MB" ) oparser.add_option( "--skip-validation", action="store_true", dest="skip_validation", default=False, help="Skip xml schema validation" ) oparser.add_option( "--skip-debootstrap", action="store_true", dest="skip_debootstrap", default=False, help="Skip debootstrap" ) oparser.add_option( "--skip-pkglist", action="store_true", dest="skip_pkglist", default=False, help="ignore changes of the package list" ) oparser.add_option( "--skip-cdrom", action="store_true", dest="skip_cdrom", default=False, help="(now obsolete) Skip cdrom iso generation" ) (opt,args) = oparser.parse_args(argv) if len(args) != 1: print "wrong number of arguments" oparser.print_help() sys.exit(20) if not opt.target: print "No target specified" sys.exit(20) if opt.skip_cdrom: print "WARNING: Skip CDROMS is now the default, use --build-bin to build binary CDROM" try: project = ElbeProject( opt.target, args[0], opt.output, opt.name, opt.buildtype, opt.skip_validation ) except ValidationError as e: print str(e) print "xml validation failed. Bailing out" sys.exit(20) try: project.build( opt.skip_debootstrap, opt.build_bin, opt.build_sources, opt.cdrom_size, opt.debug, opt.skip_pkglist, opt.skip_pbuild ) except CommandError as ce: print "command in project build failed:", ce.cmd sys.exit(20) try: db = ElbeDB() db.save_project (project) except OperationalError: print "failed to save project in database" sys.exit(20)
def execute(self, args): db = ElbeDB() users = db.list_users() for u in users: print u.name + ":", u.fullname, "<" + u.email + ">"
class ProjectManager(object): def __init__(self, basepath): self.basepath = basepath # Base path for new projects self.db = ElbeDB() # Database of projects and users self.worker = AsyncWorker(self.db) self.userid2project = {} # (userid, ElbeProject) map of open projects self.builddir2userid = {} # (builddir, userid) map of open projects self.lock = Lock() # Lock protecting our data def stop(self): self.worker.stop() def create_project(self, userid, xml_file): subdir = str(uuid4()) builddir = path.join(self.basepath, subdir) with self.lock: # Try to close old project, if any self._close_current_project(userid) self.db.create_project(builddir, owner_id=userid) try: self.db.set_xml(builddir, xml_file) except: # Delete the project, if we cannot assign an XML file self.db.del_project(builddir) raise # Open the new project logpath = path.join(builddir, "log.txt") ep = self.db.load_project(builddir, logpath) self.userid2project[userid] = ep self.builddir2userid[builddir] = userid return builddir def open_project(self, userid, builddir): self._check_project_permission(userid, builddir) with self.lock: if builddir in self.builddir2userid: if self.builddir2userid[builddir] == userid: # Same project selected again by the same user, don't do # anything return else: # Already opened by a different user raise AlreadyOpen( builddir, self.db.get_username(self.builddir2userid[builddir])) # Try to close the old project of the user, if any self._close_current_project(userid) # Load project from the database logpath = path.join(builddir, "log.txt") ep = self.db.load_project(builddir, logpath) # Add project to our dictionaries self.userid2project[userid] = ep self.builddir2userid[builddir] = userid def close_current_project(self, userid): with self.lock: self._close_current_project(userid) def del_project(self, userid, builddir): self._check_project_permission(userid, builddir) with self.lock: # Does anyone have the project opened right now? if builddir in self.builddir2userid: if self.builddir2userid[builddir] == userid: # If the calling user has opened it, then close it and # proceed if closed sucessfully. self._close_current_project(userid) else: # TODO: Admin should be allowed to delete projects # that are currently opened by other users raise AlreadyOpen( builddir, self.db.get_username(self.builddir2userid[builddir])) self.db.del_project(builddir) def get_current_project_data(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.get_project_data(builddir) def get_current_project_files(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.get_project_files(builddir) def open_current_project_file(self, userid, filename, mode='r'): with self.lock: builddir = self._get_current_project(userid, allow_busy=False).builddir pfd = self.db.get_project_file(builddir, filename) return OpenProjectFile(pfd, mode) def set_current_project_private_data(self, userid, private_data): with self.lock: ep = self._get_current_project(userid) ep.private_data = private_data def get_current_project_private_data(self, userid): private_data = None with self.lock: ep = self._get_current_project(userid) private_data = ep.private_data return private_data def set_current_project_xml(self, userid, xml_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.db.set_xml(ep.builddir, xml_file) def set_current_project_upload_cdrom(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) ep.xml.set_cdrom_mirror( path.join(ep.builddir, 'uploaded_cdrom.iso')) ep.sync_xml_to_disk() # Make db reload the xml file self.db.set_xml(ep.builddir, None) def set_current_project_postbuild(self, userid, postbuild_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_postbuild(ep.builddir, postbuild_file) ep.postbuild_file = f def set_current_project_savesh(self, userid, savesh_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_savesh(ep.builddir, savesh_file) ep.savesh_file = f def set_current_project_presh(self, userid, presh_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_presh(ep.builddir, presh_file) ep.presh_file = f def set_current_project_postsh(self, userid, postsh_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_postsh(ep.builddir, postsh_file) ep.postsh_file = f def set_current_project_version(self, userid, new_version): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.db.set_project_version(ep.builddir, new_version) ep.xml.node("/project/version").set_text(new_version) def list_current_project_versions(self, userid): with self.lock: ep = self._get_current_project(userid) return self.db.list_project_versions(ep.builddir) def save_current_project_version(self, userid, description=None): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(SaveVersionJob(ep, description)) def checkout_project_version(self, userid, version): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(CheckoutVersionJob(ep, version)) def set_current_project_version_description(self, userid, version, description): with self.lock: ep = self._get_current_project(userid) self.db.set_version_description(ep.builddir, version, description) def del_current_project_version(self, userid, version): with self.lock: ep = self._get_current_project(userid, allow_busy=False) name = ep.xml.text("project/name") self.db.del_version(ep.builddir, version) # Delete corresponding package archive, if existing pkgarchive = get_versioned_filename(name, version, ".pkgarchive") pkgarchive_path = path.join(ep.builddir, pkgarchive) try: rmtree(pkgarchive_path) except OSError as e: if e.errno != errno.ENOENT: raise def build_current_project(self, userid, build_bin, build_src): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(BuildJob(ep, build_bin, build_src)) def build_pbuilder(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(CreatePbuilderJob(ep)) def build_current_pdebuild(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) if not path.isdir(path.join(ep.builddir, "pbuilder")): raise InvalidState( 'No pbuilder exists: run "elbe pbuilder create --project %s" first' % ep.builddir) self.worker.enqueue(PdebuildJob(ep)) def build_sysroot(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(BuildSysrootJob(ep)) def build_update_package(self, userid, base_version): with self.lock: c = self._get_current_project_apt_cache(userid) if c.get_changes(): raise InvalidState( "project %s has uncommited package changes, " "please commit them first") ep = self._get_current_project(userid) self.worker.enqueue(GenUpdateJob(ep, base_version)) def apt_upd_upgr(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(APTUpdUpgrJob(ep)) def apt_update(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(APTUpdateJob(ep)) def apt_commit(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(APTCommitJob(ep)) def apt_clear(self, userid): with self.lock: c = self._get_current_project_apt_cache(userid) c.clear() def apt_mark_install(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) c.mark_install(pkgname, version) ep = self._get_current_project(userid) pkgs = ep.xml.get_target_packages() if not pkgname in pkgs: pkgs.append(pkgname) ep.xml.set_target_packages(pkgs) def apt_mark_upgrade(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) c.mark_upgrade(pkgname, version) def apt_mark_delete(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) ep = self._get_current_project(userid) pkgs = ep.xml.get_target_packages() if pkgname in pkgs: pkgs.remove(pkgname) c.mark_delete(pkgname, version) ep.xml.set_target_packages(pkgs) debootstrap_pkgs = [] for p in ep.xml.xml.node("debootstrappkgs"): debootstrap_pkgs.append(p.et.text) # temporary disabled because of # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=776057 # the functions cleans up to much # c.cleanup(debootstrap_pkgs + pkgs) def get_debootstrap_pkgs(self, userid): with self.lock: ep = self._get_current_project(userid) debootstrap_pkgs = [] for p in ep.xml.xml.node("debootstrappkgs"): debootstrap_pkgs.append(p.et.text) return debootstrap_pkgs def apt_mark_keep(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) c.mark_keep(pkgname, version) ep = self._get_current_project(userid) pkgs = ep.xml.get_target_packages() if not pkgname in pkgs: pkgs.append(pkgname) ep.xml.set_target_packages(pkgs) def apt_get_target_packages(self, userid): with self.lock: ep = self._get_current_project(userid) return ep.xml.get_target_packages() def apt_upgrade(self, userid, dist_upgrade=False): with self.lock: c = self._get_current_project_apt_cache(userid) c.upgrade(dist_upgrade) def apt_get_changes(self, userid): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_changes() def apt_get_marked_install(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_marked_install(section=section) def apt_get_installed(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_installed_pkgs(section=section) def apt_get_upgradeable(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_upgradeable(section=section) def apt_get_pkglist(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_pkglist(section) def apt_get_pkg(self, userid, term): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_pkg(term) def apt_get_pkgs(self, userid, term): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_pkgs(term) def apt_get_sections(self, userid): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_sections() def read_current_project_log(self, userid): with self.lock: ep = self._get_current_project(userid) logpath = path.join(ep.builddir, "log.txt") f = open(logpath, "r") try: data = f.read() finally: f.close() return data def current_project_has_changes(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.has_changes(builddir) def current_project_is_busy(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.is_busy(builddir) def _get_current_project(self, userid, allow_busy=True): # Must be called with self.lock held if not userid in self.userid2project: raise NoOpenProject() ep = self.userid2project[userid] if not allow_busy: if self.db.is_busy(ep.builddir): raise InvalidState("project %s is busy" % ep.builddir) return ep def _close_current_project(self, userid): # Must be called with self.lock held if userid in self.userid2project: builddir = self.userid2project[userid].builddir if self.db.is_busy(builddir): raise InvalidState( "project in directory %s of user %s is currently busy and cannot be closed" % (builddir, self.db.get_username(userid))) del self.builddir2userid[builddir] del self.userid2project[userid] def _check_project_permission(self, userid, builddir): if self.db.is_admin(userid): # Admin may access all projects return if self.db.get_owner_id(builddir) != userid: # Project of another user, deny access raise PermissionDenied(builddir) # User is owner, so allow it def _get_current_project_apt_cache(self, userid): # Must be called with self.lock held ep = self._get_current_project(userid, allow_busy=False) if not ep.has_full_buildenv(): raise InvalidState( "project in directory %s does not have a functional " "build environment" % ep.builddir) return ep.get_rpcaptcache()
class ProjectManager(object): def __init__ (self, basepath): self.basepath = basepath # Base path for new projects self.db = ElbeDB() # Database of projects and users self.worker = AsyncWorker( self.db ) self.userid2project = {} # (userid, ElbeProject) map of open projects self.builddir2userid = {} # (builddir, userid) map of open projects self.lock = Lock() # Lock protecting our data def stop(self): self.worker.stop() def create_project (self, userid, xml_file): subdir = str(uuid4()) builddir = path.join( self.basepath, subdir ) with self.lock: # Try to close old project, if any self._close_current_project( userid ) self.db.create_project( builddir, owner_id=userid ) try: self.db.set_xml( builddir, xml_file ) except: # Delete the project, if we cannot assign an XML file self.db.del_project( builddir ) raise # Open the new project logpath = path.join( builddir, "log.txt" ) ep = self.db.load_project( builddir, logpath ) self.userid2project[ userid ] = ep self.builddir2userid[ builddir ] = userid return builddir def open_project (self, userid, builddir): self._check_project_permission( userid, builddir ) with self.lock: if builddir in self.builddir2userid: if self.builddir2userid[ builddir ] == userid: # Same project selected again by the same user, don't do # anything return else: # Already opened by a different user raise AlreadyOpen( builddir, self.db.get_username( self.builddir2userid[ builddir ] ) ) # Try to close the old project of the user, if any self._close_current_project( userid ) # Load project from the database logpath = path.join( builddir, "log.txt" ) ep = self.db.load_project( builddir, logpath ) # Add project to our dictionaries self.userid2project[ userid ] = ep self.builddir2userid[ builddir ] = userid def close_current_project (self, userid): with self.lock: self._close_current_project( userid ) def del_project (self, userid, builddir): self._check_project_permission( userid, builddir ) with self.lock: # Does anyone have the project opened right now? if builddir in self.builddir2userid: if self.builddir2userid[ builddir ] == userid: # If the calling user has opened it, then close it and # proceed if closed sucessfully. self._close_current_project( userid ) else: # TODO: Admin should be allowed to delete projects # that are currently opened by other users raise AlreadyOpen( builddir, self.db.get_username( self.builddir2userid[ builddir ] ) ) self.db.del_project( builddir ) def get_current_project_data (self, userid): with self.lock: builddir = self._get_current_project( userid ).builddir return self.db.get_project_data( builddir ) def get_current_project_files (self, userid): with self.lock: builddir = self._get_current_project( userid ).builddir return self.db.get_project_files( builddir ) def open_current_project_file (self, userid, filename, mode = 'r'): with self.lock: builddir = self._get_current_project( userid, allow_busy=False ).builddir pfd = self.db.get_project_file( builddir, filename ) return OpenProjectFile( pfd, mode ) def set_current_project_private_data (self, userid, private_data): with self.lock: ep = self._get_current_project( userid ) ep.private_data = private_data def get_current_project_private_data (self, userid): private_data = None with self.lock: ep = self._get_current_project( userid ) private_data = ep.private_data return private_data def set_current_project_xml (self, userid, xml_file): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.db.set_xml( ep.builddir, xml_file ) def set_current_project_upload_cdrom (self, userid): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) ep.xml.set_cdrom_mirror (path.join (ep.builddir, 'uploaded_cdrom.iso')) ep.sync_xml_to_disk () # Make db reload the xml file self.db.set_xml (ep.builddir, None) def set_current_project_postbuild (self, userid, postbuild_file): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) f = self.db.set_postbuild( ep.builddir, postbuild_file ) ep.postbuild_file = f def set_current_project_savesh (self, userid, savesh_file): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) f = self.db.set_savesh( ep.builddir, savesh_file ) ep.savesh_file = f def set_current_project_presh (self, userid, presh_file): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) f = self.db.set_presh( ep.builddir, presh_file ) ep.presh_file = f def set_current_project_postsh (self, userid, postsh_file): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) f = self.db.set_postsh( ep.builddir, postsh_file ) ep.postsh_file = f def set_current_project_version( self, userid, new_version ): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.db.set_project_version( ep.builddir, new_version ) ep.xml.node( "/project/version" ).set_text( new_version ) def list_current_project_versions( self, userid ): with self.lock: ep = self._get_current_project( userid ) return self.db.list_project_versions( ep.builddir ) def save_current_project_version( self, userid, description = None ): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.worker.enqueue( SaveVersionJob( ep, description ) ) def checkout_project_version( self, userid, version ): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.worker.enqueue( CheckoutVersionJob( ep, version ) ) def set_current_project_version_description( self, userid, version, description ): with self.lock: ep = self._get_current_project( userid ) self.db.set_version_description( ep.builddir, version, description ) def del_current_project_version( self, userid, version ): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) name = ep.xml.text( "project/name" ) self.db.del_version( ep.builddir, version ) # Delete corresponding package archive, if existing pkgarchive = get_versioned_filename( name, version, ".pkgarchive" ) pkgarchive_path = path.join( ep.builddir, pkgarchive ) try: rmtree( pkgarchive_path ) except OSError as e: if e.errno != errno.ENOENT: raise def build_current_project (self, userid, build_bin, build_src): with self.lock: ep = self._get_current_project (userid, allow_busy=False) self.worker.enqueue (BuildJob (ep, build_bin, build_src)) def build_pbuilder (self, userid): with self.lock: ep = self._get_current_project (userid, allow_busy=False) self.worker.enqueue (CreatePbuilderJob (ep)) def build_current_pdebuild (self, userid): with self.lock: ep = self._get_current_project (userid, allow_busy=False) if not path.isdir (path.join (ep.builddir, "pbuilder")): raise InvalidState ('No pbuilder exists: run "elbe pbuilder create --project %s" first' % ep.builddir) self.worker.enqueue (PdebuildJob (ep)) def build_sysroot (self, userid): with self.lock: ep = self._get_current_project (userid, allow_busy=False) self.worker.enqueue (BuildSysrootJob (ep)) def build_update_package (self, userid, base_version): with self.lock: c = self._get_current_project_apt_cache( userid ) if c.get_changes(): raise InvalidState( "project %s has uncommited package changes, " "please commit them first" ) ep = self._get_current_project( userid ) self.worker.enqueue( GenUpdateJob ( ep, base_version ) ) def apt_upd_upgr (self, userid): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.worker.enqueue( APTUpdUpgrJob( ep ) ) def apt_update (self, userid): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.worker.enqueue( APTUpdateJob( ep ) ) def apt_commit (self, userid): with self.lock: ep = self._get_current_project( userid, allow_busy=False ) self.worker.enqueue( APTCommitJob( ep ) ) def apt_clear (self, userid): with self.lock: c = self._get_current_project_apt_cache( userid ) c.clear() def apt_mark_install (self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache( userid ) c.mark_install( pkgname, version ) ep = self._get_current_project( userid ) pkgs = ep.xml.get_target_packages() if not pkgname in pkgs: pkgs.append(pkgname) ep.xml.set_target_packages(pkgs) def apt_mark_upgrade (self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache( userid ) c.mark_upgrade( pkgname, version ) def apt_mark_delete (self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache( userid ) ep = self._get_current_project( userid ) pkgs = ep.xml.get_target_packages() if pkgname in pkgs: pkgs.remove(pkgname) c.mark_delete(pkgname, version) ep.xml.set_target_packages(pkgs) debootstrap_pkgs = [] for p in ep.xml.xml.node("debootstrappkgs"): debootstrap_pkgs.append (p.et.text) # temporary disabled because of # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=776057 # the functions cleans up to much # c.cleanup(debootstrap_pkgs + pkgs) def get_debootstrap_pkgs(self, userid): with self.lock: ep = self._get_current_project( userid ) debootstrap_pkgs = [] for p in ep.xml.xml.node("debootstrappkgs"): debootstrap_pkgs.append (p.et.text) return debootstrap_pkgs def apt_mark_keep (self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache( userid ) c.mark_keep( pkgname, version ) ep = self._get_current_project( userid ) pkgs = ep.xml.get_target_packages() if not pkgname in pkgs: pkgs.append(pkgname) ep.xml.set_target_packages(pkgs) def apt_get_target_packages (self, userid): with self.lock: ep = self._get_current_project( userid ) return ep.xml.get_target_packages() def apt_upgrade (self, userid, dist_upgrade = False): with self.lock: c = self._get_current_project_apt_cache( userid ) c.upgrade( dist_upgrade ) def apt_get_changes (self, userid): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_changes() def apt_get_marked_install (self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_marked_install (section=section) def apt_get_installed (self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_installed_pkgs (section=section) def apt_get_upgradeable (self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_upgradeable (section=section) def apt_get_pkglist (self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_pkglist( section ) def apt_get_pkg (self, userid, term): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_pkg( term ) def apt_get_pkgs (self, userid, term): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_pkgs( term ) def apt_get_sections (self, userid): with self.lock: c = self._get_current_project_apt_cache( userid ) return c.get_sections() def read_current_project_log (self, userid): with self.lock: ep = self._get_current_project( userid ) logpath = path.join( ep.builddir, "log.txt" ) f = open( logpath, "r" ) try: data = f.read() finally: f.close() return data def current_project_has_changes (self, userid): with self.lock: builddir = self._get_current_project( userid ).builddir return self.db.has_changes( builddir ) def current_project_is_busy (self, userid): with self.lock: builddir = self._get_current_project( userid ).builddir return self.db.is_busy( builddir ) def _get_current_project (self, userid, allow_busy=True): # Must be called with self.lock held if not userid in self.userid2project: raise NoOpenProject() ep = self.userid2project[ userid ] if not allow_busy: if self.db.is_busy( ep.builddir ): raise InvalidState( "project %s is busy" % ep.builddir ) return ep def _close_current_project (self, userid): # Must be called with self.lock held if userid in self.userid2project: builddir = self.userid2project[ userid ].builddir if self.db.is_busy( builddir ): raise InvalidState( "project in directory %s of user %s is currently busy and cannot be closed" % ( builddir, self.db.get_username( userid ) ) ) del self.builddir2userid[ builddir ] del self.userid2project[ userid ] def _check_project_permission (self, userid, builddir): if self.db.is_admin( userid ): # Admin may access all projects return if self.db.get_owner_id( builddir ) != userid: # Project of another user, deny access raise PermissionDenied( builddir ) # User is owner, so allow it def _get_current_project_apt_cache( self, userid ): # Must be called with self.lock held ep = self._get_current_project( userid, allow_busy=False ) if not ep.has_full_buildenv(): raise InvalidState( "project in directory %s does not have a functional " "build environment" % ep.builddir ) return ep.get_rpcaptcache()
class ProjectManager: # pylint: disable=too-many-public-methods def __init__(self, basepath): self.basepath = basepath # Base path for new projects self.db = ElbeDB() # Database of projects and users self.worker = AsyncWorker(self.db) # (userid, ElbeProject) map of open projects self.userid2project = {} self.builddir2userid = {} # (builddir, userid) map of open projects self.lock = Lock() # Lock protecting our data def stop(self): self.worker.stop() def new_project(self, userid): subdir = str(uuid4()) builddir = path.join(self.basepath, subdir) self.db.create_project(builddir, owner_id=userid) return builddir def create_project(self, userid, xml_file, url_validation=ValidationMode.CHECK_ALL): subdir = str(uuid4()) builddir = path.join(self.basepath, subdir) with self.lock: # Try to close old project, if any self._close_current_project(userid) self.db.create_project(builddir, owner_id=userid) try: self.db.set_xml(builddir, xml_file) except BaseException: # Delete the project, if we cannot assign an XML file self.db.del_project(builddir) raise # Open the new project ep = self.db.load_project(builddir, url_validation=url_validation) self.userid2project[userid] = ep self.builddir2userid[builddir] = userid return builddir def open_project(self, userid, builddir, url_validation=ValidationMode.CHECK_ALL): self._check_project_permission(userid, builddir) with self.lock: if builddir in self.builddir2userid: if self.builddir2userid[builddir] == userid: # Same project selected again by the same user, don't do # anything return # Already opened by a different user raise AlreadyOpen( builddir, self.db.get_username(self.builddir2userid[builddir])) # Try to close the old project of the user, if any self._close_current_project(userid) # Load project from the database ep = self.db.load_project(builddir, url_validation=url_validation) # Add project to our dictionaries self.userid2project[userid] = ep self.builddir2userid[builddir] = userid def close_current_project(self, userid): with self.lock: self._close_current_project(userid) def del_project(self, userid, builddir): self._check_project_permission(userid, builddir) with self.lock: # Does anyone have the project opened right now? if builddir in self.builddir2userid: if self.builddir2userid[builddir] == userid: # If the calling user has opened it, then close it and # proceed if closed sucessfully. self._close_current_project(userid) else: # TODO: Admin should be allowed to delete projects # that are currently opened by other users raise AlreadyOpen( builddir, self.db.get_username(self.builddir2userid[builddir])) self.db.del_project(builddir) def get_current_project_data(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.get_project_data(builddir) def get_current_project_files(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.get_project_files(builddir) def open_current_project_file(self, userid, filename, mode='r'): with self.lock: builddir = self._get_current_project(userid, allow_busy=False).builddir pfd = self.db.get_project_file(builddir, filename) return OpenProjectFile(pfd, mode) def set_current_project_private_data(self, userid, private_data): with self.lock: ep = self._get_current_project(userid) ep.private_data = private_data def get_current_project_private_data(self, userid): private_data = None with self.lock: ep = self._get_current_project(userid) private_data = ep.private_data return private_data def set_current_project_xml(self, userid, xml_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.db.set_xml(ep.builddir, xml_file) def set_current_project_upload_cdrom(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) ep.xml.set_cdrom_mirror( path.join(ep.builddir, 'uploaded_cdrom.iso')) ep.sync_xml_to_disk() # Make db reload the xml file self.db.set_xml(ep.builddir, None) def set_current_project_postbuild(self, userid, postbuild_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_postbuild(ep.builddir, postbuild_file) ep.postbuild_file = f def set_current_project_savesh(self, userid, savesh_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_savesh(ep.builddir, savesh_file) ep.savesh_file = f def set_current_project_presh(self, userid, presh_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_presh(ep.builddir, presh_file) ep.presh_file = f def set_current_project_postsh(self, userid, postsh_file): with self.lock: ep = self._get_current_project(userid, allow_busy=False) f = self.db.set_postsh(ep.builddir, postsh_file) ep.postsh_file = f def set_current_project_version(self, userid, new_version): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.db.set_project_version(ep.builddir, new_version) ep.xml.node("/project/version").set_text(new_version) def list_current_project_versions(self, userid): with self.lock: ep = self._get_current_project(userid) return self.db.list_project_versions(ep.builddir) def save_current_project_version(self, userid, description=None): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(SaveVersionJob(ep, description)) def checkout_project_version(self, userid, version): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(CheckoutVersionJob(ep, version)) def set_current_project_version_description(self, userid, version, description): with self.lock: ep = self._get_current_project(userid) self.db.set_version_description(ep.builddir, version, description) def del_current_project_version(self, userid, version): with self.lock: ep = self._get_current_project(userid, allow_busy=False) name = ep.xml.text("project/name") self.db.del_version(ep.builddir, version) # Delete corresponding package archive, if existing pkgarchive = get_versioned_filename(name, version, ".pkgarchive") pkgarchive_path = path.join(ep.builddir, pkgarchive) try: rmtree(pkgarchive_path) except OSError as e: if e.errno != errno.ENOENT: raise def build_current_project(self, userid, build_bin, build_src, skip_pbuilder): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue( BuildJob(ep, build_bin, build_src, skip_pbuilder)) def update_pbuilder(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(UpdatePbuilderJob(ep)) def build_pbuilder(self, userid, cross, noccache, ccachesize): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue( CreatePbuilderJob(ep, ccachesize, cross, noccache)) def build_current_pdebuild(self, userid, cpuset, profile, cross): with self.lock: ep = self._get_current_project(userid, allow_busy=False) if (not path.isdir(path.join(ep.builddir, "pbuilder")) and not path.isdir(path.join(ep.builddir, "pbuilder_cross"))): raise InvalidState('No pbuilder exists: run "elbe pbuilder ' 'create --project %s" first' % ep.builddir) self.worker.enqueue(PdebuildJob(ep, cpuset, profile, cross)) def set_orig_fname(self, userid, fname): with self.lock: ep = self._get_current_project(userid, allow_busy=False) if (not path.isdir(path.join(ep.builddir, "pbuilder")) and not path.isdir(path.join(ep.builddir, "pbuilder_cross"))): raise InvalidState('No pbuilder exists: run "elbe pbuilder ' 'create --project %s" first' % ep.builddir) ep.orig_fname = fname ep.orig_files.append(fname) def get_orig_fname(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) if (not path.isdir(path.join(ep.builddir, "pbuilder")) and not path.isdir(path.join(ep.builddir, "pbuilder_cross"))): raise InvalidState('No pbuilder exists: run "elbe pbuilder ' 'create --project %s" first' % ep.builddir) return ep.orig_fname def build_chroot_tarball(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(BuildChrootTarJob(ep)) def build_sysroot(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(BuildSysrootJob(ep)) def build_sdk(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(BuildSDKJob(ep)) def build_cdroms(self, userid, build_bin, build_src): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(BuildCDROMsJob(ep, build_bin, build_src)) def build_update_package(self, userid, base_version): with self.lock: c = self._get_current_project_apt_cache(userid) if c.get_changes(): raise InvalidState( "project %s has uncommited package changes, " "please commit them first") ep = self._get_current_project(userid) self.worker.enqueue(GenUpdateJob(ep, base_version)) def apt_upd_upgr(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(APTUpdUpgrJob(ep)) def apt_update(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(APTUpdateJob(ep)) def apt_commit(self, userid): with self.lock: ep = self._get_current_project(userid, allow_busy=False) self.worker.enqueue(APTCommitJob(ep)) def apt_clear(self, userid): with self.lock: c = self._get_current_project_apt_cache(userid) c.clear() def apt_mark_install(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) c.mark_install(pkgname, version) ep = self._get_current_project(userid) pkgs = ep.xml.get_target_packages() if pkgname not in pkgs: pkgs.append(pkgname) ep.xml.set_target_packages(pkgs) def apt_mark_upgrade(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) c.mark_upgrade(pkgname, version) def get_debootstrap_pkgs(self, userid): with self.lock: ep = self._get_current_project(userid) debootstrap_pkgs = [] for p in ep.xml.xml.node("debootstrappkgs"): debootstrap_pkgs.append(p.et.text) return debootstrap_pkgs def apt_mark_keep(self, userid, pkgname, version): with self.lock: c = self._get_current_project_apt_cache(userid) c.mark_keep(pkgname, version) ep = self._get_current_project(userid) pkgs = ep.xml.get_target_packages() if pkgname not in pkgs: pkgs.append(pkgname) ep.xml.set_target_packages(pkgs) def apt_get_target_packages(self, userid): with self.lock: ep = self._get_current_project(userid) return ep.xml.get_target_packages() def apt_upgrade(self, userid, dist_upgrade=False): with self.lock: c = self._get_current_project_apt_cache(userid) c.upgrade(dist_upgrade) def apt_get_changes(self, userid): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_changes() def apt_get_marked_install(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_marked_install(section=section) def apt_get_installed(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_installed_pkgs(section=section) def apt_get_upgradeable(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_upgradeable(section=section) def apt_get_pkglist(self, userid, section='all'): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_pkglist(section) def apt_get_pkg(self, userid, term): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_pkg(term) def apt_get_pkgs(self, userid, term): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_pkgs(term) def apt_get_sections(self, userid): with self.lock: c = self._get_current_project_apt_cache(userid) return c.get_sections() def read_current_project_log(self, userid): with self.lock: ep = self._get_current_project(userid) logpath = path.join(ep.builddir, "log.txt") f = open(logpath, "r") try: data = f.read() finally: f.close() return data def rm_log(self, userid): ep = self._get_current_project(userid) with open(os.path.join(ep.builddir, 'log.txt'), 'wb', 0): pass def add_deb_package(self, userid, filename): ep = self._get_current_project(userid) t = os.path.splitext(filename)[1] # filetype of uploaded file pkg_name = filename.split('_')[0] if t == '.dsc': ep.repo.includedsc(os.path.join(ep.builddir, filename), force=True) elif t == '.deb': ep.repo.includedeb(os.path.join(ep.builddir, filename), pkgname=pkg_name, force=True) elif t == '.changes': ep.repo.include(os.path.join(ep.builddir, filename), force=True) ep.repo.finalize() def current_project_has_changes(self, userid): with self.lock: builddir = self._get_current_project(userid).builddir return self.db.has_changes(builddir) def current_project_is_busy(self, userid): with self.lock: ep = self._get_current_project(userid) msg = read_loggingQ(ep.builddir) return self.db.is_busy(ep.builddir), msg def _get_current_project(self, userid, allow_busy=True): # Must be called with self.lock held if userid not in self.userid2project: raise NoOpenProject() ep = self.userid2project[userid] if not allow_busy: if self.db.is_busy(ep.builddir): raise InvalidState("project %s is busy" % ep.builddir) return ep def _close_current_project(self, userid): # Must be called with self.lock held if userid in self.userid2project: builddir = self.userid2project[userid].builddir if self.db.is_busy(builddir): raise InvalidState("project in directory %s of user %s is " "currently busy and cannot be closed" % (builddir, self.db.get_username(userid))) del self.builddir2userid[builddir] del self.userid2project[userid] def _check_project_permission(self, userid, builddir): if self.db.is_admin(userid): # Admin may access all projects return if self.db.get_owner_id(builddir) != userid: # Project of another user, deny access raise PermissionDenied(builddir) # User is owner, so allow it def _get_current_project_apt_cache(self, userid): # Must be called with self.lock held ep = self._get_current_project(userid, allow_busy=False) if not ep.has_full_buildenv(): raise InvalidState( "project in directory %s does not have a functional " "build environment" % ep.builddir) return ep.get_rpcaptcache()
def run_command(argv): oparser = OptionParser(usage="usage: %prog buildchroot [options] <xmlfile>") oparser.add_option("-t", "--target", dest="target", help="directoryname of target") oparser.add_option("-o", "--output", dest="output", help="name of logfile") oparser.add_option("-n", "--name", dest="name", help="name of the project (included in the report)") oparser.add_option( "--build-bin", action="store_true", dest="build_bin", default=False, help="Build Binary Repository CDROM, for exact Reproduction", ) oparser.add_option( "--build-sources", action="store_true", dest="build_sources", default=False, help="Build Source CD" ) oparser.add_option("--proxy", dest="proxy", help="Override the http proxy") oparser.add_option( "--debug", action="store_true", dest="debug", default=False, help="Enable various features to debug the build" ) oparser.add_option("--buildtype", dest="buildtype", help="Override the buildtype") oparser.add_option("--cdrom-size", action="store", dest="cdrom_size", default=CDROM_SIZE, help="ISO CD size in MB") oparser.add_option( "--skip-validation", action="store_true", dest="skip_validation", default=False, help="Skip xml schema validation", ) oparser.add_option( "--skip-debootstrap", action="store_true", dest="skip_debootstrap", default=False, help="Skip debootstrap" ) oparser.add_option( "--skip-pkglist", action="store_true", dest="skip_pkglist", default=False, help="ignore changes of the package list", ) oparser.add_option( "--skip-cdrom", action="store_true", dest="skip_cdrom", default=False, help="(now obsolete) Skip cdrom iso generation", ) (opt, args) = oparser.parse_args(argv) if len(args) != 1: print "wrong number of arguments" oparser.print_help() sys.exit(20) if not opt.target: print "No target specified" sys.exit(20) if opt.skip_cdrom: print "WARNING: Skip CDROMS is now the default, use --build-bin to build binary CDROM" try: project = ElbeProject(opt.target, args[0], opt.output, opt.name, opt.buildtype, opt.skip_validation) except ValidationError as e: print str(e) print "xml validation failed. Bailing out" sys.exit(20) try: project.build( opt.skip_debootstrap, opt.build_bin, opt.build_sources, opt.cdrom_size, opt.debug, opt.skip_pkglist ) except CommandError as ce: print "command in project build failed:", ce.cmd sys.exit(20) try: db = ElbeDB() db.save_project(project) except OperationalError: print "failed to save project in database" sys.exit(20)