Пример #1
0
def discover_network_types(dbapi_con, connection_record):  # pylint: disable=W0613
    config = Config()
    if not config.has_option("broker", "default_network_type"):  # pragma: no cover
        raise InternalError("The default_network_type option is missing from "
                            "the [broker] section in the configuration.")

    default_type = config.get("broker", "default_network_type")
    default_section = "network_" + default_type
    if not config.has_section(default_section):  # pragma: no cover
        raise InternalError("The default network type is %s, but there's no "
                            "section named [%s] in the configuration." %
                            (default_type, default_section))

    nettypes = {}

    # This function should be called only once, but you never know...
    if Network.network_type_map:
        return

    for section in config.sections():
        if not section.startswith("network_"):
            continue
        name = section[8:]
        nettypes[name] = NetworkProperties(config, name)
        LOGGER.info("Configured network type %s" % name)

    Network.network_type_map = nettypes
    Network.default_network_props = nettypes[default_type]
Пример #2
0
    def __init__(self, dbobj=None, logger=LOGGER):
        self.config = Config()
        self.dbobj = dbobj
        self.logger = logger

        if self.template_type is None:
            raise InternalError("Plenary class %s did not set the template "
                                "type" % self.__class__.__name__)

        # Object templates live under the branch-specific build directory.
        # Everything else lives under the common plenary directory.
        if self.template_type == "object":
            if not dbobj or not hasattr(dbobj, "branch"):
                raise InternalError("Plenaries meant to be compiled need a DB "
                                    "object that has a branch; got: %r" %
                                    dbobj)
            self.dir = "%s/domains/%s/profiles" % (self.config.get(
                "broker", "builddir"), dbobj.branch.name)
        else:
            self.dir = self.config.get("broker", "plenarydir")

        self.loadpath = None
        self.plenary_template = None
        self.plenary_core = None

        self.new_content = None
        # The following attributes are for stash/restore_stash
        self.old_content = None
        self.old_mtime = None
        self.stashed = False
        self.removed = False
        self.changed = False
Пример #3
0
    def onEnter(self, dbcluster):
        dbdecommissioned = HostLifecycle.get_unique(object_session(dbcluster),
                                                    "decommissioned",
                                                    compel=True)

        config = Config()
        archetype = dbcluster.personality.archetype
        section = "archetype_" + archetype.name
        opt = "allow_cascaded_deco"

        if dbcluster.hosts and (not config.has_option(section, opt)
                                or not config.getboolean(section, opt)):
            raise ArgumentError("Cannot change state to {0}, as {1}'s "
                                "archetype is {2}.".format(
                                    dbdecommissioned.name, dbcluster,
                                    archetype.name))

        if dbcluster.machines:
            raise ArgumentError("Cannot change state to {0}, as {1} has "
                                "{2} VM(s).".format(dbdecommissioned.name,
                                                    dbcluster,
                                                    len(dbcluster.machines)))

        for dbhost in dbcluster.hosts:
            dbhost.status.transition(dbhost, dbdecommissioned)
Пример #4
0
 def __init__(self, logger=LOGGER):
     config = Config()
     self.logger = logger
     self.dsdb = config.get("broker", "dsdb")
     self.dsdb_use_testdb = config.getboolean("broker", "dsdb_use_testdb")
     self.location_sync = config.getboolean("broker", "dsdb_location_sync")
     self.actions = []
     self.rollback_list = []
Пример #5
0
class ObjectFormatter(object):
    """This class and its subclasses are meant to do the real work of
        formatting individual objects.  The standard instance methods
        do the heavy lifting, which the static methods allow for
        delegation when needed.

        The instance methods (format_*) provide default implementations,
        but it is expected that they will be overridden to provide more
        useful information.
     """

    loaded_protocols = {}
    """The loaded_protocols dict will store the modules that are being
    loaded for each requested protocol. Rather than trying to import one
    each time, the dict can be checked and value returned."""
    config = Config()
    protodir = config.get("protocols", "directory")

    handlers = {}
    """ The handlers dictionary should have an entry for every subclass.
        Typically this will be defined immediately after defining the
        subclass.

    """

    mako_dir = os.path.join(config.get("broker", "srcdir"), "lib", "python2.6",
                            "aquilon", "worker", "formats", "mako")
    # Be careful about using the module_directory and cache!
    # Not using module_directory so that we don't have to worry about stale
    # files hanging around on upgrade.  Race conditions in writing the files
    # might also be an issue when we switch to multi-process.
    # Not using cache because it only has the lifetime of the template, and
    # because we do not have the beaker module installed.
    lookup_raw = TemplateLookup(directories=[os.path.join(mako_dir, "raw")],
                                imports=[
                                    'from string import rstrip', 'from '
                                    'aquilon.worker.formats.formatters '
                                    'import shift'
                                ],
                                default_filters=['unicode', 'rstrip'])
    lookup_html = TemplateLookup(directories=[os.path.join(mako_dir, "html")])

    def __init__(self):
        if hasattr(self, "protocol"):
            if not self.protocol in self.loaded_protocols:
                try:
                    self.loaded_protocols[self.protocol] = __import__(
                        self.protocol)
                except ImportError, e:  # pragma: no cover
                    self.loaded_protocols[self.protocol] = False
                    error = "path %s protocol: %s error: %s" % (
                        self.protodir, self.protocol, e)
                    raise ProtocolError(error)
            else:  # pragma: no cover
                if self.loaded_protocols[self.protocol] == False:
                    error = "path %s protocol: %s error: previous import attempt was unsuccessful" % (
                        self.protodir, self.protocol)
                    raise ProtocolError(error)
Пример #6
0
 def __init__(self, twistd=None, configfile=None):
     self.twistd = twistd or self.default_twistd
     self.configfile = configfile or self.default_configfile
     self.config = Config(configfile=self.configfile)
     self.pidfile = os.path.join(self.config.get("broker", "rundir"),
                                 "aqd.pid")
     self.logfile = self.config.get("broker", "logfile")
     self.coverage = os.path.join(self.config.get("broker", "logdir"),
                                  "aqd.coverage")
Пример #7
0
    def testclonetemplateking(self):
        config = Config()
        source = config.get("unittest", "template_base")
        dest = config.get("broker", "kingdir")
        p = Popen(("/bin/rm", "-rf", dest), stdout=1, stderr=2)
        rc = p.wait()
        self.assertEqual(
            rc, 0, "Failed to clear old template-king directory '%s'" % dest)
        env = {}
        env["PATH"] = "%s:%s" % (config.get(
            "broker", "git_path"), os.environ.get("PATH", ""))
        p = Popen(("git", "clone", "--bare", source, dest),
                  env=env,
                  stdout=PIPE,
                  stderr=PIPE)
        (out, err) = p.communicate()
        # Ignore out/err unless we get a non-zero return code, then log it.
        self.assertEqual(
            p.returncode, 0,
            "Non-zero return code for clone of template-king, "
            "STDOUT:\n@@@\n'%s'\n@@@\nSTDERR:\n@@@\n'%s'\n@@@\n" % (out, err))
        # This value can be used to test against a different branch/commit
        # than the current 'prod'.
        new_prod = None
        if config.has_option("unittest", "template_alternate_prod"):
            new_prod = config.get("unittest", "template_alternate_prod")

        if new_prod:
            for domain in ['prod', 'ny-prod']:
                p = Popen(("git", "push", ".", '+%s:%s' % (new_prod, domain)),
                          env=env,
                          cwd=dest,
                          stdout=PIPE,
                          stderr=PIPE)
                (out, err) = p.communicate()
                # Ignore out/err unless we get a non-zero return code, then log it.
                self.assertEqual(
                    p.returncode, 0,
                    "Non-zero return code while setting alternate "
                    "'%s' branch locally to '%s':"
                    "\nSTDOUT:\n@@@\n'%s'\n@@@\n"
                    "\nSTDERR:\n@@@\n'%s'\n@@@\n" %
                    (domain, new_prod, out, err))

        # Set the default branch
        p = Popen(("git", "symbolic-ref", "HEAD", "refs/heads/prod"),
                  env=env,
                  cwd=dest,
                  stdout=PIPE,
                  stderr=PIPE)
        (out, err) = p.communicate()
        self.assertEqual(
            p.returncode, 0, "Non-zero return code while setting HEAD "
            "to refs/heads/prod:"
            "\nSTDOUT:\n@@@\n'%s'\n@@@\n"
            "\nSTDERR:\n@@@\n'%s'\n@@@\n" % (out, err))
        return
Пример #8
0
    def setUp(self):
        self.config = Config()
        self.net = DummyNetworks()

        # Need to import protocol buffers after we have the config
        # object all squared away and we can set the sys.path
        # variable appropriately.
        # It would be simpler just to change sys.path in runtests.py,
        # but this allows for each test to be run individually (without
        # the runtests.py wrapper).
        protodir = self.config.get("protocols", "directory")
        if protodir not in sys.path:
            sys.path.append(protodir)
        for m in [
                'aqdsystems_pb2', 'aqdnetworks_pb2', 'aqdservices_pb2',
                'aqddnsdomains_pb2', 'aqdlocations_pb2', 'aqdaudit_pb2',
                'aqdparamdefinitions_pb2', 'aqdparameters_pb2'
        ]:
            globals()[m] = __import__(m)

        self.user = self.config.get("broker", "user")
        self.sandboxdir = os.path.join(
            self.config.get("broker", "templatesdir"), self.user)
        self.template_extension = self.config.get("panc", "template_extension")

        # This method is cumbersome.  Should probably develop something
        # like unittest.conf.defaults.
        if self.config.has_option("unittest", "scratchdir"):
            self.scratchdir = self.config.get("unittest", "scratchdir")
            if not os.path.exists(self.scratchdir):
                os.makedirs(self.scratchdir)
        if self.config.has_option("unittest", "aurora_with_node"):
            self.aurora_with_node = self.config.get("unittest",
                                                    "aurora_with_node")
        else:
            self.aurora_with_node = "oyidb1622"
        if self.config.has_option("unittest", "aurora_without_node"):
            self.aurora_without_node = self.config.get("unittest",
                                                       "aurora_without_node")
        else:
            self.aurora_without_node = "pissp1"
        self.gzip_profiles = self.config.getboolean("panc", "gzip_output")
        self.profile_suffix = ".xml.gz" if self.gzip_profiles else ".xml"

        dsdb_coverage_dir = os.path.join(
            self.config.get("unittest", "scratchdir"), "dsdb_coverage")
        for name in [
                DSDB_EXPECT_SUCCESS_FILE, DSDB_EXPECT_FAILURE_FILE,
                DSDB_ISSUED_CMDS_FILE, DSDB_EXPECT_FAILURE_ERROR
        ]:
            path = os.path.join(dsdb_coverage_dir, name)
            try:
                os.remove(path)
            except OSError:
                pass
Пример #9
0
def cache_storage_data(only=None):
    """
    Scan a storeng-style data file, checking each line as we go

    Storeng-style data files are blocks of data. Each block starts
    with a comment describing the fields for all subsequent lines. A
    block can start at any time. Fields are separated by '|'.
    This function will invoke the function after parsing every data
    line. The function will be called with a dict of the fields. If the
    function returns True, then we stop scanning the file, else we continue
    on until there is nothing left to parse.

    dbshare can be a Share
    """

    config = Config()
    sharedata = {}
    found_header = False
    header_idx = {}
    with open(config.get("broker", "sharedata")) as datafile:
        for line in datafile:
            if line[0] == '#':
                # A header line
                found_header = True
                hdr = line[1:].rstrip().split('|')

                header_idx = {}
                for idx, name in enumerate(hdr):
                    header_idx[name] = idx

                # Silently discard lines that don't have all the required info
                for k in ["objtype", "pshare", "server", "dg"]:
                    if k not in header_idx:
                        found_header = False
            elif not found_header:
                # We haven't found the right header line
                continue
            else:
                fields = line.rstrip().split('|')
                if len(fields) != len(header_idx):  # Silently ignore invalid lines
                    continue
                if fields[header_idx["objtype"]] != "pshare":
                    continue

                sharedata[fields[header_idx["pshare"]]] = ShareInfo(
                    server=fields[header_idx["server"]],
                    mount="/vol/%s/%s" % (fields[header_idx["dg"]],
                                          fields[header_idx["pshare"]])
                )

                # Take a shortcut if we need just a single entry
                if only and only == fields[header_idx["pshare"]]:
                    break

        return sharedata
Пример #10
0
    def _snapshot_db(self, test):
        # If there was an error, and we're using SQLite, create a snapshot
        # TODO: create a git-managed snapshot of the plenaries/profiles as well
        config = Config()
        dsn = config.get("database", "dsn")
        if dsn.startswith("sqlite:///"):

            dbfile = dsn[10:]
            target = dbfile + ".%s:%s" % (test.__class__.__name__,
                                          test._testMethodName)
            call(["/bin/cp", "-a", dbfile, target])
Пример #11
0
 def outputdirs(self):
     """Returns a list of directories that should exist before compiling"""
     config = Config()
     dirs = []
     dirs.append(config.get("broker", "profilesdir"))
     # The regression tests occasionally have issues with panc
     # auto-creating this directory - not sure why.
     if self.domain.clusters:
         dirs.append(os.path.join(config.get("broker", "quattordir"),
                                  "build", "xml", self.domain.name,
                                  "clusters"))
     return dirs
Пример #12
0
 def __init__(self, dbhost, logger=LOGGER):
     if not isinstance(dbhost, Host):
         raise InternalError("PlenaryHost called with %s instead of Host" %
                             dbhost.__class__.name)
     PlenaryCollection.__init__(self, logger=logger)
     self.dbobj = dbhost
     self.config = Config()
     if self.config.getboolean("broker", "namespaced_host_profiles"):
         self.plenaries.append(PlenaryNamespacedHost(dbhost))
     if self.config.getboolean("broker", "flat_host_profiles"):
         self.plenaries.append(PlenaryToplevelHost(dbhost))
     self.plenaries.append(PlenaryHostData(dbhost))
Пример #13
0
    def testdisabletemplatetests(self):
        config = Config()
        kingdir = config.get("broker", "kingdir")
        rundir = config.get("broker", "rundir")
        env = {}
        env["PATH"] = "%s:%s" % (config.get(
            "broker", "git_path"), os.environ.get("PATH", ""))

        tempdir = mkdtemp(prefix="fixup", dir=rundir)

        p = Popen(("git", "clone", "--shared", kingdir, "template-king",
                   "--branch", "prod"),
                  cwd=tempdir,
                  env=env,
                  stdout=PIPE,
                  stderr=PIPE)
        out, err = p.communicate()
        self.assertEqual(p.returncode, 0, "Failed to clone template-king")

        repodir = os.path.join(tempdir, "template-king")
        makefile = os.path.join(repodir, "Makefile")
        if os.path.exists(os.path.join(repodir, "t", "Makefile")):
            p = Popen(("git", "rm", "-f", os.path.join("t", "Makefile")),
                      cwd=repodir,
                      env=env,
                      stdout=PIPE,
                      stderr=PIPE)
            out, err = p.communicate()
            self.assertEqual(p.returncode, 0, "Failed to remove t/Makefile")

            p = Popen(("git", "commit", "-m", "Removed t/Makefile"),
                      cwd=repodir,
                      env=env,
                      stdout=PIPE,
                      stderr=PIPE)
            out, err = p.communicate()
            self.assertEqual(p.returncode, 0,
                             "Failed to commit removal of t/Makefile")

            for branch in ['prod', 'ny-prod']:
                p = Popen(("git", "push", "origin", "prod:%s" % branch),
                          cwd=repodir,
                          env=env,
                          stdout=PIPE,
                          stderr=PIPE)
                out, err = p.communicate()
                self.assertEqual(
                    p.returncode, 0, "Failed to push to %s, "
                    "STDOUT:\n@@@\n'%s'\n@@@\nSTDERR:\n@@@\n'%s'\n@@@\n" %
                    (branch, out, err))
        p = Popen(("rm", "-rf", tempdir))
        p.communicate()
Пример #14
0
def sync_domain(dbdomain, logger=LOGGER, locked=False):
    """Update templates on disk to match contents of branch in template-king.

    If this domain is tracking another, first update the branch in
    template-king with the latest from the tracking branch.  Also save
    the current (previous) commit as a potential rollback point.

    """
    config = Config()
    session = object_session(dbdomain)
    kingdir = config.get("broker", "kingdir")
    domaindir = os.path.join(config.get("broker", "domainsdir"), dbdomain.name)
    git_env = {
        "PATH":
        "%s:%s" %
        (config.get("broker", "git_path"), os.environ.get("PATH", ""))
    }
    if dbdomain.tracked_branch:
        # Might need to revisit if using this helper from rollback...
        run_command([
            "git", "push", ".",
            "%s:%s" % (dbdomain.tracked_branch.name, dbdomain.name)
        ],
                    path=kingdir,
                    env=git_env,
                    logger=logger)
    run_command(["git", "fetch", "--prune"],
                path=domaindir,
                env=git_env,
                logger=logger)
    if dbdomain.tracked_branch:
        out = run_command(["git", "rev-list", "-n", "1", "HEAD"],
                          path=domaindir,
                          env=git_env,
                          logger=logger)
        rollback_commit = out.strip()
    try:
        if not locked:
            key = CompileKey(domain=dbdomain.name, logger=logger)
            lock_queue.acquire(key)
        run_command(["git", "reset", "--hard",
                     "origin/%s" % dbdomain.name],
                    path=domaindir,
                    env=git_env,
                    logger=logger)
    finally:
        if not locked:
            lock_queue.release(key)
    if dbdomain.tracked_branch:
        dbdomain.rollback_commit = rollback_commit
        session.add(dbdomain)
Пример #15
0
    def testrebuild(self):
        env = {}
        for (key, value) in os.environ.items():
            env[key] = value
        env["AQDCONF"] = Config().baseconfig

        cmd = ['./build_db.py', '--delete', '--populate', 'data/unittest.dump']

        _DIR = os.path.dirname(os.path.realpath(__file__))
        p = Popen(cmd, stdout=1, stderr=2, env=env, cwd=_DIR)
        (out, err) = p.communicate()

        self.assertEqual(
            p.returncode, 0, "Database rebuild failed with returncode %s:\n"
            "STDOUT:\n%s\nSTDERR:\n%s\n" % (p.returncode, out, err))
Пример #16
0
    def __init__(self, network=None, network_type=None, **kw):
        # pylint: disable=W0621
        if not isinstance(network, IPv4Network):
            raise InternalError("Expected an IPv4Network, got: %s" %
                                type(network))

        if not network_type:
            config = Config()
            network_type = config.get("broker", "default_network_type")

        self._network = network
        self._props = self.network_type_map.get(self.network_type,
                                                self.default_network_props)

        super(Network, self).__init__(ip=network.network,
                                      cidr=network.prefixlen,
                                      network_type=network_type, **kw)
Пример #17
0
    def compile(self, session, only=None, locked=False,
                panc_debug_include=None, panc_debug_exclude=None,
                cleandeps=False):
        """The build directories are checked and constructed
        if necessary, so no prior setup is required.  The compile may
        take some time (current rate is 10 hosts per second, with a
        couple of seconds of constant overhead), and the possibility
        of blocking on the compile lock.

        If the 'only' parameter is provided, then it should be a
        list or set containing the profiles that need to be compiled.

        May raise ArgumentError exception, else returns the standard
        output (as a string) of the compile
        """

        config = Config()

        if self.domain.branch_type == 'sandbox':
            if not self.author:
                raise InternalError("Missing required author to compile "
                                    "sandbox %s" % self.domain.name)
            sandboxdir = os.path.join(config.get("broker", "templatesdir"),
                                      self.author.name, self.domain.name)
            if not os.path.exists(sandboxdir):
                raise ArgumentError("Sandbox directory '%s' does not exist." %
                                    sandboxdir)
            if not self.sandbox_has_latest(config, sandboxdir):
                self.logger.warn("Sandbox %s/%s does not contain the "
                                 "latest changes from the prod domain.  If "
                                 "there are failures try "
                                 "`git fetch && git merge origin/prod`" %
                                 (self.author.name, self.domain.name))

        self.logger.info("preparing domain %s for compile" % self.domain.name)

        # Ensure that the compile directory is in a good state.
        outputdir = config.get("broker", "profilesdir")

        for d in self.directories() + self.outputdirs():
            if not os.path.exists(d):
                try:
                    self.logger.info("creating %s" % d)
                    os.makedirs(d)
                except OSError, e:
                    raise ArgumentError("Failed to mkdir %s: %s" % (d, e))
Пример #18
0
def main():
    from aquilon.config import Config

    config = Config()
    if config.has_option("database", "module"):
        ms.modulecmd.load(config.get("database", "module"))

    db = DbFactory()
    Base.metadata.bind = db.engine

    session = db.Session()

    add_interfaces(session)
    add_addresses(session)

    session.rollback()
    raise Exception("Replace the rollback() in the code with commit() when "
                    "ready to go, and disable this exception")
Пример #19
0
def main():
    parser = argparse.ArgumentParser(description="Parse AQD configuration")
    parser.add_argument("-c", "--config", dest="config", action="store",
                        help="parse the given config file instead of the default")
    parser.add_argument("--get", metavar="SECTION.NAME", action="store",
                        help="get the value of the specified configuration key")
    parser.add_argument("--list", action="store_true",
                        help="list all defined configuration options and their values")

    opts = parser.parse_args()

    config = Config(configfile=opts.config)

    if opts.get:
        get_option(config, opts.get)
    elif opts.list:
        list_all(config)
    else:
        raise SystemExit("Please specify an action.")
Пример #20
0
    def teststart(self):
        # FIXME: Either remove any old pidfiles, or ignore it as a warning
        # from stderr... or IMHO (daqscott) if pid files exist and are knc or
        # python processes, kill -9 the pids and delete the files (with a
        # warning message it tickles you)

        config = Config()
        twistd = os.path.join(config.get("broker", "srcdir"), "lib",
                              "python2.6", "aquilon", "unittest_patches.py")
        pidfile = os.path.join(config.get("broker", "rundir"), "aqd.pid")
        logfile = config.get("broker", "logfile")

        # Specify twistd and options...
        args = [
            sys.executable, twistd, "--pidfile", pidfile, "--logfile", logfile
        ]

        if config.has_option("unittest", "profile"):
            if config.getboolean("unittest", "profile"):
                args.append("--profile")
                args.append(
                    os.path.join(config.get("broker", "logdir"),
                                 "aqd.profile"))
                args.append("--profiler=cProfile")
                args.append("--savestats")

        # And then aqd and options...
        args.extend(["aqd", "--config", config.baseconfig])

        if config.has_option("unittest", "coverage"):
            if config.getboolean("unittest", "coverage"):
                args.append("--coveragedir")
                dir = os.path.join(config.get("broker", "logdir"), "coverage")
                args.append(dir)

                coveragerc = os.path.join(config.get("broker", "srcdir"),
                                          "tests", "coverage.rc")
                args.append("--coveragerc")
                args.append(coveragerc)

        p = Popen(args)
        self.assertEqual(p.wait(), 0)
Пример #21
0
    def directories(self):
        """Return a list of directories required for compiling this domain"""
        config = Config()
        dirs = []

        if self.domain.branch_type == 'domain':
            dirs.append(os.path.join(config.get("broker", "domainsdir"),
                                     self.domain.name))

        dirs.append(os.path.join(config.get("broker", "quattordir"),
                                 "cfg",
                                 "domains",
                                 self.domain.name))

        dirs.append(os.path.join(config.get("broker", "quattordir"),
                                 "build",
                                 "xml",
                                 self.domain.name))

        return dirs
Пример #22
0
 def testcloneswrep(self):
     config = Config()
     source = config.get("unittest", "swrep_repository")
     dest = os.path.join(config.get("broker", "swrepdir"), "repository")
     p = Popen(("/bin/rm", "-rf", dest), stdout=1, stderr=2)
     rc = p.wait()
     self.assertEqual(rc, 0,
                      "Failed to clear old swrep directory '%s'" % dest)
     env = {}
     env["PATH"] = "%s:%s" % (config.get(
         "broker", "git_path"), os.environ.get("PATH", ""))
     p = Popen(("git", "clone", source, dest),
               env=env,
               stdout=PIPE,
               stderr=PIPE)
     (out, err) = p.communicate()
     # Ignore out/err unless we get a non-zero return code, then log it.
     self.assertEqual(
         p.returncode, 0, "Non-zero return code for clone of swrep, "
         "STDOUT:\n@@@\n'%s'\n@@@\nSTDERR:\n@@@\n'%s'\n@@@\n" % (out, err))
     return
Пример #23
0
def main():
    parser = argparse.ArgumentParser(description="Compile templates")
    parser.add_argument("-c",
                        "--config",
                        dest="config",
                        action="store",
                        help="location of the config file",
                        default=os.path.join(SRCDIR, "etc",
                                             "aqd.conf.defaults"))
    parser.add_argument("--basedir",
                        action="store",
                        required=True,
                        help="base directory")
    parser.add_argument("--domain",
                        action="store",
                        required=True,
                        help="domain name to compile")
    parser.add_argument("--compress_output",
                        action="store_true",
                        help="compress the generated profiles")
    parser.add_argument("--panc_jar",
                        action="store",
                        help="location of panc.jar")
    parser.add_argument("--templates",
                        action="store",
                        required=True,
                        help="location of the domain templates")
    parser.add_argument("--swrep",
                        action="store",
                        help="location of the swrep templates")
    parser.add_argument("--batch_size",
                        action="store",
                        type=int,
                        help="compiler batch size")

    options = parser.parse_args()
    config = Config(configfile=options.config)

    return run_domain_compile(options, config)
Пример #24
0
def write_file(filename, content, mode=None, logger=LOGGER, compress=None):
    """Atomically write content into the specified filename.

    The content is written into a temp file in the same directory as
    filename, and then swapped into place with rename.  This assumes
    that both the file and the directory can be written to by the
    broker.  The same directory was used instead of a temporary
    directory because atomic swaps are generally only available when
    the source and the target are on the same filesystem.

    If mode is set, change permissions on the file (newly created or
    pre-existing) to the new mode.  If unset and the file exists, the
    current permissions will be kept.  If unset and the file is new,
    the default is 0644.

    This method may raise OSError if any of the OS-related methods
    (creating the temp file, writing to it, correcting permissions,
    swapping into place) fail.  The method will attempt to remove
    the temp file if it had been created.

    If the compress keyword is passed, the content is compressed in
    memory before writing.  The only compression currently supported
    is gzip.

    """
    if compress == 'gzip':
        config = Config()
        buffer = StringIO()
        compress = config.getint('broker', 'gzip_level')
        zipper = gzip.GzipFile(filename, 'wb', compress, buffer)
        zipper.write(content)
        zipper.close()
        content = buffer.getvalue()
    if mode is None:
        try:
            old_mode = os.stat(filename).st_mode
        except OSError, e:
            old_mode = 0644
Пример #25
0
def run_git(args,
            env=None,
            path=".",
            logger=LOGGER,
            loglevel=logging.INFO,
            filterre=None):
    config = Config()
    if env:
        git_env = env.copy()
    else:
        git_env = {}
    env_path = git_env.get("PATH", os.environ.get("PATH", ""))
    git_env["PATH"] = "%s:%s" % (config.get("broker", "git_path"), env_path)

    for name in [
            "git_author_name", "git_author_email", "git_committer_name",
            "git_committer_email"
    ]:
        if not config.has_option("broker", name):
            continue
        value = config.get("broker", name)
        git_env[name.upper()] = value

    if isinstance(args, list):
        git_args = args[:]
        if git_args[0] != "git":
            git_args.insert(0, "git")
    else:
        git_args = ["git", args]

    return run_command(git_args,
                       env=git_env,
                       path=path,
                       logger=logger,
                       loglevel=loglevel,
                       filterre=filterre)
Пример #26
0
    def teststop(self):
        config = Config()
        pidfile = os.path.join(config.get("broker", "rundir"), "aqd.pid")
        self.assert_(os.path.exists(pidfile))
        f = file(pidfile)
        pid = f.readline()
        self.assertNotEqual(pid, "")
        f.close()
        pid = int(pid)
        os.kill(pid, signal.SIGTERM)

        # Wait for the broker to shut down. E.g. generating code coverage may
        # take some time.
        i = 0
        while i < 180:
            i += 1
            try:
                os.kill(pid, 0)
            except OSError:
                break
            sleep(1)

        # Verify that the broker is down
        self.failUnlessRaises(OSError, os.kill, pid, 0)
Пример #27
0
from aquilon.config import Config

from aquilon.exceptions_ import AquilonError, IncompleteError
from aquilon.aqdb.model import Base, Resource, ResourceGroup, Cluster, Host
from aquilon.aqdb.db_factory import DbFactory
from aquilon.worker.templates.base import PlenaryCollection
from aquilon.worker.templates.resource import PlenaryResource
from aquilon.worker.templates.cluster import PlenaryCluster
from aquilon.worker.templates.host import PlenaryHost
from aquilon.worker.locks import CompileKey

db = DbFactory()
Base.metadata.bind = db.engine

session = db.Session()
config = Config()


def main():
    logging.basicConfig(level=logging.DEBUG)

    query = session.query(Resource)

    old_paths = []

    with CompileKey():
        for res in query.all():
            PlenaryResource(res).write(locked=True)

            holder = res.holder.holder_object
            if isinstance(holder, ResourceGroup):
Пример #28
0
 def __init__(self, dbsandbox, dbauthor):
     self.dbsandbox = dbsandbox
     self.dbauthor = dbauthor
     config = Config()
     templatesdir = config.get('broker', 'templatesdir')
     self.path = os.path.join(templatesdir, dbauthor.name, dbsandbox.name)
Пример #29
0
opts = parser.parse_args()

if not os.path.exists(opts.config):
    print >> sys.stderr, "configfile %s does not exist" % opts.config
    sys.exit(1)

if os.environ.get("AQDCONF") and (os.path.realpath(opts.config)
        != os.path.realpath(os.environ["AQDCONF"])):
    force_yes("""Will ignore AQDCONF variable value:
%s
and use
%s
instead.""" % (os.environ["AQDCONF"], opts.config))

config = Config(configfile=opts.config)
if not config.has_section("unittest"):
    config.add_section("unittest")
if not config.has_option("unittest", "srcdir"):
    config.set("unittest", "srcdir", SRCDIR)
if opts.coverage:
    config.set("unittest", "coverage", "True")
if opts.profile:
    config.set("unittest", "profile", "True")

hostname = config.get("unittest", "hostname")
if hostname.find(".") < 0:
    print >> sys.stderr, """
Some regression tests depend on the config value for hostname to be
fully qualified.  Please set the config value manually since the default
on this system (%s) is a short name.
Пример #30
0
                    '-o',
                    dest='dir',
                    default='.',
                    help='directory to put generated files')
parser.add_argument('--prefix',
                    '-p',
                    dest='prefix',
                    default='aqdb_schema',
                    help='basename of files to generate')
opts = parser.parse_args()

if not os.path.exists(opts.dir):
    os.makedirs(opts.dir)

from aquilon.config import Config
config = Config(configfile=os.path.join(_ETCDIR, 'aqd.conf.mem'))

from aquilon.aqdb.db_factory import DbFactory
from aquilon.aqdb.model import Base
db = DbFactory()
Base.metadata.bind = db.engine
Base.metadata.create_all()

import ms.modulecmd
ms.modulecmd.load("fsf/graphviz/2.6")

from aquilon.aqdb.utils import schema2dot
schema2dot.write_schema_png(db.meta,
                            os.path.join(opts.dir, "%s.png" % opts.prefix))
schema2dot.write_schema_dot(db.meta,
                            os.path.join(opts.dir, "%s.dot" % opts.prefix))