Ejemplo n.º 1
0
def prepare():
    if not os.path.isdir(magic_consts.FUEL_CACHE):
        os.makedirs(magic_consts.FUEL_CACHE)
    subprocess.call(["yum", "-y", "install"] + magic_consts.PACKAGES)
    # From patch_all_containers
    apply_patches()
    docker.run_in_container("nailgun", ["pkill", "-f", "wsgi"])
    patch_initramfs()
Ejemplo n.º 2
0
def prepare():
    if not os.path.isdir(magic_consts.FUEL_CACHE):
        os.makedirs(magic_consts.FUEL_CACHE)
    subprocess.call(["yum", "-y", "install"] + magic_consts.PACKAGES)
    # From patch_all_containers
    apply_patches()
    docker.run_in_container("nailgun", ["pkill", "-f", "wsgi"])
    patch_initramfs()
Ejemplo n.º 3
0
def install_octane_nailgun():
    octane_nailgun = os.path.join(magic_consts.CWD, '..', 'octane_nailgun')
    subprocess.call(["python", "setup.py", "bdist_wheel"], cwd=octane_nailgun)
    wheel = glob.glob(os.path.join(octane_nailgun, 'dist', '*.whl'))[0]
    subprocess.call(["dockerctl", "copy", wheel, "nailgun:/root/"])
    docker.run_in_container("nailgun", ["pip", "install", "-U",
                                        "/root/" + os.path.basename(wheel)])
    docker.run_in_container("nailgun", ["pkill", "-f", "wsgi"])
Ejemplo n.º 4
0
def patch_initramfs():
    backup = magic_consts.BOOTSTRAP_INITRAMFS + '.bkup'
    chroot = tempfile.mkdtemp()
    try:
        os.rename(magic_consts.BOOTSTRAP_INITRAMFS, backup)
        subprocess.call("gunzip -c {0} | cpio -id".format(backup),
                        shell=True, cwd=chroot)
        patch_fuel_agent(chroot)
        with open(magic_consts.BOOTSTRAP_INITRAMFS, "wb") as f:
            subprocess.call("find | grep -v '^\.$' | cpio --format newc -o"
                            " | gzip -c", shell=True, stdout=f, cwd=chroot)
        docker.run_in_container("cobbler", ["cobbler", "sync"])
    finally:
        shutil.rmtree(chroot)
Ejemplo n.º 5
0
 def backup(self):
     with open("/etc/fuel/astute.yaml", "r") as current:
         current_yaml = yaml.load(current)
         ipaddr = current_yaml["ADMIN_NETWORK"]["ipaddress"]
     results, _ = docker.run_in_container(
         "postgres",
         [
             "sudo",
             "-u",
             "postgres",
             "psql",
             self.db,
             "--tuples-only",
             "-c",
             self.sql
         ],
         stdout=subprocess.PIPE)
     results = results.strip()
     if not results:
         return
     rows = results.split("\n")
     already_backuped = set()
     for line in rows:
         data = json.loads(line)
         for value in self._get_values_list(data):
             if ipaddr in value['uri']:
                 path = urlparse.urlsplit(value['uri']).path
                 dir_name = path.lstrip("/").split('/', 1)[0]
                 if dir_name in already_backuped:
                     continue
                 already_backuped.add(dir_name)
                 path = os.path.join(self.path, dir_name)
                 self.archive.add(path, os.path.join(self.name, dir_name))
Ejemplo n.º 6
0
 def backup(self):
     with open("/etc/fuel/astute.yaml", "r") as current:
         current_yaml = yaml.load(current)
         ipaddr = current_yaml["ADMIN_NETWORK"]["ipaddress"]
     results, _ = docker.run_in_container("postgres", [
         "sudo", "-u", "postgres", "psql", self.db, "--tuples-only", "-c",
         self.sql
     ],
                                          stdout=subprocess.PIPE)
     results = results.strip()
     if not results:
         return
     rows = results.split("\n")
     already_backuped = set()
     for line in rows:
         data = json.loads(line)
         for value in self._get_values_list(data):
             if ipaddr in value['uri']:
                 path = urlparse.urlsplit(value['uri']).path
                 dir_name = path.lstrip("/").split('/', 1)[0]
                 if dir_name in already_backuped:
                     continue
                 already_backuped.add(dir_name)
                 path = os.path.join(self.path, dir_name)
                 self.archive.add(path, os.path.join(self.name, dir_name))
Ejemplo n.º 7
0
 def restore(self):
     dump = self.archive.extractfile(self.filename)
     subprocess.call(
         ["systemctl", "stop", "docker-{0}.service".format(self.db)])
     docker.stop_container(self.db)
     docker.run_in_container(
         "postgres",
         ["sudo", "-u", "postgres", "dropdb", "--if-exists", self.db],
     )
     with docker.in_container("postgres",
                              ["sudo", "-u", "postgres", "psql"],
                              stdin=subprocess.PIPE) as process:
         process.stdin.write(dump.read())
     docker.start_container(self.db)
     docker.wait_for_container(self.db)
     subprocess.call(
         ["systemctl", "start", "docker-{0}.service".format(self.db)])
Ejemplo n.º 8
0
 def _run_sql_in_container(self, sql):
     sql_run_prams = [
         "sudo", "-u", "postgres", "psql", "nailgun", "--tuples-only", "-c"]
     results, _ = docker.run_in_container(
         "postgres",
         sql_run_prams + [sql],
         stdout=subprocess.PIPE)
     return results.strip().splitlines()
Ejemplo n.º 9
0
 def restore(self):
     dump = self.archive.extractfile(self.filename)
     subprocess.call([
         "systemctl", "stop", "docker-{0}.service".format(self.db)
     ])
     docker.stop_container(self.db)
     docker.run_in_container(
         "postgres",
         ["sudo", "-u", "postgres", "dropdb", "--if-exists", self.db],
     )
     with docker.in_container("postgres",
                              ["sudo", "-u", "postgres", "psql"],
                              stdin=subprocess.PIPE) as process:
         process.stdin.write(dump.read())
     subprocess.call([
         "systemctl", "start", "docker-{0}.service".format(self.db)
     ])
     docker.start_container(self.db)
Ejemplo n.º 10
0
    def _post_restore_action(self):
        data, _ = docker.run_in_container(
            "nailgun", ["cat", magic_consts.OPENSTACK_FIXTURES],
            stdout=subprocess.PIPE)
        fixtures = yaml.load(data)
        base_release_fields = fixtures[0]['fields']
        for fixture in fixtures[1:]:
            release = helpers.merge_dicts(base_release_fields,
                                          fixture['fields'])
            self.__post_data_to_nailgun("/api/v1/releases/", release,
                                        self.context.user,
                                        self.context.password)
        subprocess.call([
            "fuel", "release", "--sync-deployment-tasks", "--dir",
            "/etc/puppet/", "--user", self.context.user, "--password",
            self.context.password
        ])
        sql_run_prams = [
            "sudo", "-u", "postgres", "psql", "nailgun", "--tuples-only", "-c"
        ]
        results, _ = docker.run_in_container(
            "postgres",
            sql_run_prams + ["select id, generated from attributes;"],
            stdout=subprocess.PIPE)
        results = results.strip()
        values = []
        sql = 'update attributes as a set generated = b.generated ' \
            'from (values {0}) as b(id, generated) where a.id = b.id;'

        for line in results.split("\n"):
            c_id, c_data = line.split("|", 1)
            data = json.loads(c_data)
            data["deployed_before"] = {"value": True}
            values.append((c_id, json.dumps(data)))

        if values:
            sql = sql.format(','.join(
                ["({0}, '{1}')".format(*v) for v in values]))
            docker.run_in_container("postgres",
                                    sql_run_prams + [sql],
                                    stdout=subprocess.PIPE)
Ejemplo n.º 11
0
 def _create_links_on_remote_logs(self):
     domain = helpers.get_astute_dict()["DNS_DOMAIN"]
     dirname = "/var/log/docker-logs/remote/"
     with fuel_client.set_auth_context(self.context):
         pairs = [(n.data["meta"]["system"]["fqdn"], n.data["ip"])
                  for n in node.Node.get_all()]
     docker.run_in_container("rsyslog", ["service", "rsyslog", "stop"])
     try:
         for fqdn, ip_addr in pairs:
             if not fqdn.endswith(domain):
                 continue
             ip_addr_path = os.path.join(dirname, ip_addr)
             fqdn_path = os.path.join(dirname, fqdn)
             if os.path.islink(ip_addr_path):
                 continue
             if os.path.isdir(ip_addr_path):
                 os.rename(ip_addr_path, fqdn_path)
             else:
                 os.mkdir(fqdn_path)
             os.symlink(fqdn, ip_addr_path)
     finally:
         docker.run_in_container("rsyslog", ["service", "rsyslog", "start"])
Ejemplo n.º 12
0
def archivate_container_cmd_output(archive, container, cmd, filename):
    """archivate container command aoutput

    save command output runnning in container to archive with current tag
    :param archive: tar archive for writing or appending
    :param container: container name
    :param cmd: sequence of program arguments
    :param filename: name for saving output in archive
    """
    info = tarfile.TarInfo(filename)
    dump = io.BytesIO()
    data, _ = docker.run_in_container(container, cmd, stdout=subprocess.PIPE)
    info.size = len(data)
    dump.write(data)
    dump.seek(0)
    archive.addfile(info, dump)
Ejemplo n.º 13
0
def archivate_container_cmd_output(archive, container, cmd, filename):
    """archivate container command aoutput

    save command output runnning in container to archive with current tag
    :param archive: tar archive for writing or appending
    :param container: container name
    :param cmd: sequence of program arguments
    :param filename: name for saving output in archive
    """
    info = tarfile.TarInfo(filename)
    dump = io.BytesIO()
    data, _ = docker.run_in_container(container, cmd, stdout=subprocess.PIPE)
    info.size = len(data)
    dump.write(data)
    dump.seek(0)
    archive.addfile(info, dump)
Ejemplo n.º 14
0
def run_psql_in_container(sql, db):
    results, _ = docker.run_in_container(
        "postgres",
        [
            "sudo",
            "-u",
            "postgres",
            "psql",
            db,
            "--tuples-only",
            "--no-align",
            "-c",
            sql,
        ],
        stdout=subprocess.PIPE)
    return results.strip().splitlines()
Ejemplo n.º 15
0
 def backup(self):
     assert self.container
     assert self.backup_directory
     stdout, _ = docker.run_in_container(
         self.container, ["find", self.backup_directory, "-type", "f"],
         stdout=subprocess.PIPE)
     filenames = stdout.strip().split()
     for filename in filenames:
         filename = filename[len(self.backup_directory):].lstrip("\/")
         if filename in self.banned_files:
             continue
         if self.allowed_files is not None \
                 and filename not in self.allowed_files:
             continue
         path = os.path.join(self.backup_directory, filename)
         archivate.archivate_container_cmd_output(
             self.archive, self.container, ["cat", path],
             "{0}/{1}".format(self.container, filename))
Ejemplo n.º 16
0
 def post_restore_action(self, context):
     data, _ = docker.run_in_container(
         "nailgun",
         ["cat", "/usr/share/fuel-openstack-metadata/openstack.yaml"],
         stdout=subprocess.PIPE)
     fixtures = yaml.load(data)
     base_release_fields = fixtures[0]['fields']
     for fixture in fixtures[1:]:
         release = helpers.merge_dicts(
             base_release_fields, fixture['fields'])
         self.__post_data_to_nailgun(
             "/api/v1/releases/", release, context.password)
     subprocess.call([
         "fuel",
         "release",
         "--sync-deployment-tasks",
         "--dir",
         "/etc/puppet/",
     ])
Ejemplo n.º 17
0
    def _post_restore_action(self):
        data, _ = docker.run_in_container(
            "nailgun",
            ["cat", magic_consts.OPENSTACK_FIXTURES],
            stdout=subprocess.PIPE)
        fixtures = yaml.load(data)
        base_release_fields = fixtures[0]['fields']
        for fixture in fixtures[1:]:
            release = helpers.merge_dicts(
                base_release_fields, fixture['fields'])
            self.__post_data_to_nailgun(
                "/api/v1/releases/",
                release,
                self.context.user,
                self.context.password)
        subprocess.call(
            [
                "fuel",
                "release",
                "--sync-deployment-tasks",
                "--dir",
                "/etc/puppet/",
            ],
            env=self.context.get_credentials_env())

        values = []
        for line in self._run_sql_in_container(
                "select id, generated from attributes;"):
            c_id, c_data = line.split("|", 1)
            data = json.loads(c_data)
            data["deployed_before"] = {"value": True}
            values.append("({0}, '{1}')".format(c_id, json.dumps(data)))

        if values:
            self._run_sql_in_container(
                'update attributes as a set generated = b.generated '
                'from (values {0}) as b(id, generated) '
                'where a.id = b.id;'.format(','.join(values))
            )
        self._create_links_on_remote_logs()
Ejemplo n.º 18
0
 def backup(self):
     assert self.container
     assert self.backup_directory
     stdout, _ = docker.run_in_container(
         self.container,
         ["find", self.backup_directory, "-type", "f"],
         stdout=subprocess.PIPE)
     filenames = stdout.strip().split()
     for filename in filenames:
         filename = filename[len(self.backup_directory):].lstrip("\/")
         if filename in self.banned_files:
             continue
         if self.allowed_files is not None \
                 and filename not in self.allowed_files:
             continue
         path = os.path.join(self.backup_directory, filename)
         archivate.archivate_container_cmd_output(
             self.archive,
             self.container,
             ["cat", path],
             "{0}/{1}".format(self.container, filename)
         )
Ejemplo n.º 19
0
def update_node_partition_info(node_id):
    fname = 'update_node_partition_info.py'
    command = ['python', os.path.join('/tmp', fname), str(node_id)]
    docker.run_in_container('nailgun', command)
Ejemplo n.º 20
0
def update_partition_generator():
    fname = 'update_release_partition_info.py'
    command = ['python', os.path.join('/tmp', fname)]
    docker.run_in_container('nailgun', command)
Ejemplo n.º 21
0
def revert_prepare():
    apply_patches(revert=True)
    docker.run_in_container("nailgun", ["pkill", "-f", "wsgi"])
    revert_initramfs()
Ejemplo n.º 22
0
def update_node_partition_info(node_id):
    fname = 'update_node_partition_info.py'
    command = ['python', os.path.join('/tmp', fname), str(node_id)]
    docker.run_in_container('nailgun', command)
Ejemplo n.º 23
0
def update_partition_generator():
    fname = 'update_release_partition_info.py'
    command = ['python', os.path.join('/tmp', fname)]
    docker.run_in_container('nailgun', command)
Ejemplo n.º 24
0
def patch_initramfs():
    with archivate.update_cpio(magic_consts.BOOTSTRAP_INITRAMFS) as chroot:
        patch_fuel_agent(chroot)
    docker.run_in_container("cobbler", ["cobbler", "sync"])
Ejemplo n.º 25
0
def patch_initramfs():
    with archivate.update_cpio(magic_consts.BOOTSTRAP_INITRAMFS) as chroot:
        patch_fuel_agent(chroot)
    docker.run_in_container("cobbler", ["cobbler", "sync"])
Ejemplo n.º 26
0
def revert_prepare():
    apply_patches(revert=True)
    docker.run_in_container("nailgun", ["pkill", "-f", "wsgi"])
    revert_initramfs()