def extract_backup(backup): if not os.path.isdir(bdir): os.makedirs(bdir, 0o700) # Extract backup file print("Extracting the archive %s in %s" % (backup, bdir)) execute(["tar", "-xpf", backup, "-C", bdir]) print("Archive extracted")
def get_or_generate_ssh_key(self, args, name): priv = "%s/ssh_keys/%s" % (args.lib, name) pub = "%s/ssh_keys/%s.pub" % (args.lib, name) comment = '%s@%s' % (name, args.sfconfig["fqdn"]) if not os.path.isfile(priv): execute([ "ssh-keygen", "-t", "rsa", "-N", "", "-f", priv, "-q", '-C', comment ]) args.glue[name] = open(priv).read() args.glue["%s_pub" % name] = open(pub).read()
def bootstrap_backup(): # Copy software-factory conf execute([ "rsync", "-a", "--exclude", "arch.yaml", "%s/install-server/etc/software-factory/" % bdir, "/etc/software-factory/" ]) # Copy bootstrap data execute([ "rsync", "-a", "--exclude", "arch.yaml", "%s/install-server/var/lib/software-factory/" % bdir, "/var/lib/software-factory/" ]) print("Boostrap data prepared from the backup. Done.")
def get_or_generate_CA(self, args): args.ca_file = "%s/certs/localCA.pem" % args.lib args.ca_key_file = "%s/certs/localCAkey.pem" % args.lib args.ca_srl_file = "%s/certs/localCA.srl" % args.lib if not os.path.isfile(args.ca_file): # Generate a random OU subject to be able to trust multiple sf CA ou = ''.join(random.choice('0123456789abcdef') for n in range(6)) execute([ "openssl", "req", "-nodes", "-days", "3650", "-new", "-x509", "-subj", "/C=FR/O=SoftwareFactory/OU=%s" % ou, "-keyout", args.ca_key_file, "-out", args.ca_file ]) if not os.path.isfile(args.ca_srl_file): open(args.ca_srl_file, "w").write("00\n") args.glue["localCA_pem"] = open(args.ca_file).read()
def configure(self, args, host): priv_file = "%s/certs/cauth_privkey.pem" % args.lib pub_file = "%s/certs/cauth_pubkey.pem" % args.lib if not os.path.isfile(priv_file): execute(["openssl", "genrsa", "-out", priv_file, "1024"]) if not os.path.isfile(pub_file): execute([ "openssl", "rsa", "-in", priv_file, "-out", pub_file, "-pubout" ]) args.glue["cauth_privkey"] = open(priv_file).read() args.glue["cauth_pubkey"] = open(pub_file).read() self.add_mysql_database(args, "cauth") if self.idp_metadata_uri: if self.idp_metadata_uri.lower().startswith('http'): args.glue["idp_md_url"] = self.idp_metadata_uri else: args.glue["idp_md_file"] = self.idp_metadata_uri if args.sfconfig["authentication"].get("differentiate_usernames"): args.glue["cauth_username_collision_strategy"] = "DIFFERENTIATE" else: args.glue["cauth_username_collision_strategy"] = "FORBID" # Check if secret hash needs to be generated: update_secrets = False previous_vars = yaml_load("%s/group_vars/all.yaml" % args.ansible_root) if not args.secrets.get('cauth_admin_password_hash') or \ previous_vars.get("authentication", {}).get("admin_password") != \ args.sfconfig["authentication"]["admin_password"]: update_secrets = True args.secrets["cauth_admin_password_hash"] = self.hash_password( args.sfconfig["authentication"]["admin_password"]) if not args.secrets.get('sf_service_user_password_hash') or \ previous_vars.get('sf_service_user_password') != \ args.secrets['sf_service_user_password']: update_secrets = True args.secrets["sf_service_user_password_hash"] = self.hash_password( args.secrets["sf_service_user_password"]) if update_secrets and not args.skip_setup: yaml_dump(args.secrets, open("%s/secrets.yaml" % args.lib, "w")) args.glue.update(args.secrets)
def get_or_generate_cert(self, args, name, common_name): cert_cnf = "%s/certs/%s.cnf" % (args.lib, name) cert_key = "%s/certs/%s.key" % (args.lib, name) cert_req = "%s/certs/%s.req" % (args.lib, name) cert_crt = "%s/certs/%s.crt" % (args.lib, name) def xunlink(filename): if os.path.isfile(filename): os.unlink(filename) if os.path.isfile(cert_cnf) and \ open(cert_cnf).read().find("DNS.1 = %s\n" % common_name) == -1: # if FQDN changed, remove all certificates for fn in [cert_cnf, cert_req, cert_crt]: xunlink(fn) # Then manage certificate request if not os.path.isfile(cert_cnf): open(cert_cnf, "w").write("""[req] req_extensions = v3_req distinguished_name = req_distinguished_name [ req_distinguished_name ] commonName_default = %s [ v3_req ] subjectAltName=@alt_names [alt_names] DNS.1 = %s """ % (common_name, common_name)) if not os.path.isfile(cert_key): if os.path.isfile(cert_req): xunlink(cert_req) execute(["openssl", "genrsa", "-out", cert_key, "2048"]) if not os.path.isfile(cert_req): if os.path.isfile(cert_crt): xunlink(cert_crt) execute([ "openssl", "req", "-new", "-subj", "/C=FR/O=SoftwareFactory/CN=%s" % args.sfconfig["fqdn"], "-extensions", "v3_req", "-config", cert_cnf, "-key", cert_key, "-out", cert_req ]) if not os.path.isfile(cert_crt): execute([ "openssl", "x509", "-req", "-days", "3650", "-sha256", "-extensions", "v3_req", "-extfile", cert_cnf, "-CA", args.ca_file, "-CAkey", args.ca_key_file, "-CAserial", args.ca_srl_file, "-in", cert_req, "-out", cert_crt ]) args.glue["%s_crt" % name] = open(cert_crt).read() args.glue["%s_key" % name] = open(cert_key).read() args.glue["%s_chain" % name] = args.glue["%s_crt" % name]
def copy_backup(backup): if backup != bdir: print("Copying the tree %s in %s" % (backup, bdir)) execute(["rsync", "-a", "--delete", "%s/" % backup.rstrip('/'), bdir]) print("Tree copied")
def main(): components = sfconfig.utils.load_components() args = usage(components) # Ensure environment is UTF-8 os.environ["LC_ALL"] = "en_US.UTF-8" if not args.skip_apply: execute(["logger", "sfconfig: started %s" % sys.argv[1:]]) print("[%s] Running sfconfig" % time.ctime()) # Create required directories allyaml = "%s/group_vars/all.yaml" % args.ansible_root for dirname in (args.ansible_root, "%s/group_vars" % args.ansible_root, "%s/facts" % args.ansible_root, "%s/ara" % args.ansible_root, args.lib, "%s/ssh_keys" % args.lib, "%s/certs" % args.lib): if not os.path.isdir(dirname): os.makedirs(dirname, 0o700) if args.recover: if os.path.isfile(args.recover): extract_backup(args.recover) elif os.path.isdir(args.recover): copy_backup(args.recover) else: print('Backup archive or directory was not found') sys.exit(1) bootstrap_backup() args.sfconfig = yaml_load(args.config) args.sfarch = yaml_load(args.arch) args.secrets = yaml_load("%s/secrets.yaml" % args.lib) args.glue = { 'sf_tasks_dir': "%s/ansible/tasks" % args.share, 'sf_templates_dir': "%s/templates" % args.share, 'sf_playbooks_dir': "%s" % args.ansible_root, 'logservers': [], 'executor_hosts': [], } # Make sure the yaml files are updated sfconfig.upgrade.update_sfconfig(args) sfconfig.upgrade.update_arch(args) # Save arch if needed if args.save_arch: save_file(args.sfarch, args.arch) # Parse components options for component in components.values(): component.argparse(args) # Prepare components for host in args.sfarch["inventory"]: # TODO: do not force $fqdn as host domain name if "hostname" not in host: host["hostname"] = "%s.%s" % (host["name"], args.sfconfig["fqdn"]) for role in host["roles"]: # Set component_host variable by default args.glue["%s_host" % role.replace('-', '_')] = host["hostname"] if role not in components: continue components[role].prepare(args) # Process the arch and render playbooks sfconfig.arch.process(args) sfconfig.inventory.generate(args) # Check if fqdn should be updated args.glue["update_fqdn"] = False if os.path.isfile("/var/lib/software-factory/.version") and \ os.path.isfile(allyaml): previous_args = yaml_load(allyaml) if args.sfconfig['fqdn'] != previous_args['fqdn']: args.glue["update_fqdn"] = True # Generate group vars sfconfig.groupvars.load(args) for host in args.sfarch["inventory"]: for role in host["roles"]: if role not in components: continue if not args.skip_setup: components[role].configure(args, host) # Set rdo_release_url as global vars to be usable by sf-base and sf-upgrade args.glue["rdo_release_url"] = args.defaults["rdo_release_url"] # Save config if needed if args.save_sfconfig: save_file(args.sfconfig, args.config) # Generate group vars with open(allyaml, "w") as allvars_file: # Add legacy content args.glue.update(yaml_load(args.config)) if os.path.isfile(args.extra): args.glue.update(yaml_load(args.extra)) args.glue.update(args.sfarch) yaml_dump(args.glue, allvars_file) # Validate role settings for host in args.sfarch["inventory"]: for role in host["roles"]: if role not in components: continue components[role].validate(args, host) sfconfig.inventory.run(args) if not args.skip_apply: execute(["logger", "sfconfig.py: ended"]) if not args.disable or not args.erase: print("""%s: SUCCESS Access dashboard: https://%s Login with admin user, get the admin password by running: awk '/admin_password/ {print $2}' /etc/software-factory/sfconfig.yaml """ % (args.sfconfig['fqdn'], args.sfconfig['fqdn'])) if (not args.sfconfig['authentication']['SAML2']['disabled'] and not os.path.isfile(saml_idp_file)): print(""" Service Provider metadata is available at /etc/httpd/saml2/mellon_metadata.xml Once you have the Identity Provider metadata, run: sfconfig --set-idp-metadata <path/to/metadata.xml> """) try: notification = open( "/var/lib/software-factory/ansible/notification.txt").read() if notification: print(notification) except IOError: pass