Exemple #1
0
def initial_install(dip_home_path, base_port):
    base._sanity_check()

    dip_home = base.DipHome(dip_home_path)
    node_type = 'api'

    print "[+] Performing initial install"
    base.create_apinode_folders(dip_home)
    migration_base.write_version(dip_home)

    print "[+] Writing default install config file"
    install_config.initConfig(dip_home, base_port, node_type, "auto")

    print "[+] Writing default env file"
    envfiles.write_envdefault(dip_home)

    base.link_apinode_binaries(dip_home, os.environ["DKUINSTALLDIR"])
    base.generate_supervisor_key(dip_home)

    base.json_dumpf(
        osp.join(dip_home.path, "config", "server.json"), {
            "remappedConnections": {},
            "auditLog": {
                "logAuthFailures": True,
                "logQueries": True
            }
        })
    base.json_dumpf(osp.join(dip_home.path, "config", "adminkeys.json"),
                    {"keys": []})
    base.json_dumpf(osp.join(dip_home.path, "loaded-data-mapping.json"), {})
Exemple #2
0
def initial_install(dip_home_path, base_port, node_type, install_size):
    base._sanity_check()

    dip_home = base.DipHome(dip_home_path)
    node_type = node_type

    print "[+] Performing initial install"

    migration_base.write_version(dip_home)

    print "[+] Writing default install config file"
    install_config.initConfig(dip_home, base_port, node_type, install_size)

    print "[+] Writing default env file"
    envfiles.write_envdefault(dip_home)

    base.link_dss_binaries(dip_home, os.environ["DKUINSTALLDIR"])
    base.generate_supervisor_key(dip_home)
    base.create_dss_folders(dip_home)
    base.ensure_required_dss_files(dip_home)
Exemple #3
0
    static_sec_headers = ""
    api_sec_headers = ""

    xfo = config.getOption("server", "x-frame-options")
    if xfo:
        static_sec_headers += '      add_header "X-Frame-Options" %s;\n' % xfo
    csp = config.getOption("server", "content-security-policy")
    if csp:
        static_sec_headers += '      add_header "Content-Security-Policy" "%s";\n' % csp
    xxss = config.getOption("server", "X-XSS-Protection")
    if xxss:
        static_sec_headers += '      add_header "X-XSS-Protection" "%s";\n' % xxss

    xcto = config.getOption("server", "x-content-type-options")
    if xcto:
        static_sec_headers += '      add_header "X-Content-Type-Options" "%s";\n' % xcto
        api_sec_headers += '      add_header "X-Content-Type-Options" "%s";\n' % xcto

    result = result.replace("{{STATIC_SECURITY_HEADERS_DIRECTIVES}}", static_sec_headers)
    result = result.replace("{{API_SECURITY_HEADERS_DIRECTIVES}}", api_sec_headers)

    for var in tmpl_vars:
        result = result.replace("{{%s}}" % var, quote(os.environ[var]))
    return result

#
# Prints the nginx configuration on standard output
#
if __name__ == "__main__":
    print generate_nginx_config(base.DipHome(os.environ["DIP_HOME"]))
Exemple #4
0
import base, os

# Prints DSS version on standard output

if __name__ == "__main__":
    dh = base.DipHome(os.environ["DIP_HOME"])
    print(dh.get_dss_version())
Exemple #5
0
def migrate(dip_home_path):
    base._sanity_check()

    dip_home = base.DipHome(dip_home_path)

    backup = migration_backup.BackupData(dip_home_path)
    if backup.load():
        print "****************************************************"
        print "* PREVIOUS DSS MIGRATION WAS ABORTED, ROLLING BACK *"
        print "****************************************************"
        backup.restore()
        print "Restore complete, removing marker file"
        backup.delete()

    assert not backup.load()

    try:
        # First, migrate the configuration before upgrading the binary links
        current_version = dip_home.get_conf_version()
        node_type = migration_base.get_node_type(dip_home)
        assert node_type == 'design' or node_type == 'automation'
        migrations = migration_base.VERSIONS_MIGRATION_LIBRARY.select_migrations(
            current_version)
        if migrations:

            print "Executing the following migrations"
            for migration in migrations:
                migration.execute(dip_home, node_type, simulate=True)
            if os.getenv("DKU_MIGRATE_YES") is None:
                print "Continue? (Y/[N])",
                sys.stdout.flush()
                if "y" != raw_input().strip().lower():
                    print "Aborting!"
                    sys.exit(1)

            backup.backup_file("dss-version.json")
            backup.backup_file("bin/env-default.sh")
            for fname in [
                    "install.properties", "install.ini", "bin/env-spark.sh"
            ]:
                if osp.isfile(osp.join(dip_home.path, fname)):
                    backup.backup_file(fname)

            # not needed for the pre-4.0 layout, the bugs we know and which require the backup are for 4.0+
            if os.getenv("DKU_MIGRATE_NOBACKUP") is None:
                if osp.isdir(osp.join(dip_home.path, "databases")):
                    backup.backup_dir("databases")

            if os.getenv("DKU_MIGRATE_NOBACKUP") is None:
                print "Backing up your config ..."
                backup.backup_dir("config")

            backup.save()

            for migration in migrations:
                migration.execute(dip_home, node_type)

        # Write the final version
        migration_base.write_version(dip_home)
        # Update env-default
        envfiles.write_envdefault(dip_home)

        # Update the binary links
        base.link_dss_binaries(dip_home, os.environ["DKUINSTALLDIR"])
        base.generate_supervisor_key(dip_home)
        base.ensure_required_dss_files(dip_home)

        # We can now run "Post-upgrade" migrations (for java migrations needing the new binaries)
        if migrations is not None:
            print "Executing post-migrations"
            for migration in migrations:
                migration.post_execute(dip_home, node_type)

            #raise Exception("boom")

            print "Migration done, removing marker"
            backup.delete()

    except Exception, e:
        print "******************************************"
        print "* MIGRATION FAILED"
        print "******************************************"
        print "* Attempting to rollback"
        backup.restore()
        print "Restore complete, removing marker file"
        backup.delete()
        raise e, None, sys.exc_info()[2]
Exemple #6
0
export DKU_DKU_JAVA_LIBRARY_PATH="%s"

""" % (install_dir, node_type, base_port,
    base_port, base_port + 1, base_port + 2, base_port + 3,
    javabin, javaOpts,
    backendJavaOpts, fekJavaOpts, hproxyJavaOpts, jekJavaOpts, dkuJavaOpts,
    backendJavaLibraryPath, fekJavaLibraryPath, hproxyJavaLibraryPath, jekJavaLibraryPath, dkuJavaLibraryPath)


def write_envdefault(dip_home):
    installConfig = dip_home.get_install_config()
    node_type = installConfig.getNodeType()
    if node_type == 'design' or node_type == "automation":
        content = get_designnode_envdefault(os.environ["DKUINSTALLDIR"], os.environ["DKUJAVABIN"], installConfig)
    elif node_type == 'api':
        content = get_apinode_envdefault(os.environ["DKUINSTALLDIR"], os.environ["DKUJAVABIN"], installConfig)
    else:
        raise Exception("node type not supported: " % node_type)
    with open(osp.join(dip_home.path, "bin", "env-default.sh"), "w") as f:
        f.write(content)

#
# Regenerates the env-default file in $DIP_HOME
# Usage: envfiles.py -save
#
if __name__ == "__main__":
    if len(sys.argv) == 2 and sys.argv[1] == '-save':
        write_envdefault(base.DipHome(os.environ["DIP_HOME"]))
    else:
        print >>sys.stderr, "Usage: %s -save" % sys.argv[0]
Exemple #7
0
from os import path as osp
import sys

import base
import migration_base, migrate_dss, migrate_apinode

# Automatically migrates the proper node type

if __name__ == "__main__":
    if len(sys.argv) < 2:
        print >> sys.stderr, "Bad usage"
        sys.exit(1)

    dh = base.DipHome(sys.argv[1])
    node_type = migration_base.get_node_type(dh)

    if node_type == "design" or node_type == "automation":
        migrate_dss.migrate(sys.argv[1])
    elif node_type == "api":
        migrate_apinode.migrate(sys.argv[1])
    else:
        raise Exception("Don't know how to migrate %s" % node_type)
Exemple #8
0
#
# Main entry point: helper program to get/set entries
#
if __name__ == "__main__":

    def usage():
        print >> sys.stderr, """Usage:
        install_config.py -get (nodetype | server.port | SECTION OPTION)
        install_config.py -set SECTION OPTION VALUE
        """
        sys.exit(1)

    if len(sys.argv) < 2:
        usage()

    dipHome = base.DipHome(os.environ['DIP_HOME'])
    config = dipHome.get_install_config()

    if sys.argv[1] == "-get" and len(sys.argv) == 3:
        if sys.argv[2] == 'nodetype':
            print config.getNodeType()

        elif sys.argv[2] == 'server.port':
            print config.getServerPort()

        else:
            usage()

    elif sys.argv[1] == "-get" and len(sys.argv) == 4:
        print config.getOption(sys.argv[2], sys.argv[3])
Exemple #9
0
                                                  "graphite_server").split(":")
    collectd_interval = config.getIntOption("collectd", "interval", 300)
    collectd_prefix = config.getOption("collectd", "prefix")
    result = template.replace("{{CARBON_HOSTNAME}}", carbon_server)
    result = result.replace("{{CARBON_PORT}}", carbon_port)
    result = result.replace("{{COLLECTD_PREFIX}}", collectd_prefix)
    result = result.replace("{{COLLECTD_INTERVAL}}", str(collectd_interval))
    result = result.replace("{{ESCAPED_DIP_HOME}}",
                            os.environ["DIP_HOME"].replace(".", "\."))

    # Get dataidr device
    df_stdout, df_sterr = subprocess.Popen(["df", os.environ["DIP_HOME"]],
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE,
                                           shell=False).communicate()
    datadir_device = df_stdout.strip().split("\n")[-1].split(" ")[0]
    datadir_disk = datadir_device.split("/")[-1]
    result = result.replace("{{DATADIR_DEVICE}}", datadir_device)
    result = result.replace("{{DATADIR_DISK}}", datadir_disk)

    for var in tmpl_vars:
        result = result.replace("{{%s}}" % var, os.environ[var])
    return result


#
# Prints the collectd configuration on standard output
#
if __name__ == "__main__":
    print(generate_collectd_config(base.DipHome(os.environ["DIP_HOME"])))
Exemple #10
0
def generate_supervisor_config(dipHome):
    installConfig = dipHome.get_install_config()
    config = defaultConfig(dipHome, installConfig)

    nodeType = installConfig.getNodeType()
    if nodeType == 'design' or nodeType == "automation":
        addBackend(config, installConfig)
        addIPython(config, installConfig)
        addNginx(config, installConfig)
    elif nodeType == 'api':
        addAPIMain(config, installConfig)
        addNginx(config, installConfig)
    else:
        raise Exception("Node type not supported: %s" % nodeType)

    if installConfig.getBoolOption("collectd","enabled",False):
        addCollectd(config, installConfig)

    if installConfig.getOption('supervisord', 'kill_supervisord_if_child_dies_agressive') is not None:
        config.add_section("eventlistener:supervisord-watchdog")
        config.set("eventlistener:supervisord-watchdog", "events", "PROCESS_STATE_FATAL")
        config.set("eventlistener:supervisord-watchdog", "command", "'%(ENV_DKUINSTALLDIR)s/scripts/_kill-supervisord-if-child-dies-agressive.py'")
        config.set("eventlistener:supervisord-watchdog", 'stderr_logfile', '%(ENV_DIP_HOME)s/run/supervisord-watchdog.log')
    return config

#
# Prints the supervisor configuration on standard output
#
if __name__ == "__main__":
    generate_supervisor_config(base.DipHome(os.environ["DIP_HOME"])).write(sys.stdout)