def initial_install(dip_home_path, base_port): base._sanity_check() dip_home = base.DipHome(dip_home_path) node_type = 'api' print "[+] Performing initial install" base.create_apinode_folders(dip_home) migration_base.write_version(dip_home) print "[+] Writing default install config file" install_config.initConfig(dip_home, base_port, node_type, "auto") print "[+] Writing default env file" envfiles.write_envdefault(dip_home) base.link_apinode_binaries(dip_home, os.environ["DKUINSTALLDIR"]) base.generate_supervisor_key(dip_home) base.json_dumpf( osp.join(dip_home.path, "config", "server.json"), { "remappedConnections": {}, "auditLog": { "logAuthFailures": True, "logQueries": True } }) base.json_dumpf(osp.join(dip_home.path, "config", "adminkeys.json"), {"keys": []}) base.json_dumpf(osp.join(dip_home.path, "loaded-data-mapping.json"), {})
def migrate(import_path, current_version): base._sanity_check() migrations = migration_base.VERSIONS_MIGRATION_LIBRARY.select_migrations(current_version) print "Selected %s" % migrations project_paths = migration_base.ProjectPaths( config=osp.join(import_path, "project_config"), managed_fs_datasets=osp.join(import_path, "managed_datasets"), managed_folders=osp.join(import_path, "managed_folders"), analysis_data=osp.join(import_path, "analysis-data"), saved_models = osp.join(import_path, "saved_models"), jupyter_notebooks = osp.join(import_path, "ipython_notebooks"), jupyter_exports = osp.join(import_path, "jupyter_exports")) if migrations: print "Executing the following PROJECT-SPECIFIC migrations" for migration in migrations: print migration for operation in migration.operations: if isinstance(operation, migration_base.ProjectLocalMigrationOperation): print " - ", operation # No validation nor backup for project migration for migration in migrations: print migration for operation in migration.operations: if isinstance(operation, migration_base.ProjectLocalMigrationOperation): print " - ", operation operation.execute(project_paths) # collect additions to the manifest additions = {} for migration in migrations: print migration for operation in migration.operations: if isinstance(operation, migration_base.ProjectLocalMigrationOperation): print " - ", operation operation.get_manifest_additions(additions, project_paths) if len(additions) > 0: with open(osp.join(import_path, "export-manifest-additions.json"), "w") as f: json.dump(additions, f) else: print "NO MIGRATION for %s" % current_version
def initial_install(dip_home_path, base_port, node_type, install_size): base._sanity_check() dip_home = base.DipHome(dip_home_path) node_type = node_type print "[+] Performing initial install" migration_base.write_version(dip_home) print "[+] Writing default install config file" install_config.initConfig(dip_home, base_port, node_type, install_size) print "[+] Writing default env file" envfiles.write_envdefault(dip_home) base.link_dss_binaries(dip_home, os.environ["DKUINSTALLDIR"]) base.generate_supervisor_key(dip_home) base.create_dss_folders(dip_home) base.ensure_required_dss_files(dip_home)
def migrate(dip_home_path): base._sanity_check() dip_home = base.DipHome(dip_home_path) backup = migration_backup.BackupData(dip_home_path) if backup.load(): print "****************************************************" print "* PREVIOUS DSS MIGRATION WAS ABORTED, ROLLING BACK *" print "****************************************************" backup.restore() print "Restore complete, removing marker file" backup.delete() assert not backup.load() try: # First, migrate the configuration before upgrading the binary links current_version = dip_home.get_conf_version() node_type = migration_base.get_node_type(dip_home) assert node_type == 'design' or node_type == 'automation' migrations = migration_base.VERSIONS_MIGRATION_LIBRARY.select_migrations( current_version) if migrations: print "Executing the following migrations" for migration in migrations: migration.execute(dip_home, node_type, simulate=True) if os.getenv("DKU_MIGRATE_YES") is None: print "Continue? (Y/[N])", sys.stdout.flush() if "y" != raw_input().strip().lower(): print "Aborting!" sys.exit(1) backup.backup_file("dss-version.json") backup.backup_file("bin/env-default.sh") for fname in [ "install.properties", "install.ini", "bin/env-spark.sh" ]: if osp.isfile(osp.join(dip_home.path, fname)): backup.backup_file(fname) # not needed for the pre-4.0 layout, the bugs we know and which require the backup are for 4.0+ if os.getenv("DKU_MIGRATE_NOBACKUP") is None: if osp.isdir(osp.join(dip_home.path, "databases")): backup.backup_dir("databases") if os.getenv("DKU_MIGRATE_NOBACKUP") is None: print "Backing up your config ..." backup.backup_dir("config") backup.save() for migration in migrations: migration.execute(dip_home, node_type) # Write the final version migration_base.write_version(dip_home) # Update env-default envfiles.write_envdefault(dip_home) # Update the binary links base.link_dss_binaries(dip_home, os.environ["DKUINSTALLDIR"]) base.generate_supervisor_key(dip_home) base.ensure_required_dss_files(dip_home) # We can now run "Post-upgrade" migrations (for java migrations needing the new binaries) if migrations is not None: print "Executing post-migrations" for migration in migrations: migration.post_execute(dip_home, node_type) #raise Exception("boom") print "Migration done, removing marker" backup.delete() except Exception, e: print "******************************************" print "* MIGRATION FAILED" print "******************************************" print "* Attempting to rollback" backup.restore() print "Restore complete, removing marker file" backup.delete() raise e, None, sys.exc_info()[2]