def main(): parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") args = parser.parse_args() config = get_config(parser, args) execute_command("./package-binaries.sh") web = config.get_webserver() web.copy_file_to_remote("../hillview-bin.zip", ".", "") web.run_remote_shell_command("unzip -o hillview-bin.zip") web.copy_file_to_remote("config-greenplum.json", "bin", "") web.run_remote_shell_command("cd bin; ./upload-data.py -d . -s move-greenplum.sh config-greenplum.json") web.run_remote_shell_command("cd bin; ./redeploy.sh -s config-greenplum.json") web.copy_file_to_remote("../repository/PROGRESS_DATADIRECT_JDBC_DRIVER_PIVOTAL_GREENPLUM_5.1.4.000275.jar", config.service_folder + "/" + config.tomcat + "/lib", "") # Generate configuration file with open("greenplum.json", "rb") as f: dict = json.load(f) dict["greenplumMoveScript"] = config.service_folder + "/move-greenplum.sh" dict["hideDemoMenu"] = "true" dict["enableSaveAs"] = "true" tmp = tempfile.NamedTemporaryFile(mode="w", delete=False) j = json.dumps(dict) tmp.write(j) tmp.close() web.copy_file_to_remote(tmp.name, config.service_folder + "/hillview.json", "") os.remove(tmp.name)
def main(): """Main function""" parser = ArgumentParser(epilog="The argument in the list are uploaded in round-robin " + "to the worker machines in the cluster") parser.add_argument("config", help="json cluster configuration file") parser.add_argument("-d", "--directory", help="destination folder where output is written" +\ " (if relative it is with respect to config.service_folder)") parser.add_argument("-L", "--symlinks", help="Follow symlinks instead of ignoring them", action="store_true") parser.add_argument("--common", "-s", help="File that is loaded to all machines", action="append") parser.add_argument("files", help="Files to copy", nargs=REMAINDER) args = parser.parse_args() config = get_config(parser, args) folder = args.directory if folder is None: logger.error("Directory argument is mandatory") parser.print_help() exit(1) if args.symlinks: copyOptions = "-L" else: copyOptions = "" if not os.path.isabs(folder): folder = os.path.join(config.service_folder, folder) message = "Folder is relative, using " + folder logger.info(message) for c in args.common: copy_everywhere(config, c, folder, copyOptions) if args.files: copy_files(config, folder, args.files, copyOptions) else: logger.info("No files to upload to the machines provided in a Hillview configuration") logger.info("Done.")
def main(): """Main function""" parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") args = parser.parse_args() config = get_config(parser, args) check_webserver(config) check_workers(config)
def main(): """Main function""" parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") args = parser.parse_args() config = get_config(parser, args) check_webserver(config) config.run_on_all_aggregators(lambda rh: check_aggregator(config, rh)) config.run_on_all_workers(lambda rh: check_worker(config, rh))
def main(): """Main function""" parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") parser.add_argument("command", help="command to run", nargs=REMAINDER) args = parser.parse_args() config = get_config(parser, args) command = " ".join(args.command) execute_command_on_all(config, command, False)
def main(): """Main function""" parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") parser.add_argument("pattern", help="Filename pattern to download") args = parser.parse_args() config = get_config(parser, args) pattern = args.pattern copy_files(config, pattern) logger.info("Done.")
def main(): """Main function""" parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") parser.add_argument("folder", help="Folder to delete from all machines") args = parser.parse_args() config = get_config(parser, args) folder = args.folder if not os.path.isabs(folder): folder = os.path.join(config.service_folder, folder) delete_folder(config, folder)
inventory.set(DATANODE, worker.host) inventory.add_section(DEFAULT_VARS) inventory.set(DEFAULT_VARS, "ansible_user", config.get_user()) inventory.set(DEFAULT_VARS, "hadoop_version", HADOOP_VERSION) inventory.write(file) file.flush() def get_deployment_dir(): """ Assumes there is a deployment folder in the project root that contains the needed ansible files. :return: The absolute path to the deployment folder. """ project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) return os.path.join(project_root, "deployment") if __name__ == "__main__": parser = ArgumentParser() parser.add_argument("config", help="json cluster configuration file") args = parser.parse_args() config = get_config(parser, args) with NamedTemporaryFile(mode="w") as inventory_file: write_inventory_file(config, inventory_file) ansible_runner.run(project_dir=get_deployment_dir(), inventory=inventory_file.name, playbook="install-hdfs.yml")