def test_compose_command(self): # set up the config parser config = ArgoConfig(CONF_TEMPLATE, CONF_SCHEMA) parser = argparse.ArgumentParser() parser.add_argument('--tenant') parser.add_argument('--sudo', action='store_true') args = parser.parse_args(['--tenant', 'TENANTA', '--sudo']) print cmd_to_string(compose_command(config, args)[0]) self.assertEquals(expected_result, cmd_to_string(compose_command(config, args)[0]))
def main(args=None): # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) # check if configuration for the given tenant exists if not config.has("TENANTS:" + args.tenant): log.info("Tenant: " + args.tenant + " doesn't exist.") sys.exit(1) year, month, day = [int(x) for x in args.date.split("T")[0].split("-")] # dictionary containing the argument's name and the command assosciated with each name hdfs_commands = compose_hdfs_commands(year, month, day, args, config) cmd_command, job_namespace = compose_command(config, args, hdfs_commands) log.info("Getting ready to submit job") log.info(cmd_to_string(cmd_command) + "\n") # submit the script's command flink_job_submit(config, cmd_command, job_namespace)
def test_compose_command(self): config = ArgoConfig(CONF_TEMPLATE, CONF_SCHEMA) parser = argparse.ArgumentParser() parser.add_argument('--tenant') parser.add_argument('--date') parser.add_argument('--report') parser.add_argument('--sudo', action='store_true') parser.add_argument('--method') args = parser.parse_args([ '--tenant', 'TENANTA', '--date', '2018-02-11', '--report', 'Critical', '--method', 'upsert', '--sudo' ]) hdfs_metric = "hdfs://hdfs_test_host:hdfs_test_port/user/hdfs_test_user/argo/tenants/TENANTA/mdata" hdfs_sync = "hdfs://hdfs_test_host:hdfs_test_port/user/hdfs_test_user/argo/tenants/TENANTA/sync" test_hdfs_commands = dict() test_hdfs_commands["--pdata"] = hdfs_metric + "/2018-02-10" test_hdfs_commands["--mdata"] = hdfs_metric + "/2018-02-11" test_hdfs_commands["--conf"] = hdfs_sync + "/TENANTA_Critical_cfg.json" test_hdfs_commands[ "--mps"] = hdfs_sync + "/Critical/" + "metric_profile_2018-02-11.avro" test_hdfs_commands["--ops"] = hdfs_sync + "/TENANTA_ops.json" test_hdfs_commands["--apr"] = hdfs_sync + "/TENANTA_Critical_ap.json" test_hdfs_commands[ "--egp"] = hdfs_sync + "/Critical/group_endpoints_2018-02-11.avro" test_hdfs_commands[ "--ggp"] = hdfs_sync + "/Critical/group_groups_2018-02-11.avro" self.assertEquals( expected_result, cmd_to_string(compose_command(config, args, test_hdfs_commands)))
def main(args=None): # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) # check if configuration for the given tenant exists if not config.has("TENANTS:" + args.tenant): log.info("Tenant: " + args.tenant + " doesn't exist.") sys.exit(1) cmd_command, job_namespace = compose_command(config, args) log.info("Getting ready to submit job") log.info(cmd_to_string(cmd_command) + "\n") # submit script's command flink_job_submit(config, cmd_command, job_namespace)
def main(args=None): # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) year, month, day = [int(x) for x in args.date.split("-")] # check if configuration for the given tenant exists if not config.has("TENANTS:"+args.tenant): log.info("Tenant: "+args.tenant+" doesn't exist.") sys.exit(1) # check and upload recomputations upload_recomputations(args.tenant, args.report, args.date, config) # optional call to update profiles if args.profile_check: profile_mgr = ArgoProfileManager(config) profile_type_checklist = ["operations", "aggregations", "reports", "thresholds"] for profile_type in profile_type_checklist: profile_mgr.profile_update_check(args.tenant, args.report, profile_type) # dictionary containing the argument's name and the command assosciated with each name hdfs_commands = compose_hdfs_commands(year, month, day, args, config) cmd_command = compose_command(config, args, hdfs_commands) log.info("Getting ready to submit job") log.info(cmd_to_string(cmd_command)+"\n") # submit the script's command flink_job_submit(config, cmd_command)
def test_compose_command(self): config = ArgoConfig(CONF_TEMPLATE, CONF_SCHEMA) parser = argparse.ArgumentParser() parser.add_argument('--tenant') parser.add_argument('--date') parser.add_argument('--report') parser.add_argument('--sudo', action='store_true') parser.add_argument('--timeout') args = parser.parse_args( ['--tenant', 'TENANTA', '--date', '2018-03-05T00:00:00Z', '--report', 'Critical', '--timeout', '500', '--sudo']) hdfs_sync = "hdfs://hdfs_test_host:hdfs_test_port/user/hdfs_test_user/argo/tenants/TENANTA/sync" test_hdfs_commands = dict() test_hdfs_commands["--sync.mps"] = hdfs_sync+"/Critical/"+"metric_profile_2018-03-01.avro" test_hdfs_commands["--sync.ops"] = hdfs_sync+"/TENANTA_ops.json" test_hdfs_commands["--sync.apr"] = hdfs_sync+"/TENANTA_Critical_ap.json" test_hdfs_commands["--sync.egp"] = hdfs_sync+"/Critical/group_endpoints_2018-03-01.avro" self.assertEquals(expected_result, cmd_to_string(compose_command(config, args, test_hdfs_commands)[0]))