def main(args=None): # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) # check if configuration for the given tenant exists if not config.has("TENANTS:" + args.tenant): log.info("Tenant: " + args.tenant + " doesn't exist.") sys.exit(1) year, month, day = [int(x) for x in args.date.split("T")[0].split("-")] # dictionary containing the argument's name and the command assosciated with each name hdfs_commands = compose_hdfs_commands(year, month, day, args, config) cmd_command, job_namespace = compose_command(config, args, hdfs_commands) log.info("Getting ready to submit job") log.info(cmd_to_string(cmd_command) + "\n") # submit the script's command flink_job_submit(config, cmd_command, job_namespace)
def test_compose_command(self): config = ArgoConfig(CONF_TEMPLATE, CONF_SCHEMA) parser = argparse.ArgumentParser() parser.add_argument('--tenant') parser.add_argument('--date') parser.add_argument('--report') parser.add_argument('--sudo', action='store_true') parser.add_argument('--method') args = parser.parse_args([ '--tenant', 'TENANTA', '--date', '2018-02-11', '--report', 'Critical', '--method', 'upsert', '--sudo' ]) hdfs_metric = "hdfs://hdfs_test_host:hdfs_test_port/user/hdfs_test_user/argo/tenants/TENANTA/mdata" hdfs_sync = "hdfs://hdfs_test_host:hdfs_test_port/user/hdfs_test_user/argo/tenants/TENANTA/sync" test_hdfs_commands = dict() test_hdfs_commands["--pdata"] = hdfs_metric + "/2018-02-10" test_hdfs_commands["--mdata"] = hdfs_metric + "/2018-02-11" test_hdfs_commands["--conf"] = hdfs_sync + "/TENANTA_Critical_cfg.json" test_hdfs_commands[ "--mps"] = hdfs_sync + "/Critical/" + "metric_profile_2018-02-11.avro" test_hdfs_commands["--ops"] = hdfs_sync + "/TENANTA_ops.json" test_hdfs_commands["--apr"] = hdfs_sync + "/TENANTA_Critical_ap.json" test_hdfs_commands[ "--egp"] = hdfs_sync + "/Critical/group_endpoints_2018-02-11.avro" test_hdfs_commands[ "--ggp"] = hdfs_sync + "/Critical/group_groups_2018-02-11.avro" self.assertEquals( expected_result, cmd_to_string(compose_command(config, args, test_hdfs_commands)))
def test_compose_command(self): # set up the config parser config = ArgoConfig(CONF_TEMPLATE, CONF_SCHEMA) parser = argparse.ArgumentParser() parser.add_argument('--tenant') parser.add_argument('--sudo', action='store_true') args = parser.parse_args(['--tenant', 'TENANTA', '--sudo']) self.assertEquals(expected_result, cmd_to_string(compose_command(config, args)[0]))
def main(args=None): # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) # check if configuration for the given tenant exists if not config.has("TENANTS:" + args.tenant): log.info("Tenant: " + args.tenant + " doesn't exist.") sys.exit(1) cmd_command, job_namespace = compose_command(config, args) log.info("Getting ready to submit job") log.info(cmd_to_string(cmd_command) + "\n") # submit script's command flink_job_submit(config, cmd_command, job_namespace)
def main(args=None): # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) year, month, day = [int(x) for x in args.date.split("-")] # check if configuration for the given tenant exists if not config.has("TENANTS:"+args.tenant): log.info("Tenant: "+args.tenant+" doesn't exist.") sys.exit(1) # check and upload recomputations upload_recomputations(args.tenant, args.report, args.date, config) # optional call to update profiles if args.profile_check: profile_mgr = ArgoProfileManager(config) profile_type_checklist = ["operations", "aggregations", "reports", "thresholds"] for profile_type in profile_type_checklist: profile_mgr.profile_update_check(args.tenant, args.report, profile_type) # dictionary containing the argument's name and the command assosciated with each name hdfs_commands = compose_hdfs_commands(year, month, day, args, config) cmd_command = compose_command(config, args, hdfs_commands) log.info("Getting ready to submit job") log.info(cmd_to_string(cmd_command)+"\n") # submit the script's command flink_job_submit(config, cmd_command)
def test_compose_command(self): config = ArgoConfig(CONF_TEMPLATE, CONF_SCHEMA) parser = argparse.ArgumentParser() parser.add_argument('--tenant') parser.add_argument('--date') parser.add_argument('--report') parser.add_argument('--sudo', action='store_true') parser.add_argument('--timeout') args = parser.parse_args( ['--tenant', 'TENANTA', '--date', '2018-03-05T00:00:00Z', '--report', 'Critical', '--timeout', '500', '--sudo']) hdfs_sync = "hdfs://hdfs_test_host:hdfs_test_port/user/hdfs_test_user/argo/tenants/TENANTA/sync" test_hdfs_commands = dict() test_hdfs_commands["--sync.mps"] = hdfs_sync+"/Critical/"+"metric_profile_2018-03-01.avro" test_hdfs_commands["--sync.ops"] = hdfs_sync+"/TENANTA_ops.json" test_hdfs_commands["--sync.apr"] = hdfs_sync+"/TENANTA_Critical_ap.json" test_hdfs_commands["--sync.egp"] = hdfs_sync+"/Critical/group_endpoints_2018-03-01.avro" self.assertEquals(expected_result, cmd_to_string(compose_command(config, args, test_hdfs_commands)[0]))
def main(args): if args.config is not None and not os.path.isfile(args.config): log.info(args.config + " file not found") # Get configuration paths conf_paths = get_config_paths(args.config) # Get logger config file get_log_conf(conf_paths['log']) # Get main configuration and schema config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) log.info("Argo-engine update stated.") # if backup-conf selected backup the configuration file if args.backup: date_postfix = datetime.now().strftime('%Y-%m-%dT%H:%M:%S') backup = args.config + "." + date_postfix shutil.copyfile(args.config, backup) log.info("backed-up current configuration to: " + backup) argo_profiles = ArgoProfileManager(config) argo_profiles.upload_tenants_cfg() config.save_as(conf_paths["main"]) # reload config and profile manager config = ArgoConfig(conf_paths["main"], conf_paths["schema"]) argo_profiles = ArgoProfileManager(config) tenants = config.get("API","tenants") profile_type_checklist = ["operations", "aggregations", "reports", "thresholds", "recomputations"] for tenant in tenants: reports = config.get("TENANTS:"+tenant,"reports") for report in reports: for profile_type in profile_type_checklist: argo_profiles.profile_update_check(tenant, report, profile_type) # update ams ams_token = config.get("AMS", "access_token") ams_host = config.get("AMS", "endpoint").hostname log.info("ams api used {}".format(ams_host)) ams = ArgoAmsClient(ams_host, ams_token) for tenant in tenants: ams.check_project_exists(tenant) missing = ams.check_tenant(tenant) if is_tenant_complete(missing): log.info("Tenant {} definition on AMS is complete!".format(tenant)) else: ams.fill_missing(tenant, missing) # Update tenant configuration ams.update_tenant_configuration(tenant, config) # Save changes to designated configuration file config.save_as(config.conf_path) # check tenant status # Upload tenant statuses in argo web api api_endpoint = config.get("API","endpoint").netloc api_token = config.get("API","access_token") statuses = check_tenants(tenants,get_today(),3,config) # Update cron accordingly cron_body = "" for status in statuses: cron_body = cron_body + gen_tenant_all(config,status["tenant"],tenant_ok_reports(status)) update_cron_tab(cron_body) log.info("Argo-engine update finished.")