def check_config(): try: print "Verifying config.json" with open('config.json', 'r') as config_file: config = json.load(config_file) print "Successfully read and parsed json" except: print "********** config.json failure **********" e = sys.exc_info()[0] print e return 101 print "Validating Qumulo cluster API connections" for cluster in config["clusters"]: print "Attempting to connect to: %s with %s login" % ( cluster["hostname"], cluster["api_username"]) try: apicsv = ApiToCsv(cluster["hostname"], cluster["api_username"], cluster["api_password"], cluster["csv_data_path"]) apicsv.get_cluster_status("cluster_status") print "API connection successful" except: print "********** Qumulo Cluster API connection failure **********" e = sys.exc_info()[0] print e return 102 return 0
def check_config(): try: print "Verifying config.json" with open('config.json', 'r') as config_file: config = json.load(config_file) print "Successfully read and parsed json" except: print "********** config.json failure **********" e = sys.exc_info()[0] print e return 101 print "Validating Qumulo cluster API connections" for cluster in config["clusters"]: print "Attempting to connect to: %s with %s login" % (cluster["hostname"], cluster["api_username"]) try: apicsv = ApiToCsv(cluster["hostname"], cluster["api_username"], cluster["api_password"], cluster["csv_data_path"]) apicsv.get_cluster_status("cluster_status") print "API connection successful" except: print "********** Qumulo Cluster API connection failure **********" e = sys.exc_info()[0] print e return 102 return 0
'API data type(s) to pull (separate multiple by commas). Valid values:\ndashstats\ncluster_status\nsampled_files_by_capacity\nsampled_files_by_file\niops_by_path\ncapacity_by_path\napi_call_log' ) parser.add_argument('--timestamp', default=time.strftime('%Y-%m-%d %H:%M:%S')) args = parser.parse_args() config = get_config() if args.op == "verify_config": return_val = check_config() if return_val != 0: sys.exit(return_val) elif args.op == "api_pull": for cluster in config["clusters"]: # initialize Api to CSV. apicsv = ApiToCsv(cluster["hostname"], cluster["api_username"], cluster["api_password"], cluster["csv_data_path"]) # set the timestamp for writign to CSVs where the API doesn't provide a timettamp apicsv.set_timestamp(args.timestamp) # loop through each API call operation for api_call in args.api_data.split(','): apicsv.get_data(api_call) # log the api call times to a csv upon completion of all work. apicsv.get_data("api_call_log") elif args.op == "aggregate_data": for cluster in config["clusters"]: aggregate_data(cluster)
if __name__ == '__main__': parser = argparse.ArgumentParser(description='Bring data from Qumulo Rest API to a CSV') parser.add_argument('--op', required=True, help='Operation for application. Valid values: server or api_pull or aggregate_data') parser.add_argument('--api_data', required=False, help='API data type(s) to pull (separate multiple by commas). Valid values:\ndashstats\ncluster_status\nsampled_files_by_capacity\nsampled_files_by_file\niops_by_path\ncapacity_by_path\napi_call_log') parser.add_argument('--timestamp', default=time.strftime('%Y-%m-%d %H:%M:%S')) args = parser.parse_args() config = get_config() if args.op == "verify_config": return_val = check_config() if return_val != 0: sys.exit(return_val) elif args.op == "api_pull": for cluster in config["clusters"]: # initialize Api to CSV. apicsv = ApiToCsv(cluster["hostname"], cluster["api_username"], cluster["api_password"], cluster["csv_data_path"]) # set the timestamp for writign to CSVs where the API doesn't provide a timettamp apicsv.set_timestamp(args.timestamp) # loop through each API call operation for api_call in args.api_data.split(','): apicsv.get_data(api_call) # log the api call times to a csv upon completion of all work. apicsv.get_data("api_call_log") elif args.op == "aggregate_data": for cluster in config["clusters"]: aggregate_data(cluster)