def remove_bucket(args): """ Used to remove a bucket from IRIS DynaFed Refreshes local copy of files first to make sure config is up to date from remote copy on bucket Then delete the remote config and push updates to bucket :param args: Namespace object containing all arguments given from command line, if any :returns: 0 if success, non-zero if not. Various numbers are returned which will allow for the correct error response to be displayed on the web UI """ res_get = sync.get() if res_get != 0: return 4 args.suppress_verify_output = True if verify(args) != 0: # restore stdout sys.stdout = sys.__stdout__ print("Config file not valid, please use the verify function to debug") return 1 if does_bucket_exist(args) != 0: return 2 #Potential issue: we need to validate the bucket keys are correct and this is how we do it #However, issue with this occurs if the bucket no longer exists in Echo. This currently prevents it from being #removed as an entry in DynaFed. #Potential solution: if we are an admin, bypass this keys check. This would mean the user cannot remove a bucket #entry that doesn't exist. Not sure how to get around this while keeping the key validation in place #TL;DR not a massive issue but still annoying if hasattr(args, 'admin_operation') and hasattr(args, 'groups'): admin_operation = args.admin_operation and "dynafed/admins" in args.groups if not admin_operation: # Validate bucket exists in Echo if update_bucket_cors(args) != 0: # Bucket could not be matched in Echo. Bucket may no longer exist if do_keys_match_bucket(args) != 0: return 3 elif update_bucket_cors(args) != 0: if do_keys_match_bucket(args) != 0: return 3 remove_bucket_from_config_file(args) remove_bucket_from_json(args) res_put = sync.put() if res_put != 0: return 4 res_get = sync.get() if res_get != 0: return 4 return 0
def get_blacklist(): sync.get() try: with open(BLACKLIST_FILE, "r") as f: blacklist = json.load(f) except FileNotFoundError: return [] return blacklist["buckets"]
def add_to_blacklist(args): if (args.bucket is not None and args.admin_operation is not None and args.groups is not None): admin_operation = args.admin_operation and "dynafed/admins" in args.groups if not admin_operation: return 1 if sync.get() != 0: return 2 try: with open(BLACKLIST_FILE, "r") as f: blacklist = json.load(f) except FileNotFoundError: blacklist = {"buckets": []} if args.bucket not in blacklist["buckets"]: blacklist["buckets"].append(args.bucket) with open(BLACKLIST_FILE, "w") as f: json.dump(blacklist, f, indent=4) if sync.put() != 0: return 2 return 0 else: return 3 else: return 1
def import_bucket(args): """ Used to import a new bucket into IRIS DynaFed Refreshes local copy of files first to make sure config is up to date from remote copy on bucket Then creates the necessary config and pushes it to the bucket :param args: Namespace object containing all arguments given from command line, if any :returns: 0 if success, non-zero if not. Various numbers are returned which will allow for the correct error response to be displayed on the web UI """ res_get = sync.get() if res_get != 0: return 4 if args.bucket in blacklisting.get_blacklist(): return 5 # check config file is valid first args.suppress_verify_output = True if verify(args) != 0: # restore stdout sys.stdout = sys.__stdout__ print( "OIDC config file not valid, please use the verify function to debug" ) return 1 if does_bucket_exist(args) == 0: return 2 # Validate bucket exists in Echo (and update CORS) if update_bucket_cors(args) != 0: return 3 create_bucket_config(args) add_bucket_to_json(args) res_put = sync.put() if res_put != 0: return 4 res_get = sync.get() if res_get != 0: return 4 return 0
def get_oidc_auth(): if sync.get() != 0: print("Synchronisation of files failed.") if not os.path.isfile(DEFAULT_AUTH_FILE_LOCATION): print("No remote oidc_auth.json file found. Creating new from template.") with open(DEFAULT_AUTH_FILE_LOCATION, 'w') as file: json.dump(BLANK_OIDC_AUTH, file, indent=4)