def find_source_by_database(engine, server, database_obj): #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. source_obj = source.get_all(server, database=database_obj.reference) #We'll just do a little sanity check here to ensure we only have a 1:1 result. if len(source_obj) == 0: print_error(engine["hostname"] + ": Did not find a source for " + database_obj.name + ". Exiting") sys.exit(1) elif len(source_obj) > 1: print_error(engine["hostname"] + ": More than one source returned for " + database_obj.name + ". Exiting") print_error(source_obj) sys.exit(1) return source_obj
def find_source_by_database(engine, server, database_obj): # The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. source_obj = source.get_all(server, database=database_obj.reference) # We'll just do a little sanity check here to ensure we only have a 1:1 result. if len(source_obj) == 0: print_error(engine["hostname"] + ": Did not find a source for " + database_obj.name + ". Exiting") sys.exit(1) elif len(source_obj) > 1: print_error(engine["hostname"] + ": More than one source returned for " + database_obj.name + ". Exiting") print_error(source_obj) sys.exit(1) return source_obj
def main_workflow(engine): """ This function is where the main workflow resides. Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously """ # Pull out the values from the dictionary for this engine engine_address = engine["ip_address"] engine_username = engine["username"] engine_password = engine["password"] # Establish these variables as empty for use later databases = [] environment_obj = None source_objs = None jobs = {} # Setup the connection to the Delphix Engine server = serversess(engine_address, engine_username, engine_password) # If an environment/server was specified if host_name: print_debug(engine["hostname"] + ": Getting environment for " + host_name) # Get the environment object by the hostname environment_obj = find_obj_by_name(engine, server, environment, host_name) if environment_obj != None: # Get all the sources running on the server env_source_objs = source.get_all( server, environment=environment_obj.reference) # If the server doesn't have any objects, exit. if env_source_objs == None: print_error(host_name + "does not have any objects. Exiting") sys.exit(1) # If we are only filtering by the server, then put those objects in the main list for processing if not (arguments["--group"] and database_name): source_objs = env_source_objs all_dbs = database.get_all(server, no_js_container_data_source=False) databases = [] for source_obj in source_objs: if source_obj.staging == False and source_obj.virtual == True: database_obj = database.get(server, source_obj.container) if database_obj in all_dbs: databases.append(database_obj) else: print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") sys.exit(1) # If we specified a specific database by name.... if arguments["--name"]: # Get the database object from the name database_obj = find_database_by_name_and_group_name( engine, server, arguments["--group"], arguments["--name"]) if database_obj: databases.append(database_obj) # Else if we specified a group to filter by.... elif arguments["--group"]: print_debug(engine["hostname"] + ":Getting databases in group " + arguments["--group"]) # Get all the database objects in a group. databases = find_all_databases_by_group_name(engine, server, arguments["--group"]) # Else, if we said all vdbs ... elif arguments["--all_dbs"] and not arguments["--host"]: # Grab all databases databases = database.get_all(server, no_js_container_data_source=False) elif arguments["--object_type"] and not arguments["--host"]: databases = database.get_all(server) if not databases or len(databases) == 0: print_error("No databases found with the criterion specified") return # reset the running job count before we begin i = 0 with job_mode(server): # While there are still running jobs or databases still to process.... while len(jobs) > 0 or len(databases) > 0: # While there are databases still to process and we are still under # the max simultaneous jobs threshold (if specified) while len(databases) > 0 and (arguments["--parallel"] == None or i < int(arguments["--parallel"])): # Give us the next database in the list, and remove it from the list database_obj = databases.pop() # Get the source of the database. # The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. source_obj = find_source_by_database(engine, server, database_obj) # If we applied the environment/server filter AND group filter, find the intersecting matches if environment_obj != None and (arguments["--group"]): match = False for env_source_obj in env_source_objs: if source_obj[0].reference in env_source_obj.reference: match = True break if match == False: print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") return # Snapshot the database delete_job = delete_database( engine, server, jobs, source_obj[0], database_obj, arguments["--object_type"], ) # If delete_job has any value, then we know that a job was initiated. if delete_job: # increment the running job count i += 1 # Check to see if we are running at max parallel processes, and report if so. if arguments["--parallel"] != None and i >= int( arguments["--parallel"]): print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") # reset the running jobs counter, as we are about to update the count from the jobs report. i = update_jobs_dictionary(engine, server, jobs) print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") # If we have running jobs, pause before repeating the checks. if len(jobs) > 0: sleep(float(arguments["--poll"]))
def main_workflow(engine): """ This function is where the main workflow resides. Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously """ #Pull out the values from the dictionary for this engine engine_address = engine["ip_address"] engine_username = engine["username"] engine_password = engine["password"] #Establish these variables as empty for use later databases = [] environment_obj = None source_objs = None jobs = {} #Setup the connection to the Delphix Engine server = serversess(engine_address, engine_username, engine_password) #If an environment/server was specified if host_name: print_debug(engine["hostname"] + ": Getting environment for " + host_name) #Get the environment object by the hostname environment_obj = find_obj_by_name(engine, server, environment, host_name) if environment_obj != None: #Get all the sources running on the server env_source_objs = source.get_all(server, environment=environment_obj.reference) #If the server doesn't have any objects, exit. if env_source_objs == None: print_error(host_name + "does not have any objects. Exiting") sys.exit(1) #If we are only filtering by the server, then put those objects in the main list for processing if not(arguments['--group'] and database_name): source_objs = env_source_objs all_dbs = database.get_all(server, no_js_container_data_source=False) databases = [] for source_obj in source_objs: if source_obj.staging == False and source_obj.virtual == True: database_obj = database.get(server, source_obj.container) if database_obj in all_dbs: databases.append(database_obj) else: print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") sys.exit(1) #If we specified a specific database by name.... if arguments['--name']: #Get the database object from the name database_obj = find_database_by_name_and_group_name(engine, server, arguments['--group'], arguments['--name']) if database_obj: databases.append(database_obj) #Else if we specified a group to filter by.... elif arguments['--group']: print_debug(engine["hostname"] + ":Getting databases in group " + arguments['--group']) #Get all the database objects in a group. databases = find_all_databases_by_group_name(engine, server, arguments['--group']) #Else, if we said all vdbs ... elif arguments['--all_dbs'] and not arguments['--host'] : #Grab all databases databases = database.get_all(server, no_js_container_data_source=False) elif arguments['--object_type'] and not arguments['--host'] : databases = database.get_all(server) if not databases or len(databases) == 0: print_error("No databases found with the criterion specified") return #reset the running job count before we begin i = 0 with job_mode(server): #While there are still running jobs or databases still to process.... while (len(jobs) > 0 or len(databases) > 0): #While there are databases still to process and we are still under #the max simultaneous jobs threshold (if specified) while len(databases) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): #Give us the next database in the list, and remove it from the list database_obj = databases.pop() #Get the source of the database. #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. source_obj = find_source_by_database(engine, server, database_obj) #If we applied the environment/server filter AND group filter, find the intersecting matches if environment_obj != None and (arguments['--group']): match = False for env_source_obj in env_source_objs: if source_obj[0].reference in env_source_obj.reference: match = True break if match == False: print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") return #Snapshot the database snapshot_job = snapshot_database(engine, server, jobs, source_obj[0], database_obj, arguments['--object_type']) #If snapshot_job has any value, then we know that a job was initiated. if snapshot_job: #increment the running job count i += 1 #Check to see if we are running at max parallel processes, and report if so. if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") #reset the running jobs counter, as we are about to update the count from the jobs report. i = update_jobs_dictionary(engine, server, jobs) print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") #If we have running jobs, pause before repeating the checks. if len(jobs) > 0: sleep(float(arguments['--poll']))