コード例 #1
0
def find_source_by_dbname(engine, f_class, obj_name, active_branch=False):
    """
    Function to find sources by database name and object class, and return object's 
    reference as a string
    engine: A Delphix engine session object
    f_class: The objects class. I.E. database or timeflow.
    obj_name: The name of the database object in Delphix
    active_branch: Default = False. If true, return list containing
                   the object's reference and active_branch. Otherwise, return 
                   the reference.
    """

    return_list = []

    try:
        all_objs = f_class.get_all(engine)
    except AttributeError as e:
        raise DlpxException('Could not find reference for object class'
                            '{}.\n'.format(e))
    for obj in all_objs:
        
        if obj.name == obj_name:
            print_debug('object: {}\n\n'.format(obj))
            print_debug(obj.name)
            print_debug(obj.reference)
            source_obj = source.get_all(engine,database=obj.reference)
            print_debug('source: {}\n\n'.format(source_obj))
            return source_obj[0]

    #If the object isn't found, raise an exception.
    raise DlpxException('{} was not found on engine {}.\n'.format(
        obj_name, engine.address))
コード例 #2
0
def find_source_by_database(engine, database_obj):
    #The source tells us if the database is enabled/disables, virtual, 
    # vdb/dSource, or is a staging database.
    source_obj = source.get_all(server, database=database_obj.reference)

    #We'll just do a little sanity check here to ensure we only have a 1:1 
    # result.
    if len(source_obj) == 0:
        raise DlpxException('%s: Did not find a source for %s. Exiting.\n' % 
                            (engine['hostname'], database_obj.name))

    elif len(source_obj) > 1:
        raise DlpxException('%s: More than one source returned for %s. '
                            'Exiting.\n' % (engine['hostname'],
                                            database_obj.name + ". Exiting"))
    return source_obj
コード例 #3
0
def find_source_by_database(engine, database_obj):
    #The source tells us if the database is enabled/disables, virtual,
    # vdb/dSource, or is a staging database.
    source_obj = source.get_all(server, database=database_obj.reference)

    #We'll just do a little sanity check here to ensure we only have a 1:1
    # result.
    if len(source_obj) == 0:
        raise DlpxException('%s: Did not find a source for %s. Exiting.\n' %
                            (engine['hostname'], database_obj.name))

    elif len(source_obj) > 1:
        raise DlpxException(
            '%s: More than one source returned for %s. '
            'Exiting.\n' %
            (engine['hostname'], database_obj.name + ". Exiting"))
    return source_obj
コード例 #4
0
def find_source_by_database(engine, server, database_obj):
    # The source tells us if the database is enabled/disables, virtual,
    # vdb/dSource, or is a staging database.
    source_obj = source.get_all(server, database=database_obj.reference)

    # We'll just do a little sanity check here to ensure we only have a
    # 1:1 result.
    if len(source_obj) == 0:
        print_error(engine["hostname"] + ": Did not find a source for " +
                    database_obj.name + ". Exiting")
        sys.exit(1)

    elif len(source_obj) > 1:
        print_error(engine["hostname"] +
                    ": More than one source returned for " +
                    database_obj.name + ". Exiting")
        print_error(source_obj)
        sys.exit(1)

    return source_obj
コード例 #5
0
def find_source_by_database(engine, server, database_obj):
    #The source tells us if the database is enabled/disables, virtual, 
    # vdb/dSource, or is a staging database.
    source_obj = source.get_all(server, database=database_obj.reference)

    #We'll just do a little sanity check here to ensure we only have a 
    # 1:1 result.
    if len(source_obj) == 0:
        print_error(engine["hostname"] + ": Did not find a source for " + 
                    database_obj.name + ". Exiting")
        sys.exit(1)

    elif len(source_obj) > 1:
        print_error(engine["hostname"] + 
                    ": More than one source returned for " + 
                    database_obj.name + ". Exiting")
        print_error(source_obj)
        sys.exit(1)

    return source_obj
コード例 #6
0
def main_workflow(engine):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously
    """

    # Pull out the values from the dictionary for this engine
    engine_address = engine["ip_address"]
    engine_username = engine["username"]
    engine_password = engine["password"]
    # Establish these variables as empty for use later
    databases = []
    environment_obj = None
    source_objs = None
    jobs = {}

    # Setup the connection to the Delphix Engine
    server = serversess(engine_address, engine_username, engine_password)

    # If an environment/server was specified
    if host_name:
        print_debug(engine["hostname"] + ": Getting environment for " +
                    host_name)
        # Get the environment object by the hostname
        environment_obj = find_obj_by_name(engine, server, environment,
                                           host_name)

        if environment_obj != None:
            # Get all the sources running on the server
            env_source_objs = source.get_all(
                server, environment=environment_obj.reference)

            # If the server doesn't have any objects, exit.
            if env_source_objs == None:
                print_error(host_name + "does not have any objects. Exiting")
                sys.exit(1)

            # If we are only filtering by the server, then put those objects in
            # the main list for processing
            if not (arguments["--group_name"] and database_name):
                source_objs = env_source_objs
                all_dbs = database.get_all(server,
                                           no_js_container_data_source=True)
                databases = []
                for source_obj in source_objs:
                    if source_obj.staging == False and source_obj.virtual == True:

                        database_obj = database.get(server,
                                                    source_obj.container)

                        if database_obj in all_dbs:
                            databases.append(database_obj)
        else:
            print_error(engine["hostname"] + ":No environment found for " +
                        host_name + ". Exiting")
            sys.exit(1)

    # If we specified a specific database by name....
    if arguments["--name"]:
        # Get the database object from the name

        database_obj = find_obj_by_name(engine, server, database,
                                        arguments["--name"])
        if database_obj:
            databases.append(database_obj)

    # Else if we specified a group to filter by....
    elif arguments["--group_name"]:
        print_debug(engine["hostname"] + ":Getting databases in group " +
                    arguments["--group_name"])
        # Get all the database objects in a group.
        databases = find_all_databases_by_group_name(engine, server,
                                                     arguments["--group_name"])

    # Else if we specified a dSource to filter by....
    elif arguments["--dsource"]:
        print_debug(engine["hostname"] + ":Getting databases for dSource" +
                    arguments["--dsource"])

        # Get all the database objects in a group.
        databases = find_all_databases_by_dsource_name(engine, server,
                                                       arguments["--dsource"])

    # Else, if we said all vdbs ...
    elif arguments["--all_vdbs"] and not arguments["--host"]:
        print_debug(engine["hostname"] + ":Getting all VDBs ")

        # Grab all databases, but filter out the database that are in JetStream
        # containers, because we can't refresh those this way.
        databases = database.get_all(server, no_js_container_data_source=True)

    elif arguments["--list_timeflows"]:
        list_timeflows(server)

    elif arguments["--list_snapshots"]:
        list_snapshots(server)

    # reset the running job count before we begin
    i = 0
    with job_mode(server):
        # While there are still running jobs or databases still to process....

        while len(jobs) > 0 or len(databases) > 0:

            # While there are databases still to process and we are still under
            # the max simultaneous jobs threshold (if specified)
            while len(databases) > 0 and (arguments["--parallel"] == None
                                          or i < int(arguments["--parallel"])):

                # Give us the next database in the list, and then remove it
                database_obj = databases.pop()
                # Get the source of the database.
                source_obj = find_source_by_database(engine, server,
                                                     database_obj)

                # If we applied the environment/server filter AND group filter,
                # find the intersecting matches
                if environment_obj != None and (arguments["--group_name"]):
                    match = False

                    for env_source_obj in env_source_objs:
                        if source_obj[0].reference in env_source_obj.reference:
                            match = True
                            break
                    if match == False:
                        print_error(engine["hostname"] + ": " +
                                    database_obj.name + " does not exist on " +
                                    host_name + ". Exiting")
                        return

                # Refresh the database
                refresh_job = refresh_database(engine, server, jobs,
                                               source_obj[0], database_obj)
                # If refresh_job has any value, then we know that a job was
                # initiated.

                if refresh_job:
                    # increment the running job count
                    i += 1
            # Check to see if we are running at max parallel processes, and
            # report if so.
            if arguments["--parallel"] != None and i >= int(
                    arguments["--parallel"]):

                print_info(engine["hostname"] + ": Max jobs reached (" +
                           str(i) + ")")

            i = update_jobs_dictionary(engine, server, jobs)
            print_info(engine["hostname"] + ": " + str(i) + " jobs running. " +
                       str(len(databases)) + " jobs waiting to run")

            # If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments["--poll"]))
コード例 #7
0
def main_workflow(engine):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine 
    simultaneously
    """

    #Pull out the values from the dictionary for this engine
    engine_address = engine["ip_address"]
    engine_username = engine["username"]
    engine_password = engine["password"]
    #Establish these variables as empty for use later
    databases = []
    environment_obj = None
    source_objs = None
    jobs = {}
    

    #Setup the connection to the Delphix Engine
    server = serversess(engine_address, engine_username, engine_password)

    #If an environment/server was specified
    if host_name:
        print_debug(engine["hostname"] + ": Getting environment for " + 
                    host_name)
        #Get the environment object by the hostname
        environment_obj = find_obj_by_name(engine, server, environment, 
                                           host_name)

        if environment_obj != None:
            #Get all the sources running on the server
            env_source_objs = source.get_all(server, 
                                     environment=environment_obj.reference)

            #If the server doesn't have any objects, exit.
            if env_source_objs == None:
                print_error(host_name + "does not have any objects. Exiting")
                sys.exit(1)

            #If we are only filtering by the server, then put those objects in 
            # the main list for processing
            if not(arguments['--group_name'] and database_name):
                source_objs = env_source_objs
                all_dbs = database.get_all(server, 
                                           no_js_container_data_source=True)
                databases = []
                for source_obj in source_objs:
                    if source_obj.staging == False and \
                                  source_obj.virtual == True:

                        database_obj = database.get(server, 
                                                    source_obj.container)

                        if database_obj in all_dbs:
                            databases.append(database_obj)
        else:
            print_error(engine["hostname"] + ":No environment found for " + 
                        host_name + ". Exiting")
            sys.exit(1)

    #If we specified a specific database by name....
    if arguments['--name']:
        #Get the database object from the name

        database_obj = find_obj_by_name(engine, server, database,
                                        arguments['--name'])
        if database_obj:
            databases.append(database_obj)

    #Else if we specified a group to filter by....
    elif arguments['--group_name']:
        print_debug(engine["hostname"] + ":Getting databases in group " + 
                    arguments['--group_name'])
        #Get all the database objects in a group.
        databases = find_all_databases_by_group_name(engine, server, 
                                                     arguments['--group_name'])

    #Else if we specified a dSource to filter by....
    elif arguments['--dsource']:
        print_debug(engine["hostname"] + ":Getting databases for dSource" + 
                    arguments['--dsource'])

        #Get all the database objects in a group.
        databases = find_all_databases_by_dsource_name(engine, server, 
                                                       arguments['--dsource'])

    #Else, if we said all vdbs ...
    elif arguments['--all_vdbs'] and not arguments['--host'] :
        print_debug(engine['hostname'] + ':Getting all VDBs ')

        #Grab all databases, but filter out the database that are in JetStream 
        #containers, because we can't refresh those this way.
        databases = database.get_all(server, no_js_container_data_source=True)

    elif arguments['--list_timeflows']:
        list_timeflows(server)

    elif arguments['--list_snapshots']:
        list_snapshots(server)

    #reset the running job count before we begin
    i = 0
    with job_mode(server):
        #While there are still running jobs or databases still to process....

        while (len(jobs) > 0 or len(databases) > 0):

            #While there are databases still to process and we are still under 
            #the max simultaneous jobs threshold (if specified)
            while len(databases) > 0 and (arguments['--parallel'] == None or \
                  i < int(arguments['--parallel'])):

                #Give us the next database in the list, and then remove it
                database_obj = databases.pop()
                #Get the source of the database.
                source_obj = find_source_by_database(engine, server, 
                                                     database_obj)

               #If we applied the environment/server filter AND group filter, 
               # find the intersecting matches
                if environment_obj != None and (arguments['--group_name']):
                    match = False

                    for env_source_obj in env_source_objs:
                        if source_obj[0].reference in env_source_obj.reference:
                            match = True
                            break
                    if match == False:
                        print_error(engine["hostname"] + ": " + 
                                    database_obj.name + " does not exist on " +
                                    host_name + ". Exiting")
                        return

                #Refresh the database
                refresh_job = refresh_database(engine, server, jobs, 
                                               source_obj[0], database_obj)
                #If refresh_job has any value, then we know that a job was 
                # initiated.

                if refresh_job:
                    #increment the running job count
                    i += 1
            #Check to see if we are running at max parallel processes, and 
            # report if so.
            if ( arguments['--parallel'] != None and \
                 i >= int(arguments['--parallel'])):

                print_info(engine["hostname"] + ": Max jobs reached (" + 
                           str(i) + ")")

            i = update_jobs_dictionary(engine, server, jobs)
            print_info(engine["hostname"] + ": " + str(i) + " jobs running. " +
                       str(len(databases)) + " jobs waiting to run")

            #If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments['--poll']))