Пример #1
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    try:
        # Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine["ip_address"], engine["username"],
                                  engine["password"])

    except DlpxException as e:
        print_exception("\nERROR: Engine {} encountered an error while"
                        "{}:\n{}\n".format(engine["hostname"],
                                           arguments["--target"], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dx_session_obj.job_mode(single_thread):
            while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments["--add"]:
                        add_group(arguments["--group_name"])
                    elif arguments["--delete"]:
                        delete_group(arguments["--group_name"])
                    elif arguments["--list"]:
                        list_groups()
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dx_session_obj.jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session,
                                      dx_session_obj.jobs[j])
                    print_debug(job_obj)
                    print_info("{}: Group: {}".format(engine["hostname"],
                                                      job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dx_session_obj.jobs[j]
                    elif job_obj.job_state in "RUNNING":
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info("{}: {:d} jobs running.".format(
                        engine["hostname"], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dx_session_obj.jobs) > 0:
                        sleep(float(arguments["--poll"]))

    except (HttpError, RequestError, JobError, DlpxException) as e:
        print_exception("ERROR: Could not complete group "
                        "operation: {}".format(e))
Пример #2
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while' 
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dx_session_obj.job_mode(single_thread):
            while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0):
                if len(thingstodo) > 0:
                    if arguments['--add'] :
                        add_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly'])
                    elif arguments['--update'] :
                        update_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly'])
                    elif arguments['--delete']:
                        delete_user(arguments['--user_name'])
                    elif arguments['--list']:
                        list_users()
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dx_session_obj.jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session,
                                      dx_session_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: User: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dx_session_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                               engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dx_session_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))

    except (HttpError, RequestError, JobError, DlpxException) as e:
        print_exception('ERROR: Could not complete user '
                        'operation: {}'.format(e))
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        #Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])
    except DlpxException as e:
        print_exception('ERROR: Engine {} encountered an error while'
                        'rewinding {}:\n{}\n'.format(engine['hostname'],
                                                     arguments['--target'], e))

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    rewind_database(dlpx_obj, arguments['--vdb'],
                                    arguments['--timestamp'],
                                    arguments['--timestamp_type'])
                    thingstodo.pop()

                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Refresh of {}: {}'.format(
                        engine['hostname'], arguments['--vdb'],
                        job_obj.job_state))
                    if job_obj.job_state in ['CANCELED', 'COMPLETED', 'FAILED']:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error in dx_rewind_vdb: {}\n{}'.format(
            engine['hostname'], e))
        sys.exit(1)
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine %s encountered an error while'
                        '%s:\n%s\n' %
                        (engine['hostname'], arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    #reset the running job count before we begin
    i = 0
    with dx_session_obj.job_mode(single_thread):
        while (len(jobs) > 0 or len(thingstodo) > 0):
            if len(thingstodo) > 0:
                if arguments['--pw']:
                    update_ase_db_pw()

                #elif OPERATION:
                #    method_call

                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in jobs.keys():
                job_obj = job.get(dx_session_obj.server_session, jobs[j])
                print_debug(job_obj)
                print_info(engine["hostname"] + ": VDB Operations: " +
                           job_obj.job_state)

                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del jobs[j]
                else:
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

            print_info(engine["hostname"] + ": " + str(i) + " jobs running. ")
            #If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments['--poll']))
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while'
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    #reset the running job count before we begin
    i = 0
    with dx_session_obj.job_mode(single_thread):
        while (len(jobs) > 0 or len(thingstodo)> 0):
            if len(thingstodo)> 0:

                #if OPERATION:
                find_missing_archivelogs(engine['hostname'])

                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in jobs.keys():
                job_obj = job.get(dx_session_obj.server_session, jobs[j])
                print_debug(job_obj)
                print_info('{}: VDB Operations:{}\n'.format(engine['hostname'],
                           job_obj.job_state))

                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del jobs[j]
                else:
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

            print_info(engine["hostname"] + ": " + str(i) + " jobs running. ")
            #If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments['--poll']))
Пример #6
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while'
                        '{}:\n{}\n'.format(engine['hostname'],
                                           arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    with dx_session_obj.job_mode(single_thread):
        while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
            if len(thingstodo) > 0:

                if arguments['--list']:
                    list_jobs()
                thingstodo.pop()

            # get all the jobs, then inspect them
            i = 0
            for j in dx_session_obj.jobs.keys():
                job_obj = job.get(dx_session_obj.server_session,
                                  dx_session_obj.jobs[j])
                print_debug(job_obj)
                print_info('{}: Operations: {}'.format(engine['hostname'],
                                                       job_obj.job_state))
                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    # If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del dx_session_obj.jobs[j]
                elif job_obj.job_state in 'RUNNING':
                    # If the job is in a running state, increment the running
                    # job count.
                    i += 1

                print_info('{}: {:d} jobs running.'.format(
                    engine['hostname'], i))

            # If we have running jobs, pause before repeating the checks.
            if len(dx_session_obj.jobs) > 0:
                sleep(float(arguments['--poll']))
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while' 
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    with dx_session_obj.job_mode(single_thread):
        while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
            if len(thingstodo)> 0:
                refresh_database(arguments['--vdb'],
                                    arguments['--timestamp'],
                                    arguments['--timestamp_type'])
                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in dx_session_obj.jobs.keys():
                job_obj = job.get(dx_session_obj.server_session,
                                  dx_session_obj.jobs[j])
                print_debug(job_obj)
                print_info('{}: Operations: {}'.format(engine['hostname'],
                                                       job_obj.job_state))
                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del dx_session_obj.jobs[j]
                elif job_obj.job_state in 'RUNNING':
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

                print_info('{}: {:d} jobs running.'.format(
                    engine['hostname'], i))

            #If we have running jobs, pause before repeating the checks.
            if len(dx_session_obj.jobs) > 0:
                sleep(float(arguments['--poll']))
Пример #8
0
def update_jobs_dictionary(engine, server, jobs):
    """
    This function checks each job in the dictionary and updates its status or
    removes it if the job is complete.
    Return the number of jobs still running.
    """
    # Establish the running jobs counter, as we are about to update the count
    # from the jobs report.
    i = 0
    # get all the jobs, then inspect them
    for j in jobs.keys():
        job_obj = job.get(server, jobs[j])
        print_debug("%s: %s" % (engine["hostname"], str(job_obj)), debug)
        print_info("%s: %s: %s" % (engine["hostname"], j.name, job_obj.job_state))

        if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
            # If the job is in a non-running state, remove it from the running
            # jobs list.
            del jobs[j]
        else:
            # If the job is in a running state, increment the running job count.
            i += 1
    return i
def update_jobs_dictionary(engine, server, jobs):
    """
    This function checks each job in the dictionary and updates its status or 
    removes it if the job is complete.
    Return the number of jobs still running.
    """
    #Establish the running jobs counter, as we are about to update the count 
    # from the jobs report.
    i = 0
    #get all the jobs, then inspect them
    for j in jobs.keys():
        job_obj = job.get(server, jobs[j])
        print_debug('%s: %s' % (engine['hostname'], str(job_obj)), debug)
        print_info('%s: %s: %s' % (engine['hostname'], j.name,
                   job_obj.job_state))
        
        if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
            #If the job is in a non-running state, remove it from the running 
            # jobs list.
            del jobs[j]
        else:
            #If the job is in a running state, increment the running job count.
            i += 1
    return i
Пример #10
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary containing engine information
    """

    # Establish these variables as empty for use later
    environment_obj = None
    source_objs = None
    jobs = {}

    try:
        # Setup the connection to the Delphix Engine
        dx_session_obj.serversess(
            engine["ip_address"], engine["username"], engine["password"]
        )

        group_obj = find_obj_by_name(
            dx_session_obj.server_session, group, arguments["--target_grp"]
        )

        # Get the reference of the target environment.
        print_debug("Getting environment for %s\n" % (host_name), debug)

        # Get the environment object by the hostname
        environment_obj = find_obj_by_name(
            dx_session_obj.server_session, environment, host_name
        )

    except DlpxException as e:
        print (
            "\nERROR: Engine %s encountered an error while provisioning "
            "%s:\n%s\n" % (engine["hostname"], arguments["--target"], e)
        )
        sys.exit(1)

    print_debug(
        "Getting database information for %s\n" % (arguments["--source"]), debug
    )
    try:
        # Get the database reference we are copying from the database name
        database_obj = find_obj_by_name(
            dx_session_obj.server_session, database, arguments["--source"]
        )
    except DlpxException:
        return

    thingstodo = ["thingtodo"]
    # reset the running job count before we begin
    i = 0

    try:
        with dx_session_obj.job_mode(single_thread):
            while len(jobs) > 0 or len(thingstodo) > 0:
                arg_type = arguments["--type"].lower()
                if len(thingstodo) > 0:

                    if arg_type == "oracle":
                        create_oracle_si_vdb(
                            engine,
                            jobs,
                            database_name,
                            group_obj,
                            environment_obj,
                            database_obj,
                            arguments["--prerefresh"],
                            arguments["--postrefresh"],
                            arguments["--prerollback"],
                            arguments["--postrollback"],
                            arguments["--configure-clone"],
                        )

                    elif arg_type == "ase":
                        create_ase_vdb(
                            engine,
                            server,
                            jobs,
                            group_obj,
                            database_name,
                            environment_obj,
                            database_obj,
                        )

                    elif arg_type == "mssql":
                        create_mssql_vdb(
                            engine,
                            jobs,
                            group_obj,
                            database_name,
                            environment_obj,
                            database_obj,
                        )

                    elif arg_type == "vfiles":
                        create_vfiles_vdb(
                            engine,
                            jobs,
                            group_obj,
                            database_name,
                            environment_obj,
                            database_obj,
                            arguments["--prerefresh"],
                            arguments["--postrefresh"],
                            arguments["--prerollback"],
                            arguments["--postrollback"],
                            arguments["--configure-clone"],
                        )

                    thingstodo.pop()

                # get all the jobs, then inspect them
                i = 0
                for j in jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session, jobs[j])
                    print_debug(job_obj, debug)
                    print_info(
                        engine["hostname"] + ": VDB Provision: " + job_obj.job_state
                    )

                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it from
                        # the running jobs list.
                        del jobs[j]
                    else:
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1

                print_info("%s: %s jobs running." % (engine["hostname"], str(i)))

                # If we have running jobs, pause before repeating the checks.
                if len(jobs) > 0:
                    sleep(float(arguments["--poll"]))

    except (DlpxException, JobError) as e:
        print("\nError while provisioning %s:\n%s" % (database_name, e.message))
        sys.exit(1)
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine["ip_address"], engine["username"],
                            engine["password"])

    except DlpxException as e:
        print_exception("ERROR: Engine {} encountered an error while"
                        "{}:\n{}\n".format(engine["hostname"],
                                           arguments["--target"], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if (arguments["--type"] == "linux"
                            or arguments["--type"] == "windows"):
                        env_name = arguments["--env_name"]
                        host_user = arguments["--host_user"]
                        pw = arguments["--pw"]
                        ip_addr = arguments["--ip"]
                        host_name = arguments["--connector_name"]
                        if arguments["--type"] == "linux":
                            toolkit_path = arguments["--toolkit"]
                            create_linux_env(dlpx_obj, env_name, host_user,
                                             ip_addr, toolkit_path, pw)
                        else:
                            create_windows_env(
                                dlpx_obj,
                                env_name,
                                host_user,
                                ip_addr,
                                pw,
                                host_name,
                            )

                    elif arguments["--delete"]:
                        delete_env(dlpx_obj, arguments["--delete"])

                    elif arguments["--refresh"]:
                        refresh_env(dlpx_obj, arguments["--refresh"])

                    elif arguments["--update_ase_pw"]:
                        update_ase_pw(dlpx_obj)

                    elif arguments["--update_ase_user"]:
                        update_ase_username(dlpx_obj)
                    elif arguments["--list"]:
                        list_env(dlpx_obj)
                    elif arguments["--update_host"]:
                        update_host_address(
                            dlpx_obj,
                            arguments["--old_host_address"],
                            arguments["--new_host_address"],
                        )
                    elif arguments["--enable"]:
                        enable_environment(dlpx_obj, arguments["--env_name"])
                    elif arguments["--disable"]:
                        disable_environment(dlpx_obj, arguments["--env_name"])

                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info("{} Environment: {}".format(
                        engine["hostname"], job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in "RUNNING":
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info("{}: {:d} jobs running.".format(
                        engine["hostname"], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments["--poll"]))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception("Error while creating the environment {}\n{}".format(
            arguments["--env_name"], e))
        sys.exit(1)
Пример #12
0
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    engine: Dictionary of engines
    dlpx_obj: Virtualization Engine session object
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine["ip_address"], engine["username"],
                            engine["password"])

    except DlpxException as e:
        print_exception("\nERROR: Engine {} encountered an error while "
                        "creating the session:\n{}\n".format(
                            dlpx_obj.dlpx_engines["hostname"], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments["--create_container"]:
                        create_container(
                            dlpx_obj,
                            arguments["--template_name"],
                            arguments["--create_container"],
                            arguments["--database"],
                        )
                    elif arguments["--delete_container"]:
                        delete_container(
                            dlpx_obj,
                            arguments["--delete_container"],
                            arguments["--keep_vdbs"],
                        )
                    elif arguments["--list"]:
                        list_containers(dlpx_obj)
                    elif arguments["--remove_owner"]:
                        remove_owner(
                            dlpx_obj,
                            arguments["--remove_owner"],
                            arguments["--container_name"],
                        )
                    elif arguments["--restore_container"]:
                        restore_container(
                            dlpx_obj,
                            arguments["--restore_container"],
                            arguments["--bookmark_name"],
                        )
                    elif arguments["--add_owner"]:
                        add_owner(
                            dlpx_obj,
                            arguments["--add_owner"],
                            arguments["--container_name"],
                        )
                    elif arguments["--refresh_container"]:
                        refresh_container(dlpx_obj,
                                          arguments["--refresh_container"])
                    elif arguments["--list_hierarchy"]:
                        list_hierarchy(dlpx_obj, arguments["--list_hierarchy"])
                    elif arguments["--reset"]:
                        reset_container(dlpx_obj, arguments["--reset"])
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info("{}: JS Container operations: {}".format(
                        engine["hostname"], job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in "RUNNING":
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info("{}: {:d} jobs running.".format(
                        engine["hostname"], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments["--poll"]))

    except (DlpxException, RequestError, JobError, HttpError) as e:
        print("\nError in js_container: {}:\n{}".format(engine["hostname"], e))
        sys.exit(1)
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary containing engine information
    """

    #Establish these variables as empty for use later
    environment_obj = None
    source_objs = None
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

        group_obj = find_obj_by_name(dx_session_obj.server_session, group, 
                                     arguments['--target_grp'])

        #Get the reference of the target environment.
        print_debug('Getting environment for %s\n' % (host_name), debug)

        #Get the environment object by the hostname
        environment_obj = find_obj_by_name(dx_session_obj.server_session, 
                                           environment, host_name)

    except DlpxException as e:
        print('\nERROR: Engine %s encountered an error while provisioning '
              '%s:\n%s\n' % (engine['hostname'], arguments['--target'], e))
        sys.exit(1)

    print_debug('Getting database information for %s\n' %
                (arguments['--source']), debug)
    try:
        #Get the database reference we are copying from the database name
        database_obj = find_obj_by_name(dx_session_obj.server_session,
                                        database, arguments['--source'])
    except DlpxException:
        return

    thingstodo = ["thingtodo"]
    #reset the running job count before we begin
    i = 0

    try:
        with dx_session_obj.job_mode(single_thread):
            while (len(jobs) > 0 or len(thingstodo) > 0):
                arg_type = arguments['--type'].lower()
                if len(thingstodo)> 0:

                    if arg_type == "oracle":
                        create_oracle_si_vdb(engine, jobs, database_name,
                                             group_obj, environment_obj,
                                             database_obj,
                                             arguments['--prerefresh'],
                                             arguments['--postrefresh'],
                                             arguments['--prerollback'],
                                             arguments['--postrollback'],
                                             arguments['--configure-clone'])

                    elif arg_type == "ase":
                        create_ase_vdb(engine, server, jobs, group_obj, 
                                       database_name, environment_obj, 
                                       database_obj)

                    elif arg_type == "mssql":
                        create_mssql_vdb(engine, jobs, group_obj, 
                                         database_name, environment_obj, 
                                         database_obj)

                    elif arg_type == "vfiles":
                        create_vfiles_vdb(engine, jobs, group_obj, 
                                          database_name, environment_obj, 
                                          database_obj, 
                                          arguments['--prerefresh'],
                                          arguments['--postrefresh'],
                                          arguments['--prerollback'],
                                          arguments['--postrollback'],
                                          arguments['--configure-clone'])

                    thingstodo.pop()

                #get all the jobs, then inspect them 
                i = 0
                for j in jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session, jobs[j])
                    print_debug(job_obj, debug)
                    print_info(engine["hostname"] + ": VDB Provision: " + 
                               job_obj.job_state)
                
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        #If the job is in a non-running state, remove it from 
                        # the running jobs list.
                        del jobs[j]
                    else:
                        #If the job is in a running state, increment the 
                        # running job count.
                        i += 1

                print_info('%s: %s jobs running.' % (engine['hostname'],
                           str(i)))

                #If we have running jobs, pause before repeating the checks.
                if len(jobs) > 0:
                    sleep(float(arguments['--poll']))

    except (DlpxException, JobError) as e:
        print '\nError while provisioning %s:\n%s' % (database_name, e.message)
        sys.exit(1)
Пример #14
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

        if arguments['--vdb']:
            #Get the database reference we are copying from the database name
            database_obj = find_obj_by_name(dx_session_obj.server_session,
                                            database, arguments['--vdb'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while'
                        '{}:\n{}\n'.format(engine['hostname'],
                                           arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dx_session_obj.job_mode(single_thread):
            while (len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0):
                if len(thingstodo) > 0:
                    if OPERATION:
                        method_call

                    elif OPERATION:
                        method_call
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dx_session_obj.jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session,
                                      dx_session_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Replication operations: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dx_session_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dx_session_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))

    except (HttpError, RequestError, JobError, DlpxException) as e:
        print_exception('ERROR: Could not complete replication '
                        'operation:{}'.format(e))
Пример #15
0
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :param dlpx_obj: Virtualization Engine session object
    """

    #Establish these variables as empty for use later
    environment_obj = None
    source_objs = None

    try:
        #Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])
    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while '
                        'provisioning {}:\n{}\n'.format(engine['hostname'],
                                                        arguments['--target'],
                                                        e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0):
                if len(thingstodo) > 0:
                    if arguments['--create_branch']:
                        create_branch(dlpx_obj, arguments['--create_branch'],
                                      arguments['--container_name'],
                                      arguments['--template_name']
                                      if arguments['--template_name'] else None,
                                      arguments['--bookmark_name']
                                      if arguments['--bookmark_name'] else None)
                    elif arguments['--delete_branch']:
                        delete_branch(dlpx_obj, arguments['--delete_branch'])
                    elif arguments['--update_branch']:
                        update_branch(dlpx_obj, arguments['--update_branch'])
                    elif arguments['--activate_branch']:
                        activate_branch(dlpx_obj,
                                        arguments['--activate_branch'])
                    elif arguments['--list_branches']:
                        list_branches(dlpx_obj)
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Provisioning JS Branch: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('\nError in js_branch: {}\n{}'.format(
            engine['hostname'], e))
def create_ora_sourceconfig(engine_name, port_num=1521):
    """
    :param ip_addr:
    :param db_name:
    :return:
    """
    create_ret = None
    env_obj = find_obj_by_name(dx_session_obj.server_session, environment,
                               arguments['--env_name'])

    try:
        sourceconfig_ref = find_obj_by_name(dx_session_obj.server_session,
                                            sourceconfig,
                                            arguments['--db_name']).reference
    except DlpxException:
        sourceconfig_ref = None

    repo_ref = find_dbrepo(dx_session_obj.server_session,
                           'OracleInstall', env_obj.reference,
                           arguments['--db_install_path']).reference

    dsource_params = OracleSIConfig()

    connect_str = ('jdbc:oracle:thin:@' + arguments['--ip_addr'] + ':' +
                   str(port_num) + ':' + arguments['--db_name'])

    dsource_params.database_name = arguments['--db_name']
    dsource_params.unique_name = arguments['--db_name']
    dsource_params.repository = repo_ref
    dsource_params.instance = OracleInstance()
    dsource_params.instance.instance_name = arguments['--db_name']
    dsource_params.instance.instance_number = 1
    dsource_params.services = [{'type': 'OracleService',
                                'jdbcConnectionString': connect_str}]

    try:
        if sourceconfig_ref is None:
            create_ret = link_ora_dsource(sourceconfig.create(
                dx_session_obj.server_session, dsource_params),
                env_obj.primary_user)
        elif sourceconfig_ref is not None:
            create_ret = link_ora_dsource(sourceconfig_ref,
                                          env_obj.primary_user)

        print_info('Created and linked the dSource {} with reference {}.\n'.format(
              arguments['--db_name'], create_ret))
        link_job_ref = dx_session_obj.server_session.last_job
        link_job_obj = job.get(dx_session_obj.server_session,
                                      link_job_ref)
        while link_job_obj.job_state not in ["CANCELED", "COMPLETED", "FAILED"]:
          print_info('Waiting three seconds for link job to complete, and sync to begin')
          sleep(3)
          link_job_obj = job.get(dx_session_obj.server_session,
                                      link_job_ref)

        #Add the snapsync job to the jobs dictionary
        dx_session_obj.jobs[engine_name + 'snap'] = get_running_job(
            dx_session_obj.server_session, find_obj_by_name(
                dx_session_obj.server_session, database,
                arguments['--dsource_name']).reference)
        print_debug('Snapshot Job Reference: {}.\n'.format(
          dx_session_obj.jobs[engine_name + 'snap']))
    except (HttpError, RequestError) as e:
        print_exception('ERROR: Could not create the sourceconfig:\n'
                        '{}'.format(e))
        sys.exit(1)
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('ERROR: Engine {} encountered an error while' 
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo)> 0:
                    if arguments['--type'] == 'linux' or arguments['--type'] == 'windows':
                        env_name = arguments['--env_name']
                        host_user = arguments['--host_user']
                        pw = arguments['--pw']
                        ip_addr = arguments['--ip']
                        host_name = arguments['--connector_name']
                        if arguments['--type'] == 'linux':
                          toolkit_path = arguments['--toolkit']
                          create_linux_env(dlpx_obj, env_name, host_user,
                                        ip_addr, toolkit_path, pw)
                        else:
                          create_windows_env(dlpx_obj, env_name, host_user,
                                        ip_addr, pw, host_name,)

                    elif arguments['--delete']:
                        delete_env(dlpx_obj, arguments['--delete'])

                    elif arguments['--refresh']:
                        refresh_env(dlpx_obj, arguments['--refresh'])

                    elif arguments['--update_ase_pw']:
                        update_ase_pw(dlpx_obj)

                    elif arguments['--update_ase_user']:
                        update_ase_username(dlpx_obj)
                    elif arguments['--list']:
                        list_env(dlpx_obj)
                    elif arguments['--update_host']:
                        update_host_address(dlpx_obj, arguments['--old_host_address'], arguments['--new_host_address'])
                    elif arguments['--enable']:
                        enable_environment(dlpx_obj, arguments['--env_name'])
                    elif arguments['--disable']:
                        disable_environment(dlpx_obj, arguments['--env_name'])

                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session, dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{} Environment: {}'.format(
                               engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error while creating the environment {}\n{}'.format(
            arguments['--env_name'], e))
        sys.exit(1)
Пример #18
0
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                            engine['password'])
    except DlpxException as e:
        print_exception('ERROR: js_bookmark encountered an error authenticating'
                        ' to {} {}:\n{}\n'.format(engine['hostname'],
                                                  arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments['--create_bookmark']:
                        create_bookmark(dlpx_obj,
                                        arguments['--create_bookmark'],
                                        arguments['--data_layout'],
                                        arguments['--branch_name']
                                        if arguments['--branch_name']
                                        else None,
                                        arguments['--tags']
                                        if arguments['--tags'] else None,
                                        arguments['--description']
                                        if arguments['--description'] else None)
                    elif arguments['--delete_bookmark']:
                        delete_bookmark(dlpx_obj,
                                        arguments['--delete_bookmark'])
                    elif arguments['--update_bookmark']:
                        update_bookmark(dlpx_obj,
                                        arguments['--update_bookmark'])
                    elif arguments['--share_bookmark']:
                        share_bookmark(dlpx_obj,
                                       arguments['--share_bookmark'])
                    elif arguments['--unshare_bookmark']:
                        unshare_bookmark(dlpx_obj,
                                         arguments['--unshare_bookmark'])
                    elif arguments['--list_bookmarks']:
                        list_bookmarks(dlpx_obj,
                            arguments['--tags'] if arguments['--tags'] else None)
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Running JS Bookmark: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error in js_bookmark: {}\n{}'.format(
            engine['hostname'], e))
        sys.exit(1)
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine["ip_address"], engine["username"],
                            engine["password"])

    except DlpxException as e:
        print_exception("ERROR: Engine {} encountered an error while"
                        "{}:\n{}\n".format(engine["hostname"],
                                           arguments["--target"], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments["--start"]:
                        dx_obj_operation(dlpx_obj, arguments["--vdb"], "start")
                    elif arguments["--stop"]:
                        dx_obj_operation(dlpx_obj, arguments["--vdb"], "stop")
                    elif arguments["--enable"]:
                        dx_obj_operation(dlpx_obj, arguments["--vdb"],
                                         "enable")
                    elif arguments["--disable"]:
                        if arguments["--force"]:
                            dx_obj_operation(dlpx_obj, arguments["--vdb"],
                                             "force_disable")
                        else:
                            dx_obj_operation(dlpx_obj, arguments["--vdb"],
                                             "disable")
                    elif arguments["--list"]:
                        list_databases(dlpx_obj)
                    elif arguments["--all_dbs"]:
                        all_databases(dlpx_obj, arguments["--all_dbs"])
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info("{}: Running JS Bookmark: {}".format(
                        engine["hostname"], job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in "RUNNING":
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info("{}: {:d} jobs running.".format(
                        engine["hostname"], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments["--poll"]))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception("Error in js_bookmark: {}\n{}".format(
            engine["hostname"], e))
        sys.exit(1)
def main_workflow(engine, dlpx_obj):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    :type engine: dict
    dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession

    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(
            engine["ip_address"], engine["username"], engine["password"]
        )
    except DlpxException as e:
        print_exception(
            "ERROR: js_bookmark encountered an error authenticating"
            " to {} {}:\n{}\n".format(engine["hostname"], arguments["--target"], e)
        )
    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments["--create"]:
                        create_authorization(
                            dlpx_obj,
                            arguments["--role"],
                            arguments["--target_type"],
                            arguments["--target"],
                            arguments["--user"],
                        )
                    elif arguments["--delete"]:
                        delete_authorization(
                            dlpx_obj,
                            arguments["--role"],
                            arguments["--target_type"],
                            arguments["--target"],
                            arguments["--user"],
                        )
                    elif arguments["--list"]:
                        list_authorization(dlpx_obj)
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session, dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info("{}: : {}".format(engine["hostname"], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in "RUNNING":
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info("{}: {:d} jobs running.".format(engine["hostname"], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments["--poll"]))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception(
            "\nError in dx_authorization: {}\n{}".format(engine["hostname"], e)
        )
        sys.exit(1)
Пример #21
0
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                            engine['password'])
    except DlpxException as e:
        print_exception(
            'ERROR: js_bookmark encountered an error authenticating'
            ' to {} {}:\n{}\n'.format(engine['hostname'],
                                      arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments['--create_bookmark']:
                        create_bookmark(
                            dlpx_obj, arguments['--create_bookmark'],
                            arguments['--data_layout'],
                            arguments['--branch_name']
                            if arguments['--branch_name'] else None,
                            arguments['--tag'] if arguments['--tag'] else None,
                            arguments['--description']
                            if arguments['--description'] else None)
                    elif arguments['--delete_bookmark']:
                        delete_bookmark(dlpx_obj,
                                        arguments['--delete_bookmark'])
                    elif arguments['--update_bookmark']:
                        update_bookmark(dlpx_obj,
                                        arguments['--update_bookmark'])
                    elif arguments['--share_bookmark']:
                        share_bookmark(dlpx_obj, arguments['--share_bookmark'])
                    elif arguments['--unshare_bookmark']:
                        unshare_bookmark(dlpx_obj,
                                         arguments['--unshare_bookmark'])
                    elif arguments['--list_bookmarks']:
                        list_bookmarks(
                            dlpx_obj,
                            arguments['--tag'] if arguments['--tag'] else None)
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Running JS Bookmark: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error in js_bookmark: {}\n{}'.format(
            engine['hostname'], e))
        sys.exit(1)
Пример #22
0
def create_ora_sourceconfig(engine_name, port_num=1521):
    """
    :param ip_addr:
    :param db_name:
    :return:
    """
    create_ret = None
    env_obj = find_obj_by_name(dx_session_obj.server_session, environment,
                               arguments['--env_name'])

    try:
        sourceconfig_ref = find_obj_by_name(dx_session_obj.server_session,
                                            sourceconfig,
                                            arguments['--db_name']).reference
    except DlpxException:
        sourceconfig_ref = None

    repo_ref = find_dbrepo(dx_session_obj.server_session, 'OracleInstall',
                           env_obj.reference,
                           arguments['--db_install_path']).reference

    dsource_params = OracleSIConfig()

    connect_str = ('jdbc:oracle:thin:@' + arguments['--ip_addr'] + ':' +
                   str(port_num) + ':' + arguments['--db_name'])

    dsource_params.database_name = arguments['--db_name']
    dsource_params.unique_name = arguments['--db_name']
    dsource_params.repository = repo_ref
    dsource_params.instance = OracleInstance()
    dsource_params.instance.instance_name = arguments['--db_name']
    dsource_params.instance.instance_number = 1
    dsource_params.services = [{
        'type': 'OracleService',
        'jdbcConnectionString': connect_str
    }]

    try:
        if sourceconfig_ref is None:
            create_ret = link_ora_dsource(
                sourceconfig.create(dx_session_obj.server_session,
                                    dsource_params), env_obj.primary_user)
        elif sourceconfig_ref is not None:
            create_ret = link_ora_dsource(sourceconfig_ref,
                                          env_obj.primary_user)

        print_info(
            'Created and linked the dSource {} with reference {}.\n'.format(
                arguments['--db_name'], create_ret))
        link_job_ref = dx_session_obj.server_session.last_job
        link_job_obj = job.get(dx_session_obj.server_session, link_job_ref)
        while link_job_obj.job_state not in [
                "CANCELED", "COMPLETED", "FAILED"
        ]:
            print_info(
                'Waiting three seconds for link job to complete, and sync to begin'
            )
            sleep(3)
            link_job_obj = job.get(dx_session_obj.server_session, link_job_ref)

        #Add the snapsync job to the jobs dictionary
        dx_session_obj.jobs[engine_name + 'snap'] = get_running_job(
            dx_session_obj.server_session,
            find_obj_by_name(dx_session_obj.server_session, database,
                             arguments['--dsource_name']).reference)
        print_debug('Snapshot Job Reference: {}.\n'.format(
            dx_session_obj.jobs[engine_name + 'snap']))
    except (HttpError, RequestError) as e:
        print_exception('ERROR: Could not create the sourceconfig:\n'
                        '{}'.format(e))
        sys.exit(1)
Пример #23
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while'
                        '{}:\n{}\n'.format(
                            dx_session_obj.dlpx_engines['hostname'],
                            arguments['--target'], e))
        sys.exit(1)
    thingstodo = ["thingtodo"]
    try:
        with dx_session_obj.job_mode(single_thread):
            while (len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0):
                if len(thingstodo) > 0:
                    if arguments['--type'].lower() == 'oracle':
                        create_ora_sourceconfig(engine["hostname"])
                    elif arguments['--type'].lower() == 'sybase':
                        link_ase_dsource(engine["hostname"])
                    elif arguments['--type'].lower() == 'mssql':
                        link_mssql_dsource(engine["hostname"])
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dx_session_obj.jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session,
                                      dx_session_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Provisioning dSource: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dx_session_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dx_session_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))

    except (HttpError, RequestError, JobError, DlpxException) as e:
        print_exception('ERROR: Could not complete ingesting the source '
                        'data:\n{}'.format(e))
        sys.exit(1)