def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while' 
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dx_session_obj.job_mode(single_thread):
            while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0):
                if len(thingstodo) > 0:
                    if arguments['--add'] :
                        add_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly'])
                    elif arguments['--update'] :
                        update_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly'])
                    elif arguments['--delete']:
                        delete_user(arguments['--user_name'])
                    elif arguments['--list']:
                        list_users()
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dx_session_obj.jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session,
                                      dx_session_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: User: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dx_session_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                               engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dx_session_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))

    except (HttpError, RequestError, JobError, DlpxException) as e:
        print_exception('ERROR: Could not complete user '
                        'operation: {}'.format(e))
def create_template(dlpx_obj, template_name, database_name):
    """
    Create the JS Template

    dlpx_obj: Virtualization Engine session object
    template_name: Name of the template to create
    database_name: Name of the database(s) to use in the template
    """

    js_template_params = JSDataTemplateCreateParameters()
    js_template_params.name = template_name
    template_ds_lst = []
    engine_name = dlpx_obj.dlpx_engines.keys()[0]

    for db in database_name.split(':'):
            template_ds_lst.append(build_ds_params(dlpx_obj, database, db))
    try:
        js_template_params.data_sources = template_ds_lst
        js_template_params.type = 'JSDataTemplateCreateParameters'
        template.create(dlpx_obj.server_session, js_template_params)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info('Template {} was created successfully.\n'.format(
            template_name))
    except (DlpxException, RequestError, HttpError) as e:
        print_exception('\nThe template {} was not created. The error '
                        'was:\n\n{}'.format(template_name, e))
def create_replication_job():
    """
    Create a replication job
    :return: Reference to the spec object
    """
    rep_spec = ReplicationSpec()
    rep_spec.name = arguments['--rep_name']
    rep_spec.target_host = arguments['--target_host']
    rep_spec.target_principal = arguments['--target_user']
    rep_spec.target_credential = {'type': 'PasswordCredential', 'password':
        arguments['--target_pw']}
    rep_spec.object_specification = ReplicationList()
    rep_spec.schedule = arguments['--schedule']
    rep_spec.encrypted = True

    if arguments['--num_cons']:
        rep_spec.number_of_connections = int(arguments['--num_cons'])
    if arguments['--bandwidth']:
        rep_spec.bandwidth_limit = int(arguments['--bandwidth'])
    if arguments['--enabled']:
        rep_spec.enabled = True
    try:
        rep_spec.object_specification.objects = find_obj_specs(
            dx_session_obj.server_session, arguments['--rep_objs'].split(','))

        ref = spec.create(dx_session_obj.server_session, rep_spec)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[dx_session_obj.server_session.address] = \
                dx_session_obj.server_session.last_job
        print_info('Successfully created {} with reference '
                   '{}\n'.format(arguments['--rep_name'], ref))

    except (HttpError, RequestError, DlpxException) as e:
        print_exception('Could not create replication job {}:\n{}'.format(
            arguments['--rep_name'], e))
def list_authorization(dlpx_obj):
    """
    Function to list authorizations for a given engine

    :param dlpx_obj: Virtualization Engine session object
    """
    target_obj = None

    try:
        auth_objs = authorization.get_all(dlpx_obj.server_session)
        print_info("User, Role, Target, Reference")
        for auth_obj in auth_objs:
            role_obj = role.get(dlpx_obj.server_session, auth_obj.role)
            user_obj = user.get(dlpx_obj.server_session, auth_obj.user)
            if auth_obj.target.startswith("USER"):
                target_obj = user.get(dlpx_obj.server_session, auth_obj.target)
            elif auth_obj.target.startswith("GROUP"):
                target_obj = group.get(dlpx_obj.server_session, auth_obj.target)
            elif auth_obj.target.startswith("DOMAIN"):
                target_obj = User()
                target_obj.name = "DOMAIN"
            print(
                "{}, {}, {}, {}".format(
                    user_obj.name, role_obj.name, target_obj.name, auth_obj.reference
                )
            )
    except (RequestError, HttpError, JobError, AttributeError) as e:
        print_exception(
            "An error occurred while listing authorizations.:\n" "{}\n".format((e))
        )
def create_template(dlpx_obj, template_name, database_name):
    """
    Create the JS Template

    dlpx_obj: Virtualization Engine session object
    template_name: Name of the template to create
    database_name: Name of the database(s) to use in the template
    """

    js_template_params = JSDataTemplateCreateParameters()
    js_template_params.name = template_name
    template_ds_lst = []
    engine_name = dlpx_obj.dlpx_engines.keys()[0]

    for db in database_name.split(":"):
        template_ds_lst.append(build_ds_params(dlpx_obj, database, db))
    try:
        js_template_params.data_sources = template_ds_lst
        js_template_params.type = "JSDataTemplateCreateParameters"
        template.create(dlpx_obj.server_session, js_template_params)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info(
            "Template {} was created successfully.\n".format(template_name))
    except (DlpxException, RequestError, HttpError) as e:
        print_exception("\nThe template {} was not created. The error "
                        "was:\n\n{}".format(template_name, e))
Exemple #6
0
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    try:
        # Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine["ip_address"], engine["username"],
                                  engine["password"])

    except DlpxException as e:
        print_exception("\nERROR: Engine {} encountered an error while"
                        "{}:\n{}\n".format(engine["hostname"],
                                           arguments["--target"], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dx_session_obj.job_mode(single_thread):
            while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    if arguments["--add"]:
                        add_group(arguments["--group_name"])
                    elif arguments["--delete"]:
                        delete_group(arguments["--group_name"])
                    elif arguments["--list"]:
                        list_groups()
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dx_session_obj.jobs.keys():
                    job_obj = job.get(dx_session_obj.server_session,
                                      dx_session_obj.jobs[j])
                    print_debug(job_obj)
                    print_info("{}: Group: {}".format(engine["hostname"],
                                                      job_obj.job_state))
                    if job_obj.job_state in [
                            "CANCELED", "COMPLETED", "FAILED"
                    ]:
                        # If the job is in a non-running state, remove it
                        # from the
                        # running jobs list.
                        del dx_session_obj.jobs[j]
                    elif job_obj.job_state in "RUNNING":
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info("{}: {:d} jobs running.".format(
                        engine["hostname"], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dx_session_obj.jobs) > 0:
                        sleep(float(arguments["--poll"]))

    except (HttpError, RequestError, JobError, DlpxException) as e:
        print_exception("ERROR: Could not complete group "
                        "operation: {}".format(e))
Exemple #7
0
def list_containers(dlpx_obj):
    """
    List all containers on a given engine

    dlpx_obj: Virtualization Engine session object
    """

    header = "Name, Active Branch, Owner, Reference, Template, Last Updated"
    js_containers = container.get_all(dlpx_obj.server_session)
    try:
        print(header)
        for js_container in js_containers:
            last_updated = convert_timestamp(dlpx_obj.server_session,
                                             js_container.last_updated[:-5])
            print_info("{}, {}, {}, {}, {}, {}".format(
                js_container.name,
                js_container.active_branch,
                str(js_container.owner),
                str(js_container.reference),
                str(js_container.template),
                last_updated,
            ))
    except (DlpxException, HttpError, RequestError) as e:
        print_exception("\nERROR: JS Containers could not be listed. The "
                        "error was:\n\n{}".format(e))
def list_branches(dlpx_obj):
    """
    List all branches on a given engine

    :param dlpx_obj: Virtualization Engine session object
    """

    try:
        header = '\nBranch Name, Data Layout, Reference, End Time'
        js_data_layout = ''
        js_branches = branch.get_all(dlpx_obj.server_session)

        print header
        for js_branch in js_branches:
            js_end_time = operation.get(dlpx_obj.server_session,
                                        js_branch.first_operation).end_time
            if re.search('TEMPLATE', js_branch.data_layout):
                js_data_layout = find_obj_name(dlpx_obj.server_session,
                                               template, js_branch.data_layout)
            elif re.search('CONTAINER', js_branch.data_layout):
                js_data_layout = find_obj_name(dlpx_obj.server_session,
                                               container, js_branch.data_layout)
            print_info('{} {}, {}, {}'.format(js_branch._name[0],
                                              js_data_layout,
                                              js_branch.reference,
                                              js_end_time))
    except (DlpxException, HttpError, RequestError) as e:
        print_exception('\nERROR: JS Branches could not be listed. The '
                        'error was:\n\n{}'.format(e))
def list_authorization(dlpx_obj):
    """
    Function to list authorizations for a given engine

    :param dlpx_obj: Virtualization Engine session object
    """
    target_obj = None

    try:
        auth_objs = authorization.get_all(dlpx_obj.server_session)
        print_info('User, Role, Target, Reference')
        for auth_obj in auth_objs:
            role_obj = role.get(dlpx_obj.server_session, auth_obj.role)
            user_obj = user.get(dlpx_obj.server_session, auth_obj.user)
            if auth_obj.target.startswith('USER'):
                target_obj = user.get(dlpx_obj.server_session, auth_obj.target)
            elif auth_obj.target.startswith('GROUP'):
                target_obj = group.get(dlpx_obj.server_session, auth_obj.target)
            elif auth_obj.target.startswith('DOMAIN'):
                target_obj = User()
                target_obj.name = 'DOMAIN'
            print '{}, {}, {}, {}'.format(user_obj.name, role_obj.name,
                                               target_obj.name,
                                               auth_obj.reference)
    except (RequestError, HttpError, JobError, AttributeError) as e:
        print_exception('An error occurred while listing authorizations.:\n'
                        '{}\n'.format((e)))
Exemple #10
0
def restore_container(dlpx_obj, container_name, bookmark_name):
    """
    Restores a container to a given JS bookmark

    dlpx_obj: Virtualization Engine session object
    container_name: Name of the container
    bookmark_name: Name of the bookmark to restore
    """
    bookmark_params = JSTimelinePointBookmarkInput()
    bookmark_params.bookmark = get_obj_reference(dlpx_obj.server_session,
                                                 bookmark,
                                                 bookmark_name).pop()
    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    try:
        container.restore(
            dlpx_obj.server_session,
            get_obj_reference(dlpx_obj.server_session, container,
                              container_name).pop(),
            bookmark_params,
        )
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info("Container {} was restored successfully with "
                   "bookmark {}".format(container_name, bookmark_name))
    except (DlpxException, RequestError, HttpError) as e:
        print_exception("The user was not added to container {}. The "
                        "error was:\n{}\n".format(container_name, e))
Exemple #11
0
def add_owner(dlpx_obj, owner_name, container_name):
    """
    Adds an owner to a container

    dlpx_obj: Virtualization Engine session object
    owner_name: Grant authorizations for the given user on this container and
        parent template
    container_name: Name of the container
    """

    owner_params = JSDataContainerModifyOwnerParameters()
    try:
        owner_params.owner = get_obj_reference(dlpx_obj.server_session, user,
                                               owner_name).pop()
        container.add_owner(
            dlpx_obj.server_session,
            get_obj_reference(dlpx_obj.server_session, container,
                              container_name).pop(),
            owner_params,
        )
        print_info("User {} was granted access to {}".format(
            owner_name, container_name))
    except (DlpxException, RequestError, HttpError) as e:
        print_exception("The user was not added to container {}. The error"
                        " was:\n{}\n".format(container_name, e))
def create_replication_job():
    """
    Create a replication job
    :return: Reference to the spec object
    """
    rep_spec = ReplicationSpec()
    rep_spec.name = arguments['--rep_name']
    rep_spec.target_host = arguments['--target_host']
    rep_spec.target_principal = arguments['--target_user']
    rep_spec.target_credential = {'type': 'PasswordCredential', 'password':
        arguments['--target_pw']}
    rep_spec.object_specification = ReplicationList()
    rep_spec.schedule = arguments['--schedule']
    rep_spec.encrypted = True

    if arguments['--num_cons']:
        rep_spec.number_of_connections = int(arguments['--num_cons'])
    if arguments['--bandwidth']:
        rep_spec.bandwidth_limit = int(arguments['--bandwidth'])
    if arguments['--enabled']:
        rep_spec.enabled = True
    try:
        rep_spec.object_specification.objects = find_obj_specs(
            dx_session_obj.server_session, arguments['--rep_objs'].split(','))

        ref = spec.create(dx_session_obj.server_session, rep_spec)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[dx_session_obj.server_session.address] = \
                dx_session_obj.server_session.last_job
        print_info('Successfully created {} with reference '
                   '{}\n'.format(arguments['--rep_name'], ref))

    except (HttpError, RequestError, DlpxException) as e:
        print_exception('Could not create replication job {}:\n{}'.format(
            arguments['--rep_name'], e))
Exemple #13
0
def create_container(dlpx_obj, template_name, container_name, database_name):
    """
    Create the JS container

    dlpx_obj: Virtualization Engine session object
    container_name: Name of the container to create
    database_name: Name of the database(s) to use in the container
    """

    js_container_params = JSDataContainerCreateParameters()
    container_ds_lst = []
    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    for db in database_name.split(":"):
        container_ds_lst.append(build_ds_params(dlpx_obj, database, db))

    try:
        js_template_obj = find_obj_by_name(dlpx_obj.server_session, template,
                                           template_name)
        js_container_params.template = js_template_obj.reference
        js_container_params.timeline_point_parameters = {
            "sourceDataLayout": js_template_obj.reference,
            "type": "JSTimelinePointLatestTimeInput",
        }
        js_container_params.data_sources = container_ds_lst
        js_container_params.name = container_name
        container.create(dlpx_obj.server_session, js_container_params)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info(
            "JS Container {} was created successfully.".format(container_name))
    except (DlpxException, RequestError, HttpError) as e:
        print_exception("Container {} was not created. The error "
                        "was:\n{}\n".format(container_name, e))
def list_jobs():

    if arguments['--state']:
        if re.match('RUNNING|SUSPENDED|CANCELED|COMPLETED|FAILED',
                    arguments['--state'].upper()):
            pass
        else:
            print_info('The state should be one of these options:\n'
                       'RUNNING, SUSPENDED, CANCELED, COMPLETED, FAILED')
            sys.exit(1)

        for job_info in job.get_all(dx_session_obj.server_session,
                                    job_state=arguments['--state'].upper()):

            if arguments['--title']:
                if re.search(arguments['--title'], job_info.title,
                             re.IGNORECASE):
                    print(
                        'Action={}, Job State={}, Parent Action State={},'
                        'Percent Complete={}, Reference={}, Target={},'
                        'Target Name={}, Title={}, User={}\n'.format(
                            job_info.action_type, job_info.job_state,
                            job_info.parent_action_state,
                            job_info.percent_complete, job_info.reference,
                            job_info.target, job_info.target_name,
                            job_info.title, job_info.user))
            else:
                print(
                    'Action=%s, Job State=%s, Parent Action State=%s,'
                    'Percent Complete=%s, Reference=%s, Target=%s,'
                    'Target Name=%s, Title=%s, User=%s\n' %
                    (job_info.action_type, job_info.job_state,
                     job_info.parent_action_state, job_info.percent_complete,
                     job_info.reference, job_info.target, job_info.target_name,
                     job_info.title, job_info.user))
    else:
        for job_info in job.get_all(dx_session_obj.server_session):

            if arguments['--title']:
                if re.search(arguments['--title'], job_info.title,
                             re.IGNORECASE):
                    print(
                        'Action=%s, Job State=%s, Parent Action State=%s,'
                        'Percent Complete=%s, Reference=%s, Target=%s,'
                        'Target Name=%s, Title=%s, User=%s\n' %
                        (job_info.action_type, job_info.job_state,
                         job_info.parent_action_state,
                         job_info.percent_complete, job_info.reference,
                         job_info.target, job_info.target_name, job_info.title,
                         job_info.user))
            else:
                print(
                    'Action=%s, Job State=%s, Parent Action State=%s,'
                    'Percent Complete=%s, Reference=%s, Target=%s,'
                    'Target Name=%s, Title=%s, User=%s\n' %
                    (job_info.action_type, job_info.job_state,
                     job_info.parent_action_state, job_info.percent_complete,
                     job_info.reference, job_info.target, job_info.target_name,
                     job_info.title, job_info.user))
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        #Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])
    except DlpxException as e:
        print_exception('ERROR: Engine {} encountered an error while'
                        'rewinding {}:\n{}\n'.format(engine['hostname'],
                                                     arguments['--target'], e))

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    rewind_database(dlpx_obj, arguments['--vdb'],
                                    arguments['--timestamp'],
                                    arguments['--timestamp_type'])
                    thingstodo.pop()

                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Refresh of {}: {}'.format(
                        engine['hostname'], arguments['--vdb'],
                        job_obj.job_state))
                    if job_obj.job_state in ['CANCELED', 'COMPLETED', 'FAILED']:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error in dx_rewind_vdb: {}\n{}'.format(
            engine['hostname'], e))
        sys.exit(1)
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        #Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])
    except DlpxException as e:
        print_exception('ERROR: Engine {} encountered an error while'
                        'rewinding {}:\n{}\n'.format(engine['hostname'],
                                                     arguments['--target'], e))

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo) > 0:
                    rewind_database(dlpx_obj, arguments['--vdb'],
                                    arguments['--timestamp'],
                                    arguments['--timestamp_type'])
                    thingstodo.pop()

                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Refresh of {}: {}'.format(
                        engine['hostname'], arguments['--vdb'],
                        job_obj.job_state))
                    if job_obj.job_state in ['CANCELED', 'COMPLETED', 'FAILED']:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error in dx_rewind_vdb: {}\n{}'.format(
            engine['hostname'], e))
        sys.exit(1)
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine %s encountered an error while'
                        '%s:\n%s\n' %
                        (engine['hostname'], arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    #reset the running job count before we begin
    i = 0
    with dx_session_obj.job_mode(single_thread):
        while (len(jobs) > 0 or len(thingstodo) > 0):
            if len(thingstodo) > 0:
                if arguments['--pw']:
                    update_ase_db_pw()

                #elif OPERATION:
                #    method_call

                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in jobs.keys():
                job_obj = job.get(dx_session_obj.server_session, jobs[j])
                print_debug(job_obj)
                print_info(engine["hostname"] + ": VDB Operations: " +
                           job_obj.job_state)

                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del jobs[j]
                else:
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

            print_info(engine["hostname"] + ": " + str(i) + " jobs running. ")
            #If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments['--poll']))
def list_jobs():

    if arguments['--state']:
        if re.match('RUNNING|SUSPENDED|CANCELED|COMPLETED|FAILED',
                    arguments['--state'].upper()):
            pass
        else:
            print_info('The state should be one of these options:\n'
                  'RUNNING, SUSPENDED, CANCELED, COMPLETED, FAILED')
            sys.exit(1)

        for job_info in job.get_all(dx_session_obj.server_session,
                                    job_state=arguments['--state'].upper()):

            if arguments['--title']:
                if re.search(arguments['--title'], job_info.title,
                                       re.IGNORECASE):
                    print('Action={}, Job State={}, Parent Action State={},'
                           'Percent Complete={}, Reference={}, Target={},'
                           'Target Name={}, Title={}, User={}\n'.format(
                           job_info.action_type, job_info.job_state,
                           job_info.parent_action_state,
                           job_info.percent_complete, job_info.reference,
                           job_info.target, job_info.target_name,
                           job_info.title, job_info.user))
            else:
                print('Action=%s, Job State=%s, Parent Action State=%s,'
                       'Percent Complete=%s, Reference=%s, Target=%s,'
                       'Target Name=%s, Title=%s, User=%s\n' %
                       (job_info.action_type, job_info.job_state,
                        job_info.parent_action_state,
                        job_info.percent_complete, job_info.reference,
                        job_info.target, job_info.target_name,
                        job_info.title, job_info.user))
    else:
        for job_info in job.get_all(dx_session_obj.server_session):

            if arguments['--title']:
                if re.search(arguments['--title'], job_info.title,
                                       re.IGNORECASE):
                    print('Action=%s, Job State=%s, Parent Action State=%s,'
                    'Percent Complete=%s, Reference=%s, Target=%s,'
                    'Target Name=%s, Title=%s, User=%s\n' %
                    (job_info.action_type, job_info.job_state,
                     job_info.parent_action_state, job_info.percent_complete,
                     job_info.reference, job_info.target, job_info.target_name,
                     job_info.title, job_info.user))
            else:
                print('Action=%s, Job State=%s, Parent Action State=%s,'
                      'Percent Complete=%s, Reference=%s, Target=%s,'
                      'Target Name=%s, Title=%s, User=%s\n' %
                      (job_info.action_type, job_info.job_state,
                       job_info.parent_action_state, job_info.percent_complete,
                       job_info.reference, job_info.target,
                       job_info.target_name, job_info.title, job_info.user))
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while'
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    #reset the running job count before we begin
    i = 0
    with dx_session_obj.job_mode(single_thread):
        while (len(jobs) > 0 or len(thingstodo)> 0):
            if len(thingstodo)> 0:

                #if OPERATION:
                find_missing_archivelogs(engine['hostname'])

                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in jobs.keys():
                job_obj = job.get(dx_session_obj.server_session, jobs[j])
                print_debug(job_obj)
                print_info('{}: VDB Operations:{}\n'.format(engine['hostname'],
                           job_obj.job_state))

                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del jobs[j]
                else:
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

            print_info(engine["hostname"] + ": " + str(i) + " jobs running. ")
            #If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments['--poll']))
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while'
                        '{}:\n{}\n'.format(engine['hostname'],
                                           arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    with dx_session_obj.job_mode(single_thread):
        while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
            if len(thingstodo) > 0:

                if arguments['--list']:
                    list_jobs()
                thingstodo.pop()

            # get all the jobs, then inspect them
            i = 0
            for j in dx_session_obj.jobs.keys():
                job_obj = job.get(dx_session_obj.server_session,
                                  dx_session_obj.jobs[j])
                print_debug(job_obj)
                print_info('{}: Operations: {}'.format(engine['hostname'],
                                                       job_obj.job_state))
                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    # If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del dx_session_obj.jobs[j]
                elif job_obj.job_state in 'RUNNING':
                    # If the job is in a running state, increment the running
                    # job count.
                    i += 1

                print_info('{}: {:d} jobs running.'.format(
                    engine['hostname'], i))

            # If we have running jobs, pause before repeating the checks.
            if len(dx_session_obj.jobs) > 0:
                sleep(float(arguments['--poll']))
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while' 
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    with dx_session_obj.job_mode(single_thread):
        while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0:
            if len(thingstodo)> 0:
                refresh_database(arguments['--vdb'],
                                    arguments['--timestamp'],
                                    arguments['--timestamp_type'])
                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in dx_session_obj.jobs.keys():
                job_obj = job.get(dx_session_obj.server_session,
                                  dx_session_obj.jobs[j])
                print_debug(job_obj)
                print_info('{}: Operations: {}'.format(engine['hostname'],
                                                       job_obj.job_state))
                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del dx_session_obj.jobs[j]
                elif job_obj.job_state in 'RUNNING':
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

                print_info('{}: {:d} jobs running.'.format(
                    engine['hostname'], i))

            #If we have running jobs, pause before repeating the checks.
            if len(dx_session_obj.jobs) > 0:
                sleep(float(arguments['--poll']))
def find_database_by_name_and_group_name(engine, server, group_name, 
                                         database_name):

    databases = find_all_databases_by_group_name(engine, server, group_name)

    for each in databases:
        if each.name == database_name:
            print_debug(engine["hostname"] + ": Found a match " + 
                        str(each.reference))
            return each

    print_info(engine["hostname"] + ": Unable to find \"" + 
               database_name + "\" in " + group_name)
def find_database_by_name_and_group_name(engine, server, group_name, 
                                         database_name):

    databases = find_all_databases_by_group_name(engine, server, group_name)

    for each in databases:
        if each.name == database_name:
            print_debug('%s: Found a match %s' % (engine['hostname'],
                        str(each.reference)))
            return each

    print_info('%s unable to find %s in %s' % (engine['hostname'],
                                               database_name, group_name))
Exemple #24
0
def find_database_by_name_and_group_name(engine, server, group_name,
                                         database_name):

    databases = find_all_databases_by_group_name(engine, server, group_name)

    for each in databases:
        if each.name == database_name:
            print_debug('%s: Found a match %s' %
                        (engine['hostname'], str(each.reference)))
            return each

    print_info('%s unable to find %s in %s' %
               (engine['hostname'], database_name, group_name))
Exemple #25
0
def find_database_by_name_and_group_name(engine, server, group_name,
                                         database_name):

    databases = find_all_databases_by_group_name(engine, server, group_name)

    for each in databases:
        if each.name == database_name:
            print_debug(engine["hostname"] + ": Found a match " +
                        str(each.reference))
            return each

    print_info(engine["hostname"] + ': Unable to find "' + database_name +
               '" in ' + group_name)
Exemple #26
0
def create_ase_vdb(
    engine, server, jobs, vdb_group, vdb_name, environment_obj, container_obj
):
    """
    Create a Sybase ASE VDB
    """
    vdb_obj = find_database_by_name_and_group_name(
        engine, server, vdb_group.name, vdb_name
    )
    if vdb_obj == None:
        vdb_params = ASEProvisionParameters()
        vdb_params.container = ASEDBContainer()
        if arguments["--no_truncate_log"]:
            vdb_params.truncate_log_on_checkpoint = False
        else:
            vdb_params.truncate_log_on_checkpoint = True
        vdb_params.container.group = vdb_group.reference
        vdb_params.container.name = vdb_name
        vdb_params.source = ASEVirtualSource()
        vdb_params.source_config = ASESIConfig()
        vdb_params.source_config.database_name = arguments["--db"]
        vdb_params.source_config.instance = ASEInstanceConfig()
        vdb_params.source_config.instance.host = environment_obj.host

        vdb_repo = find_dbrepo_by_environment_ref_and_name(
            engine,
            server,
            "ASEInstance",
            environment_obj.reference,
            arguments["--envinst"],
        )

        vdb_params.source_config.repository = vdb_repo.reference
        vdb_params.timeflow_point_parameters = set_timeflow_point(
            engine, server, container_obj
        )

        vdb_params.timeflow_point_parameters.container = container_obj.reference
        print_info("Provisioning " + vdb_name)
        database.provision(server, vdb_params)

        # Add the job into the jobs dictionary so we can track its progress
        jobs[engine["hostname"]] = server.last_job
        # return the job object to the calling statement so that we can tell if
        # a job was created or not (will return None, if no job)
        return server.last_job
    else:
        print_info(engine["hostname"] + ": " + vdb_name + " already exists.")
        return vdb_obj.reference
def execute_replication_job(obj_name):
    """
    Execute a replication job immediately.
    :param obj_name: name of object to execute.
    """
    try:
        spec.execute(dx_session_obj.server_session,
                     find_obj_by_name(dx_session_obj.server_session,
                                      spec, obj_name).reference)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[dx_session_obj.server_session.address] = \
                dx_session_obj.server_session.last_job
        print_info('Successfully executed {}.\n'.format(obj_name))
    except (HttpError, RequestError, DlpxException, JobError) as e:
        print_exception('Could not execute job {}:\n{}'.format(obj_name, e))
    def server_wait(self):
        """
        This job just waits for the Delphix Engine to be up and for a 
        succesful connection.

        No arguments
        """
        while True:
            try:
                system.get(self.server_session)
                break
            except:    
                pass
            print_info("Waiting for Delphix Engine to be ready")
            sleep(3)
def execute_replication_job(obj_name):
    """
    Execute a replication job immediately.
    :param obj_name: name of object to execute.
    """
    try:
        spec.execute(dx_session_obj.server_session,
                     find_obj_by_name(dx_session_obj.server_session,
                                      spec, obj_name).reference)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[dx_session_obj.server_session.address] = \
                dx_session_obj.server_session.last_job
        print_info('Successfully executed {}.\n'.format(obj_name))
    except (HttpError, RequestError, DlpxException, JobError) as e:
        print_exception('Could not execute job {}:\n{}'.format(obj_name, e))
Exemple #30
0
def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, environment_obj, container_obj):
    """
    Create a MSSQL VDB
    engine:
    jobs:
    vdb_group:
    vdb_name,
    environment_obj:
    container_obj:

    """
    vdb_obj = find_database_by_name_and_group_name(
        engine, dx_session_obj.server_session, vdb_group.name, vdb_name
    )
    if vdb_obj == None:
        vdb_params = MSSqlProvisionParameters()
        vdb_params.container = MSSqlDatabaseContainer()
        vdb_params.container.group = vdb_group.reference
        vdb_params.container.name = vdb_name
        vdb_params.source = MSSqlVirtualSource()
        vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False
        vdb_params.source_config = MSSqlSIConfig()
        vdb_params.source_config.database_name = arguments["--db"]

        vdb_params.source_config.repository = find_dbrepo(
            dx_session_obj.server_session,
            "MSSqlInstance",
            environment_obj.reference,
            arguments["--envinst"],
        ).reference

        vdb_params.timeflow_point_parameters = set_timeflow_point(
            engine, dx_session_obj.server_session, container_obj
        )
        if not vdb_params.timeflow_point_parameters:
            return
        vdb_params.timeflow_point_parameters.container = container_obj.reference
        print_info(engine["hostname"] + ":Provisioning " + vdb_name)
        database.provision(dx_session_obj.server_session, vdb_params)
        # Add the job into the jobs dictionary so we can track its progress
        jobs[engine["hostname"]] = dx_session_obj.server_session.last_job
        # return the job object to the calling statement so that we can tell if
        # a job was created or not (will return None, if no job)
        return dx_session_obj.server_session.last_job
    else:
        print_info(engine["hostname"] + ": " + vdb_name + " already exists.")
        return vdb_obj.reference
def delete_replication_job():
    """
    Delete a replication job.
    :return: Reference to the spec object
    """
    try:
        spec.delete(dx_session_obj.server_session,
                    find_obj_by_name(dx_session_obj.server_session, spec,
                                     arguments['--delete']).reference)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[dx_session_obj.server_session.address] = \
                dx_session_obj.server_session.last_job
        print_info('Successfully deleted {}.\n'.format(arguments['--delete']))

    except (HttpError, RequestError, DlpxException) as e:
        print_exception('Was not able to delete {}:\n{}'.format(
            arguments['--delete'], e))
def delete_replication_job():
    """
    Delete a replication job.
    :return: Reference to the spec object
    """
    try:
        spec.delete(dx_session_obj.server_session,
                    find_obj_by_name(dx_session_obj.server_session, spec,
                                     arguments['--delete']).reference)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[dx_session_obj.server_session.address] = \
                dx_session_obj.server_session.last_job
        print_info('Successfully deleted {}.\n'.format(arguments['--delete']))

    except (HttpError, RequestError, DlpxException) as e:
        print_exception('Was not able to delete {}:\n{}'.format(
            arguments['--delete'], e))
Exemple #33
0
def share_bookmark(dlpx_obj, bookmark_name):
    """
    Share a bookmark

    :param dlpx_obj: Virtualization Engine session object
    :param bookmark_name: Name of the bookmark to share
    """

    try:
        bookmark.share(dlpx_obj.server_session,
                       get_obj_reference(dlpx_obj.server_session,
                                         bookmark, bookmark_name).pop())
        print_info('JS Bookmark {} was shared successfully.'.format(
            bookmark_name))
    except (DlpxException, HttpError, RequestError) as e:
        print_exception('\nERROR: The bookmark {} could not be shared. The '
                        'error was:\n\n{}'.format(bookmark_name, e))
def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, 
                     environment_obj, container_obj):
    '''
    Create a MSSQL VDB
    engine:
    jobs:
    vdb_group:
    vdb_name,
    environment_obj:
    container_obj:
    
    '''
    vdb_obj = find_database_by_name_and_group_name(engine, dx_session_obj.server_session,
                                                   vdb_group.name, vdb_name)
    if vdb_obj == None:
        vdb_params = MSSqlProvisionParameters()
        vdb_params.container = MSSqlDatabaseContainer()
        vdb_params.container.group = vdb_group.reference
        vdb_params.container.name = vdb_name
        vdb_params.source = MSSqlVirtualSource()
        vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False
        vdb_params.source_config = MSSqlSIConfig()
        vdb_params.source_config.database_name = arguments['--db']

        vdb_params.source_config.repository = find_dbrepo(
            dx_session_obj.server_session, 'MSSqlInstance', environment_obj.reference,
            arguments['--envinst']).reference

        vdb_params.timeflow_point_parameters = set_timeflow_point(engine, 
                                                                  dx_session_obj.server_session, 
                                                                  container_obj)
        if not vdb_params.timeflow_point_parameters:
            return
        vdb_params.timeflow_point_parameters.container = \
                                             container_obj.reference
        print_info(engine["hostname"] + ":Provisioning " + vdb_name)
        database.provision(dx_session_obj.server_session, vdb_params)
        #Add the job into the jobs dictionary so we can track its progress
        jobs[engine["hostname"]] = dx_session_obj.server_session.last_job
        #return the job object to the calling statement so that we can tell if 
        # a job was created or not (will return None, if no job)
        return dx_session_obj.server_session.last_job
    else:
        print_info(engine["hostname"] + ": " + vdb_name + " already exists.")
        return vdb_obj.reference
Exemple #35
0
def refresh_container(dlpx_obj, container_name):
    """
    Refreshes a container

    dlpx_obj: Virtualization Engine session object
    container_name: Name of the container to refresh
    """

    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    try:
        container.refresh(dlpx_obj.server_session,
                          get_obj_reference(dlpx_obj.server_session,
                          container, container_name).pop())
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info('The container {} was refreshed.'.format(container_name))
    except (DlpxException, RequestError, HttpError) as e:
        print_exception('\nContainer {} was not refreshed. The error '
                        'was:\n{}\n'.format(container_name, e))
def create_ase_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, 
                   container_obj):
    '''
    Create a Sybase ASE VDB
    '''
    vdb_obj = find_database_by_name_and_group_name(engine, server, 
                                                   vdb_group.name, vdb_name)
    if vdb_obj == None:
        vdb_params = ASEProvisionParameters()
        vdb_params.container = ASEDBContainer()
        if arguments['--no_truncate_log']:
            vdb_params.truncate_log_on_checkpoint = False
        else:
            vdb_params.truncate_log_on_checkpoint = True
        vdb_params.container.group = vdb_group.reference
        vdb_params.container.name = vdb_name
        vdb_params.source = ASEVirtualSource()
        vdb_params.source_config = ASESIConfig()
        vdb_params.source_config.database_name = arguments['--db']
        vdb_params.source_config.instance = ASEInstanceConfig()
        vdb_params.source_config.instance.host = environment_obj.host

        vdb_repo = find_dbrepo_by_environment_ref_and_name(engine, server, 
                                                     "ASEInstance", 
                                                     environment_obj.reference,
                                                     arguments['--envinst'])

        vdb_params.source_config.repository = vdb_repo.reference
        vdb_params.timeflow_point_parameters = set_timeflow_point(engine, 
                                                                  server, 
                                                                  container_obj)

        vdb_params.timeflow_point_parameters.container = container_obj.reference
        print_info("Provisioning " + vdb_name)
        database.provision(server, vdb_params)

        #Add the job into the jobs dictionary so we can track its progress
        jobs[engine["hostname"]] = server.last_job
        #return the job object to the calling statement so that we can tell if 
        # a job was created or not (will return None, if no job)
        return server.last_job
    else:
        print_info(engine["hostname"] + ": " + vdb_name + " already exists.")
        return vdb_obj.reference
def activate_branch(dlpx_obj, branch_name):
    """
    Activates a branch

    :param dlpx_obj: Virtualization Engine session object
    :param branch_name: Name of the branch to activate
    """

    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    try:
        branch_obj = find_obj_by_name(dlpx_obj.server_session,
                                      branch, branch_name)
        branch.activate(dlpx_obj.server_session, branch_obj.reference)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info('The branch {} was activated successfully.'.format(
            branch_name))
    except RequestError as e:
        print_exception('\nAn error occurred activating the '
                        'branch:\n{}'.format(e))
Exemple #38
0
def delete_bookmark(dlpx_obj, bookmark_name):
    """
    Deletes a bookmark

    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    :param bookmark_name: Bookmark to delete
    :type bookmark_name: str
    """

    try:
        bookmark.delete(dlpx_obj.server_session,
                        get_obj_reference(dlpx_obj.server_session,
                                          bookmark, bookmark_name).pop())
        print_info('The bookmark {} was deleted successfully.'.format(
            bookmark_name))
    except (DlpxException, HttpError, RequestError) as e:
        print_exception('\nERROR: The bookmark {} was not deleted. The '
                        'error was:\n\n{}'.format(bookmark_name, e.message))
def update_branch(dlpx_obj, branch_name):
    """
    Updates a branch

    :param dlpx_obj: Virtualization Engine session object
    :param branch_name: Name of the branch to update
    """

    js_branch_obj = JSBranch()
    try:
        branch_obj = find_obj_by_name(dlpx_obj.server_session,
                                      branch, branch_name)
        branch.update(dlpx_obj.server_session, branch_obj.reference,
                      js_branch_obj)
        print_info('The branch {} was updated successfully.'.format(
            branch_name))
    except (DlpxException, HttpError, RequestError) as e:
        print_exception('\nERROR: The branch could not be updated. The '
                        'error was:\n\n{}'.format(e))
def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type='SNAPSHOT'):
    """
    This function performs the rewind (rollback)

    dlpx_obj: Virtualization Engine session object
    vdb_name: VDB to be rewound
    timestamp: Point in time to rewind the VDB
    timestamp_type: The type of timestamp being used for the rewind
    """

    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    dx_timeflow_obj = DxTimeflow(dlpx_obj.server_session)
    container_obj = find_obj_by_name(dlpx_obj.server_session, database,
                                     vdb_name)
    # Sanity check to make sure our container object has a reference
    if container_obj.reference:
        try:
            if container_obj.virtual is not True:
                raise DlpxException('{} in engine {} is not a virtual object. '
                                    'Skipping.\n'.format(container_obj.name,
                                    engine_name))
            elif container_obj.staging is True:
                raise DlpxException('{} in engine {} is a virtual object. '
                                    'Skipping.\n'.format(container_obj.name,
                                    engine_name))
            elif container_obj.runtime.enabled == "ENABLED":
                print_info('\nINFO: {} Rewinding {} to {}\n'.format(
                           engine_name, container_obj.name, timestamp))

        # This exception is raised if rewinding a vFiles VDB
        # since AppDataContainer does not have virtual, staging or
        # enabled attributes.
        except AttributeError:
            pass

        print_debug('{}: Type: {}'.format(engine_name, container_obj.type))

        # If the vdb is a Oracle type, we need to use a OracleRollbackParameters
        if str(container_obj.reference).startswith("ORACLE"):
            rewind_params = OracleRollbackParameters()
        else:
            rewind_params = RollbackParameters()
        rewind_params.timeflow_point_parameters = \
            dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type,
                                               timestamp)
        print_debug('{}: {}'.format(engine_name, str(rewind_params)))
        try:
            # Rewind the VDB
            database.rollback(dlpx_obj.server_session, container_obj.reference,
                              rewind_params)
            dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
            print_info('VDB {} was rolled back.'.format(container_obj.name))
        except (RequestError, HttpError, JobError) as e:
            print_exception('ERROR: {} encountered an error on {}'
                            ' during the rewind process:\n{}'.format(
                engine_name, container_obj.name, e))
    # Don't do anything if the database is disabled
    else:
        print_info('{}: {} is not enabled. Skipping sync.'.format(engine_name,
                                                            container_obj.name))
def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type='SNAPSHOT'):
    """
    This function performs the rewind (rollback)

    dlpx_obj: Virtualization Engine session object
    vdb_name: VDB to be rewound
    timestamp: Point in time to rewind the VDB
    timestamp_type: The type of timestamp being used for the rewind
    """

    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    dx_timeflow_obj = DxTimeflow(dlpx_obj.server_session)
    container_obj = find_obj_by_name(dlpx_obj.server_session, database,
                                     vdb_name)
    # Sanity check to make sure our container object has a reference
    if container_obj.reference:
        try:
            if container_obj.virtual is not True:
                raise DlpxException('{} in engine {} is not a virtual object. '
                                    'Skipping.\n'.format(container_obj.name,
                                    engine_name))
            elif container_obj.staging is True:
                raise DlpxException('{} in engine {} is a virtual object. '
                                    'Skipping.\n'.format(container_obj.name,
                                    engine_name))
            elif container_obj.runtime.enabled == "ENABLED":
                print_info('\nINFO: {} Rewinding {} to {}\n'.format(
                           engine_name, container_obj.name, timestamp))

        # This exception is raised if rewinding a vFiles VDB
        # since AppDataContainer does not have virtual, staging or
        # enabled attributes.
        except AttributeError:
            pass

        print_debug('{}: Type: {}'.format(engine_name, container_obj.type))

        # If the vdb is a Oracle type, we need to use a OracleRollbackParameters
        if str(container_obj.reference).startswith("ORACLE"):
            rewind_params = OracleRollbackParameters()
        else:
            rewind_params = RollbackParameters()
        rewind_params.timeflow_point_parameters = \
            dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type,
                                               timestamp)
        print_debug('{}: {}'.format(engine_name, str(rewind_params)))
        try:
            # Rewind the VDB
            database.rollback(dlpx_obj.server_session, container_obj.reference,
                              rewind_params)
            dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
            print_info('VDB {} was rolled back.'.format(container_obj.name))
        except (RequestError, HttpError, JobError) as e:
            print_exception('ERROR: {} encountered an error on {}'
                            ' during the rewind process:\n{}'.format(
                engine_name, container_obj.name, e))
    # Don't do anything if the database is disabled
    else:
        print_info('{}: {} is not enabled. Skipping sync.'.format(engine_name,
                                                            container_obj.name))
def list_templates(dlpx_obj):
    """
    List all templates on a given engine

    dlpx_obj: Virtualization Engine session object
    """

    header = 'Name, Reference, Active Branch, Last Updated'

    try:
        print header
        js_templates = template.get_all(dlpx_obj.server_session)
        for js_template in js_templates:
            last_updated = convert_timestamp(dlpx_obj.server_session,
                                             js_template.last_updated[:-5])
            print_info('{}, {}, {}, {}'.format(js_template.name,
                                            js_template.reference,
                                            js_template.active_branch,
                                            last_updated))
    except (DlpxException, HttpError, RequestError) as e:
        raise DlpxException('\nERROR: The templates could not be listed. '
                            'The error was:\n\n{}'.format(e.message))
def create_branch(dlpx_obj,
                  branch_name,
                  container_name,
                  template_name=None,
                  bookmark_name=None):
    """
    Create the JS Branch

    :param dlpx_obj: Virtualization Engine session object
    :param branch_name: Name of the branch to create
    :param container_name: Name of the container to use
    :param template_name: Name of the template to use
    :param bookmark_name: Name of the bookmark to use
    """

    js_branch = JSBranchCreateParameters()
    js_branch.name = branch_name
    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    data_container_obj = find_obj_by_name(dlpx_obj.server_session, container,
                                          container_name)
    js_branch.data_container = data_container_obj.reference

    if bookmark_name:
        js_branch.timeline_point_parameters = JSTimelinePointBookmarkInput()
        js_branch.timeline_point_parameters.bookmark = find_obj_by_name(
            dlpx_obj.server_session, bookmark, bookmark_name).reference
    elif template_name:
        source_layout_ref = find_obj_by_name(dlpx_obj.server_session, template,
                                             template_name).reference
        js_branch.timeline_point_parameters = JSTimelinePointLatestTimeInput()
        js_branch.timeline_point_parameters.source_data_layout = source_layout_ref

    try:
        branch.create(dlpx_obj.server_session, js_branch)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
    except (DlpxException, RequestError, HttpError) as e:
        print_exception("\nThe branch was not created. The error was:"
                        "\n{}".format(e))
    print_info("JS Branch {} was created successfully.".format(branch_name))
def create_branch(dlpx_obj, branch_name, container_name, template_name=None,
                  bookmark_name=None):
    """
    Create the JS Branch

    :param dlpx_obj: Virtualization Engine session object
    :param branch_name: Name of the branch to create
    :param container_name: Name of the container to use
    :param template_name: Name of the template to use
    :param bookmark_name: Name of the bookmark to use
    """

    js_branch = JSBranchCreateParameters()
    js_branch.name = branch_name
    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    data_container_obj = find_obj_by_name(dlpx_obj.server_session,
                                          container, container_name)
    js_branch.data_container = data_container_obj.reference

    if bookmark_name:
        js_branch.timeline_point_parameters = JSTimelinePointBookmarkInput()
        js_branch.timeline_point_parameters.bookmark = find_obj_by_name(
            dlpx_obj.server_session, bookmark, bookmark_name).reference
    elif template_name:
        source_layout_ref = find_obj_by_name(dlpx_obj.server_session,
                                             template, template_name).reference
        js_branch.timeline_point_parameters = JSTimelinePointLatestTimeInput()
        js_branch.timeline_point_parameters.source_data_layout = \
            source_layout_ref

    try:
        branch.create(dlpx_obj.server_session, js_branch)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
    except (DlpxException, RequestError, HttpError) as e:
        print_exception('\nThe branch was not created. The error was:'
                        '\n{}'.format(e))
    print_info('JS Branch {} was created successfully.'.format(
        branch_name))
def create_replication_job():
    """
    Create a replication job
    :return: Reference to the spec object
    """
    rep_spec = ReplicationSpec()
    rep_spec.name = arguments["--rep_name"]
    rep_spec.target_host = arguments["--target_host"]
    rep_spec.target_principal = arguments["--target_user"]
    rep_spec.target_credential = {
        "type": "PasswordCredential",
        "password": arguments["--target_pw"],
    }
    rep_spec.object_specification = ReplicationList()
    rep_spec.schedule = arguments["--schedule"]
    rep_spec.encrypted = True

    if arguments["--num_cons"]:
        rep_spec.number_of_connections = int(arguments["--num_cons"])
    if arguments["--bandwidth"]:
        rep_spec.bandwidth_limit = int(arguments["--bandwidth"])
    if arguments["--enabled"]:
        rep_spec.enabled = True
    try:
        rep_spec.object_specification.objects = find_obj_specs(
            dx_session_obj.server_session, arguments["--rep_objs"].split(","))

        ref = spec.create(dx_session_obj.server_session, rep_spec)
        if dx_session_obj.server_session.last_job:
            dx_session_obj.jobs[
                dx_session_obj.server_session.
                address] = dx_session_obj.server_session.last_job
        print_info("Successfully created {} with reference "
                   "{}\n".format(arguments["--rep_name"], ref))

    except (HttpError, RequestError, DlpxException) as e:
        print_exception("Could not create replication job {}:\n{}".format(
            arguments["--rep_name"], e))
Exemple #46
0
def update_jobs_dictionary(engine, server, jobs):
    """
    This function checks each job in the dictionary and updates its status or
    removes it if the job is complete.
    Return the number of jobs still running.
    """
    # Establish the running jobs counter, as we are about to update the count
    # from the jobs report.
    i = 0
    # get all the jobs, then inspect them
    for j in jobs.keys():
        job_obj = job.get(server, jobs[j])
        print_debug("%s: %s" % (engine["hostname"], str(job_obj)), debug)
        print_info("%s: %s: %s" % (engine["hostname"], j.name, job_obj.job_state))

        if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
            # If the job is in a non-running state, remove it from the running
            # jobs list.
            del jobs[j]
        else:
            # If the job is in a running state, increment the running job count.
            i += 1
    return i
def run_job(dlpx_obj, config_file_path):
    """
    This function runs the main_workflow aynchronously against all the
    servers specified

    dlpx_obj: Virtualization Engine session object
    config_file_path: path containing the dxtools.conf file.
    """
    # Create an empty list to store threads we create.
    threads = []
    engine = None

    # If the --all argument was given, run against every engine in dxtools.conf
    if arguments['--all']:
        print_info('Executing against all Delphix Engines in the dxtools.conf')

        try:
            # For each server in the dxtools.conf...
            for delphix_engine in dlpx_obj.dlpx_engines:
                engine = dlpx_obj.dlpx_engines[delphix_engine]
                # Create a new thread and add it to the list.
                threads.append(main_workflow(engine, dlpx_obj))
        except DlpxException as e:
            print 'Error encountered in run_job():\n{}'.format(e)
            sys.exit(1)
    elif arguments['--all'] is False:
        # Else if the --engine argument was given, test to see if the engine
        # exists in dxtools.conf
        if arguments['--engine']:
            try:
                engine = dlpx_obj.dlpx_engines[arguments['--engine']]
                print_info('Executing against Delphix Engine: {}\n'.format(
                           arguments['--engine']))

            except (DlpxException, RequestError, KeyError):
                raise DlpxException('\nERROR: Delphix Engine {} cannot be '                                         'found in %s. Please check your value '
                                    'and try again. Exiting.\n'.format(
                                    arguments['--engine'], config_file_path))
        else:
            # Else search for a default engine in the dxtools.conf
            for delphix_engine in dlpx_obj.dlpx_engines:
                if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true':
                    engine = dlpx_obj.dlpx_engines[delphix_engine]
                    print_info('Executing against the default Delphix Engine '
                       'in the dxtools.conf: {}'.format(
                       dlpx_obj.dlpx_engines[delphix_engine]['hostname']))
                break
            if engine is None:
                raise DlpxException('\nERROR: No default engine found. Exiting')
        # run the job against the engine
        threads.append(main_workflow(engine, dlpx_obj))

    # For each thread in the list...
    for each in threads:
        # join them back together so that we wait for all threads to complete
        # before moving on
        each.join()
def update_jobs_dictionary(engine, server, jobs):
    """
    This function checks each job in the dictionary and updates its status or 
    removes it if the job is complete.
    Return the number of jobs still running.
    """
    #Establish the running jobs counter, as we are about to update the count 
    # from the jobs report.
    i = 0
    #get all the jobs, then inspect them
    for j in jobs.keys():
        job_obj = job.get(server, jobs[j])
        print_debug('%s: %s' % (engine['hostname'], str(job_obj)), debug)
        print_info('%s: %s: %s' % (engine['hostname'], j.name,
                   job_obj.job_state))
        
        if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
            #If the job is in a non-running state, remove it from the running 
            # jobs list.
            del jobs[j]
        else:
            #If the job is in a running state, increment the running job count.
            i += 1
    return i
def main():
    # We want to be able to call on these variables anywhere in the script.
    global single_thread
    global debug

    time_start = time()
    single_thread = False

    try:
        dx_session_obj = GetSession()
        logging_est(arguments['--logdir'])
        print_debug(arguments)
        config_file_path = arguments['--config']
        # Parse the dxtools.conf and put it into a dictionary
        dx_session_obj.get_config(config_file_path)

        # This is the function that will handle processing main_workflow for
        # all the servers.
        run_job(dx_session_obj, config_file_path)

        elapsed_minutes = time_elapsed(time_start)
        print_info('script took {:.2f} minutes to get this far.'.format(
            elapsed_minutes))

    # Here we handle what we do when the unexpected happens
    except SystemExit as e:
        # This is what we use to handle our sys.exit(#)
        sys.exit(e)

    except DlpxException as e:
        # We use this exception handler when an error occurs in a function call.
        print_exception('ERROR: Please check the ERROR message below:\n'
                        '{}'.format(e.message))
        sys.exit(2)

    except HttpError as e:
        # We use this exception handler when our connection to Delphix fails
        print_exception('ERROR: Connection failed to the Delphix Engine. Please'
                        'check the ERROR message below:\n{}'.format(e.message))
        sys.exit(2)

    except JobError as e:
        # We use this exception handler when a job fails in Delphix so that we
        # have actionable data
        print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job))
        elapsed_minutes = time_elapsed(time_start)
        print_exception('{} took {:.2f} minutes to get this far'.format(
            basename(__file__), elapsed_minutes))
        sys.exit(3)

    except KeyboardInterrupt:
        # We use this exception handler to gracefully handle ctrl+c exits
        print_debug('You sent a CTRL+C to interrupt the process')
        elapsed_minutes = time_elapsed(time_start)
        print_info('{} took {:.2f} minutes to get this far'.format(
            basename(__file__), elapsed_minutes))
    except:
        # Everything else gets caught here
        print_exception('{}\n{}'.format(sys.exc_info()[0],
                                        traceback.format_exc()))
        elapsed_minutes = time_elapsed(time_start)
        print_info("{} took {:.2f} minutes to get this far".format(
            basename(__file__), elapsed_minutes))
        sys.exit(1)
def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None,
                    tags=None, description=None):
    """
    Create the JS Bookmark

    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    :param bookmark_name: Name of the bookmark to create
    :type bookmark_name: basestring
    :param source_layout: Name of the source (template or container) to use
    :type source_layout: basestring
    :param branch_name: Name of the branch to use
    :type branch_name: basestring
    :param tag_name: Tag to use for the bookmark
    :type tag: basestring
    :param description: Description of the bookmark
    :type description: basestring
    """

    branch_ref = None
    source_layout_ref = None
    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    js_bookmark_params = JSBookmarkCreateParameters()
    if branch_name:
        try:
            source_layout_ref = find_obj_by_name(dlpx_obj.server_session,
                                                 template,
                                                 source_layout).reference
        except DlpxException:
            source_layout_ref = find_obj_by_name(
                dlpx_obj.server_session, container,
                source_layout).reference
        #import pdb;pdb.set_trace()
        for branch_obj in branch.get_all(dlpx_obj.server_session):
            if branch_name == branch_obj.name and \
                    source_layout_ref == branch_obj.data_layout:
                branch_ref = branch_obj.reference
                break
        if branch_ref is None:
            raise DlpxException('Set the --data_layout parameter equal to '
                                'the data layout of the bookmark.\n')
    elif branch_name is None:
        try:
            (source_layout_ref, branch_ref) = find_obj_by_name(
                dlpx_obj.server_session, template, source_layout, True)
        except DlpxException:
            (source_layout_ref, branch_ref) = find_obj_by_name(
                dlpx_obj.server_session, container, source_layout, True)
        if branch_ref is None:
            raise DlpxException('Could not find {} in engine {}'.format(
                branch_name, engine_name))
    js_bookmark_params.bookmark = JSBookmark()
    js_bookmark_params.bookmark.name = bookmark_name
    js_bookmark_params.bookmark.branch = branch_ref
    if tags:
        js_bookmark_params.bookmark.tags = tags.split(',')
    if description:
        js_bookmark_params.bookmark.description = description
    js_bookmark_params.timeline_point_parameters = {
        'sourceDataLayout': source_layout_ref, 'type':
            'JSTimelinePointLatestTimeInput'}
    try:
        bookmark.create(dlpx_obj.server_session, js_bookmark_params)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info('JS Bookmark {} was created successfully.'.format(
            bookmark_name))

    except (DlpxException, RequestError, HttpError) as e:
        print_exception('\nThe bookmark {} was not created. The error '
                        'was:\n\n{}'.format(bookmark_name, e))
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :type engine: dictionary
    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    """

    try:
        # Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('ERROR: Engine {} encountered an error while' 
                        '{}:\n{}\n'.format(engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0:
                if len(thingstodo)> 0:
                    if arguments['--type'] == 'linux' or arguments['--type'] == 'windows':
                        env_name = arguments['--env_name']
                        host_user = arguments['--host_user']
                        pw = arguments['--pw']
                        ip_addr = arguments['--ip']
                        host_name = arguments['--connector_name']
                        if arguments['--type'] == 'linux':
                          toolkit_path = arguments['--toolkit']
                          create_linux_env(dlpx_obj, env_name, host_user,
                                        ip_addr, toolkit_path, pw)
                        else:
                          create_windows_env(dlpx_obj, env_name, host_user,
                                        ip_addr, pw, host_name,)

                    elif arguments['--delete']:
                        delete_env(dlpx_obj, arguments['--delete'])

                    elif arguments['--refresh']:
                        refresh_env(dlpx_obj, arguments['--refresh'])

                    elif arguments['--update_ase_pw']:
                        update_ase_pw(dlpx_obj)

                    elif arguments['--update_ase_user']:
                        update_ase_username(dlpx_obj)
                    elif arguments['--list']:
                        list_env(dlpx_obj)
                    elif arguments['--update_host']:
                        update_host_address(dlpx_obj, arguments['--old_host_address'], arguments['--new_host_address'])
                    elif arguments['--enable']:
                        enable_environment(dlpx_obj, arguments['--env_name'])
                    elif arguments['--disable']:
                        disable_environment(dlpx_obj, arguments['--env_name'])

                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session, dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{} Environment: {}'.format(
                               engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('Error while creating the environment {}\n{}'.format(
            arguments['--env_name'], e))
        sys.exit(1)
def main(arguments):
    #We want to be able to call on these variables anywhere in the script.
    global single_thread
    global usebackup
    global time_start
    global config_file_path
    global dx_session_obj
    global debug

    if arguments['--debug']:
        debug = True

    try:
        dx_session_obj = GetSession()
        logging_est(arguments['--logdir'])
        print_debug(arguments)
        time_start = time()
        single_thread = False
        config_file_path = arguments['--config']
        #Parse the dxtools.conf and put it into a dictionary
        dx_session_obj.get_config(config_file_path)

        #This is the function that will handle processing main_workflow for
        # all the servers.
        run_job()

        elapsed_minutes = time_elapsed()
        print_info('script took {:.2f} minutes to get this far.'.format(
            elapsed_minutes))

    #Here we handle what we do when the unexpected happens
    except SystemExit as e:
        """
        This is what we use to handle our sys.exit(#)
        """
        sys.exit(e)

    except HttpError as e:
        """
        We use this exception handler when our connection to Delphix fails
        """
        print_exception('Connection failed to the Delphix Engine'
                        'Please check the ERROR message:\n{}'.format(e))
        sys.exit(1)

    except JobError as e:
        """
        We use this exception handler when a job fails in Delphix so that
        we have actionable data
        """
        elapsed_minutes = time_elapsed()
        print_exception('A job failed in the Delphix Engine')
        print_info('{} took {:.2f} minutes to get this far\n{}'.format(
                   basename(__file__), elapsed_minutes, e))
        sys.exit(3)

    except KeyboardInterrupt:
        """
        We use this exception handler to gracefully handle ctrl+c exits
        """
        print_debug("You sent a CTRL+C to interrupt the process")
        elapsed_minutes = time_elapsed()
        print_info('{} took {:.2f} minutes to get this far\n'.format(
                   basename(__file__), elapsed_minutes))

    except:
        """
        Everything else gets caught here
        """
        print_exception(sys.exc_info()[0])
        elapsed_minutes = time_elapsed()
        print_info('{} took {:.2f} minutes to get this far\n'.format(
                   basename(__file__), elapsed_minutes))
        sys.exit(1)
def main_workflow(engine, dlpx_obj):
    """
    This function is where we create our main workflow.
    Use the @run_async decorator to run this function asynchronously.
    The @run_async decorator allows us to run against multiple Delphix Engine
    simultaneously

    :param engine: Dictionary of engines
    :param dlpx_obj: Virtualization Engine session object
    """

    #Establish these variables as empty for use later
    environment_obj = None
    source_objs = None

    try:
        #Setup the connection to the Delphix Engine
        dlpx_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])
    except DlpxException as e:
        print_exception('\nERROR: Engine {} encountered an error while '
                        'provisioning {}:\n{}\n'.format(engine['hostname'],
                                                        arguments['--target'],
                                                        e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    try:
        with dlpx_obj.job_mode(single_thread):
            while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0):
                if len(thingstodo) > 0:
                    if arguments['--create_branch']:
                        create_branch(dlpx_obj, arguments['--create_branch'],
                                      arguments['--container_name'],
                                      arguments['--template_name']
                                      if arguments['--template_name'] else None,
                                      arguments['--bookmark_name']
                                      if arguments['--bookmark_name'] else None)
                    elif arguments['--delete_branch']:
                        delete_branch(dlpx_obj, arguments['--delete_branch'])
                    elif arguments['--update_branch']:
                        update_branch(dlpx_obj, arguments['--update_branch'])
                    elif arguments['--activate_branch']:
                        activate_branch(dlpx_obj,
                                        arguments['--activate_branch'])
                    elif arguments['--list_branches']:
                        list_branches(dlpx_obj)
                    thingstodo.pop()
                # get all the jobs, then inspect them
                i = 0
                for j in dlpx_obj.jobs.keys():
                    job_obj = job.get(dlpx_obj.server_session,
                                      dlpx_obj.jobs[j])
                    print_debug(job_obj)
                    print_info('{}: Provisioning JS Branch: {}'.format(
                        engine['hostname'], job_obj.job_state))
                    if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                        # If the job is in a non-running state, remove it
                        # from the running jobs list.
                        del dlpx_obj.jobs[j]
                    elif job_obj.job_state in 'RUNNING':
                        # If the job is in a running state, increment the
                        # running job count.
                        i += 1
                    print_info('{}: {:d} jobs running.'.format(
                        engine['hostname'], i))
                    # If we have running jobs, pause before repeating the
                    # checks.
                    if len(dlpx_obj.jobs) > 0:
                        sleep(float(arguments['--poll']))
    except (DlpxException, RequestError, JobError, HttpError) as e:
        print_exception('\nError in js_branch: {}\n{}'.format(
            engine['hostname'], e))