示例#1
0
def find_snapshot_by_database_and_time(engine, server, database_obj,
                                       snap_time):
    """
    Find snapshot object by database name and timetamp
    engine:
    server: A Delphix engine object.
    database_obj: The database reference to retrieve the snapshot
    snap_time: timstamp of the snapshot
    """
    snapshots = snapshot.get_all(server, database=database_obj.reference)
    matches = []

    for snapshot_obj in snapshots:
        if (str(snapshot_obj.latest_change_point.timestamp) == snap_time or
                str(snapshot_obj.first_change_point.timestamp) == snap_time):

            matches.append(snapshot_obj)

    if len(matches) == 1:
        snap_match = get_obj_name(server, database, matches[0].container)
        print_debug(engine["hostname"] +
                    ": Found one and only one match. This is good.")
        print_debug(engine["hostname"] + ": " + snap_match)

        return matches[0]

    elif len(matches) > 1:
        print_debug(engine["hostname"] + ": " + matches)
        raise DlpxException("The time specified was not specific enough."
                            " More than one match found.\n")
    else:
        raise DlpxException("No matches found for the time specified.\n")
示例#2
0
    def get_config(self, config_file_path='./dxtools.conf'):
        """
        This method reads in the dxtools.conf file

        config_file_path: path to the configuration file.
                          Default: ./dxtools.conf
        """

        #config_file_path = config_file_path
        #config_file = None

        #First test to see that the file is there and we can open it
        try:
            with open(config_file_path) as config_file:

                #Now parse the file contents as json and turn them into a
                #python dictionary, throw an error if it isn't proper json
                config = json.loads(config_file.read())

        except IOError:
            raise DlpxException('\nERROR: Was unable to open {}. Please '
                                'check the path and permissions, and try '
                                'again.\n'.format(config_file_path))

        except (ValueError, TypeError, AttributeError) as e:
            raise DlpxException('\nERROR: Was unable to read {} as json. '
                                'Please check if the file is in a json format'
                                ' and try again.\n {}'.format(config_file_path,
                                                              e))

        #Create a dictionary of engines (removing the data node from the
        # dxtools.json, for easier parsing)
        for each in config['data']:
            self.dlpx_engines[each['hostname']] = each
示例#3
0
def find_snapshot_by_database_and_name(engine, database_obj, snap_name):
    """
    Find snapshots by database and name. Return snapshot reference.

    engine: Dictionary of engines from config file.
    database_obj: Database object to find the snapshot against
    snap_name: Name of the snapshot
    """

    snapshots = snapshot.get_all(dx_session_obj.server_session,
                                 database=database_obj.reference)
    matches = []
    for snapshot_obj in snapshots:
        if str(snapshot_obj.name).startswith(arguments['--timestamp']):
            matches.append(snapshot_obj)

    for each in matches:
        print_debug(each.name, debug)

    if len(matches) == 1:
        print_debug(
            '%s: Found one and only one match. This is good.\n %s' %
            (engine['hostname'], matches[0]), debug)
        return matches[0]

    elif len(matches) > 1:
        raise DlpxException('%s: The name specified was not specific enough.'
                            ' More than one match found.\n' %
                            (engine['hostname'], ))

    else:
        raise DlpxException('%s: No matches found for the time specified.\n' %
                            (engine['hostname']))
示例#4
0
def find_snapshot_by_database_and_time(engine, database_obj, snap_time):
    snapshots = snapshot.get_all(
        dx_session_obj.server_session, database=database_obj.reference
    )
    matches = []

    for snapshot_obj in snapshots:
        if str(snapshot_obj.latest_change_point.timestamp).startswith(
            arguments["--timestamp"]
        ):

            matches.append(snapshot_obj)

    if len(matches) == 1:
        print_debug(
            '%s": Found one and only one match. This is good.\n%s'
            % (engine["hostname"], matches[0]),
            debug,
        )

        return matches[0]

    elif len(matches) > 1:
        print_debug(matches, debug)

        raise DlpxException(
            "%s: The time specified was not specific enough."
            "More than one match found.\n" % (engine["hostname"])
        )
    else:
        raise DlpxException(
            "%s: No matches found for the time specified.\n" % (engine["hostname"])
        )
示例#5
0
def list_timeflows(server):
    """
    Retrieve and print all timeflows for a given engine
    """

    ret_timeflow_dct = {}
    all_timeflows = timeflow.get_all(server)

    print("DB Name, Timeflow Name, Timestamp")

    for tfbm_lst in all_timeflows:
        try:

            db_name = get_obj_name(server, database, tfbm_lst.container)
            print("%s, %s, %s\n" % (
                str(db_name),
                str(tfbm_lst.name),
                str(tfbm_lst.parent_point.timestamp),
            ))

        except AttributeError:
            print("%s, %s\n" % (str(tfbm_lst.name), str(db_name)))

        except TypeError as e:
            raise DlpxException("Listing Timeflows encountered an error:\n%s" %
                                (e.message))

        except RequestError as e:
            dlpx_err = e.message
            raise DlpxException(dlpx_err.action)
def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type='SNAPSHOT'):
    """
    This function performs the rewind (rollback)

    dlpx_obj: Virtualization Engine session object
    vdb_name: VDB to be rewound
    timestamp: Point in time to rewind the VDB
    timestamp_type: The type of timestamp being used for the rewind
    """

    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    dx_timeflow_obj = DxTimeflow(dlpx_obj.server_session)
    container_obj = find_obj_by_name(dlpx_obj.server_session, database,
                                     vdb_name)
    # Sanity check to make sure our container object has a reference
    if container_obj.reference:
        try:
            if container_obj.virtual is not True:
                raise DlpxException('{} in engine {} is not a virtual object. '
                                    'Skipping.\n'.format(container_obj.name,
                                    engine_name))
            elif container_obj.staging is True:
                raise DlpxException('{} in engine {} is a virtual object. '
                                    'Skipping.\n'.format(container_obj.name,
                                    engine_name))
            elif container_obj.runtime.enabled == "ENABLED":
                print_info('\nINFO: {} Rewinding {} to {}\n'.format(
                           engine_name, container_obj.name, timestamp))

        # This exception is raised if rewinding a vFiles VDB
        # since AppDataContainer does not have virtual, staging or
        # enabled attributes.
        except AttributeError:
            pass

        print_debug('{}: Type: {}'.format(engine_name, container_obj.type))

        # If the vdb is a Oracle type, we need to use a OracleRollbackParameters
        if str(container_obj.reference).startswith("ORACLE"):
            rewind_params = OracleRollbackParameters()
        else:
            rewind_params = RollbackParameters()
        rewind_params.timeflow_point_parameters = \
            dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type,
                                               timestamp)
        print_debug('{}: {}'.format(engine_name, str(rewind_params)))
        try:
            # Rewind the VDB
            database.rollback(dlpx_obj.server_session, container_obj.reference,
                              rewind_params)
            dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
            print_info('VDB {} was rolled back.'.format(container_obj.name))
        except (RequestError, HttpError, JobError) as e:
            print_exception('ERROR: {} encountered an error on {}'
                            ' during the rewind process:\n{}'.format(
                engine_name, container_obj.name, e))
    # Don't do anything if the database is disabled
    else:
        print_info('{}: {} is not enabled. Skipping sync.'.format(engine_name,
                                                            container_obj.name))
示例#7
0
def run_job(dlpx_obj, config_file_path):
    """
    This function runs the main_workflow aynchronously against all the
    servers specified

    dlpx_obj: Virtualization Engine session object
    config_file_path: path containing the dxtools.conf file.
    """
    # Create an empty list to store threads we create.
    threads = []
    engine = None

    # If the --all argument was given, run against every engine in dxtools.conf
    if arguments['--all']:
        print_info('Executing against all Delphix Engines in the dxtools.conf')

        try:
            # For each server in the dxtools.conf...
            for delphix_engine in dlpx_obj.dlpx_engines:
                engine = dlpx_obj.dlpx_engines[delphix_engine]
                # Create a new thread and add it to the list.
                threads.append(main_workflow(engine, dlpx_obj))
        except DlpxException as e:
            print 'Error encountered in run_job():\n{}'.format(e)
            sys.exit(1)
    elif arguments['--all'] is False:
        # Else if the --engine argument was given, test to see if the engine
        # exists in dxtools.conf
        if arguments['--engine']:
            try:
                engine = dlpx_obj.dlpx_engines[arguments['--engine']]
                print_info('Executing against Delphix Engine: {}\n'.format(
                           arguments['--engine']))

            except (DlpxException, RequestError, KeyError):
                raise DlpxException('\nERROR: Delphix Engine {} cannot be '                                         'found in %s. Please check your value '
                                    'and try again. Exiting.\n'.format(
                                    arguments['--engine'], config_file_path))
        else:
            # Else search for a default engine in the dxtools.conf
            for delphix_engine in dlpx_obj.dlpx_engines:
                if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true':
                    engine = dlpx_obj.dlpx_engines[delphix_engine]
                    print_info('Executing against the default Delphix Engine '
                       'in the dxtools.conf: {}'.format(
                       dlpx_obj.dlpx_engines[delphix_engine]['hostname']))
                break
            if engine is None:
                raise DlpxException('\nERROR: No default engine found. Exiting')
        # run the job against the engine
        threads.append(main_workflow(engine, dlpx_obj))

    # For each thread in the list...
    for each in threads:
        # join them back together so that we wait for all threads to complete
        # before moving on
        each.join()
示例#8
0
    def serversess(self, f_engine_address, f_engine_username,
                   f_engine_password, f_engine_namespace='DOMAIN'):
        """
        Method to setup the session with the Virtualization Engine

        f_engine_address: The Virtualization Engine's address (IP/DNS Name)
        f_engine_username: Username to authenticate
        f_engine_password: User's password
        f_engine_namespace: Namespace to use for this session. Default: DOMAIN
        """

#        if use_https:
#            if hasattr(ssl, '_create_unverified_context'):
#                ssl._create_default_https_context = \
#                    ssl._create_unverified_context

        try:
            if f_engine_password:
                self.server_session = DelphixEngine(f_engine_address,
                                                    f_engine_username,
                                                    f_engine_password,
                                                    f_engine_namespace)
            elif f_engine_password is None:
                self.server_session = DelphixEngine(f_engine_address,
                                                    f_engine_username,
                                                    None, f_engine_namespace)

        except (HttpError, RequestError, JobError) as e:
            raise DlpxException('ERROR: An error occurred while authenticating'
                                ' to {}:\n {}\n'.format(f_engine_address, e))
示例#9
0
def find_dbrepo_by_environment_ref_and_name(engine, repo_type,
                                            f_environment_ref, f_name):
    '''
    Function to find database repository objects by environment reference and 
    name, and return the object's reference as a string
    You might use this function to find MSSQL database repos.
    '''

    print_debug(
        '%s: Searching objects in the %s class for one with the '
        'environment reference of %s and a name of %s.' %
        (engine['hostname'], repo_type, f_environment_ref, f_name), debug)

    obj_ref = ''
    all_objs = repository.get_all(server, environment=f_environment_ref)

    for obj in all_objs:
        if (repo_type == 'MSSqlInstance' or repo_type == 'ASEInstance'):
            if (obj.type == repo_type and obj.name == f_name):
                print_debug(
                    '%s: Found a match %s' %
                    (engine['hostname'], str(obj.reference)), debug)
                return obj

        elif repo_type == 'Unstructured Files':
            if obj.value == install_type:
                print_debug(
                    '%s: Found a match %s' %
                    (engine['hostname'], str(obj.reference)), debug)
                return obj

    raise DlpxException('%s: No Repo match found for type %s\n' %
                        (engine['hostname'], repo_type))
示例#10
0
def find_dbrepo_by_environment_ref_and_install_path(engine, server,
                                                    install_type,
                                                    f_environment_ref,
                                                    f_install_path):
    '''
    Function to find database repository objects by environment reference and 
    install path, and return the object's reference as a string
    You might use this function to find Oracle and PostGreSQL database repos.
    '''
    print_debug(
        '%s: Searching objects in the %s class for one with the '
        'environment reference of %s and an install path of %s' %
        (engine['hostname'], install_type, f_environment_ref, f_install_path),
        debug)

    for obj in repository.get_all(server, environment=f_environment_ref):
        if install_type == 'PgSQLInstall':
            if (obj.type == install_type
                    and obj.installation_path == f_install_path):
                print_debug(
                    '%s: Found a match %s' %
                    (engine['hostname'], str(obj.reference)), debug)
                return obj

        elif install_type == 'OracleInstall':
            if (obj.type == install_type
                    and obj.installation_home == f_install_path):

                print_debug(
                    '%s: Fount a match %s' %
                    (engine['hostname'], str(obj.reference)), debug)
                return obj
        else:
            raise DlpxException('%s: No Repo match found for type %s.\n' %
                                (engine["hostname"], install_type))
示例#11
0
def find_repo_by_environment_ref(
    engine, repo_type, f_environment_ref, f_install_path=None
):
    """
    Function to find unstructured file repository objects by environment
    reference and name, and return the object's reference as a string
    You might use this function to find Unstructured File repos.
    """

    print_debug(
        "\n%s: Searching objects in the %s class for one with the"
        "environment reference of %s\n"
        % (engine["hostname"], repo_type, f_environment_ref),
        debug,
    )

    obj_ref = ""
    all_objs = repository.get_all(
        dx_session_obj.server_session, environment=f_environment_ref
    )

    for obj in all_objs:
        if obj.name == repo_type:
            print_debug(engine["hostname"] + ": Found a match " + str(obj.reference))
            return obj

        elif obj.type == repo_type:
            print_debug(
                "%s Found a match %s" % (engine["hostname"], str(obj.reference)), debug
            )
            return obj

    raise DlpxException(
        "%s: No Repo match found for type %s\n" % (engine["hostname"], repo_type)
    )
示例#12
0
def find_source_by_database(engine, database_obj):
    #The source tells us if the database is enabled/disables, virtual,
    # vdb/dSource, or is a staging database.
    source_obj = source.get_all(server, database=database_obj.reference)

    #We'll just do a little sanity check here to ensure we only have a 1:1
    # result.
    if len(source_obj) == 0:
        raise DlpxException('%s: Did not find a source for %s. Exiting.\n' %
                            (engine['hostname'], database_obj.name))

    elif len(source_obj) > 1:
        raise DlpxException(
            '%s: More than one source returned for %s. '
            'Exiting.\n' %
            (engine['hostname'], database_obj.name + ". Exiting"))
    return source_obj
示例#13
0
def get_obj_name(server, f_object, obj_reference):
    """
    Return the object name from obj_reference

    engine: A Delphix engine object.
    obj_reference: The object reference to retrieve the name
    """

    try:
        obj_name = f_object.get(server, obj_reference)
        return obj_name.name

    except RequestError as e:
        raise dlpxExceptionHandler(e)

    except HttpError as e:
        raise DlpxException(e)
示例#14
0
def convert_dct_str(obj_dct):
    """
    Convert dictionary into a string for printing

    obj_dct: Dictionary to convert into a string
    :return: string object
    """
    js_str = ""

    if isinstance(obj_dct, dict):
        for js_db, js_jdbc in obj_dct.iteritems():
            if isinstance(js_jdbc, list):
                js_str += "{}: {}\n".format(js_db, ", ".join(js_jdbc))
            elif isinstance(js_jdbc, str):
                js_str += "{}: {}\n".format(js_db, js_jdbc)
    else:
        raise DlpxException("Passed a non-dictionary object to "
                            "convert_dct_str(): {}".format(type(obj_dct)))
    return js_str
示例#15
0
def list_templates(dlpx_obj):
    """
    List all templates on a given engine

    dlpx_obj: Virtualization Engine session object
    """

    header = 'Name, Reference, Active Branch, Last Updated'

    try:
        print header
        js_templates = template.get_all(dlpx_obj.server_session)
        for js_template in js_templates:
            last_updated = convert_timestamp(dlpx_obj.server_session,
                                             js_template.last_updated[:-5])
            print_info('{}, {}, {}, {}'.format(js_template.name,
                                            js_template.reference,
                                            js_template.active_branch,
                                            last_updated))
    except (DlpxException, HttpError, RequestError) as e:
        raise DlpxException('\nERROR: The templates could not be listed. '
                            'The error was:\n\n{}'.format(e.message))
示例#16
0
def find_dbrepo_by_environment_ref_and_name(
    engine, repo_type, f_environment_ref, f_name
):
    """
    Function to find database repository objects by environment reference and
    name, and return the object's reference as a string
    You might use this function to find MSSQL database repos.
    """

    print_debug(
        "%s: Searching objects in the %s class for one with the "
        "environment reference of %s and a name of %s."
        % (engine["hostname"], repo_type, f_environment_ref, f_name),
        debug,
    )

    obj_ref = ""
    all_objs = repository.get_all(server, environment=f_environment_ref)

    for obj in all_objs:
        if repo_type == "MSSqlInstance" or repo_type == "ASEInstance":
            if obj.type == repo_type and obj.name == f_name:
                print_debug(
                    "%s: Found a match %s" % (engine["hostname"], str(obj.reference)),
                    debug,
                )
                return obj

        elif repo_type == "Unstructured Files":
            if obj.value == install_type:
                print_debug(
                    "%s: Found a match %s" % (engine["hostname"], str(obj.reference)),
                    debug,
                )
                return obj

    raise DlpxException(
        "%s: No Repo match found for type %s\n" % (engine["hostname"], repo_type)
    )
def vdb_operation(vdb_name, operation):
    """
    Function to start, stop, enable or disable a VDB
    """
    print_debug('Searching for %s reference.\n' % (vdb_name))

    vdb_obj = find_obj_by_name(dx_session_obj.server_session, source, vdb_name)

    try:
        if vdb_obj:
            if operation == 'start':
                source.start(dx_session_obj.server_session, vdb_obj.reference)
            elif operation == 'stop':
                source.stop(dx_session_obj.server_session, vdb_obj.reference)
            elif operation == 'enable':
                source.enable(dx_session_obj.server_session, vdb_obj.reference)
            elif operation == 'disable':
                source.disable(dx_session_obj.server_session,
                               vdb_obj.reference)

    except (RequestError, HttpError, JobError, AttributeError), e:
        raise DlpxException('An error occurred while performing ' +
                            operation + ' on ' + vdb_name + '.:%s\n' % (e))
示例#18
0
def set_timeflow_point(engine, server, container_obj):
    """
    This returns the reference of the timestamp specified.
    """

    if arguments["--timestamp_type"].upper() == "SNAPSHOT":
        if arguments["--timestamp"].upper() == "LATEST":
            print_debug("%s: Using the latest Snapshot." % (engine["hostname"]), debug)

            timeflow_point_parameters = TimeflowPointSemantic()
            timeflow_point_parameters.container = container_obj.reference
            timeflow_point_parameters.location = "LATEST_SNAPSHOT"

        elif arguments["--timestamp"].startswith("@"):
            print_debug("%s: Using a named snapshot" % (engine["hostname"]), debug)

            snapshot_obj = find_snapshot_by_database_and_name(
                engine, server, container_obj, arguments["--timestamp"]
            )

            if snapshot_obj != None:
                timeflow_point_parameters = TimeflowPointLocation()
                timeflow_point_parameters.timeflow = snapshot_obj.timeflow
                timeflow_point_parameters.location = (
                    snapshot_obj.latest_change_point.location
                )

            else:
                raise DlpxException(
                    "%s: Was unable to use the specified "
                    "snapshot %s for database %s\n"
                    % (engine["hostname"], arguments["--timestamp"], container_obj.name)
                )

        else:
            print_debug(
                "%s: Using a time-designated snapshot" % (engine["hostname"]), debug
            )

            snapshot_obj = find_snapshot_by_database_and_time(
                engine, server, container_obj, arguments["--timestamp"]
            )
            if snapshot_obj != None:
                timeflow_point_parameters = TimeflowPointTimestamp()
                timeflow_point_parameters.timeflow = snapshot_obj.timeflow
                timeflow_point_parameters.timestamp = (
                    snapshot_obj.latest_change_point.timestamp
                )
            else:
                raise DlpxException(
                    "%s: Was unable to find a suitable time "
                    " for %s for database %s.\n"
                    % (engine["hostname"], arguments["--timestamp"], container_obj.name)
                )

    elif arguments["--timestamp_type"].upper() == "TIME":
        if arguments["--timestamp"].upper() == "LATEST":
            timeflow_point_parameters = TimeflowPointSemantic()
            timeflow_point_parameters.location = "LATEST_POINT"
        else:
            raise DlpxException(
                "%s: Only support a --timestamp value of "
                '"latest" when used with timestamp_type '
                "of time" % s(engine["hostname"])
            )

    else:
        raise DlpxException(
            "%s is not a valied timestamp_type. Exiting\n"
            % (arguments["--timestamp_type"])
        )

    timeflow_point_parameters.container = container_obj.reference
    return timeflow_point_parameters
示例#19
0
def create_bookmark(dlpx_obj,
                    bookmark_name,
                    source_layout,
                    branch_name=None,
                    tag=None,
                    description=None):
    """
    Create the JS Bookmark

    :param dlpx_obj: Virtualization Engine session object
    :type dlpx_obj: lib.GetSession.GetSession
    :param bookmark_name: Name of the bookmark to create
    :type bookmark_name: basestring
    :param source_layout: Name of the source (template or container) to use
    :type source_layout: basestring
    :param branch_name: Name of the branch to use
    :type branch_name: basestring
    :param tag_name: Tag to use for the bookmark
    :type tag: basestring
    :param description: Description of the bookmark
    :type description: basestring
    """

    branch_ref = None
    source_layout_ref = None
    engine_name = dlpx_obj.dlpx_engines.keys()[0]
    js_bookmark_params = JSBookmarkCreateParameters()
    if branch_name:
        try:
            source_layout_ref = find_obj_by_name(dlpx_obj.server_session,
                                                 template,
                                                 source_layout).reference
        except DlpxException:
            source_layout_ref = find_obj_by_name(dlpx_obj.server_session,
                                                 container,
                                                 source_layout).reference
        #import pdb;pdb.set_trace()
        for branch_obj in branch.get_all(dlpx_obj.server_session):
            if branch_name == branch_obj.name and \
                    source_layout_ref == branch_obj.data_layout:
                branch_ref = branch_obj.reference
                break
        if branch_ref is None:
            raise DlpxException('Set the --data_layout parameter equal to '
                                'the data layout of the bookmark.\n')
    elif branch_name is None:
        try:
            (source_layout_ref,
             branch_ref) = find_obj_by_name(dlpx_obj.server_session, template,
                                            source_layout, True)
        except DlpxException:
            (source_layout_ref,
             branch_ref) = find_obj_by_name(dlpx_obj.server_session, container,
                                            source_layout, True)
        if branch_ref is None:
            raise DlpxException('Could not find {} in engine {}'.format(
                branch_name, engine_name))
    js_bookmark_params.bookmark = JSBookmark()
    js_bookmark_params.bookmark.name = bookmark_name
    js_bookmark_params.bookmark.branch = branch_ref
    if tag:
        js_bookmark_params.bookmark.tags = list()
        js_bookmark_params.bookmark.tags.append(tag)
    if description:
        js_bookmark_params.bookmark.description = description
    js_bookmark_params.timeline_point_parameters = {
        'sourceDataLayout': source_layout_ref,
        'type': 'JSTimelinePointLatestTimeInput'
    }
    try:
        bookmark.create(dlpx_obj.server_session, js_bookmark_params)
        dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job
        print_info(
            'JS Bookmark {} was created successfully.'.format(bookmark_name))

    except (DlpxException, RequestError, HttpError) as e:
        print_exception('\nThe bookmark {} was not created. The error '
                        'was:\n\n{}'.format(bookmark_name, e))
def run_job():
    """
    This function runs the main_workflow aynchronously against all the servers
    specified
    """
    # Create an empty list to store threads we create.
    threads = []
    engine = None

    # If the --all argument was given, run against every engine in dxtools.conf
    if arguments["--all"]:
        print_info("Executing against all Delphix Engines in the dxtools.conf")

        try:
            # For each server in the dxtools.conf...
            for delphix_engine in dx_session_obj.dlpx_engines:
                engine = dx_session_obj[delphix_engine]
                # Create a new thread and add it to the list.
                threads.append(main_workflow(engine))

        except DlpxException as e:
            print("Error encountered in run_job():\n{}".format(e))
            sys.exit(1)

    elif arguments["--all"] is False:
        # Else if the --engine argument was given, test to see if the engine
        # exists in dxtools.conf
        if arguments["--engine"]:
            try:
                engine = dx_session_obj.dlpx_engines[arguments["--engine"]]
                print_info("Executing against Delphix Engine: {}\n".format(
                    arguments["--engine"]))

            except (DlpxException, RequestError, KeyError) as e:
                raise DlpxException("\nERROR: Delphix Engine {} cannot be "
                                    "found in {}. Please check your value "
                                    "and try again. Exiting.\n".format(
                                        arguments["--engine"],
                                        config_file_path))

        else:
            # Else search for a default engine in the dxtools.conf
            for delphix_engine in dx_session_obj.dlpx_engines:
                if dx_session_obj.dlpx_engines[delphix_engine][
                        "default"] == "true":
                    engine = dx_session_obj.dlpx_engines[delphix_engine]
                    print_info("Executing against the default Delphix Engine "
                               "in the dxtools.conf: {}".format(
                                   dx_session_obj.dlpx_engines[delphix_engine]
                                   ["hostname"]))
                break

            if engine == None:
                raise DlpxException(
                    "\nERROR: No default engine found. Exiting")

    # run the job against the engine
    threads.append(main_workflow(engine))

    # For each thread in the list...
    for each in threads:
        # join them back together so that we wait for all threads to complete
        # before moving on
        each.join()
def main_workflow(engine):
    """
    This function actually runs the jobs.
    Use the @run_async decorator to run this function asynchronously.
    This allows us to run against multiple Delphix Engine simultaneously

    engine: Dictionary of engines
    """
    jobs = {}

    try:
        #Setup the connection to the Delphix Engine
        dx_session_obj.serversess(engine['ip_address'], engine['username'],
                                  engine['password'])

    except DlpxException as e:
        print_exception('\nERROR: Engine %s encountered an error while' 
                        '%s:\n%s\n' % (engine['hostname'],
                        arguments['--target'], e))
        sys.exit(1)

    thingstodo = ["thingtodo"]
    #reset the running job count before we begin
    i = 0
    with dx_session_obj.job_mode(single_thread):
        while (len(jobs) > 0 or len(thingstodo)> 0):
            if len(thingstodo)> 0:
                try:
                    if arguments['--start']:
                        vdb_operation(database_name, 'start')

                    elif arguments['--stop']:
                        vdb_operation(database_name, 'stop')

                    elif arguments['--enable']:
                        vdb_operation(database_name, 'enable')

                    elif arguments['--disable']:
                        vdb_operation(database_name, 'disable')

                    elif arguments['--list']:
                        list_databases()

                    elif arguments['--all_dbs']:
                        if not re.match('disable|enable',
                                        arguments['--all_dbs'].lower()):
                            raise DlpxException('--all_dbs should be either'
                                                'enable or disable')

                except DlpxException as e:
                    print('\nERROR: Could not perform action on the VDB(s)'
                          '\n%s\n' % e.message)
                thingstodo.pop()

            #get all the jobs, then inspect them
            i = 0
            for j in jobs.keys():
                job_obj = job.get(dx_session_obj.server_session, jobs[j])
                print_debug(job_obj)
                print_info(engine["hostname"] + ": VDB Operations: " +
                           job_obj.job_state)

                if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]:
                    #If the job is in a non-running state, remove it from the
                    # running jobs list.
                    del jobs[j]
                else:
                    #If the job is in a running state, increment the running
                    # job count.
                    i += 1

            print_info(engine["hostname"] + ": " + str(i) + " jobs running. ")
            #If we have running jobs, pause before repeating the checks.
            if len(jobs) > 0:
                sleep(float(arguments['--poll']))
示例#22
0
def create_vfiles_vdb(engine,
                      jobs,
                      vfiles_group,
                      vfiles_name,
                      environment_obj,
                      container_obj,
                      pre_refresh=None,
                      post_refresh=None,
                      pre_rollback=None,
                      post_rollback=None,
                      configure_clone=None):
    '''
    Create a Vfiles VDB
    '''

    vfiles_obj = None

    try:
        vfiles_obj = find_obj_by_name(dx_session_obj.server_session, database,
                                      vfiles_name)
    except DlpxException:
        pass

    if vfiles_obj is None:
        vfiles_repo = find_repo_by_environment_ref(engine,
                                                   'Unstructured Files',
                                                   environment_obj.reference)

        vfiles_params = AppDataProvisionParameters()
        vfiles_params.source = AppDataVirtualSource()
        vfiles_params.source_config = AppDataDirectSourceConfig()

        vdb_restart_reobj = re.compile('true', re.IGNORECASE)

        if vdb_restart_reobj.search(str(arguments['--vdb_restart'])):
            vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = True

        elif vdb_restart_reobj.search(str(arguments['--vdb_restart'])) is None:
            vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = False

        vfiles_params.container = {
            'type': 'AppDataContainer',
            'group': vfiles_group.reference,
            'name': vfiles_name
        }

        vfiles_params.source_config.name = arguments['--target']
        vfiles_params.source_config.path = arguments['--vfiles_path']
        vfiles_params.source_config.environment_user = \
                                    environment_obj.primary_user
        vfiles_params.source_config.repository = vfiles_repo.reference

        vfiles_params.source.parameters = {}
        vfiles_params.source.name = vfiles_name
        vfiles_params.source.name = vfiles_name
        vfiles_params.source.operations = VirtualSourceOperations()

        if pre_refresh:
            vfiles_params.source.operations.pre_refresh = [{
                'type':
                'RunCommandOnSourceOperation',
                'command':
                pre_refresh
            }]

        if post_refresh:
            vfiles_params.source.operations.post_refresh = [{
                'type':
                'RunCommandOnSourceOperation',
                'command':
                post_refresh
            }]

        if pre_rollback:
            vfiles_params.source.operations.pre_rollback = [{
                'type':
                'RunCommandOnSourceOperation',
                'command':
                pre_rollback
            }]

        if post_rollback:
            vfiles_params.source.operations.post_rollback = [{
                'type':
                'RunCommandOnSourceOperation',
                'command':
                post_rollback
            }]

        if configure_clone:
            vfiles_params.source.operations.configure_clone = [{
                'type':
                'RunCommandOnSourceOperation',
                'command':
                configure_clone
            }]

        if arguments['--timestamp_type'] is None:
            vfiles_params.timeflow_point_parameters = {
                'type': 'TimeflowPointSemantic',
                'container': container_obj.reference,
                'location': 'LATEST_POINT'
            }

        elif arguments['--timestamp_type'].upper() == 'SNAPSHOT':

            try:
                dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session)
                dx_snap_params = dx_timeflow_obj.set_timeflow_point(
                    container_obj, arguments['--timestamp_type'],
                    arguments['--timestamp'], arguments['--timeflow'])

            except RequestError as e:
                raise DlpxException('Could not set the timeflow point:\n%s' %
                                    (e))

            if dx_snap_params.type == 'TimeflowPointSemantic':
                vfiles_params.timeflow_point_parameters = {
                    'type': dx_snap_params.type,
                    'container': dx_snap_params.container,
                    'location': dx_snap_params.location
                }

            elif dx_snap_params.type == 'TimeflowPointTimestamp':
                vfiles_params.timeflow_point_parameters = {
                    'type': dx_snap_params.type,
                    'timeflow': dx_snap_params.timeflow,
                    'timestamp': dx_snap_params.timestamp
                }

        print_info('%s: Provisioning %s\n' % (engine["hostname"], vfiles_name))

        try:
            database.provision(dx_session_obj.server_session, vfiles_params)

        except (JobError, RequestError, HttpError) as e:
            raise DlpxException('\nERROR: Could not provision the database:'
                                '\n%s' % (e))

        #Add the job into the jobs dictionary so we can track its progress
        jobs[engine['hostname']] = dx_session_obj.server_session.last_job

        #return the job object to the calling statement so that we can tell if
        # a job was created or not (will return None, if no job)
        return dx_session_obj.server_session.last_job
    else:
        print_info('\nERROR %s: %s already exists. \n' %
                   (engine['hostname'], vfiles_name))
        return vfiles_obj.reference
示例#23
0
def create_oracle_si_vdb(
    engine,
    jobs,
    vdb_name,
    vdb_group_obj,
    environment_obj,
    container_obj,
    pre_refresh=None,
    post_refresh=None,
    pre_rollback=None,
    post_rollback=None,
    configure_clone=None,
):

    """
    Create an Oracle SI VDB
    """

    vdb_obj = None

    try:
        vdb_obj = find_obj_by_name(dx_session_obj.server_session, database, vdb_name)
    except DlpxException:
        pass

    if vdb_obj == None:
        vdb_params = OracleProvisionParameters()
        vdb_params.open_resetlogs = True

        if arguments["--noopen"]:
            vdb_params.open_resetlogs = False

        vdb_params.container = OracleDatabaseContainer()
        vdb_params.container.group = vdb_group_obj.reference
        vdb_params.container.name = vdb_name
        vdb_params.source = OracleVirtualSource()
        vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False

        if arguments["--instname"]:
            inst_name = arguments["--instname"]
        elif arguments["--instname"] == None:
            inst_name = vdb_name

        if arguments["--uniqname"]:
            unique_name = arguments["--uniqname"]
        elif arguments["--uniqname"] == None:
            unique_name = vdb_name

        if arguments["--db"]:
            db = arguments["--db"]
        elif arguments["--db"] == None:
            db = vdb_name

        vdb_params.source.mount_base = arguments["--mntpoint"]

        if arguments["--mapfile"]:
            vdb_params.source.file_mapping_rules = arguments["--mapfile"]

        if arguments["--template"]:
            template_obj = find_obj_by_name(
                dx_session_obj.server_session,
                database.template,
                arguments["--template"],
            )

            vdb_params.source.config_template = template_obj.reference

        vdb_params.source_config = OracleSIConfig()
        vdb_params.source.operations = VirtualSourceOperations()

        if pre_refresh:
            vdb_params.source.operations.pre_refresh = [
                {"type": "RunCommandOnSourceOperation", "command": pre_refresh}
            ]

        if post_refresh:
            vdb_params.source.operations.post_refresh = [
                {"type": "RunCommandOnSourceOperation", "command": post_refresh}
            ]

        if pre_rollback:
            vdb_params.source.operations.pre_rollback = [
                {"type": "RunCommandOnSourceOperation", "command": pre_rollback}
            ]

        if post_rollback:
            vdb_params.source.operations.post_rollback = [
                {"type": "RunCommandOnSourceOperation", "command": post_rollback}
            ]

        if configure_clone:
            vdb_params.source.operations.configure_clone = [
                {"type": "RunCommandOnSourceOperation", "command": configure_clone}
            ]

        vdb_repo = find_dbrepo_by_environment_ref_and_install_path(
            engine,
            dx_session_obj.server_session,
            "OracleInstall",
            environment_obj.reference,
            arguments["--envinst"],
        )

        vdb_params.source_config.database_name = db
        vdb_params.source_config.unique_name = unique_name
        vdb_params.source_config.instance = OracleInstance()
        vdb_params.source_config.instance.instance_name = inst_name
        vdb_params.source_config.instance.instance_number = 1
        vdb_params.source_config.repository = vdb_repo.reference

        dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session)
        vdb_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point(
            container_obj, arguments["--timestamp_type"], arguments["--timestamp"]
        )

        print(vdb_params, "\n\n\n")
        print_info(engine["hostname"] + ": Provisioning " + vdb_name)
        database.provision(dx_session_obj.server_session, vdb_params)
        # Add the job into the jobs dictionary so we can track its progress

        jobs[engine["hostname"]] = dx_session_obj.server_session.last_job
        # return the job object to the calling statement so that we can tell if
        # a job was created or not (will return None, if no job)

        return dx_session_obj.server_session.last_job

    else:
        raise DlpxException(
            "\nERROR: %s: %s alread exists\n" % (engine["hostname"], vdb_name)
        )
示例#24
0
def create_vfiles_vdb(
    engine,
    jobs,
    vfiles_group,
    vfiles_name,
    environment_obj,
    container_obj,
    pre_refresh=None,
    post_refresh=None,
    pre_rollback=None,
    post_rollback=None,
    configure_clone=None,
):
    """
    Create a Vfiles VDB
    """

    vfiles_obj = None

    try:
        vfiles_obj = find_obj_by_name(
            dx_session_obj.server_session, database, vfiles_name
        )
    except DlpxException:
        pass

    if vfiles_obj is None:
        vfiles_repo = find_repo_by_environment_ref(
            engine, "Unstructured Files", environment_obj.reference
        )

        vfiles_params = AppDataProvisionParameters()
        vfiles_params.source = AppDataVirtualSource()
        vfiles_params.source_config = AppDataDirectSourceConfig()

        vdb_restart_reobj = re.compile("true", re.IGNORECASE)

        if vdb_restart_reobj.search(str(arguments["--vdb_restart"])):
            vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = True

        elif vdb_restart_reobj.search(str(arguments["--vdb_restart"])) is None:
            vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = False

        vfiles_params.container = {
            "type": "AppDataContainer",
            "group": vfiles_group.reference,
            "name": vfiles_name,
        }

        vfiles_params.source_config.name = arguments["--target"]
        vfiles_params.source_config.path = arguments["--vfiles_path"]
        vfiles_params.source_config.environment_user = environment_obj.primary_user
        vfiles_params.source_config.repository = vfiles_repo.reference

        vfiles_params.source.parameters = {}
        vfiles_params.source.name = vfiles_name
        vfiles_params.source.name = vfiles_name
        vfiles_params.source.operations = VirtualSourceOperations()

        if pre_refresh:
            vfiles_params.source.operations.pre_refresh = [
                {"type": "RunCommandOnSourceOperation", "command": pre_refresh}
            ]

        if post_refresh:
            vfiles_params.source.operations.post_refresh = [
                {"type": "RunCommandOnSourceOperation", "command": post_refresh}
            ]

        if pre_rollback:
            vfiles_params.source.operations.pre_rollback = [
                {"type": "RunCommandOnSourceOperation", "command": pre_rollback}
            ]

        if post_rollback:
            vfiles_params.source.operations.post_rollback = [
                {"type": "RunCommandOnSourceOperation", "command": post_rollback}
            ]

        if configure_clone:
            vfiles_params.source.operations.configure_clone = [
                {"type": "RunCommandOnSourceOperation", "command": configure_clone}
            ]

        if arguments["--timestamp_type"] is None:
            vfiles_params.timeflow_point_parameters = {
                "type": "TimeflowPointSemantic",
                "container": container_obj.reference,
                "location": "LATEST_POINT",
            }

        elif arguments["--timestamp_type"].upper() == "SNAPSHOT":

            try:
                dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session)
                dx_snap_params = dx_timeflow_obj.set_timeflow_point(
                    container_obj,
                    arguments["--timestamp_type"],
                    arguments["--timestamp"],
                    arguments["--timeflow"],
                )

            except RequestError as e:
                raise DlpxException("Could not set the timeflow point:\n%s" % (e))

            if dx_snap_params.type == "TimeflowPointSemantic":
                vfiles_params.timeflow_point_parameters = {
                    "type": dx_snap_params.type,
                    "container": dx_snap_params.container,
                    "location": dx_snap_params.location,
                }

            elif dx_snap_params.type == "TimeflowPointTimestamp":
                vfiles_params.timeflow_point_parameters = {
                    "type": dx_snap_params.type,
                    "timeflow": dx_snap_params.timeflow,
                    "timestamp": dx_snap_params.timestamp,
                }

        print_info("%s: Provisioning %s\n" % (engine["hostname"], vfiles_name))

        try:
            database.provision(dx_session_obj.server_session, vfiles_params)

        except (JobError, RequestError, HttpError) as e:
            raise DlpxException(
                "\nERROR: Could not provision the database:" "\n%s" % (e)
            )

        # Add the job into the jobs dictionary so we can track its progress
        jobs[engine["hostname"]] = dx_session_obj.server_session.last_job

        # return the job object to the calling statement so that we can tell if
        # a job was created or not (will return None, if no job)
        return dx_session_obj.server_session.last_job
    else:
        print_info(
            "\nERROR %s: %s already exists. \n" % (engine["hostname"], vfiles_name)
        )
        return vfiles_obj.reference
示例#25
0
def set_timeflow_point(engine, server, container_obj):
    """
    This returns the reference of the timestamp specified.
    engine:
    server: Delphix Engine object
    container_obj: VDB object
    """

    if arguments["--timestamp_type"].upper() == "SNAPSHOT":
        if arguments["--timestamp"].upper() == "LATEST":
            print_debug(engine["hostname"] + ": Using the latest Snapshot")
            timeflow_point_parameters = TimeflowPointSemantic()
            timeflow_point_parameters.location = "LATEST_SNAPSHOT"

        elif arguments["--timestamp"].startswith("@"):
            print_debug(engine["hostname"] + ": Using a named snapshot")
            snapshot_obj = find_snapshot_by_database_and_name(
                engine, server, container_obj, arguments["--timestamp"])

            if snapshot_obj:
                timeflow_point_parameters = TimeflowPointLocation()
                timeflow_point_parameters.timeflow = snapshot_obj.timeflow
                timeflow_point_parameters.location = (
                    snapshot_obj.latest_change_point.location)

            else:
                raise DlpxException(
                    "ERROR: Was unable to use the specified "
                    "snapshot %s for database %s.\n" %
                    (arguments["--timestamp"], container_obj.name))

        elif arguments["--timestamp"]:
            print_debug(engine["hostname"] +
                        ": Using a time-designated snapshot")
            snapshot_obj = find_snapshot_by_database_and_time(
                engine, server, container_obj, arguments["--timestamp"])

            if snapshot_obj:
                timeflow_point_parameters = TimeflowPointTimestamp()
                timeflow_point_parameters.timeflow = snapshot_obj.timeflow
                timeflow_point_parameters.timestamp = (
                    snapshot_obj.latest_change_point.timestamp)

            else:
                raise DlpxException(
                    "Was unable to find a suitable time"
                    "  for %s for database %s" %
                    (arguments["--timestamp"], container_obj.name))

    elif arguments["--timestamp_type"].upper() == "TIME":

        if arguments["--timestamp"].upper() == "LATEST":
            timeflow_point_parameters = TimeflowPointSemantic()
            timeflow_point_parameters.location = "LATEST_POINT"

        elif arguments["--timestamp"]:
            timeflow_point_parameters = TimeflowPointTimestamp()
            timeflow_point_parameters.type = "TimeflowPointTimestamp"
            timeflow_obj = find_obj_by_name(engine, server, timeflow,
                                            arguments["--timeflow"])

            timeflow_point_parameters.timeflow = timeflow_obj.reference
            timeflow_point_parameters.timestamp = arguments["--timestamp"]
            return timeflow_point_parameters
    else:
        raise DlpxException(arguments["--timestamp_type"] +
                            " is not a valied timestamp_type. Exiting")

    timeflow_point_parameters.container = container_obj.reference
    return timeflow_point_parameters
def refresh_database(vdb_name, timestamp, timestamp_type="SNAPSHOT"):
    """
    This function actually performs the refresh
    engine:
    dlpx_obj: Virtualization Engine session object
    vdb_name: VDB to be refreshed
    """

    # Sanity check to make sure our source object has a reference
    dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session)
    container_obj = find_obj_by_name(dx_session_obj.server_session, database,
                                     vdb_name)
    source_obj = find_source_by_dbname(dx_session_obj.server_session, database,
                                       vdb_name)

    # Sanity check to make sure our container object has a reference
    if container_obj.reference:
        try:
            if container_obj.virtual is not True:
                raise DlpxException("{} is not a virtual object. "
                                    "Skipping.\n".format(container_obj.name))
            elif container_obj.staging is True:
                raise DlpxException("{} is a virtual object. "
                                    "Skipping.\n".format(container_obj.name))
            elif container_obj.runtime.enabled == "ENABLED":
                print_info("\nINFO: Refrshing {} to {}\n".format(
                    container_obj.name, timestamp))

        # This exception is raised if rewinding a vFiles VDB
        # since AppDataContainer does not have virtual, staging or
        # enabled attributes.
        except AttributeError:
            pass

    if source_obj.reference:
        # We can only refresh VDB's
        if source_obj.virtual != True:
            print_info(
                "\nINFO: {} is not a virtual object. Skipping.\n".format(
                    container_obj.name))

        # Ensure this source is not a staging database. We can't act upon those.
        elif source_obj.staging == True:
            print_info("\nINFO: {} is a staging database. Skipping.\n".format(
                container_obj.name))

        # Ensure the source is enabled. We can't refresh disabled databases.
        elif source_obj.runtime.enabled == "ENABLED":
            source_db = database.get(dx_session_obj.server_session,
                                     container_obj.provision_container)
            if not source_db:
                print_error(
                    "\nERROR: Was unable to retrieve the source container for {} \n"
                    .format(container_obj.name))
            print_info("\nINFO: Refreshing {} from {}\n".format(
                container_obj.name, source_db.name))

            # If the vdb is a Oracle type, we need to use a
            # OracleRefreshParameters
            """
            rewind_params = RollbackParameters()
            rewind_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point(
                container_obj, timestamp_type, timestamp
            )
            print_debug('{}: {}'.format(engine_name, str(rewind_params)))
            """
            if str(container_obj.reference).startswith("ORACLE"):
                refresh_params = OracleRefreshParameters()
            else:
                refresh_params = RefreshParameters()

            try:
                refresh_params.timeflow_point_parameters = (
                    dx_timeflow_obj.set_timeflow_point(source_db,
                                                       timestamp_type,
                                                       timestamp))
                print_info("\nINFO: Refresh prams {}\n".format(refresh_params))

                # Sync it
                database.refresh(
                    dx_session_obj.server_session,
                    container_obj.reference,
                    refresh_params,
                )
                dx_session_obj.jobs[
                    dx_session_obj.server_session.
                    address] = dx_session_obj.server_session.last_job

            except RequestError as e:
                print("\nERROR: Could not set timeflow point:\n%s\n" %
                      (e.message.action))
                sys.exit(1)

            except DlpxException as e:
                print("ERROR: Could not set timeflow point:\n%s\n" %
                      (e.message))
                sys.exit(1)

        # Don't do anything if the database is disabled
        else:
            print_info("\nINFO: {} is not enabled. Skipping sync.\n".format(
                container_obj.name))
示例#27
0
def run_job():
    """
    This function runs the main_workflow aynchronously against all the servers 
    specified

    No arguments required for run_job().
    """
    #Create an empty list to store threads we create.
    threads = []

    #If the --all argument was given, run against every engine in dxtools.conf
    if arguments['--all']:
        print_info("Executing against all Delphix Engines in the dxtools.conf")

        try:
            #For each server in the dxtools.conf...
            for delphix_engine in dx_session_obj.dlpx_engines:
                engine = dx_session_obj[delphix_engine]
                #Create a new thread and add it to the list.
                threads.append(main_workflow(engine))

        except DlpxException as e:
            print 'Error encountered in main_workflow:\n%s' % (e)
            sys.exit(1)

    elif arguments['--all'] is False:
        #Else if the --engine argument was given, test to see if the engine
        # exists in dxtools.conf
        if arguments['--engine']:
            try:
                engine = dx_session_obj.dlpx_engines[arguments['--engine']]
                print_info('Executing against Delphix Engine: %s\n' %
                           (arguments['--engine']))

            except (DlpxException, RequestError, KeyError) as e:
                raise DlpxException('\nERROR: Delphix Engine %s cannot be '
                                    'found in %s. Please check your value '
                                    'and try again. Exiting.\n' %
                                    (arguments['--engine'], config_file_path))

        else:
            #Else search for a default engine in the dxtools.conf
            for delphix_engine in dx_session_obj.dlpx_engines:
                if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \
                    'true':

                    engine = dx_session_obj.dlpx_engines[delphix_engine]
                    print_info('Executing against the default Delphix Engine '
                               'in the dxtools.conf: %s' %
                               (dx_session_obj.dlpx_engines[delphix_engine]
                                ['hostname']))

                break

            if engine == None:
                raise DlpxException(
                    "\nERROR: No default engine found. Exiting")

        #run the job against the engine
        threads.append(main_workflow(engine))

    #For each thread in the list...
    for each in threads:
        #join them back together so that we wait for all threads to complete
        # before moving on
        each.join()