def find_target_type(dlpx_obj, target_type, target_name): """ Function to find the target authorization :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param target_type: Type of target for authorization :param target_name: Name of the target """ target_obj = None try: if target_type.lower() == 'group': target_obj = find_obj_by_name(dlpx_obj.server_session, group, target_name) elif target_type.lower() == 'database': target_obj = find_obj_by_name(dlpx_obj.server_session, database, target_name) elif target_type.lower() == 'snapshot': target_obj = find_obj_by_name(dlpx_obj.server_session, snapshot, target_name) except (DlpxException, RequestError, HttpError) as e: print_exception('Could not find authorization target type ' '{}:\n{}'.format(target_type, e)) return target_obj
def update_user(user_name, user_password=None, user_email=None, jsonly=None): """ This function updates the user """ if user_email: updated_user_obj = User() updated_user_obj.email_address = user_email try: user.update(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, user, user_name).reference,updated_user_obj) print('Attempting to update {}'.format(user_name)) except (DlpxException, RequestError) as e: print_exception('\nERROR: Updating the user {} ' 'encountered an error:\n{}'.format(user_name, e)) sys.exit(1) if user_password: new_password_obj = CredentialUpdateParameters() new_password_obj.new_credential = PasswordCredential() new_password_obj.new_credential.password = user_password try: user.update_credential(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, user, user_name).reference,new_password_obj) print('Attempting to update {} password'.format(user_name)) except (DlpxException, RequestError) as e: print_exception('\nERROR: Updating the user {} password ' 'encountered an error:\n{}'.format(user_name, e)) sys.exit(1) js_only(user_name, jsonly)
def create_authorization(dlpx_obj, role_name, target_type, target_name, user_name): """ Function to start, stop, enable or disable a VDB :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param role_name: Name of the role :param target_type: Supports snapshot, group and database target types :param target_name: Name of the target :param user_name: User for the authorization """ authorization_obj = Authorization() print_debug('Searching for {}, {} and {} references.\n'.format( role_name, target_name, user_name)) try: authorization_obj.role = find_obj_by_name(dlpx_obj.server_session, role, role_name).reference authorization_obj.target = find_target_type(dlpx_obj, target_type, target_name).reference authorization_obj.user = find_obj_by_name(dlpx_obj.server_session, user, user_name).reference authorization.create(dlpx_obj.server_session, authorization_obj) except (RequestError, HttpError, JobError) as e: print_exception('An error occurred while creating authorization:\n' '{}'.format(e)) print 'Authorization successfully created for {}.'.format(user_name)
def link_ase_dsource(engine_name): """ Link an ASE dSource """ link_params = LinkParameters() link_params.name = arguments['--dsource_name'] link_params.link_data = ASELinkData() link_params.link_data.db_credentials = {'type': 'PasswordCredential', 'password': arguments['--ase_passwd']} link_params.link_data.db_user = arguments['--ase_user'] link_params.link_data.load_backup_path = arguments['--backup_path'] if arguments['--bck_file']: link_params.link_data.sync_parameters = \ ASESpecificBackupSyncParameters() bck_files = (arguments['--bck_file']).split(' ') link_params.link_data.sync_parameters.backup_files = bck_files elif arguments['--create_bckup']: link_params.link_data.sync_parameters = ASENewBackupSyncParameters() else: link_params.link_data.sync_parameters = ASELatestBackupSyncParameters() try: link_params.group = find_obj_by_name( dx_session_obj.server_session, group, arguments['--dx_group']).reference env_user_ref = link_params.link_data.stage_user = find_obj_by_name( dx_session_obj.server_session, environment, arguments['--env_name']).primary_user link_params.link_data.staging_host_user = env_user_ref link_params.link_data.source_host_user = env_user_ref link_params.link_data.config = find_obj_by_name( dx_session_obj.server_session, sourceconfig, arguments['--src_config']).reference link_params.link_data.staging_repository = find_obj_by_name( dx_session_obj.server_session, repository, arguments['--stage_repo']).reference except DlpxException as e: print_exception('Could not link {}: {}\n'.format( arguments['--dsource_name'], e)) sys.exit(1) try: dsource_ref = database.link(dx_session_obj.server_session, link_params) dx_session_obj.jobs[engine_name] = dx_session_obj.server_session.last_job dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( dx_session_obj.server_session, find_obj_by_name( dx_session_obj.server_session, database, arguments['--dsource_name']).reference) print '{} sucessfully linked {}'.format(dsource_ref, arguments['--dsource_name']) except (RequestError, HttpError) as e: print_exception('Database link failed for {}:\n{}'.format( arguments['--dsource_name'], e))
def link_ora_dsource(srcconfig_ref, primary_user_ref): """ :param srcconfig_ref: Reference to the sourceconfig object :param primary_user_ref: Reference to the environment user :return: Reference of the linked dSource """ link_params = LinkParameters() link_params.link_data = OracleLinkData() link_params.link_data.sourcing_policy = OracleSourcingPolicy() link_params.name = arguments['--dsource_name'] link_params.group = find_obj_by_name(dx_session_obj.server_session, group, arguments['--dx_group']).reference link_params.link_data.compressedLinkingEnabled = True link_params.link_data.environment_user = primary_user_ref link_params.link_data.db_user = arguments['--db_user'] link_params.link_data.number_of_connections = \ int(arguments['--num_connections']) link_params.link_data.link_now = bool(arguments['--link_now']) link_params.link_data.files_per_set = int(arguments['--files_per_set']) link_params.link_data.rman_channels = int(arguments['--rman_channels']) link_params.link_data.db_credentials = {'type': 'PasswordCredential', 'password': arguments['--db_passwd']} link_params.link_data.sourcing_policy.logsync_enabled = True #link_params.link_data.sourcing_policy.logsync_mode = 'ARCHIVE_REDO_MODE' link_params.link_data.config = srcconfig_ref try: return database.link(dx_session_obj.server_session, link_params) except (RequestError, HttpError) as e: print_exception('Database link failed for {}:\n{}\n'.format( arguments['--dsource_name'], e)) sys.exit(1)
def refresh_env(dlpx_obj, env_name): """ Refresh the environment engine: Dictionary of engines env_name: Name of the environment to refresh """ engine_name = dlpx_obj.dlpx_engines.keys()[0] if env_name == "all": env_list = find_all_objects(dlpx_obj.server_session, environment) for env_obj in env_list: try: environment.refresh(dlpx_obj.server_session, env_obj.reference) dlpx_obj.jobs[engine_name] = \ dlpx_obj.server_session.last_job except (DlpxException, RequestError) as e: print_exception('\nERROR: Refreshing the environment {} ' 'encountered an error:\n{}'.format(env_name, e)) sys.exit(1) else: try: env_obj = find_obj_by_name(dlpx_obj.server_session, environment, env_name) environment.refresh(dlpx_obj.server_session, env_obj.reference) dlpx_obj.jobs[engine_name] = \ dlpx_obj.server_session.last_job except (DlpxException, RequestError) as e: print_exception('\nERROR: Refreshing the environment {} ' 'encountered an error:\n{}'.format(env_name, e)) sys.exit(1)
def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type='SNAPSHOT'): """ This function performs the rewind (rollback) dlpx_obj: Virtualization Engine session object vdb_name: VDB to be rewound timestamp: Point in time to rewind the VDB timestamp_type: The type of timestamp being used for the rewind """ engine_name = dlpx_obj.dlpx_engines.keys()[0] dx_timeflow_obj = DxTimeflow(dlpx_obj.server_session) container_obj = find_obj_by_name(dlpx_obj.server_session, database, vdb_name) # Sanity check to make sure our container object has a reference if container_obj.reference: try: if container_obj.virtual is not True: raise DlpxException('{} in engine {} is not a virtual object. ' 'Skipping.\n'.format(container_obj.name, engine_name)) elif container_obj.staging is True: raise DlpxException('{} in engine {} is a virtual object. ' 'Skipping.\n'.format(container_obj.name, engine_name)) elif container_obj.runtime.enabled == "ENABLED": print_info('\nINFO: {} Rewinding {} to {}\n'.format( engine_name, container_obj.name, timestamp)) # This exception is raised if rewinding a vFiles VDB # since AppDataContainer does not have virtual, staging or # enabled attributes. except AttributeError: pass print_debug('{}: Type: {}'.format(engine_name, container_obj.type)) # If the vdb is a Oracle type, we need to use a OracleRollbackParameters if str(container_obj.reference).startswith("ORACLE"): rewind_params = OracleRollbackParameters() else: rewind_params = RollbackParameters() rewind_params.timeflow_point_parameters = \ dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type, timestamp) print_debug('{}: {}'.format(engine_name, str(rewind_params))) try: # Rewind the VDB database.rollback(dlpx_obj.server_session, container_obj.reference, rewind_params) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job print_info('VDB {} was rolled back.'.format(container_obj.name)) except (RequestError, HttpError, JobError) as e: print_exception('ERROR: {} encountered an error on {}' ' during the rewind process:\n{}'.format( engine_name, container_obj.name, e)) # Don't do anything if the database is disabled else: print_info('{}: {} is not enabled. Skipping sync.'.format(engine_name, container_obj.name))
def find_missing_archivelogs(hostname): """ Function to find missing archive log files for Oracle dSources. """ print 'Now working on engine {}.'.format(hostname) log_file = open('{}/{}.csv'.format(arguments['--outdir'], hostname), 'a+') log_file.write('InstanceNumber,Sequence,StartSCN,EndSCN\n') src_objs = find_all_objects(dx_session_obj.server_session, source) for src_obj in src_objs: if src_obj.virtual is False and src_obj.type == 'OracleLinkedSource': ora_logs = oracle.log.get_all(dx_session_obj.server_session, database=find_obj_by_name( dx_session_obj.server_session, database, src_obj.name).reference, missing=True, page_size=1000) if ora_logs: for log_data in ora_logs: log_file.write('{}, {}, {}, {}, {}, {}\n'.format( src_obj.name, log_data.instance_num, log_data.instance_num, log_data.sequence, log_data.start_scn, log_data.end_scn)) elif not ora_logs: log_file.write('{} has no missing files.\n'.format( src_obj.name)) log_file.close()
def find_all_databases_by_group_name(engine, server, group_name, exclude_js_container=True): """ Easy way to quickly find databases by group name """ #First search groups for the name specified and return its reference group_obj = find_obj_by_name(engine, server, group, group_name) if group_obj: return(database.get_all(server, group=group_obj.reference, no_js_container_data_source=exclude_js_container))
def js_only(user_name, jsonly=None): """ Switch the user to/from a jsonly user """ user_obj = find_obj_by_name(dx_session_obj.server_session, user, user_name) role_obj = find_obj_by_name(dx_session_obj.server_session, role, "Jet Stream User") if jsonly: authorization_obj = Authorization() authorization_obj.role = role_obj.reference authorization_obj.target = user_obj.reference authorization_obj.user = user_obj.reference authorization.create(dx_session_obj.server_session, authorization_obj) else: auth_name = "(" + user_obj.reference + ", " + role_obj.reference + ", " + user_obj.reference + ")" authorization.delete(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, authorization, auth_name).reference)
def create_windows_env(dlpx_obj, env_name, host_user, ip_addr, pw=None, connector_name=None): """ Create a Windows environment. env_name: The name of the environment host_user: The server account used to authenticate ip_addr: DNS name or IP address of the environment toolkit_path: Path to the toolkit. Note: This directory must be writable by the host_user pw: Password of the user. Default: None (use SSH keys instead) """ engine_name = dlpx_obj.dlpx_engines.keys()[0] env_params_obj = HostEnvironmentCreateParameters() print_debug('Creating the environment with a password') env_params_obj.primary_user = {'type': 'EnvironmentUser', 'name': host_user, 'credential': { 'type': 'PasswordCredential', 'password': pw }} env_params_obj.host_parameters = {'type': 'WindowsHostCreateParameters', 'host': { 'address': ip_addr, 'type': 'WindowsHost', 'name': env_name, 'connectorPort': 9100}} env_params_obj.host_environment = WindowsHostEnvironment() env_params_obj.host_environment.name = env_name if connector_name: env_obj = find_obj_by_name(dlpx_obj.server_session, environment, connector_name) if env_obj: env_params_obj.host_environment.proxy = env_obj.host elif env_obj is None: print('Host was not found in the Engine: {}'.format(arguments[--connector_name])) sys.exit(1) try: environment.create(dlpx_obj.server_session, env_params_obj) dlpx_obj.jobs[engine_name] = \ dlpx_obj.server_session.last_job except (DlpxException, RequestError, HttpError) as e: print('\nERROR: Encountered an exception while creating the ' 'environment:\n{}'.format(e))
def find_all_databases_by_dsource_name(engine, server, dsource_name, exclude_js_container=True): """ Easy way to quickly find databases by dSource """ #First search for the dSource name specified and return its reference dsource_obj = find_obj_by_name(engine, server, database, dsource_name) if dsource_obj: return(database.get_all(server, provision_container=dsource_obj.reference, no_js_container_data_source=exclude_js_container))
def create_branch(dlpx_obj, branch_name, container_name, template_name=None, bookmark_name=None): """ Create the JS Branch :param dlpx_obj: Virtualization Engine session object :param branch_name: Name of the branch to create :param container_name: Name of the container to use :param template_name: Name of the template to use :param bookmark_name: Name of the bookmark to use """ js_branch = JSBranchCreateParameters() js_branch.name = branch_name engine_name = dlpx_obj.dlpx_engines.keys()[0] data_container_obj = find_obj_by_name(dlpx_obj.server_session, container, container_name) js_branch.data_container = data_container_obj.reference if bookmark_name: js_branch.timeline_point_parameters = JSTimelinePointBookmarkInput() js_branch.timeline_point_parameters.bookmark = find_obj_by_name( dlpx_obj.server_session, bookmark, bookmark_name).reference elif template_name: source_layout_ref = find_obj_by_name(dlpx_obj.server_session, template, template_name).reference js_branch.timeline_point_parameters = JSTimelinePointLatestTimeInput() js_branch.timeline_point_parameters.source_data_layout = \ source_layout_ref try: branch.create(dlpx_obj.server_session, js_branch) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job except (DlpxException, RequestError, HttpError) as e: print_exception('\nThe branch was not created. The error was:' '\n{}'.format(e)) print_info('JS Branch {} was created successfully.'.format( branch_name))
def delete_branch(dlpx_obj, branch_name): """ Deletes a branch :param dlpx_obj: Virtualization Engine session object :param branch_name: Branch to delete """ try: branch_obj = find_obj_by_name(dlpx_obj.server_session, branch, branch_name) branch.delete(dlpx_obj.server_session, branch_obj.reference) except (DlpxException, HttpError, RequestError) as e: print_exception('\nERROR: The branch was not deleted. The ' 'error was:\n\n{}'.format(e.message))
def execute_replication_job(obj_name): """ Execute a replication job immediately. :param obj_name: name of object to execute. """ try: spec.execute(dx_session_obj.server_session, find_obj_by_name(dx_session_obj.server_session, spec, obj_name).reference) if dx_session_obj.server_session.last_job: dx_session_obj.jobs[dx_session_obj.server_session.address] = \ dx_session_obj.server_session.last_job print_info('Successfully executed {}.\n'.format(obj_name)) except (HttpError, RequestError, DlpxException, JobError) as e: print_exception('Could not execute job {}:\n{}'.format(obj_name, e))
def delete_user(user_name): """ This function adds the user """ user_obj = find_obj_by_name(dx_session_obj.server_session, user, user_name) try: user.delete(dx_session_obj.server_session,user_obj.reference) print('Attempting to delete {}'.format(user_name)) except (DlpxException, RequestError) as e: print_exception('\nERROR: Deleting the user {} ' 'encountered an error:\n{}'.format(user_name, e)) sys.exit(1)
def delete_authorization(dlpx_obj, role_name, target_type, target_name, user_name): """ Function to delete a given authorization :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param role_name: Name of the role :type role_name: basestring :param target_type: Supports snapshot, group and database target types :type target_type basestring :param target_name: Name of the target :type target_name: basestring :param user_name: User for the authorization :type user_name: basestring """ target_obj = find_target_type(dlpx_obj, target_type, target_name) user_obj = find_obj_by_name(dlpx_obj.server_session, user, user_name) role_obj = find_obj_by_name(dlpx_obj.server_session, role, role_name) auth_objs = authorization.get_all(dlpx_obj.server_session) try: del_auth_str = '({}, {}, {})'.format(user_obj.reference, role_obj.reference, target_obj.reference) for auth_obj in auth_objs: if auth_obj.name == del_auth_str: authorization.delete(dlpx_obj.server_session, auth_obj.reference) except DlpxException as e: print_exception('ERROR: Could not delete authorization:\n{}'.format(e)) print '{} for user {} was deleted successfully'.format(target_name, user_name)
def disable_environment(dlpx_obj,env_name): """ Enable the given host """ engine_name = dlpx_obj.dlpx_engines.keys()[0] env_obj = find_obj_by_name(dlpx_obj.server_session, environment, env_name) try: environment.disable(dlpx_obj.server_session,env_obj.reference) print('Attempting to disable {}'.format(env_name)) except (DlpxException, RequestError) as e: print_exception('\nERROR: Disabling the host {} ' 'encountered an error:\n{}'.format(env_name, e)) sys.exit(1)
def delete_template(dlpx_obj, template_name): """ Deletes a template dlpx_obj: Virtualization Engine session object template_name: Template to delete """ try: template_obj = find_obj_by_name(dlpx_obj.server_session, template, template_name) template.delete(dlpx_obj.server_session, template_obj.reference) print 'Template {} is deleted.'.format(template_name) except (DlpxException, HttpError, RequestError) as e: print_exception('\nERROR: The template {} was not deleted. The' ' error was:\n\n{}'.format(template_name, e.message))
def delete_replication_job(): """ Delete a replication job. :return: Reference to the spec object """ try: spec.delete(dx_session_obj.server_session, find_obj_by_name(dx_session_obj.server_session, spec, arguments['--delete']).reference) if dx_session_obj.server_session.last_job: dx_session_obj.jobs[dx_session_obj.server_session.address] = \ dx_session_obj.server_session.last_job print_info('Successfully deleted {}.\n'.format(arguments['--delete'])) except (HttpError, RequestError, DlpxException) as e: print_exception('Was not able to delete {}:\n{}'.format( arguments['--delete'], e))
def build_ds_params(dlpx_obj, obj, db): """ Builds the datasource parameters dlpx_obj: Virtualization Engine session object obj: object type to use when finding db db: Name of the database to use when building the parameters """ try: db_obj = find_obj_by_name(dlpx_obj.server_session, obj, db) ds_params = JSDataSourceCreateParameters() ds_params.source = {'type':'JSDataSource', 'name': db} ds_params.container = db_obj.reference return ds_params except RequestError as e: print_exception('\nCould not find {}\n{}'.format(db, e.message))
def update_ase_username(dlpx_obj): """ Update the ASE database user password """ engine_name = dlpx_obj.dlpx_engines.keys()[0] env_obj = UnixHostEnvironment() env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() env_obj.ase_host_environment_parameters.db_user = \ arguments['--update_ase_user'] try: environment.update(dlpx_obj.server_session, find_obj_by_name( dlpx_obj.server_session, environment, arguments['--env_name'], env_obj).reference, env_obj) except (HttpError, RequestError) as e: print_exception('\nERROR: Updating the ASE DB password ' 'failed:\n{}\n'.format(e))
def activate_branch(dlpx_obj, branch_name): """ Activates a branch :param dlpx_obj: Virtualization Engine session object :param branch_name: Name of the branch to activate """ engine_name = dlpx_obj.dlpx_engines.keys()[0] try: branch_obj = find_obj_by_name(dlpx_obj.server_session, branch, branch_name) branch.activate(dlpx_obj.server_session, branch_obj.reference) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job print_info('The branch {} was activated successfully.'.format( branch_name)) except RequestError as e: print_exception('\nAn error occurred activating the ' 'branch:\n{}'.format(e))
def update_branch(dlpx_obj, branch_name): """ Updates a branch :param dlpx_obj: Virtualization Engine session object :param branch_name: Name of the branch to update """ js_branch_obj = JSBranch() try: branch_obj = find_obj_by_name(dlpx_obj.server_session, branch, branch_name) branch.update(dlpx_obj.server_session, branch_obj.reference, js_branch_obj) print_info('The branch {} was updated successfully.'.format( branch_name)) except (DlpxException, HttpError, RequestError) as e: print_exception('\nERROR: The branch could not be updated. The ' 'error was:\n\n{}'.format(e))
def delete_env(dlpx_obj, env_name): """ Deletes an environment engine: Dictionary of engines env_name: Name of the environment to delete """ engine_name = dlpx_obj.dlpx_engines.keys()[0] env_obj = find_obj_by_name(dlpx_obj.server_session, environment, env_name) if env_obj: environment.delete(dlpx_obj.server_session, env_obj.reference) dlpx_obj.jobs[engine_name] = \ dlpx_obj.server_session.last_job elif env_obj is None: print('Environment was not found in the Engine: {}'.format(env_name)) sys.exit(1)
def update_host_address(dlpx_obj, old_host_address, new_host_address): """ Update the given host """ engine_name = dlpx_obj.dlpx_engines.keys()[0] old_host_obj = find_obj_by_name(dlpx_obj.server_session, host, old_host_address) if old_host_obj.type == "WindowsHost": host_obj = WindowsHost() else: host_obj = UnixHost() host_obj.address = new_host_address try: host.update(dlpx_obj.server_session, old_host_obj.reference, host_obj) print('Attempting to update {} to {}'.format(old_host_address, new_host_address)) except (DlpxException, RequestError) as e: print_exception('\nERROR: Updating the host {} ' 'encountered an error:\n{}'.format(env_name, e)) sys.exit(1)
def dx_obj_operation(dlpx_obj, vdb_name, operation): """ Function to start, stop, enable or disable a VDB :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param vdb_name: Name of the object to stop/start/enable/disable :type vdb_name: str :param operation: enable or disable dSources and VDBs :type operation: str """ print_debug('Searching for {} reference.\n'.format(vdb_name)) engine_name = dlpx_obj.dlpx_engines.keys()[0] vdb_obj = find_obj_by_name(dlpx_obj.server_session, source, vdb_name) try: if vdb_obj: if operation == 'start': source.start(dlpx_obj.server_session, vdb_obj.reference) elif operation == 'stop': source.stop(dlpx_obj.server_session, vdb_obj.reference) elif operation == 'enable': source.enable(dlpx_obj.server_session, vdb_obj.reference) elif operation == 'disable': source.disable(dlpx_obj.server_session, vdb_obj.reference) elif operation == 'force_disable': disable_params = SourceDisableParameters() disable_params.attempt_cleanup = False source.disable(dlpx_obj.server_session, vdb_obj.reference, disable_params) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job except (RequestError, HttpError, JobError, AttributeError), e: print_exception('An error occurred while performing {} on {}:\n' '{}'.format(operation, vdb_name, e))
def link_ase_dsource(engine_name): """ Link an ASE dSource """ link_params = LinkParameters() link_params.name = arguments['--dsource_name'] link_params.link_data = ASELinkData() link_params.link_data.db_credentials = { 'type': 'PasswordCredential', 'password': arguments['--ase_passwd'] } link_params.link_data.db_user = arguments['--ase_user'] link_params.link_data.load_backup_path = arguments['--backup_path'] if arguments['--bck_file']: link_params.link_data.sync_parameters = \ ASESpecificBackupSyncParameters() bck_files = (arguments['--bck_file']).split(' ') link_params.link_data.sync_parameters.backup_files = bck_files elif arguments['--create_bckup']: link_params.link_data.sync_parameters = ASENewBackupSyncParameters() else: link_params.link_data.sync_parameters = ASELatestBackupSyncParameters() try: link_params.group = find_obj_by_name(dx_session_obj.server_session, group, arguments['--dx_group']).reference env_user_ref = link_params.link_data.stage_user = find_obj_by_name( dx_session_obj.server_session, environment, arguments['--env_name']).primary_user link_params.link_data.staging_host_user = env_user_ref link_params.link_data.source_host_user = env_user_ref link_params.link_data.config = find_obj_by_name( dx_session_obj.server_session, sourceconfig, arguments['--src_config']).reference link_params.link_data.staging_repository = find_obj_by_name( dx_session_obj.server_session, repository, arguments['--stage_repo']).reference except DlpxException as e: print_exception('Could not link {}: {}\n'.format( arguments['--dsource_name'], e)) sys.exit(1) try: dsource_ref = database.link(dx_session_obj.server_session, link_params) dx_session_obj.jobs[ engine_name] = dx_session_obj.server_session.last_job dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( dx_session_obj.server_session, find_obj_by_name(dx_session_obj.server_session, database, arguments['--dsource_name']).reference) print '{} sucessfully linked {}'.format(dsource_ref, arguments['--dsource_name']) except (RequestError, HttpError) as e: print_exception('Database link failed for {}:\n{}'.format( arguments['--dsource_name'], e))
def main_workflow(engine): """ This function actually runs the jobs. Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously engine: Dictionary of engines """ try: #Setup the connection to the Delphix Engine dx_session_obj.serversess(engine['ip_address'], engine['username'], engine['password']) if arguments['--vdb']: #Get the database reference we are copying from the database name database_obj = find_obj_by_name(dx_session_obj.server_session, database, arguments['--vdb']) except DlpxException as e: print_exception('\nERROR: Engine {} encountered an error while' '{}:\n{}\n'.format(engine['hostname'], arguments['--target'], e)) sys.exit(1) thingstodo = ["thingtodo"] try: with dx_session_obj.job_mode(single_thread): while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): if len(thingstodo) > 0: if OPERATION: method_call elif OPERATION: method_call thingstodo.pop() # get all the jobs, then inspect them i = 0 for j in dx_session_obj.jobs.keys(): job_obj = job.get(dx_session_obj.server_session, dx_session_obj.jobs[j]) print_debug(job_obj) print_info('{}: Replication operations: {}'.format( engine['hostname'], job_obj.job_state)) if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: # If the job is in a non-running state, remove it # from the # running jobs list. del dx_session_obj.jobs[j] elif job_obj.job_state in 'RUNNING': # If the job is in a running state, increment the # running job count. i += 1 print_info('{}: {:d} jobs running.'.format( engine['hostname'], i)) # If we have running jobs, pause before repeating the # checks. if len(dx_session_obj.jobs) > 0: sleep(float(arguments['--poll'])) except (HttpError, RequestError, JobError, DlpxException) as e: print_exception('ERROR: Could not complete replication ' 'operation:{}'.format(e))
def link_mssql_dsource(engine_name): """ Link an MSSQL dSource """ link_params = LinkParameters() link_params.name = arguments['--dsource_name'] link_params.link_data = MSSqlLinkData() try: env_obj_ref = find_obj_by_name(dx_session_obj.server_session, environment, arguments['--stage_env']).reference link_params.link_data.ppt_repository = find_dbrepo( dx_session_obj.server_session, 'MSSqlInstance', env_obj_ref, arguments['--stage_instance']).reference link_params.link_data.config = find_obj_by_name( dx_session_obj.server_session, sourceconfig, arguments['--dsource_name']).reference link_params.group = find_obj_by_name(dx_session_obj.server_session, group, arguments['--dx_group']).reference except DlpxException as e: print_exception('Could not link {}: {}\n'.format( arguments['--dsource_name'], e)) sys.exit(1) if arguments['--backup_path'] != "auto": link_params.link_data.shared_backup_location = arguments['--backup_path'] if arguments['--backup_loc_passwd']: link_params.link_data.backup_location_credentials = {'type': 'PasswordCredential', 'password': arguments['--backup_loc_passwd']} link_params.link_data.backup_location_user = \ arguments['--backup_loc_user'] link_params.link_data.db_credentials = {'type': 'PasswordCredential', 'password': arguments['--db_passwd']} link_params.link_data.db_user = arguments['--db_user'] link_params.link_data.sourcing_policy = SourcingPolicy() if arguments['--load_from_backup']: link_params.link_data.sourcing_policy.load_from_backup = True if arguments['--logsync']: link_params.link_data.sourcing_policy.logsync_enabled = True try: database.link(dx_session_obj.server_session, link_params) dx_session_obj.jobs[engine_name] = dx_session_obj.server_session.last_job dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( dx_session_obj.server_session, find_obj_by_name( dx_session_obj.server_session, database, arguments['--dsource_name']).reference) except (HttpError, RequestError, JobError) as e: print_exception('Database link failed for {}:\n{}\n'.format( arguments['--dsource_name'], e))
def create_ora_sourceconfig(engine_name, port_num=1521): """ :param ip_addr: :param db_name: :return: """ create_ret = None env_obj = find_obj_by_name(dx_session_obj.server_session, environment, arguments['--env_name']) try: sourceconfig_ref = find_obj_by_name(dx_session_obj.server_session, sourceconfig, arguments['--db_name']).reference except DlpxException: sourceconfig_ref = None repo_ref = find_dbrepo(dx_session_obj.server_session, 'OracleInstall', env_obj.reference, arguments['--db_install_path']).reference dsource_params = OracleSIConfig() connect_str = ('jdbc:oracle:thin:@' + arguments['--ip_addr'] + ':' + str(port_num) + ':' + arguments['--db_name']) dsource_params.database_name = arguments['--db_name'] dsource_params.unique_name = arguments['--db_name'] dsource_params.repository = repo_ref dsource_params.instance = OracleInstance() dsource_params.instance.instance_name = arguments['--db_name'] dsource_params.instance.instance_number = 1 dsource_params.services = [{'type': 'OracleService', 'jdbcConnectionString': connect_str}] try: if sourceconfig_ref is None: create_ret = link_ora_dsource(sourceconfig.create( dx_session_obj.server_session, dsource_params), env_obj.primary_user) elif sourceconfig_ref is not None: create_ret = link_ora_dsource(sourceconfig_ref, env_obj.primary_user) print_info('Created and linked the dSource {} with reference {}.\n'.format( arguments['--db_name'], create_ret)) link_job_ref = dx_session_obj.server_session.last_job link_job_obj = job.get(dx_session_obj.server_session, link_job_ref) while link_job_obj.job_state not in ["CANCELED", "COMPLETED", "FAILED"]: print_info('Waiting three seconds for link job to complete, and sync to begin') sleep(3) link_job_obj = job.get(dx_session_obj.server_session, link_job_ref) #Add the snapsync job to the jobs dictionary dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( dx_session_obj.server_session, find_obj_by_name( dx_session_obj.server_session, database, arguments['--dsource_name']).reference) print_debug('Snapshot Job Reference: {}.\n'.format( dx_session_obj.jobs[engine_name + 'snap'])) except (HttpError, RequestError) as e: print_exception('ERROR: Could not create the sourceconfig:\n' '{}'.format(e)) sys.exit(1)
def main_workflow(engine): """ This function actually runs the jobs. Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously engine: Dictionary containing engine information """ #Establish these variables as empty for use later environment_obj = None source_objs = None jobs = {} try: #Setup the connection to the Delphix Engine dx_session_obj.serversess(engine['ip_address'], engine['username'], engine['password']) group_obj = find_obj_by_name(dx_session_obj.server_session, group, arguments['--target_grp']) #Get the reference of the target environment. print_debug('Getting environment for %s\n' % (host_name), debug) #Get the environment object by the hostname environment_obj = find_obj_by_name(dx_session_obj.server_session, environment, host_name) except DlpxException as e: print( '\nERROR: Engine %s encountered an error while provisioning ' '%s:\n%s\n' % (engine['hostname'], arguments['--target'], e)) sys.exit(1) print_debug( 'Getting database information for %s\n' % (arguments['--source']), debug) try: #Get the database reference we are copying from the database name database_obj = find_obj_by_name(dx_session_obj.server_session, database, arguments['--source']) except DlpxException: return thingstodo = ["thingtodo"] #reset the running job count before we begin i = 0 try: with dx_session_obj.job_mode(single_thread): while (len(jobs) > 0 or len(thingstodo) > 0): arg_type = arguments['--type'].lower() if len(thingstodo) > 0: if arg_type == "oracle": create_oracle_si_vdb(engine, jobs, database_name, group_obj, environment_obj, database_obj, arguments['--prerefresh'], arguments['--postrefresh'], arguments['--prerollback'], arguments['--postrollback'], arguments['--configure-clone']) elif arg_type == "ase": create_ase_vdb(engine, server, jobs, group_obj, database_name, environment_obj, database_obj) elif arg_type == "mssql": create_mssql_vdb(engine, jobs, group_obj, database_name, environment_obj, database_obj) elif arg_type == "vfiles": create_vfiles_vdb(engine, jobs, group_obj, database_name, environment_obj, database_obj, arguments['--prerefresh'], arguments['--postrefresh'], arguments['--prerollback'], arguments['--postrollback'], arguments['--configure-clone']) thingstodo.pop() #get all the jobs, then inspect them i = 0 for j in jobs.keys(): job_obj = job.get(dx_session_obj.server_session, jobs[j]) print_debug(job_obj, debug) print_info(engine["hostname"] + ": VDB Provision: " + job_obj.job_state) if job_obj.job_state in [ "CANCELED", "COMPLETED", "FAILED" ]: #If the job is in a non-running state, remove it from # the running jobs list. del jobs[j] else: #If the job is in a running state, increment the # running job count. i += 1 print_info('%s: %s jobs running.' % (engine['hostname'], str(i))) #If we have running jobs, pause before repeating the checks. if len(jobs) > 0: sleep(float(arguments['--poll'])) except (DlpxException, JobError) as e: print '\nError while provisioning %s:\n%s' % (database_name, e.message) sys.exit(1)
def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, environment_obj, container_obj, pre_refresh=None, post_refresh=None, pre_rollback=None, post_rollback=None, configure_clone=None): ''' Create an Oracle SI VDB ''' vdb_obj = None try: vdb_obj = find_obj_by_name(dx_session_obj.server_session, database, vdb_name) except DlpxException: pass if vdb_obj == None: vdb_params = OracleProvisionParameters() vdb_params.open_resetlogs = True if arguments['--noopen']: vdb_params.open_resetlogs = False vdb_params.container = OracleDatabaseContainer() vdb_params.container.group = vdb_group_obj.reference vdb_params.container.name = vdb_name vdb_params.source = OracleVirtualSource() vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False if arguments['--instname']: inst_name = arguments['--instname'] elif arguments['--instname'] == None: inst_name = vdb_name if arguments['--uniqname']: unique_name = arguments['--uniqname'] elif arguments['--uniqname'] == None: unique_name = vdb_name if arguments['--db']: db = arguments['--db'] elif arguments['--db'] == None: db = vdb_name vdb_params.source.mount_base = arguments['--mntpoint'] if arguments['--mapfile']: vdb_params.source.file_mapping_rules = arguments['--mapfile'] if arguments['--template']: template_obj = find_obj_by_name(dx_session_obj.server_session, database.template, arguments['--template']) vdb_params.source.config_template = template_obj.reference vdb_params.source_config = OracleSIConfig() vdb_params.source.operations = VirtualSourceOperations() if pre_refresh: vdb_params.source.operations.pre_refresh = [{ 'type': 'RunCommandOnSourceOperation', 'command': pre_refresh }] if post_refresh: vdb_params.source.operations.post_refresh = [{ 'type': 'RunCommandOnSourceOperation', 'command': post_refresh }] if pre_rollback: vdb_params.source.operations.pre_rollback = [{ 'type': 'RunCommandOnSourceOperation', 'command': pre_rollback }] if post_rollback: vdb_params.source.operations.post_rollback = [{ 'type': 'RunCommandOnSourceOperation', 'command': post_rollback }] if configure_clone: vdb_params.source.operations.configure_clone = [{ 'type': 'RunCommandOnSourceOperation', 'command': configure_clone }] vdb_repo = find_dbrepo_by_environment_ref_and_install_path( engine, dx_session_obj.server_session, 'OracleInstall', environment_obj.reference, arguments['--envinst']) vdb_params.source_config.database_name = db vdb_params.source_config.unique_name = unique_name vdb_params.source_config.instance = OracleInstance() vdb_params.source_config.instance.instance_name = inst_name vdb_params.source_config.instance.instance_number = 1 vdb_params.source_config.repository = vdb_repo.reference dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) vdb_params.timeflow_point_parameters = \ dx_timeflow_obj.set_timeflow_point(container_obj, arguments['--timestamp_type'], arguments['--timestamp']) print vdb_params, '\n\n\n' print_info(engine["hostname"] + ": Provisioning " + vdb_name) database.provision(dx_session_obj.server_session, vdb_params) #Add the job into the jobs dictionary so we can track its progress jobs[engine['hostname']] = dx_session_obj.server_session.last_job #return the job object to the calling statement so that we can tell if # a job was created or not (will return None, if no job) return dx_session_obj.server_session.last_job else: raise DlpxException('\nERROR: %s: %s alread exists\n' % (engine['hostname'], vdb_name))
def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, environment_obj, container_obj, pre_refresh=None, post_refresh=None, pre_rollback=None, post_rollback=None, configure_clone=None): ''' Create a Vfiles VDB ''' vfiles_obj = None try: vfiles_obj = find_obj_by_name(dx_session_obj.server_session, database, vfiles_name) except DlpxException: pass if vfiles_obj is None: vfiles_repo = find_repo_by_environment_ref(engine, 'Unstructured Files', environment_obj.reference) vfiles_params = AppDataProvisionParameters() vfiles_params.source = AppDataVirtualSource() vfiles_params.source_config = AppDataDirectSourceConfig() vdb_restart_reobj = re.compile('true', re.IGNORECASE) if vdb_restart_reobj.search(str(arguments['--vdb_restart'])): vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = True elif vdb_restart_reobj.search(str(arguments['--vdb_restart'])) is None: vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = False vfiles_params.container = { 'type': 'AppDataContainer', 'group': vfiles_group.reference, 'name': vfiles_name } vfiles_params.source_config.name = arguments['--target'] vfiles_params.source_config.path = arguments['--vfiles_path'] vfiles_params.source_config.environment_user = \ environment_obj.primary_user vfiles_params.source_config.repository = vfiles_repo.reference vfiles_params.source.parameters = {} vfiles_params.source.name = vfiles_name vfiles_params.source.name = vfiles_name vfiles_params.source.operations = VirtualSourceOperations() if pre_refresh: vfiles_params.source.operations.pre_refresh = [{ 'type': 'RunCommandOnSourceOperation', 'command': pre_refresh }] if post_refresh: vfiles_params.source.operations.post_refresh = [{ 'type': 'RunCommandOnSourceOperation', 'command': post_refresh }] if pre_rollback: vfiles_params.source.operations.pre_rollback = [{ 'type': 'RunCommandOnSourceOperation', 'command': pre_rollback }] if post_rollback: vfiles_params.source.operations.post_rollback = [{ 'type': 'RunCommandOnSourceOperation', 'command': post_rollback }] if configure_clone: vfiles_params.source.operations.configure_clone = [{ 'type': 'RunCommandOnSourceOperation', 'command': configure_clone }] if arguments['--timestamp_type'] is None: vfiles_params.timeflow_point_parameters = { 'type': 'TimeflowPointSemantic', 'container': container_obj.reference, 'location': 'LATEST_POINT' } elif arguments['--timestamp_type'].upper() == 'SNAPSHOT': try: dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) dx_snap_params = dx_timeflow_obj.set_timeflow_point( container_obj, arguments['--timestamp_type'], arguments['--timestamp'], arguments['--timeflow']) except RequestError as e: raise DlpxException('Could not set the timeflow point:\n%s' % (e)) if dx_snap_params.type == 'TimeflowPointSemantic': vfiles_params.timeflow_point_parameters = { 'type': dx_snap_params.type, 'container': dx_snap_params.container, 'location': dx_snap_params.location } elif dx_snap_params.type == 'TimeflowPointTimestamp': vfiles_params.timeflow_point_parameters = { 'type': dx_snap_params.type, 'timeflow': dx_snap_params.timeflow, 'timestamp': dx_snap_params.timestamp } print_info('%s: Provisioning %s\n' % (engine["hostname"], vfiles_name)) try: database.provision(dx_session_obj.server_session, vfiles_params) except (JobError, RequestError, HttpError) as e: raise DlpxException('\nERROR: Could not provision the database:' '\n%s' % (e)) #Add the job into the jobs dictionary so we can track its progress jobs[engine['hostname']] = dx_session_obj.server_session.last_job #return the job object to the calling statement so that we can tell if # a job was created or not (will return None, if no job) return dx_session_obj.server_session.last_job else: print_info('\nERROR %s: %s already exists. \n' % (engine['hostname'], vfiles_name)) return vfiles_obj.reference
def set_timeflow_point(engine, server, container_obj): """ This returns the reference of the timestamp specified. engine: server: Delphix Engine object container_obj: VDB object """ if arguments["--timestamp_type"].upper() == "SNAPSHOT": if arguments["--timestamp"].upper() == "LATEST": print_debug(engine["hostname"] + ": Using the latest Snapshot") timeflow_point_parameters = TimeflowPointSemantic() timeflow_point_parameters.location = "LATEST_SNAPSHOT" elif arguments["--timestamp"].startswith("@"): print_debug(engine["hostname"] + ": Using a named snapshot") snapshot_obj = find_snapshot_by_database_and_name( engine, server, container_obj, arguments["--timestamp"]) if snapshot_obj: timeflow_point_parameters = TimeflowPointLocation() timeflow_point_parameters.timeflow = snapshot_obj.timeflow timeflow_point_parameters.location = ( snapshot_obj.latest_change_point.location) else: raise DlpxException( "ERROR: Was unable to use the specified " "snapshot %s for database %s.\n" % (arguments["--timestamp"], container_obj.name)) elif arguments["--timestamp"]: print_debug(engine["hostname"] + ": Using a time-designated snapshot") snapshot_obj = find_snapshot_by_database_and_time( engine, server, container_obj, arguments["--timestamp"]) if snapshot_obj: timeflow_point_parameters = TimeflowPointTimestamp() timeflow_point_parameters.timeflow = snapshot_obj.timeflow timeflow_point_parameters.timestamp = ( snapshot_obj.latest_change_point.timestamp) else: raise DlpxException( "Was unable to find a suitable time" " for %s for database %s" % (arguments["--timestamp"], container_obj.name)) elif arguments["--timestamp_type"].upper() == "TIME": if arguments["--timestamp"].upper() == "LATEST": timeflow_point_parameters = TimeflowPointSemantic() timeflow_point_parameters.location = "LATEST_POINT" elif arguments["--timestamp"]: timeflow_point_parameters = TimeflowPointTimestamp() timeflow_point_parameters.type = "TimeflowPointTimestamp" timeflow_obj = find_obj_by_name(engine, server, timeflow, arguments["--timeflow"]) timeflow_point_parameters.timeflow = timeflow_obj.reference timeflow_point_parameters.timestamp = arguments["--timestamp"] return timeflow_point_parameters else: raise DlpxException(arguments["--timestamp_type"] + " is not a valied timestamp_type. Exiting") timeflow_point_parameters.container = container_obj.reference return timeflow_point_parameters
def create_windows_env(dlpx_obj, env_name, host_user, ip_addr, pw=None, connector_name=None): """ Create a Windows environment. env_name: The name of the environment host_user: The server account used to authenticate ip_addr: DNS name or IP address of the environment toolkit_path: Path to the toolkit. Note: This directory must be writable by the host_user pw: Password of the user. Default: None (use SSH keys instead) """ engine_name = dlpx_obj.dlpx_engines.keys()[0] env_params_obj = HostEnvironmentCreateParameters() print_debug("Creating the environment with a password") env_params_obj.primary_user = { "type": "EnvironmentUser", "name": host_user, "credential": { "type": "PasswordCredential", "password": pw }, } env_params_obj.host_parameters = { "type": "WindowsHostCreateParameters", "host": { "address": ip_addr, "type": "WindowsHost", "name": env_name, "connectorPort": 9100, }, } env_params_obj.host_environment = WindowsHostEnvironment() env_params_obj.host_environment.name = env_name if connector_name: env_obj = find_obj_by_name(dlpx_obj.server_session, environment, connector_name) if env_obj: env_params_obj.host_environment.proxy = env_obj.host elif env_obj is None: print("Host was not found in the Engine: {}".format( arguments[--connector_name])) sys.exit(1) try: environment.create(dlpx_obj.server_session, env_params_obj) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job except (DlpxException, RequestError, HttpError) as e: print("\nERROR: Encountered an exception while creating the " "environment:\n{}".format(e))
def main_workflow(engine): """ This function actually runs the jobs. Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously engine: Dictionary containing engine information """ #Establish these variables as empty for use later environment_obj = None source_objs = None jobs = {} try: #Setup the connection to the Delphix Engine dx_session_obj.serversess(engine['ip_address'], engine['username'], engine['password']) group_obj = find_obj_by_name(dx_session_obj.server_session, group, arguments['--target_grp']) #Get the reference of the target environment. print_debug('Getting environment for %s\n' % (host_name), debug) #Get the environment object by the hostname environment_obj = find_obj_by_name(dx_session_obj.server_session, environment, host_name) except DlpxException as e: print('\nERROR: Engine %s encountered an error while provisioning ' '%s:\n%s\n' % (engine['hostname'], arguments['--target'], e)) sys.exit(1) print_debug('Getting database information for %s\n' % (arguments['--source']), debug) try: #Get the database reference we are copying from the database name database_obj = find_obj_by_name(dx_session_obj.server_session, database, arguments['--source']) except DlpxException: return thingstodo = ["thingtodo"] #reset the running job count before we begin i = 0 try: with dx_session_obj.job_mode(single_thread): while (len(jobs) > 0 or len(thingstodo) > 0): arg_type = arguments['--type'].lower() if len(thingstodo)> 0: if arg_type == "oracle": create_oracle_si_vdb(engine, jobs, database_name, group_obj, environment_obj, database_obj, arguments['--prerefresh'], arguments['--postrefresh'], arguments['--prerollback'], arguments['--postrollback'], arguments['--configure-clone']) elif arg_type == "ase": create_ase_vdb(engine, server, jobs, group_obj, database_name, environment_obj, database_obj) elif arg_type == "mssql": create_mssql_vdb(engine, jobs, group_obj, database_name, environment_obj, database_obj) elif arg_type == "vfiles": create_vfiles_vdb(engine, jobs, group_obj, database_name, environment_obj, database_obj, arguments['--prerefresh'], arguments['--postrefresh'], arguments['--prerollback'], arguments['--postrollback'], arguments['--configure-clone']) thingstodo.pop() #get all the jobs, then inspect them i = 0 for j in jobs.keys(): job_obj = job.get(dx_session_obj.server_session, jobs[j]) print_debug(job_obj, debug) print_info(engine["hostname"] + ": VDB Provision: " + job_obj.job_state) if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: #If the job is in a non-running state, remove it from # the running jobs list. del jobs[j] else: #If the job is in a running state, increment the # running job count. i += 1 print_info('%s: %s jobs running.' % (engine['hostname'], str(i))) #If we have running jobs, pause before repeating the checks. if len(jobs) > 0: sleep(float(arguments['--poll'])) except (DlpxException, JobError) as e: print '\nError while provisioning %s:\n%s' % (database_name, e.message) sys.exit(1)
def refresh_database(vdb_name, timestamp, timestamp_type="SNAPSHOT"): """ This function actually performs the refresh engine: dlpx_obj: Virtualization Engine session object vdb_name: VDB to be refreshed """ # Sanity check to make sure our source object has a reference dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) container_obj = find_obj_by_name(dx_session_obj.server_session, database, vdb_name) source_obj = find_source_by_dbname(dx_session_obj.server_session, database, vdb_name) # Sanity check to make sure our container object has a reference if container_obj.reference: try: if container_obj.virtual is not True: raise DlpxException("{} is not a virtual object. " "Skipping.\n".format(container_obj.name)) elif container_obj.staging is True: raise DlpxException("{} is a virtual object. " "Skipping.\n".format(container_obj.name)) elif container_obj.runtime.enabled == "ENABLED": print_info("\nINFO: Refrshing {} to {}\n".format( container_obj.name, timestamp)) # This exception is raised if rewinding a vFiles VDB # since AppDataContainer does not have virtual, staging or # enabled attributes. except AttributeError: pass if source_obj.reference: # We can only refresh VDB's if source_obj.virtual != True: print_info( "\nINFO: {} is not a virtual object. Skipping.\n".format( container_obj.name)) # Ensure this source is not a staging database. We can't act upon those. elif source_obj.staging == True: print_info("\nINFO: {} is a staging database. Skipping.\n".format( container_obj.name)) # Ensure the source is enabled. We can't refresh disabled databases. elif source_obj.runtime.enabled == "ENABLED": source_db = database.get(dx_session_obj.server_session, container_obj.provision_container) if not source_db: print_error( "\nERROR: Was unable to retrieve the source container for {} \n" .format(container_obj.name)) print_info("\nINFO: Refreshing {} from {}\n".format( container_obj.name, source_db.name)) # If the vdb is a Oracle type, we need to use a # OracleRefreshParameters """ rewind_params = RollbackParameters() rewind_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point( container_obj, timestamp_type, timestamp ) print_debug('{}: {}'.format(engine_name, str(rewind_params))) """ if str(container_obj.reference).startswith("ORACLE"): refresh_params = OracleRefreshParameters() else: refresh_params = RefreshParameters() try: refresh_params.timeflow_point_parameters = ( dx_timeflow_obj.set_timeflow_point(source_db, timestamp_type, timestamp)) print_info("\nINFO: Refresh prams {}\n".format(refresh_params)) # Sync it database.refresh( dx_session_obj.server_session, container_obj.reference, refresh_params, ) dx_session_obj.jobs[ dx_session_obj.server_session. address] = dx_session_obj.server_session.last_job except RequestError as e: print("\nERROR: Could not set timeflow point:\n%s\n" % (e.message.action)) sys.exit(1) except DlpxException as e: print("ERROR: Could not set timeflow point:\n%s\n" % (e.message)) sys.exit(1) # Don't do anything if the database is disabled else: print_info("\nINFO: {} is not enabled. Skipping sync.\n".format( container_obj.name))
def link_mssql_dsource(engine_name): """ Link an MSSQL dSource """ link_params = LinkParameters() link_params.name = arguments["--dsource_name"] link_params.link_data = MSSqlLinkData() try: env_obj_ref = find_obj_by_name(dx_session_obj.server_session, environment, arguments["--stage_env"]).reference link_params.link_data.ppt_repository = find_dbrepo( dx_session_obj.server_session, "MSSqlInstance", env_obj_ref, arguments["--stage_instance"], ).reference link_params.link_data.config = find_obj_by_name( dx_session_obj.server_session, sourceconfig, arguments["--dsource_name"]).reference link_params.group = find_obj_by_name(dx_session_obj.server_session, group, arguments["--dx_group"]).reference except DlpxException as e: print_exception("Could not link {}: {}\n".format( arguments["--dsource_name"], e)) sys.exit(1) if arguments["--backup_path"] != "auto": link_params.link_data.shared_backup_location = arguments[ "--backup_path"] if arguments["--backup_loc_passwd"]: link_params.link_data.backup_location_credentials = { "type": "PasswordCredential", "password": arguments["--backup_loc_passwd"], } link_params.link_data.backup_location_user = arguments[ "--backup_loc_user"] link_params.link_data.db_credentials = { "type": "PasswordCredential", "password": arguments["--db_passwd"], } link_params.link_data.db_user = arguments["--db_user"] link_params.link_data.sourcing_policy = SourcingPolicy() if arguments["--load_from_backup"]: link_params.link_data.sourcing_policy.load_from_backup = True if arguments["--sync_mode"]: link_params.link_data.validated_sync_mode = arguments["sync_mode"] if arguments["--logsync"]: link_params.link_data.sourcing_policy.logsync_enabled = True try: database.link(dx_session_obj.server_session, link_params) dx_session_obj.jobs[ engine_name] = dx_session_obj.server_session.last_job dx_session_obj.jobs[engine_name + "snap"] = get_running_job( dx_session_obj.server_session, find_obj_by_name(dx_session_obj.server_session, database, arguments["--dsource_name"]).reference, ) except (HttpError, RequestError, JobError) as e: print_exception("Database link failed for {}:\n{}\n".format( arguments["--dsource_name"], e))
def main_workflow(engine): """ This function actually runs the jobs. Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously engine: Dictionary of engines """ try: #Setup the connection to the Delphix Engine dx_session_obj.serversess(engine['ip_address'], engine['username'], engine['password']) if arguments['--vdb']: #Get the database reference we are copying from the database name database_obj = find_obj_by_name(dx_session_obj.server_session, database, arguments['--vdb']) except DlpxException as e: print_exception('\nERROR: Engine {} encountered an error while' '{}:\n{}\n'.format(engine['hostname'], arguments['--target'], e)) sys.exit(1) thingstodo = ["thingtodo"] try: with dx_session_obj.job_mode(single_thread): while (len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0): if len(thingstodo) > 0: if OPERATION: method_call elif OPERATION: method_call thingstodo.pop() # get all the jobs, then inspect them i = 0 for j in dx_session_obj.jobs.keys(): job_obj = job.get(dx_session_obj.server_session, dx_session_obj.jobs[j]) print_debug(job_obj) print_info('{}: Replication operations: {}'.format( engine['hostname'], job_obj.job_state)) if job_obj.job_state in [ "CANCELED", "COMPLETED", "FAILED" ]: # If the job is in a non-running state, remove it # from the # running jobs list. del dx_session_obj.jobs[j] elif job_obj.job_state in 'RUNNING': # If the job is in a running state, increment the # running job count. i += 1 print_info('{}: {:d} jobs running.'.format( engine['hostname'], i)) # If we have running jobs, pause before repeating the # checks. if len(dx_session_obj.jobs) > 0: sleep(float(arguments['--poll'])) except (HttpError, RequestError, JobError, DlpxException) as e: print_exception('ERROR: Could not complete replication ' 'operation:{}'.format(e))
def main_workflow(engine): """ This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. The @run_async decorator allows us to run against multiple Delphix Engine simultaneously """ # Pull out the values from the dictionary for this engine engine_address = engine["ip_address"] engine_username = engine["username"] engine_password = engine["password"] # Establish these variables as empty for use later databases = [] environment_obj = None source_objs = None jobs = {} # Setup the connection to the Delphix Engine server = serversess(engine_address, engine_username, engine_password) # If an environment/server was specified if host_name: print_debug(engine["hostname"] + ": Getting environment for " + host_name) # Get the environment object by the hostname environment_obj = find_obj_by_name(engine, server, environment, host_name) if environment_obj != None: # Get all the sources running on the server env_source_objs = source.get_all( server, environment=environment_obj.reference) # If the server doesn't have any objects, exit. if env_source_objs == None: print_error(host_name + "does not have any objects. Exiting") sys.exit(1) # If we are only filtering by the server, then put those objects in # the main list for processing if not (arguments["--group_name"] and database_name): source_objs = env_source_objs all_dbs = database.get_all(server, no_js_container_data_source=True) databases = [] for source_obj in source_objs: if source_obj.staging == False and source_obj.virtual == True: database_obj = database.get(server, source_obj.container) if database_obj in all_dbs: databases.append(database_obj) else: print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") sys.exit(1) # If we specified a specific database by name.... if arguments["--name"]: # Get the database object from the name database_obj = find_obj_by_name(engine, server, database, arguments["--name"]) if database_obj: databases.append(database_obj) # Else if we specified a group to filter by.... elif arguments["--group_name"]: print_debug(engine["hostname"] + ":Getting databases in group " + arguments["--group_name"]) # Get all the database objects in a group. databases = find_all_databases_by_group_name(engine, server, arguments["--group_name"]) # Else if we specified a dSource to filter by.... elif arguments["--dsource"]: print_debug(engine["hostname"] + ":Getting databases for dSource" + arguments["--dsource"]) # Get all the database objects in a group. databases = find_all_databases_by_dsource_name(engine, server, arguments["--dsource"]) # Else, if we said all vdbs ... elif arguments["--all_vdbs"] and not arguments["--host"]: print_debug(engine["hostname"] + ":Getting all VDBs ") # Grab all databases, but filter out the database that are in JetStream # containers, because we can't refresh those this way. databases = database.get_all(server, no_js_container_data_source=True) elif arguments["--list_timeflows"]: list_timeflows(server) elif arguments["--list_snapshots"]: list_snapshots(server) # reset the running job count before we begin i = 0 with job_mode(server): # While there are still running jobs or databases still to process.... while len(jobs) > 0 or len(databases) > 0: # While there are databases still to process and we are still under # the max simultaneous jobs threshold (if specified) while len(databases) > 0 and (arguments["--parallel"] == None or i < int(arguments["--parallel"])): # Give us the next database in the list, and then remove it database_obj = databases.pop() # Get the source of the database. source_obj = find_source_by_database(engine, server, database_obj) # If we applied the environment/server filter AND group filter, # find the intersecting matches if environment_obj != None and (arguments["--group_name"]): match = False for env_source_obj in env_source_objs: if source_obj[0].reference in env_source_obj.reference: match = True break if match == False: print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") return # Refresh the database refresh_job = refresh_database(engine, server, jobs, source_obj[0], database_obj) # If refresh_job has any value, then we know that a job was # initiated. if refresh_job: # increment the running job count i += 1 # Check to see if we are running at max parallel processes, and # report if so. if arguments["--parallel"] != None and i >= int( arguments["--parallel"]): print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") i = update_jobs_dictionary(engine, server, jobs) print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") # If we have running jobs, pause before repeating the checks. if len(jobs) > 0: sleep(float(arguments["--poll"]))
def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None, tags=None, description=None): """ Create the JS Bookmark :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param bookmark_name: Name of the bookmark to create :type bookmark_name: basestring :param source_layout: Name of the source (template or container) to use :type source_layout: basestring :param branch_name: Name of the branch to use :type branch_name: basestring :param tag_name: Tag to use for the bookmark :type tag: basestring :param description: Description of the bookmark :type description: basestring """ branch_ref = None source_layout_ref = None engine_name = dlpx_obj.dlpx_engines.keys()[0] js_bookmark_params = JSBookmarkCreateParameters() if branch_name: try: source_layout_ref = find_obj_by_name(dlpx_obj.server_session, template, source_layout).reference except DlpxException: source_layout_ref = find_obj_by_name( dlpx_obj.server_session, container, source_layout).reference #import pdb;pdb.set_trace() for branch_obj in branch.get_all(dlpx_obj.server_session): if branch_name == branch_obj.name and \ source_layout_ref == branch_obj.data_layout: branch_ref = branch_obj.reference break if branch_ref is None: raise DlpxException('Set the --data_layout parameter equal to ' 'the data layout of the bookmark.\n') elif branch_name is None: try: (source_layout_ref, branch_ref) = find_obj_by_name( dlpx_obj.server_session, template, source_layout, True) except DlpxException: (source_layout_ref, branch_ref) = find_obj_by_name( dlpx_obj.server_session, container, source_layout, True) if branch_ref is None: raise DlpxException('Could not find {} in engine {}'.format( branch_name, engine_name)) js_bookmark_params.bookmark = JSBookmark() js_bookmark_params.bookmark.name = bookmark_name js_bookmark_params.bookmark.branch = branch_ref if tags: js_bookmark_params.bookmark.tags = tags.split(',') if description: js_bookmark_params.bookmark.description = description js_bookmark_params.timeline_point_parameters = { 'sourceDataLayout': source_layout_ref, 'type': 'JSTimelinePointLatestTimeInput'} try: bookmark.create(dlpx_obj.server_session, js_bookmark_params) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job print_info('JS Bookmark {} was created successfully.'.format( bookmark_name)) except (DlpxException, RequestError, HttpError) as e: print_exception('\nThe bookmark {} was not created. The error ' 'was:\n\n{}'.format(bookmark_name, e))