def find_repos(source_connection): """ Args: source_connection (RemoteConnection): The connection associated with the remote environment to run repository discovery Returns: Object of RepositoryDefinition class """ try: binary_paths = helper_lib.find_binary_path(source_connection) repositories = [] for binary_path in binary_paths.split(';'): if helper_lib.check_dir_present(source_connection, binary_path): install_path = helper_lib.find_install_path( source_connection, binary_path) shell_path = helper_lib.find_shell_path( source_connection, binary_path) version = helper_lib.find_version(source_connection, install_path) pretty_name = "Couchbase ({})".format(version) repository_definition = RepositoryDefinition( cb_install_path=install_path, cb_shell_path=shell_path, version=version, pretty_name=pretty_name) repositories.append(repository_definition) return repositories except RepositoryDiscoveryError as err: err.to_user_error(), None, sys.exc_info()[2] except Exception as err: logger.debug("find_repos: Caught unexpected exception!" + err.message) raise
def find_repos(source_connection): logger = setupLogger._setup_logger(__name__) env = {"DLPX_TOOLKIT_NAME": "Oracle on Windows"} delphixToolkitPath = executeScript.execute_powershell( source_connection, 'writeLibrary.ps1', env).strip('"') logger.debug("Delphix Toolkit path: {}".format(delphixToolkitPath)) env = { "DLPX_TOOLKIT_NAME": "Oracle on Windows", "DLPX_TOOLKIT_WORKFLOW": "repository_discovery", "DLPX_TOOLKIT_PATH": delphixToolkitPath } repoDiscovery = executeScript.execute_powershell(source_connection, 'repoDiscovery.ps1', env) logger.debug("Repository discovered: {}".format(repoDiscovery)) parsedRepositories = json.loads(repoDiscovery) logger.debug("parsedRepositories: {}".format(parsedRepositories)) return [ RepositoryDefinition( toolkit_name=installedRepository["toolkitName"], delphix_tookit_path=installedRepository["delphixToolkitPath"], pretty_name=installedRepository["prettyName"], ora_base=installedRepository["oraBase"], ora_edition=installedRepository["oraEdition"], ora_home=installedRepository["oraHome"]) for installedRepository in parsedRepositories ]
def _internal_reconfigure(self, request): """Reconfigure operation wrapper. Executed while attaching a VDB during a virtual source enable job and returns a virtual source config. Args: request (ReconfigureRequest): Reconfigure operation arguments. Returns: ReconfigureResponse: A response containing the return value of the reconfigure operation, as a ReconfigureResult. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import SnapshotDefinition from generated.definitions import SourceConfigDefinition from generated.definitions import RepositoryDefinition if not self.reconfigure_impl: raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) snapshot = SnapshotDefinition.from_dict( json.loads(request.snapshot.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) config = self.reconfigure_impl(snapshot=snapshot, repository=repository, source_config=source_config, virtual_source=virtual_source) # Validate that this is a SourceConfigDefinition object. if not isinstance(config, SourceConfigDefinition): raise IncorrectReturnTypeError(Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) reconfigure_response = platform_pb2.ReconfigureResponse() reconfigure_response.return_value.source_config.parameters.json = ( json.dumps(config.to_dict())) return reconfigure_response
def _internal_status(self, request): """Virtual status operation wrapper. Executed to get the status of a virtual source - active or inactive. Run status operation for a virtual source. Args: request (VirtualStatusRequest): Virtual status operation arguments. Returns: VirtualStatusResponse: A response containing VirtualStatusResult if successful or PluginErrorResult in case of an error. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SourceConfigDefinition # # While virtual.status() is not a required operation, this should # not be called if it wasn't implemented. # if not self.status_impl: raise OperationNotDefinedError(Op.VIRTUAL_STATUS) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) virtual_status = self.status_impl(repository=repository, source_config=source_config, virtual_source=virtual_source) # Validate that this is a Status object. if not isinstance(virtual_status, Status): raise IncorrectReturnTypeError(Op.VIRTUAL_STATUS, type(virtual_status), Status) virtual_status_response = platform_pb2.VirtualStatusResponse() virtual_status_response.return_value.status = virtual_status.value return virtual_status_response
def _internal_pre_snapshot(self, request): """Virtual pre snapshot operation wrapper. Executed before creating a ZFS snapshot. This plugin operation is run prior to creating a snapshot for a virtual source. Run pre-snapshot operation for a virtual source. Args: virtual_pre_snapshot_request (VirtualPreSnapshotRequest): Virtual pre snapshot operation arguments. Returns: VirtualPreSnapshotResponse: A response containing VirtualPreSnapshotResult if successful or PluginErrorResult in case of an error. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SourceConfigDefinition # # While virtual.pre_snapshot() is not a required operation, this should # not be called if it wasn't implemented. # if not self.pre_snapshot_impl: raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) self.pre_snapshot_impl(repository=repository, source_config=source_config, virtual_source=virtual_source) virtual_pre_snapshot_response = ( platform_pb2.VirtualPreSnapshotResponse()) virtual_pre_snapshot_response.return_value.CopyFrom( platform_pb2.VirtualPreSnapshotResult()) return virtual_pre_snapshot_response
def _internal_unconfigure(self, request): """Unconfigure operation wrapper. Executed when disabling or deleting an existing virtual source which has already been mounted to a target environment. This plugin operation is run before unmounting the virtual source from the target environment. Args: request (UnconfigureRequest): Unconfigure operation arguments. Returns: UnconfigureResponse: A response containing UnconfigureResult if successful or PluginErrorResult in case of an error. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SourceConfigDefinition # # While virtual.unconfigure() is not a required operation, this should # not be called if it wasn't implemented. # if not self.unconfigure_impl: raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) self.unconfigure_impl(repository=repository, source_config=source_config, virtual_source=virtual_source) unconfigure_response = platform_pb2.UnconfigureResponse() unconfigure_response.return_value.CopyFrom( platform_pb2.UnconfigureResult()) return unconfigure_response
def _internal_direct_pre_snapshot(self, request): """Pre Snapshot Wrapper for direct plugins. Executed before creating a snapshot. This plugin operation is run prior to creating a snapshot for a direct source. Run pre-snapshot operation for a direct source. Args: request (DirectPreSnapshotRequest): Pre Snapshot arguments. Returns: DirectPreSnapshotResponse: A response containing DirectPreSnapshotResult if successful or PluginErrorResult in case of an error. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import RepositoryDefinition from generated.definitions import LinkedSourceDefinition from generated.definitions import SourceConfigDefinition # # While linked.pre_snapshot() is not a required operation, this should # not be called if it wasn't implemented. # if not self.pre_snapshot_impl: raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) direct_source_definition = LinkedSourceDefinition.from_dict( json.loads(request.direct_source.linked_source.parameters.json)) direct_source = DirectSource( guid=request.direct_source.linked_source.guid, connection=RemoteConnection.from_proto( request.direct_source.connection), parameters=direct_source_definition) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) self.pre_snapshot_impl(direct_source=direct_source, repository=repository, source_config=source_config) direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() direct_pre_snapshot_response.return_value.CopyFrom( platform_pb2.DirectPreSnapshotResult()) return direct_pre_snapshot_response
def _internal_initialize(self, request): """Initialize operation wrapper. Executed during VDB creation after mounting onto the target environment. Run initialize operation for an empty virtual source. Args: request (InitializeRequest): Initialize operation arguments. Returns: InitializeResponse: A response containing InitializeResult if successful or PluginErrorResult in case of an error. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SourceConfigDefinition if not self.initialize_impl: raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) config = self.initialize_impl(repository=repository, virtual_source=virtual_source) # Validate that this is a SourceConfigDefinition object. if not isinstance(config, SourceConfigDefinition): raise IncorrectReturnTypeError(Op.VIRTUAL_INITIALIZE, type(config), SourceConfigDefinition) initialize_response = platform_pb2.InitializeResponse() initialize_response.return_value.source_config.parameters.json = ( json.dumps(config.to_dict())) return initialize_response
def _internal_start(self, request): """Start operation wrapper. Executed after attaching a VDB during a virtual source enable job to start the database. Args: request (StartRequest): Start operation arguments. Returns: StartResponse: A response containing StartResult if successful or PluginErrorResult in case of an error. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SourceConfigDefinition # # While virtual.start() is not a required operation, this should # not be called if it wasn't implemented. # if not self.start_impl: raise OperationNotDefinedError(Op.VIRTUAL_START) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) self.start_impl(repository=repository, source_config=source_config, virtual_source=virtual_source) start_response = platform_pb2.StartResponse() start_response.return_value.CopyFrom(platform_pb2.StartResult()) return start_response
def find_mysql_binaries(connection): logger.debug("operations.find_mysql_binaries()") baseName = "" version = "" dirName = "" prettyName = "" try: bashresult = runbash(connection, CommandFactory.find_binary_path(), None) repoList = bashresult.stdout.strip() stderr = bashresult.stderr.strip() exitcode = bashresult.exit_code logger.debug("find_mysql_binaries>_repoList > \n " + repoList) repositories = [] if exitcode != 0: logger.debug("find_mysql_binaries > exit code > " + str(exitcode)) raise RepositoryDiscoveryError(stderr) elif (repoList == "" or repoList is None): logger.debug("find_mysql_binaries > No MySQL repositories found") else: for repoPath in repoList.splitlines(): logger.debug("Parsing repository at " + repoPath) if not utils.validate_repository(repoPath): logger.debug("Invalid repository path. Skipping") continue baseName = os.path.basename(repoPath) dirName = os.path.dirname(repoPath) bashresult = runbash(connection, CommandFactory.get_version(repoPath), None) versionStr = bashresult.stdout.strip() versionArr = versionStr.split(" ") version = versionArr[3] if (version != "" and baseName == "mysqld"): prettyName = versionStr[versionStr.index("(MySQL" ):len(versionStr)] prettyName = prettyName + " {}".format(version) repository = RepositoryDefinition(name=prettyName, install_path=dirName, version=version) repositories.append(repository) except RepositoryDiscoveryError as err: raise RepositoryDiscoveryError( err.message).to_user_error(), None, sys.exc_info()[2] except Exception as err: raise return repositories
def repository_discovery(source_connection): common.add_debug_heading_block("Start Repository Discovery") helpers._record_hook("repository_discovery", source_connection) env = { "DELPHIX_DIR": source_connection.environment.host.binary_path, "DLPX_PLUGIN_WORKFLOW": 'repoDiscovery', "DLPX_TOOLKIT_WORKFLOW": 'repoDiscovery' } logger.debug("env: {}".format(env)) repositories = [] script_content = pkgutil.get_data('resources', 'discover_repos.sh') #logger.debug("discover_repos_repository_script: {}".format(script_content)) res = libs.run_bash(source_connection, script_content, env) logger.debug("res = {}".format(res)) logger.debug("res.stdout = {}".format(res.stdout)) repodiscovery = json.loads(res.stdout) logger.debug(repodiscovery) for item in repodiscovery: logger.debug("item:{}".format(item)) repository = RepositoryDefinition( version=item['version'], mongo_install_path=item['mongo_install_path'], mongo_shell_path=item['mongo_shell_path'], pretty_name=item['pretty_name']) repositories.append(repository) # # Write library file for future use # env = { # "DELPHIX_DIR": source_connection.environment.host.binary_path, # "DLPX_PLUGIN_WORKFLOW": 'sourceConfigDiscovery', # "MONGO_LIBRARY_SOURCE": pkgutil.get_data('resources', 'library.sh') # } # script_content = pkgutil.get_data('resources', 'write_library.sh') # res = libs.run_bash(source_connection, script_content, env) # data = json.loads(res.stdout) # logger.debug(data) common.add_debug_heading_block("End Repository Discovery") return repositories
def repository_discovery(source_connection): # This is an object generated from the repositoryDefinition schema. # In order to use it locally you must run the 'build -g' command provided # by the SDK tools from the plugin's root directory. repositories = [] binary_path = source_connection.environment.host.binary_path library_script = pkgutil.get_data('resources', 'library.sh') environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path } find_mysql_binary = pkgutil.get_data('resources', 'repoDiscovery.sh') result = libs.run_bash(source_connection, find_mysql_binary, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + error) raise RepositoryDiscoveryError("Exception while discovering:" + error) else: logger.debug("Output: " + output) #process repository json repos_js = json.loads(output) # print the keys and values for repo_js in repos_js: #logger.debug("Adding repository:"+repo_js+" to list") path = repo_js['installPath'] version = repo_js['version'] prettyName = repo_js['prettyName'].split("/bin")[1] repository = RepositoryDefinition(name=prettyName, install_path=path, version=version) repositories.append(repository) logger.debug("output:" + output) return repositories
def _internal_source_config(self, request): """Source config discovery wrapper. Executed when adding or refreshing an environment. This plugin operation is run after discovering repositories and before persisting/updating repository and source config data in MDS. This plugin operation returns a list of source configs from a discovered repository. Discover the source configs on an environment given a discovered repository. Args: request (SourceConfigDiscoveryRequest): Source Config Discovery arguments. Returns: SourceConfigDiscoveryResponse: The return value of source config discovery operation. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import RepositoryDefinition from generated.definitions import SourceConfigDefinition def to_protobuf(source_config): parameters = common_pb2.PluginDefinedObject() parameters.json = json.dumps(source_config.to_dict()) source_config_protobuf = common_pb2.SourceConfig() source_config_protobuf.parameters.CopyFrom(parameters) return source_config_protobuf if not self.source_config_impl: raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) repository_definition = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_configs = self.source_config_impl( source_connection=RemoteConnection.from_proto( request.source_connection), repository=repository_definition) # Validate that this is a list of SourceConfigDefinition objects if not isinstance(source_configs, list): raise IncorrectReturnTypeError(Op.DISCOVERY_SOURCE_CONFIG, type(source_configs), [SourceConfigDefinition]) if not all( isinstance(config, SourceConfigDefinition) for config in source_configs): raise IncorrectReturnTypeError( Op.DISCOVERY_SOURCE_CONFIG, [type(config) for config in source_configs], [SourceConfigDefinition]) source_config_discovery_response = ( platform_pb2.SourceConfigDiscoveryResponse()) source_config_protobuf_list = [ to_protobuf(config) for config in source_configs ] source_config_discovery_response.return_value.source_configs.extend( source_config_protobuf_list) return source_config_discovery_response
def _internal_mount_specification(self, request): """Virtual mount spec operation wrapper. Executed to fetch the ownership spec before mounting onto a target environment. Run mount spec operation for a virtual source. Args: virtual_mount_spec_request (VirtualMountSpecRequest): Virtual mount spec operation arguments. Returns: VirtualMountSpecResponse: A response containing the return value of the virtual mount spec operation, as a VirtualMountSpecResult. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition def to_protobuf_single_mount(single_mount): single_mount_protobuf = common_pb2.SingleSubsetMount() environment_protobuf = single_mount.remote_environment.to_proto() single_mount_protobuf.remote_environment.CopyFrom( environment_protobuf) single_mount_protobuf.mount_path = single_mount.mount_path if single_mount.shared_path: single_mount_protobuf.shared_path = single_mount.shared_path return single_mount_protobuf def to_protobuf_ownership_spec(ownership_spec): ownership_spec_protobuf = common_pb2.OwnershipSpec() ownership_spec_protobuf.uid = ownership_spec.uid ownership_spec_protobuf.gid = ownership_spec.gid return ownership_spec_protobuf if not self.mount_specification_impl: raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) virtual_mount_spec = self.mount_specification_impl( repository=repository, virtual_source=virtual_source) # Validate that this is a MountSpecification object if not isinstance(virtual_mount_spec, MountSpecification): raise IncorrectReturnTypeError(Op.VIRTUAL_MOUNT_SPEC, type(virtual_mount_spec), MountSpecification) virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() if virtual_mount_spec.ownership_specification: ownership_spec = to_protobuf_ownership_spec( virtual_mount_spec.ownership_specification) virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( ownership_spec) mounts_list = [ to_protobuf_single_mount(m) for m in virtual_mount_spec.mounts ] virtual_mount_spec_response.return_value.mounts.extend(mounts_list) return virtual_mount_spec_response
def _internal_post_snapshot(self, request): """Virtual post snapshot operation wrapper. Executed after creating a ZFS snapshot. This plugin operation is run after creating a snapshot for a virtual source. Run post-snapshot operation for a virtual source. Args: request (VirtualPostSnapshotRequest): Virtual post snapshot operation arguments. Returns: VirtualPostSnapshotResponse: A response containing the return value of the virtual post snapshot operation, as a VirtualPostSnapshotResult. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SnapshotDefinition from generated.definitions import SourceConfigDefinition def to_protobuf(snapshot): parameters = common_pb2.PluginDefinedObject() parameters.json = json.dumps(snapshot.to_dict()) snapshot_protobuf = common_pb2.Snapshot() snapshot_protobuf.parameters.CopyFrom(parameters) return snapshot_protobuf if not self.post_snapshot_impl: raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) snapshot = self.post_snapshot_impl(repository=repository, source_config=source_config, virtual_source=virtual_source) # Validate that this is a SnapshotDefinition object if not isinstance(snapshot, SnapshotDefinition): raise IncorrectReturnTypeError(Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) virtual_post_snapshot_response = ( platform_pb2.VirtualPostSnapshotResponse()) virtual_post_snapshot_response.return_value.snapshot.CopyFrom( to_protobuf(snapshot)) return virtual_post_snapshot_response
def _internal_configure(self, request): """Configure operation wrapper. Executed just after cloning the captured data and mounting it to a target environment. Specifically, this plugin operation is run during provision and refresh, prior to taking the initial snapshot of the clone. This plugin operation is run before the user-customizable Configure Clone and Before Refresh operations are run. It must return a sourceConfig object that represents the new dataset. Configure the data to be usable on the target environment. For database data files, this may mean recovering from a crash consistent format or backup. For application files, this may mean reconfiguring XML files or rewriting hostnames and symlinks. Args: request (ConfigureRequest): Configure operation arguments. Returns: ConfigureResponse: A response containing the return value of the configure operation, as a ConfigureResult. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import VirtualSourceDefinition from generated.definitions import RepositoryDefinition from generated.definitions import SnapshotDefinition from generated.definitions import SourceConfigDefinition if not self.configure_impl: raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) mounts = [ VirtualOperations._from_protobuf_single_subset_mount(m) for m in request.virtual_source.mounts ] virtual_source = VirtualSource(guid=request.virtual_source.guid, connection=RemoteConnection.from_proto( request.virtual_source.connection), parameters=virtual_source_definition, mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) snapshot = SnapshotDefinition.from_dict( json.loads(request.snapshot.parameters.json)) config = self.configure_impl(virtual_source=virtual_source, repository=repository, snapshot=snapshot) # Validate that this is a SourceConfigDefinition object. if not isinstance(config, SourceConfigDefinition): raise IncorrectReturnTypeError(Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) configure_response = platform_pb2.ConfigureResponse() configure_response.return_value.source_config.parameters.json = ( json.dumps(config.to_dict())) return configure_response
def _internal_staged_post_snapshot(self, request): """Post Snapshot Wrapper for staged plugins. Executed after creating a snapshot. This plugin operation is run after creating a snapshot for a staged source. Run post-snapshot operation for a staged source. Args: request (StagedPostSnapshotRequest): Post Snapshot arguments. Returns: StagedPostSnapshotResponse: A response containing the return value StagedPostSnapshotResult which has the snapshot metadata on success. In case of errors, response object will contain PluginErrorResult. """ # Reasoning for method imports are in this file's docstring. from generated.definitions import RepositoryDefinition from generated.definitions import LinkedSourceDefinition from generated.definitions import SourceConfigDefinition from generated.definitions import SnapshotDefinition from generated.definitions import SnapshotParametersDefinition def to_protobuf(snapshot): parameters = common_pb2.PluginDefinedObject() parameters.json = json.dumps(snapshot.to_dict()) snapshot_protobuf = common_pb2.Snapshot() snapshot_protobuf.parameters.CopyFrom(parameters) return snapshot_protobuf if not self.post_snapshot_impl: raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) staged_source_definition = LinkedSourceDefinition.from_dict( json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( remote_environment=RemoteEnvironment.from_proto( request.staged_source.staged_mount.remote_environment), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, source_connection=RemoteConnection.from_proto( request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, staged_connection=RemoteConnection.from_proto( request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) snap_params = json.loads(request.snapshot_parameters.parameters.json) # # The snapshot_parameters object should be set to None if the json from # the protobuf is None to differentiate no snapshot parameters vs empty # snapshot parameters. # snapshot_parameters = ( None if snap_params is None else SnapshotParametersDefinition.from_dict(snap_params)) snapshot = self.post_snapshot_impl( staged_source=staged_source, repository=repository, source_config=source_config, optional_snapshot_parameters=snapshot_parameters) # Validate that this is a SnapshotDefinition object if not isinstance(snapshot, SnapshotDefinition): raise IncorrectReturnTypeError(Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) response = platform_pb2.StagedPostSnapshotResponse() response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) return response