def _run_once(self): src_used = get_property(self.src_shell, self.src_dataset, "used", int) try: dst_used = get_property(self.dst_shell, self.dst_dataset, "used", int) except DatasetDoesNotExistException: logger.info("Destination dataset %r on shell %r does not exist yet", self.dst_dataset, self.dst_shell) dst_used = 0 with self.lock: if not self.event.is_set(): self.observer(src_used, dst_used)
def check_target_type(replication_task: ReplicationTask, source_dataset: str, src_context: ReplicationContext, dst_context: ReplicationContext): target_dataset = get_target_dataset(replication_task, source_dataset) source_dataset_type = get_property(src_context.shell, source_dataset, "type") try: target_dataset_type = get_property(dst_context.shell, target_dataset, "type") except DatasetDoesNotExistException: pass else: if source_dataset_type != target_dataset_type: raise ReplicationError(f"Source {source_dataset!r} is a {source_dataset_type}, but target " f"{target_dataset!r} already exists and is a {target_dataset_type}")
def run_replication_steps(step_templates: [ReplicationStepTemplate], observer=None): for step_template in step_templates: if step_template.replication_task.readonly == ReadOnlyBehavior.REQUIRE: if not step_template.dst_context.datasets_readonly.get( step_template.dst_dataset, True): message = ( f"Target dataset {step_template.dst_dataset!r} exists and does not have readonly=on property, " "but replication task is set up to require this property. Refusing to replicate." ) try: target_type = get_property(step_template.dst_context.shell, step_template.dst_dataset, "type") except Exception: pass else: if target_type == "volume": message += ( f" Please run \"zfs set readonly=on {step_template.dst_dataset}\" on the target system " "to fix this.") raise ReplicationError(message) plan = [] ignored_roots = set() for i, step_template in enumerate(step_templates): is_immediate_target_dataset = i == 0 ignore = False for ignored_root in ignored_roots: if is_child(step_template.src_dataset, ignored_root): logger.debug( "Not replicating dataset %r because it's ancestor %r did not have any snapshots", step_template.src_dataset, ignored_root) ignore = True if ignore: continue src_snapshots = step_template.src_context.datasets[ step_template.src_dataset] dst_snapshots = step_template.dst_context.datasets.get( step_template.dst_dataset, []) incremental_base, snapshots, include_intermediate = get_snapshots_to_send( src_snapshots, dst_snapshots, step_template.replication_task, step_template.src_context.shell, step_template.src_dataset, ) if incremental_base is None and snapshots: if dst_snapshots: if step_template.replication_task.allow_from_scratch: logger.warning( "No incremental base for replication task %r on dataset %r, destroying destination dataset", step_template.replication_task.id, step_template.src_dataset, ) step_template.dst_context.shell.exec( ["zfs", "destroy", "-r", step_template.dst_dataset]) for dictionary in ( step_template.dst_context.datasets, step_template.dst_context.datasets_encrypted, step_template.dst_context.datasets_readonly, step_template.dst_context. datasets_receive_resume_tokens, ): if dictionary is None: continue for k in list(dictionary.keys()): if k == step_template.dst_dataset or k.startswith( f"{step_template.dst_dataset}/"): dictionary.pop(k) else: raise NoIncrementalBaseReplicationError( f"No incremental base on dataset {step_template.src_dataset!r} and replication from scratch " f"is not allowed") else: if not step_template.replication_task.allow_from_scratch: if is_immediate_target_dataset: # We are only interested in checking target datasets, not their children allowed_empty_children = [] if step_template.replication_task.recursive: allowed_dst_child_datasets = { get_target_dataset( step_template.replication_task, dataset) for dataset in (set(step_template.src_context.datasets) - set(step_template.replication_task.exclude)) if dataset != step_template.src_dataset and is_child(dataset, step_template.src_dataset) } existing_dst_child_datasets = { dataset for dataset in step_template.dst_context.datasets if dataset != step_template.dst_dataset and is_child(dataset, step_template.dst_dataset) } allowed_empty_children = list( allowed_dst_child_datasets & existing_dst_child_datasets) ensure_has_no_data(step_template.dst_context.shell, step_template.dst_dataset, allowed_empty_children) if not snapshots: logger.info( "No snapshots to send for replication task %r on dataset %r", step_template.replication_task.id, step_template.src_dataset) if is_immediate_target_dataset and incremental_base is None: raise ReplicationError( f"Dataset {step_template.src_dataset!r} does not have any matching snapshots to replicate" ) if not src_snapshots: ignored_roots.add(step_template.src_dataset) continue if is_immediate_target_dataset and step_template.dst_dataset not in step_template.dst_context.datasets: # Target dataset does not exist, there is a chance that intermediate datasets also do not exist parent = os.path.dirname(step_template.dst_dataset) if "/" in parent: create_dataset(step_template.dst_context.shell, parent) encryption = None if is_immediate_target_dataset and step_template.dst_dataset not in step_template.dst_context.datasets: encryption = step_template.replication_task.encryption step_template.src_context.context.snapshots_total_by_replication_step_template[ step_template] += len(snapshots) plan.append((step_template, incremental_base, snapshots, include_intermediate, encryption)) for step_template, incremental_base, snapshots, include_intermediate, encryption in plan: replicate_snapshots(step_template, incremental_base, snapshots, include_intermediate, encryption, observer) handle_readonly(step_template)