def from_data(cls, id, data: dict, periodic_snapshot_tasks: [PeriodicSnapshotTask]): replication_task_validator.validate(data) for k in [ "source-dataset", "naming-schema", "also-include-naming-schema" ]: if k in data and isinstance(data[k], str): data[k] = [data[k]] data.setdefault("exclude", []) data.setdefault("properties", True) data.setdefault("replicate", False) data.setdefault("periodic-snapshot-tasks", []) data.setdefault("only-matching-schedule", False) data.setdefault("allow-from-scratch", False) data.setdefault("hold-pending-snapshots", False) data.setdefault("compression", None) data.setdefault("speed-limit", None) data.setdefault("dedup", False) data.setdefault("large-block", False) data.setdefault("embed", False) data.setdefault("compressed", False) data.setdefault("retries", 5) data.setdefault("logging-level", "notset") resolved_periodic_snapshot_tasks = [] for periodic_snapshot_task_id in data["periodic-snapshot-tasks"]: for periodic_snapshot_task in periodic_snapshot_tasks: if periodic_snapshot_task.id == periodic_snapshot_task_id: resolved_periodic_snapshot_tasks.append( periodic_snapshot_task) break else: raise ValueError( f"Periodic snapshot task {periodic_snapshot_task_id!r} does not exist" ) if data["recursive"]: for source_dataset in data["source-dataset"]: for periodic_snapshot_task in resolved_periodic_snapshot_tasks: if is_child(source_dataset, periodic_snapshot_task.dataset): for exclude in periodic_snapshot_task.exclude: if exclude not in data["exclude"]: raise ValueError( "Replication tasks should exclude everything their periodic snapshot tasks exclude " f"(task does not exclude {exclude!r} from periodic snapshot task " f"{periodic_snapshot_task.id!r})") if data["replicate"]: if not data["recursive"]: raise ValueError( "Replication tasks that replicate entire filesystem should be recursive" ) if data["exclude"]: raise ValueError( "Replication tasks that replicate entire filesystem can't exclude datasets" ) if not data["properties"]: raise ValueError( "Replication tasks that replicate entire filesystem can't exclude properties" ) data["direction"] = ReplicationDirection(data["direction"]) if data["direction"] == ReplicationDirection.PUSH: if "naming-schema" in data: raise ValueError( "Push replication task can't have naming-schema") data.setdefault("also-include-naming-schema", []) if not resolved_periodic_snapshot_tasks and not data[ "also-include-naming-schema"]: raise ValueError( "You must at least provide either periodic-snapshot-tasks or also-include-naming-schema " "for push replication task") elif data["direction"] == ReplicationDirection.PULL: if "naming-schema" not in data: raise ValueError( "You must provide naming-schema for pull replication task") if "also-include-naming-schema" in data: raise ValueError( "Pull replication task can't have also-include-naming-schema" ) data.setdefault("also-include-naming-schema", data.pop("naming-schema")) schedule, restrict_schedule = cls._parse_schedules(data) if data["direction"] == ReplicationDirection.PULL: if data["hold-pending-snapshots"]: raise ValueError( "Pull replication tasks can't hold pending snapshots because they don't do source " "retention") retention_policy = TargetSnapshotRetentionPolicy.from_data(data) compression = replication_compressions[ data["compression"]] if data["compression"] else None return cls(id, data["direction"], create_transport(data["transport"]), data["source-dataset"], data["target-dataset"], data["recursive"], data["exclude"], data["properties"], data["replicate"], resolved_periodic_snapshot_tasks, data["also-include-naming-schema"], data["auto"], schedule, restrict_schedule, data["only-matching-schedule"], data["allow-from-scratch"], data["hold-pending-snapshots"], retention_policy, compression, data["speed-limit"], data["dedup"], data["large-block"], data["embed"], data["compressed"], data["retries"], logging._nameToLevel[data["logging-level"].upper()])
def from_data(cls, id, data: dict, periodic_snapshot_tasks: [PeriodicSnapshotTask]): replication_task_validator.validate(data) for k in [ "source-dataset", "naming-schema", "also-include-naming-schema" ]: if k in data and isinstance(data[k], str): data[k] = [data[k]] data.setdefault("exclude", []) data.setdefault("properties", True) data.setdefault("properties-exclude", []) data.setdefault("properties-override", {}) data.setdefault("replicate", False) data.setdefault("encryption", None) data.setdefault("periodic-snapshot-tasks", []) data.setdefault("name-regex", None) data.setdefault("only-matching-schedule", False) data.setdefault("readonly", "ignore") data.setdefault("allow-from-scratch", False) data.setdefault("hold-pending-snapshots", False) data.setdefault("compression", None) data.setdefault("speed-limit", None) data.setdefault("dedup", False) data.setdefault("large-block", False) data.setdefault("embed", False) data.setdefault("compressed", False) data.setdefault("retries", 5) data.setdefault("logging-level", "notset") resolved_periodic_snapshot_tasks = [] for periodic_snapshot_task_id in data["periodic-snapshot-tasks"]: for periodic_snapshot_task in periodic_snapshot_tasks: if periodic_snapshot_task.id == periodic_snapshot_task_id: resolved_periodic_snapshot_tasks.append( periodic_snapshot_task) break else: raise ValueError( f"Periodic snapshot task {periodic_snapshot_task_id!r} does not exist" ) if data["recursive"]: cls._validate_exclude(data, resolved_periodic_snapshot_tasks) if data["replicate"]: if not data["recursive"]: raise ValueError( "Replication tasks that replicate entire filesystem should be recursive" ) if data["exclude"]: raise ValueError( "Replication tasks that replicate entire filesystem can't exclude datasets" ) if not data["properties"]: raise ValueError( "Replication tasks that replicate entire filesystem can't exclude properties" ) if data["encryption"]: encryption = ReplicationEncryption( data["encryption"]["key"], KeyFormat(data["encryption"]["key-format"]), data["encryption"]["key-location"], ) else: encryption = None data["direction"] = ReplicationDirection(data["direction"]) if data["direction"] == ReplicationDirection.PUSH: if "naming-schema" in data: raise ValueError( "Push replication task can't have naming-schema") data.setdefault("also-include-naming-schema", []) if not resolved_periodic_snapshot_tasks and not data[ "also-include-naming-schema"] and not data["name-regex"]: raise ValueError( "You must at least provide either periodic-snapshot-tasks or also-include-naming-schema or " "name-regex for push replication task") elif data["direction"] == ReplicationDirection.PULL: if "naming-schema" not in data and not data["name-regex"]: raise ValueError( "You must provide naming-schema or name-regex for pull replication task" ) if "also-include-naming-schema" in data: raise ValueError( "Pull replication task can't have also-include-naming-schema" ) data.setdefault("also-include-naming-schema", data.pop("naming-schema")) schedule, restrict_schedule = cls._parse_schedules(data) if data["direction"] == ReplicationDirection.PULL: if data["hold-pending-snapshots"]: raise ValueError( "Pull replication tasks can't hold pending snapshots because they don't do source " "retention") if data["name-regex"]: try: name_pattern = re.compile(f"({data['name-regex']})$") except Exception as e: raise ValueError(f"Invalid name-regex: {e}") if data["also-include-naming-schema"]: raise ValueError( "naming-schema/also-include-naming-schema can't be used with name-regex" ) else: name_pattern = None retention_policy = TargetSnapshotRetentionPolicy.from_data(data) compression = replication_compressions[ data["compression"]] if data["compression"] else None return cls( id, data["direction"], create_transport(data["transport"]), data["source-dataset"], data["target-dataset"], data["recursive"], data["exclude"], data["properties"], data["properties-exclude"], {k: str(v) for k, v in data["properties-override"].items()}, data["replicate"], encryption, resolved_periodic_snapshot_tasks, data["also-include-naming-schema"], name_pattern, data["auto"], schedule, restrict_schedule, data["only-matching-schedule"], ReadOnlyBehavior(data["readonly"]), data["allow-from-scratch"], data["hold-pending-snapshots"], retention_policy, compression, data["speed-limit"], data["dedup"], data["large-block"], data["embed"], data["compressed"], data["retries"], logging._nameToLevel[data["logging-level"].upper()])