Esempio n. 1
0
    with open(REPL_RESULTFILE, 'rb') as f:
        data = f.read()
    results = pickle.loads(data)
except:
    results = defaultdict(dict)


def write_results():
    global results
    with open(REPL_RESULTFILE, 'wb') as f:
        f.write(pickle.dumps(results))

system_re = re.compile('^[^/]+/.system.*')

# Traverse all replication tasks
replication_tasks = query_model("storage/replication", "repl_enabled")
for replication in replication_tasks:
    # BEGIN REPLICATION ADAPTER
    replication.repl_begin = datetime.time(*map(int, replication.repl_begin.split(':')))
    replication.repl_end = datetime.time(*map(int, replication.repl_end.split(':')))

    replication.repl_remote = SimpleNamespace(**dict({
        k.replace("repl_", "ssh_"): v
        for k, v in replication.__dict__.items()
        if k.startswith("repl_remote_")
    }, **{
        k.replace("repl_remote_", "ssh_"): v
        for k, v in replication.__dict__.items()
        if k.startswith("repl_remote_")
    }))
Esempio n. 2
0
    pidfile.write('%d' % mypid)

MNTLOCK.unlock()

now = datetime.now().replace(microsecond=0)
if now.second < 30 or now.minute == 59:
    snaptime = now.replace(second=0)
else:
    snaptime = now.replace(minute=now.minute + 1, second=0)

mp_to_task_map = {}

# Grab all matching tasks into a tree.
# Since the snapshot we make have the name 'foo@auto-%Y%m%d.%H%M-{expire time}'
# format, we just keep one task.
TaskObjects = query_model("storage/task", "task_enabled")
taskpath = {'recursive': [], 'nonrecursive': []}
for task in TaskObjects:
    task.task_begin = time(*map(int, task.task_begin.split(':')))
    task.task_end = time(*map(int, task.task_end.split(':')))

    vol_name = task.task_filesystem.split('/')[0]
    if isMatchingTime(task, snaptime):
        proc = pipeopen(f'zpool list {vol_name}')
        proc.communicate()
        if proc.returncode != 0:
            log.warn(
                f'Volume {vol_name} not imported, skipping snapshot task #{task.id}'
            )
            continue
        if task.task_recursive:
Esempio n. 3
0
        data = f.read()
    results = pickle.loads(data)
except:
    results = defaultdict(dict)


def write_results():
    global results
    with open(REPL_RESULTFILE, 'wb') as f:
        f.write(pickle.dumps(results))


system_re = re.compile('^[^/]+/.system.*')

# Traverse all replication tasks
replication_tasks = query_model("storage/replication", "repl_enabled")
for replication in replication_tasks:
    # BEGIN REPLICATION ADAPTER
    replication.repl_begin = datetime.time(
        *map(int, replication.repl_begin.split(':')))
    replication.repl_end = datetime.time(
        *map(int, replication.repl_end.split(':')))

    replication.repl_remote = SimpleNamespace(**dict(
        {
            k.replace("repl_", "ssh_"): v
            for k, v in replication.__dict__.items()
            if k.startswith("repl_remote_")
        }, **{
            k.replace("repl_remote_", "ssh_"): v
            for k, v in replication.__dict__.items()