forked from MHarland/ClusterDMFT
/
archive.py
92 lines (86 loc) · 3.6 KB
/
archive.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import os
from pytriqs.archive import HDFArchive
from pytriqs.gf.local import BlockGf, GfImFreq, GfReFreq, GfImTime
from pytriqs.utility import mpi
class ArchiveConnected(object):
"""
allows extraction of data from the associated HDFArchive
"""
def __init__(self, archive, *args, **kwargs):
self.archive = archive
if not os.path.exists(archive) and mpi.is_master_node():
archive = HDFArchive(archive, 'w')
archive.create_group('results')
archive['results']['n_dmft_loops'] = 0
del archive
mpi.barrier()
def next_loop(self):
"""returns the DMFT loop nr. of the next loop"""
archive = HDFArchive(self.archive, 'r')
if archive.is_group('results'):
nl = archive['results']['n_dmft_loops']
else:
nl = 0
del archive
return nl
def last_loop(self):
"""returns the last DMFT loop nr."""
arch = HDFArchive(self.archive, 'r')
ll = arch['results']['n_dmft_loops'] - 1
del arch
return ll
def load(self, function_name, loop_nr = -1):
"""
returns a calculated function from archive
function_name: 'Sigma_c_iw', 'G_c_iw', ...
loop_nr: int, -1 gives the last loop nr.
"""
function = None
if mpi.is_master_node():
a = HDFArchive(self.archive, 'r')
if loop_nr < 0:
function = a['results'][str(self.next_loop() + loop_nr)][function_name]
else:
function = a['results'][str(loop_nr)][function_name]
del a
function = mpi.bcast(function)
return function
def archive_content(self, group = list(), dont_exp = list(), n_max_subgroups = 50, shift_step_len = 10):
"""
collects and returns the archive\'s content as a string
can be applied to a specific group of the archive
certain group's expansion can be omitted
recursion depth can be set
indentation for tree structure can be set
"""
content = str()
if mpi.is_master_node():
archive = self.archive
arch = HDFArchive(archive, 'r')
content = archive + '\n'
shift = str()
for i, g in enumerate(group):
arch = arch[g]
shift += ' ' * shift_step_len
content += shift + g + '\n'
content = _archive_content(arch, content, shift, shift_step_len, dont_exp, n_max_subgroups)
del arch
return content
def _archive_content(group, content, shift, shift_step_len, dont_exp, n_max_subgroups):
for key in group.keys():
if group.is_data(key):
if key in dont_exp:
content += shift + ' ' * shift_step_len + key + '...\n'
else:
content += shift + ' ' * shift_step_len + str(key) + ' = ' + str(group[key]) + '\n'
else:
assert group.is_group(key), 'unkown data in archive'
if len(group[key]) > n_max_subgroups:
content += shift + ' ' * shift_step_len + str(key) + '...\n'
elif key in dont_exp:
content += shift + ' ' * shift_step_len + key + '...\n'
elif type(group[key]) in [BlockGf, GfImFreq, GfReFreq, list, tuple, dict]:
content += shift + ' ' * shift_step_len + str(key) + ' = ' + str(group[key]) + '\n'
else:
content += _archive_content(group[key], shift + ' ' * shift_step_len + str(key) + '\n', shift + ' ' * shift_step_len, shift_step_len, dont_exp, n_max_subgroups)
return content