예제 #1
0
def runJob(model, settings, simulationRoot):
    """
    Run the job and stops process if taking too long"
    """
    global rootDirectory

    # check if the file is already there else skip
    fn = createJob(model, settings, simulationRoot)
    if os.path.exists(fn):
        print(f'{fn} exists')
        return 0

    if settings.get('pulse'):
        trial = settings.get('trial')
        mag = settings.get('ratio')[0]

        control = f'data/trial={trial}_r={mag}_{{}}.pickle'
        control = os.path.join(simulationRoot, control)

        snapshots = {}
        # try to load the snapshots
        # redundant check if run on separate process
        while not snapshots:
            try:
                snapshots = IO.loadPickle(control).snapshots
            except:
                time.sleep(1)

    else:
        snaps = {}
        for k in 'nSamples burninSamples steps'.split():
            snaps[k] = settings.get(k)
        snapshots = infcy.getSnapShots(model, **snaps)

    conditional, px, mi = infcy.runMC(model, snapshots, deltas, repeats)
    store       = dict(\
        mi          = mi,\
        conditional = conditional,\
        px          = px,\
        snapshots   = snapshots)

    # empty vector to safe disk space
    if settings.get('pulse'):
        for i in 'mi conditional snapshots'.split():
            store[i] = []
    sr = IO.SimulationResult(**store)
    IO.savePickle(fn, sr, verbose=1)
    checkTime()
    head, tail = os.path.split(f)
    print(tail)

    data = IO.loadPickle(head, tail)

    gname = os.path.splitext(tail)[0].split('_Tc_results')[0]

    result = IO.TempsResult(data['temps'], data['mags'], data['abs_mags'], \
        data['sus'], data['binder'], data['T_c'], data['T_d'], data['T_o'], gname)
    dir = f'backup/tempsData/{head}'
    os.makedirs(dir, exist_ok=True)
    result.saveToPickle(dir)
"""

directory = 'output_systemEntropyGreedy'
for f in find_files(f'../masterthesis_casperscode/{directory}',
                    'simulation_results', 'dict.pickle'):
    head, tail = os.path.split(f)
    print(tail)

    data = IO.loadPickle(head, tail)
    #type = data['type']
    #data.pop('type')

    result = IO.SimulationResult(**data)
    t = tail.split('_')[-2]
    print(t)
    dir = f'backup/{directory}/{head}'
    os.makedirs(dir, exist_ok=True)
    result.saveToPickle(dir, timestamp=t)
예제 #3
0
import networkx as nx, \
        itertools, scipy,\
        os,     pickle, \
        sys, \
        multiprocessing as mp, json,\
        datetime, sys, \
        scipy, \
        time

parser = argparse.ArgumentParser()
parser.add_argument('--file')
if __name__ == "__main__":
    args = parser.parse_args()
    print(args, args.file)
    runFile = args.file
    for k, v in IO.loadPickle(runFile).items():
        globals()[k] = v

    modelSettings = dict(\
                             graph       = graph,\
                             temperature = 0,\
                             updateType  = updateType,\
                             magSide     = magSide,\
                             nudgeType   = nudgeType)
    model = FastIsing.Ising(**modelSettings)
    magRange = np.array([CHECK]).ravel()

    # magRange = array([.9, .2])
    fitTemps = np.linspace(0, graph.number_of_nodes() / 2, tempres)
    mag, sus = model.matchMagnetization(temps = fitTemps,\
     n = int(1e3), burninSamples = 0)
예제 #4
0
from Utils import IO
from Toolbox import infcy
# init arg parse
parser = argparse.ArgumentParser()
parser.add_argument('--file')
import time
if __name__ == '__main__':
    # load settings
    # parse input
    args = parser.parse_args()
    runFile = args.file

    # load data to global
    print("IM AM IN RUNFILE", runFile)
    settings = IO.loadPickle(runFile)
    model = settings.get('model')

    # init model
    # run experiment
    if not settings.get('pulse'):
        # run snapshots (cheap enough)
        snaps = {k : settings.get(k) for\
                k in 'nSamples burninSamples steps'.split()\
                }
        snapshots = infcy.getSnapShots(model, **snaps)
    # load nudges
    else:

        # think of something to extract the control
        trial = settings.get('trial')
예제 #5
0
loadGraph = ''
if __name__ == '__main__':
    graphs = []
    N = 10
    if not loadGraph:
        for i in range(10):

            r = np.random.rand() * (1 - .2) + .2
            # g = nx.barabasi_albert_graph(N, 2)
            # g = nx.erdos_renyi_graph(N, r)
            # g = nx.duplication_divergence_graph(N, r)
            # graphs.append(g)
    else:
        print('running craph graph')
        graph = IO.loadPickle(loadGraph)
        graphs.append(graph)
    # w = nx.utils.powerlaw_sequence(N, 2)
    # g = nx.expected_degree_graph(w)
    # g = sorted(nx.connected_component_subgraphs(g), key = lambda x: len(x))[-1]

    #for i, j in g.edges():
    #    g[i][j]['weight'] = np.random.rand() * 2 - 1
#        graphs.append(g)

#    graphs[0].add_edge(0,0)
#    for j in np.int32(np.logspace(0, np.log10(N-1),  5)):
#       graphs.append(nx.barabasi_albert_graph(N, j))
    dataDir = 'Graphs'  # relative path careful
    df = IO.readCSV(f'{dataDir}/Graph_min1_1.csv', header=0, index_col=0)
    h = IO.readCSV(f'{dataDir}/External_min1_1.csv', header=0, index_col=0)
예제 #6
0
        for (root, dir, files) in os.walk(file):
            for f in files:
                fn = os.path.join(root, f)
                if f.endswith('settings.pickle'):
                    oldSettings = fn
                    break
        if not oldSettings:
            oldSettings = os.path.join(file.replace('data', ''),
                                       'settings.pickle')

        # TODO: change
        rootDirectory = simulationRoot = file

        # oldSettings = os.path.join(file, 'settings.pickle')
        # create backup
        s = IO.loadPickle(oldSettings)
        # overwrite values
        for k, oldValue in s.items():
            newValue = settings.get(k)
            if not newValue:
                print(f'Overwriting {k} : {oldValue}')
                settings[k] = oldValue
        IO.savePickle(oldSettings +
                      f"{datetime.datetime.now().isoformat()}.bk",
                      s,
                      verbose=1)

        newSettings = oldSettings  # for clarity
        IO.savePickle(newSettings, settings)

        print(settings)
        distSamples = mixingResults['distSamples']
        print(f'mixing time      = {burninSteps}')
        print(f'correlation time = {distSamples}')
    except:
        subprocess.call(['python3', 'run_mixing.py', f'{args.T}', f'{args.dir}', f'{args.graph}', \
                        '--maxcorrtime', '10000', \
                        '--maxmixing', '10000'])
        mixingResults = IO.loadResults(targetDirectory, 'mixingResults')
        corrTimeSettings = IO.loadResults(targetDirectory, 'corrTimeSettings')
        burninSteps = mixingResults['burninSteps']
        distSamples = mixingResults['distSamples']

    # try to load neighbourhood shell data. If it doesn't exist yet, generate it
    try:
        if len(args.neighboursDir) > 0:
            neighboursG = IO.loadPickle(args.neighboursDir, 'neighboursG')
        else:
            neighboursG = IO.loadPickle(targetDirectory, 'neighboursG')
    except:
        print(f'determining neighbours')
        neighboursG = model.neighboursAtDistAllNodes(nodes, maxDist)
        if len(args.neighboursDir) > 0:
            os.makedirs(args.neighboursDir, exist_ok=True)
            IO.savePickle(args.neighboursDir, 'neighboursG', neighboursG)
        else:
            IO.savePickle(targetDirectory, 'neighboursG', neighboursG)


    snapshotSettings = dict( \
        nSamples    = args.numSamples, \
        burninSteps = burninSteps, \
예제 #8
0
genDataFile = lambda x: f'dataset{idx}'

graphs = []
N = 10
# loadGraph = 'rerunthese.pickle2'
loadGraph = ''
if not loadGraph:
    for i in range(10):
        r = np.random.rand() * (1 - .2) + .2
        # g = nx.barabasi_albert_graph(N, 2)
        g = nx.erdos_renyi_graph(N, r)
        # g = nx.duplication_divergence_graph(N, r)
        graphs.append(g)
else:
    for graph in IO.loadPickle(loadGraph)['graphs']:
        graphs.append(graph)
# w = nx.utils.powerlaw_sequence(N, 2)
# g = nx.expected_degree_graph(w)
# g = sorted(nx.connected_component_subgraphs(g), key = lambda x: len(x))[-1]

#for i, j in g.edges():
#    g[i][j]['weight'] = np.random.rand() * 2 - 1
#        graphs.append(g)

#    graphs[0].add_edge(0,0)
#    for j in np.int32(np.logspace(0, np.log10(N-1),  5)):
#       graphs.append(nx.barabasi_albert_graph(N, j))
# dataDir = 'Graphs' # relative path careful
# df    = IO.readCSV(f'{dataDir}/Graph_min1_1.csv', header = 0, index_col = 0)
# h     = IO.readCSV(f'{dataDir}/External_min1_1.csv', header = 0, index_col = 0)