예제 #1
0
def runJob(model, settings, simulationRoot):
    """
    Run the job and stops process if taking too long"
    """
    global rootDirectory

    # check if the file is already there else skip
    fn = createJob(model, settings, simulationRoot)
    if os.path.exists(fn):
        print(f'{fn} exists')
        return 0

    if settings.get('pulse'):
        trial = settings.get('trial')
        mag = settings.get('ratio')[0]

        control = f'data/trial={trial}_r={mag}_{{}}.pickle'
        control = os.path.join(simulationRoot, control)

        snapshots = {}
        # try to load the snapshots
        # redundant check if run on separate process
        while not snapshots:
            try:
                snapshots = IO.loadPickle(control).snapshots
            except:
                time.sleep(1)

    else:
        snaps = {}
        for k in 'nSamples burninSamples steps'.split():
            snaps[k] = settings.get(k)
        snapshots = infcy.getSnapShots(model, **snaps)

    conditional, px, mi = infcy.runMC(model, snapshots, deltas, repeats)
    store       = dict(\
        mi          = mi,\
        conditional = conditional,\
        px          = px,\
        snapshots   = snapshots)

    # empty vector to safe disk space
    if settings.get('pulse'):
        for i in 'mi conditional snapshots'.split():
            store[i] = []
    sr = IO.SimulationResult(**store)
    IO.savePickle(fn, sr, verbose=1)
    checkTime()
예제 #2
0
            top_node = remaining_nodes[top_idx]

            print(f'best node choice: {top_node} with MI = {MI[top_idx]}')

            selected_nodes.append(top_node)
            remaining_nodes.remove(top_node)

            mi_greedy[tuple(selected_nodes)] = MI[top_idx]
            h_greedy[tuple(selected_nodes)] = condH[top_idx]

            condRand, systemRand = infoTheory.compute_entropies(
                snapshots[-1], args.snapshots)
            mi_random[tuple(sets[-1])] = systemRand - condRand
            h_random[tuple(sets[-1])] = condRand


    result = IO.SimulationResult('greedy', \
                networkSettings     = networkSettings, \
                modelSettings       = modelSettings, \
                snapshotSettings    = systemSnapshotSettings, \
                corrTimeSettings    = corrTimeSettings, \
                mixingResults       = mixingResults, \
                miGreedy            = mi_greedy, \
                hCondGreedy         = h_greedy, \
                miRandom            = mi_random, \
                hCondRandom         = h_random, \
                miBruteForce        = mi_brute_force, \
                hCondBruteForce     = h_brute_force, \
                computeTime         = timer()-start )
    result.saveToPickle(targetDirectory)
    head, tail = os.path.split(f)
    print(tail)

    data = IO.loadPickle(head, tail)

    gname = os.path.splitext(tail)[0].split('_Tc_results')[0]

    result = IO.TempsResult(data['temps'], data['mags'], data['abs_mags'], \
        data['sus'], data['binder'], data['T_c'], data['T_d'], data['T_o'], gname)
    dir = f'backup/tempsData/{head}'
    os.makedirs(dir, exist_ok=True)
    result.saveToPickle(dir)
"""

directory = 'output_systemEntropyGreedy'
for f in find_files(f'../masterthesis_casperscode/{directory}',
                    'simulation_results', 'dict.pickle'):
    head, tail = os.path.split(f)
    print(tail)

    data = IO.loadPickle(head, tail)
    #type = data['type']
    #data.pop('type')

    result = IO.SimulationResult(**data)
    t = tail.split('_')[-2]
    print(t)
    dir = f'backup/{directory}/{head}'
    os.makedirs(dir, exist_ok=True)
    result.saveToPickle(dir, timestamp=t)
예제 #4
0
        control = os.path.join(root, \
                    f"trial={trial}_r={mag}_{{}}.pickle"\
                    )
        snapshots = {}
        while not snapshots:
            try:
                snapshots = IO.loadPickle(control).snapshots
            except:
                time.sleep(1)
    deltas, repeats = [settings.get(k) for k in 'deltas repeats'.split()]
    conditional, px, mi = infcy.runMC(model, snapshots, deltas, repeats)

    # store the results
    store = dict(\
    mi          = mi, \
    conditional = conditional,\
    px          = px,\
    snapshots   = snapshots,\
    )
    # reduce dataset size
    # TODO: make nicer
    if settings.get('pulse'):
        store['mi'] = []
        store['conditional'] = []
        store['snapshots'] = []
    sr = IO.SimulationResult(**store)

    # TODO: remove

    IO.savePickle(runFile, sr)
        startS = timer()
        avgSnapshots, avgSystemSnapshots, fullSnapshots = \
            simulation.getJointSnapshotsPerDistNodes(model, nodes, neighboursG, \
                                    **snapshotSettings, threads=nthreads, \
                                    initStateIdx=args.initState, getFullSnapshots=1)
        simulationTime = timer() - startS

        startC = timer()
        MI_avg, MI_system, HX = infoTheory.processJointSnapshots_allNodes(
            avgSnapshots, args.numSamples, nodes, maxDist, avgSystemSnapshots)

        result = IO.SimulationResult('magMI', \
                    networkSettings     = networkSettings, \
                    modelSettings       = modelSettings, \
                    snapshotSettings    = snapshotSettings, \
                    corrTimeSettings    = corrTimeSettings, \
                    mixingResults       = mixingResults, \
                    mi                  = MI_avg, \
                    miSystemMag         = MI_system, \
                    hx                  = HX, \
                    computeTime         = simulationTime + timer()-startC )
        result.saveToPickle(targetDirectory)

        if args.pairwise:

            startC = timer()
            MI, corr = infoTheory.pairwiseMI_allNodes(model, nodes,
                                                      fullSnapshots)

            result = IO.SimulationResult('pairwiseMI', \
                        networkSettings     = networkSettings, \
                        modelSettings       = modelSettings, \
예제 #6
0
        snapshots, allNeighbours_G = simulation.getSnapshotsPerDist(model, \
                                nodes, **snapshotSettings, \
                                threads=threads, initStateIdx=args.initState)

        for i, node in enumerate(nodes):
            if args.maxDist == -2:
                # find distance with max number of neighbours
                maxDist = np.argmax([len(allNeighbours_G[i][d]) \
                    for d in range(1, max(allNeighbours_G[i].keys())+1)]) + 1

            print(f'start conditional sampling for node {node}')
            all_MI[node], all_HX[node] = computeMI_cond(model, node, minDist, maxDist, \
                                allNeighbours_G[i], snapshots[i], nTrials, \
                                nSamples, modelSettingsCond, corrTimeSettings)
            print(f'MI = {all_MI[node]}')

        result = IO.SimulationResult('snapshotMI', \
                    networkSettings     = networkSettings, \
                    modelSettings       = modelSettings, \
                    snapshotSettings    = snapshotSettings, \
                    corrTimeSettings    = corrTimeSettings, \
                    mixingResults       = mixingResults, \
                    mi                  = all_MI, \
                    hx                  = all_HX, \
                    computeTime         = timer()-start, \
                    threshold           = args.threshold)
        result.saveToPickle(targetDirectory)

        print(f'run {r} finished')
        print(f'time elapsed: {timer()-start : .2f} seconds')
                    np.fromiter(snapshots.values(), dtype=int))
                numSysStates[trial] = len(snapshots)
                maxCounts[trial] = max(snapshots.values())
                print(
                    f'num system states = {len(snapshots)}, min count = {min(snapshots.values())}, max count = {max(snapshots.values())}'
                )

            entropy = np.median(allEntropies)
            print(f'system entropy = {entropy}')

            result = IO.SimulationResult('systemMI', \
                        networkSettings     = networkSettings, \
                        modelSettings       = modelSettings, \
                        snapshotSettings    = systemSnapshotSettings, \
                        corrTimeSettings    = corrTimeSettings, \
                        mixingResults       = mixingResults, \
                        hx                  = allEntropies, \
                        numSystemStates     = numSysStates, \
                        maxCounts           = maxCounts, \
                        meanStateEntropies  = meanStateEntropies, \
                        computeTime         = timer()-start )
            result.saveToPickle(targetDirectory)

        elif args.nodes == 'all':
            allNodes = np.array(list(graph))

            if args.nodes == 'test':
                fixedNodes = allNodes[:2]
            else:
                fixedNodes = np.load(args.nodes).astype(int)