Esempio n. 1
0
def explorfluxsim():
    '''This functions uses the previous one for exploring all possible values of P_CB'''
    import numpy as np
    #initializations
    allcases=[]
    allstrenghts=[]
    for p in [0.0,0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,1.0]: #all the values we studied of P_CB
        print(p) #for keeping truck
        alpha=analysingflux(1000,p) #results of the previous functions
        allcases.append(np.mean(alpha[0])) #list of results of method 1 of measuring flux
        allstrenghts.append(np.mean(alpha[1])) #list of results of method 2 of measuring flux
        
    from saving import savinggeneric #import a function for saving the results.
    savinggeneric([allcases,allstrenghts],'fluxsimforM6') #save the results with the appropriate name.
Esempio n. 2
0
def exploringfinalstateswithPCB(numens=1000,
                                N=10,
                                allargs=50,
                                steps=10000,
                                p=0.5,
                                numofrelevargs=6):
    '''This function is used for parameter exploration, using the previous one. It is very similar to all other parameter
        exploration functions, found in newexplorargsim.py, exploringM.py and exploringDelta.py, so we will not be adding
        additional comments to it. The only new input is:
            "p" is P_CB, the probability of interacting with confirmation bias.'''

    counts0 = 0
    countspl = 0
    countsmin = 0
    countsbip = 0
    otherstuff = 0
    countosc = 0
    for i in range(numens):
        a = runargsimwithPCB(allargs, allargs / 2, N, 6, steps, p)
        if a[1][-1] == N:
            countspl += 1
        elif a[2][-1] == N:
            countsmin += 1
        elif a[0][-1] == N:
            counts0 += 1
        elif a[0][-1] == 0:
            countsbip += 1
        else:
            otherstuff += 1

        countosc += a[-1]

    from saving import savinggeneric

    savinggeneric(
        (counts0, countspl, countsmin, countsbip, otherstuff, countosc),
        'PCBNargs50simwithp' + str(p))

    return counts0, countspl, countsmin, countsbip, otherstuff, countosc
def transformdatareview2():
    '''This function is to be used after the previous one, and finishes the restructuring of data. After this, results can be
    plotted with function analyzingargsimphasediagramreview in file newargumentsphasediagram.py. This function is quite complicated,
    you need to fully understand the process of studying every possible value of N_A/N in a given range of N_A and of N, to understand
    this full process. If you need guidance, please contact the corresponding author.'''

    #useful import
    import numpy as np
    from loading import loadinggeneric  #we load results
    from saving import savinggeneric  #we will save the results at the end

    #If the previous function was not already run, these lines can be uncommented and used to run it
    # numens=1000
    # h**o=0
    # from newcollectdata import newcollectargsimdatareview2
    # A=newcollectargsimdatareview2(h**o,'no')

    #If the previous function was already run, this can be used to load the results.
    A = loadinggeneric('BarridoFinoReviewerFinal')

    #For properly analyzing the results, a lot of restructuring is needed.
    #This exploration involved finding every possible value of N_A/N for all N_A in range [10,100] (steps of 2) and for all N in range
    #[10,100]. We now need to find all those values of N_A/N and regroup them in an useful way for plotting.

    #initializations
    counters = [[] for i in range(6, 51, 2)
                ]  #this will count all values of N_A/N, also called "delta"

    #These initializations are the same as previous functions in this file
    counts0 = [[] for i in range(6, 51, 2)]
    countsdec = [[] for i in range(6, 51, 2)]
    countsbip = [[] for i in range(6, 51, 2)]
    countsother = [[] for i in range(6, 51, 2)]
    countspl = [[] for i in range(6, 51, 2)]
    countsmin = [[] for i in range(6, 51, 2)]
    countsosc = [[] for i in range(6, 51, 2)]

    i = 0
    for M in range(6, 51, 2):
        j = 0
        alldeltas = []  #all possible values of N_A/N
        for N in range(10, 101, 1):
            k = 0
            for NA in range(10, 101, 2):
                #The idea is as follows: for a fixed value of M, one particular value of N_A/N could come from different combinations of
                #N_A and N. To find the true general behavior in all parameter space of N_A/N vs. M, we average the results of all
                #possible combinations that give the same value of N_A/N (and this is done for each M, that is why the most outer loop is
                #over values of M). To do this average, we create a list of N_A/Ns, and each time we find the same value as one already on
                #the list, we sum the new results to the ones already on the list. When the complete process is finished, we divide by the
                #total number of times we found each value of N_A/N, thus obtaining averages for each value of N_A/N. "counters" is the list
                #that keeps counts of how many times we encountered each value of N_A/N.

                newdelta = round(
                    NA / float(N), 6
                )  #we can round these values, as we choose. When we bin the interval of values of N_A/N, this
                #will not matter (we do that in the plotting function analyzingargsimphasediagramreview).

                if newdelta in alldeltas:  #If this newdelta was already present in our list of deltas, we update the values on the lists, summing the new ones and the old ones
                    index = alldeltas.index(newdelta)
                    counters[i][index] += 1
                    counts0[i][index] += A[0][i][j][k]
                    countsdec[i][index] += A[1][i][j][k]
                    countsbip[i][index] += A[2][i][j][k]
                    countsother[i][index] += A[3][i][j][k]
                    countspl[i][index] += A[4][i][j][k]
                    countsmin[i][index] += A[5][i][j][k]
                    countsosc[i][index] += A[6][i][j][k]
                else:  #if the delta is new, we create a new entry on the lists and save the new results there.
                    alldeltas.append(newdelta)
                    counters[i].append(1)
                    counts0[i].append(A[0][i][j][k])
                    countsdec[i].append(A[1][i][j][k])
                    countsbip[i].append(A[2][i][j][k])
                    countsother[i].append(A[3][i][j][k])
                    countspl[i].append(A[4][i][j][k])
                    countsmin[i].append(A[5][i][j][k])
                    countsosc[i].append(A[6][i][j][k])

                k += 1
            j += 1

        sortedindexes = np.argsort(
            alldeltas
        )  #we will sort the lists by increasing values of delta=N_A/N

        alldeltas = np.array(alldeltas)
        counters[i] = np.array(counters[i])
        counts0[i] = np.array(counts0[i])
        countsdec[i] = np.array(countsdec[i])
        countsbip[i] = np.array(countsbip[i])
        countsother[i] = np.array(countsother[i])
        countspl[i] = np.array(countspl[i])
        countsmin[i] = np.array(countsmin[i])
        countsosc[i] = np.array(countsosc[i])

        alldeltas = alldeltas[sortedindexes]
        counters[i] = counters[i][sortedindexes]
        counts0[i] = counts0[i][sortedindexes]
        countsdec[i] = countsdec[i][sortedindexes]
        countsbip[i] = countsbip[i][sortedindexes]
        countsother[i] = countsother[i][sortedindexes]
        countspl[i] = countspl[i][sortedindexes]
        countsmin[i] = countsmin[i][sortedindexes]
        countsosc[i] = countsosc[i][sortedindexes]

        #Now all the lists are properly sorted

        i += 1
    #Now we divide by counters to get the proper average values, and also by the number of copies in the ensamble.
    newcounts0 = list(
        np.array(counts0) / np.array(np.array(counters) * float(100))
    )  #The number 100 is the number of copies in the ensamble
    newcountsdec = list(
        np.array(countsdec) / np.array(np.array(counters) * float(100)))
    newcountsbip = list(
        np.array(countsbip) / np.array(np.array(counters) * float(100)))
    newcountsother = list(
        np.array(countsother) / np.array(np.array(counters) * float(100)))
    newcountspl = list(
        np.array(countspl) / np.array(np.array(counters) * float(100)))
    newcountsmin = list(
        np.array(countsmin) / np.array(np.array(counters) * float(100)))
    newcountsosc = list(
        np.array(countsosc) / np.array(np.array(counters) * float(100)))

    #save the results
    savinggeneric((newcounts0, newcountsdec, newcountsbip, newcountsother,
                   newcountspl, newcountsmin, newcountsosc, alldeltas),
                  'BarridoFino')

    return newcounts0, newcountsdec, newcountsbip, newcountsother, newcountspl, newcountsmin, newcountsosc, alldeltas  #also return them, if you want to use them now
def newcollectargsimdatareview2(h**o=0, cb='no'):
    '''This functions uses the previous one to load and restructure all the results from the exploration newexplorargsimwithconvergencereview2
    found in exploringDelta.py.
    "h**o" is the homophily parameter
    "cb" is the presence (cb='yes') or abscence (cb='no') of confirmation bias.
    This returns the restructured results, but not ready for plotting. THe last function, transformdatareview2, finishes the restructuring
    of the data (we separated both functions because this time, the restructuring process was much more complicated).
    Since this function is very similar to newcollectargsimdatareviewer, we will not be commenting it, except for the new lines of code.'''

    import os
    import numpy as np
    from saving import savinggeneric  #we will save this, just in case.
    os.chdir('D:\\Doctorado\\Simulation_Functions\\')

    #This time, the final lists are more complicated, as seen in this initializations
    counts0 = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)
               ]  #the most inner dimensions are of N, the outer ones are of M.
    countsdec = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]
    countsbip = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]
    countsother = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]
    countspl = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]
    countsmin = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]
    countsosc = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]
    tiempos = [[[] for j in range(10, 101, 1)] for i in range(6, 51, 2)]

    i = 0

    for M in range(6, 51, 2):  #the range of Ms explored
        j = 0
        print(
            M
        )  #for keeping track, this is the function which takes the most to finish in this file.
        for N in range(10, 101, 1):  #the range of Ns explored
            for NA in range(10, 101, 2):  #the range of N_As explored

                A = newloadingargumentvariablesreview2(NA, N, M, h**o, cb,
                                                       1000)

                if A[0] == np.nan:
                    counts0[i][j].append(np.nan)
                    countspl[i][j].append(np.nan)
                    countsmin[i][j].append(np.nan)
                    countsbip[i][j].append(np.nan)
                    countsother[i][j].append(np.nan)
                    countsdec[i][j].append(np.nan)
                    countsosc[i][j].append(np.nan)
                    tiempos[i][j].append(np.nan)
                else:
                    counts0[i][j].append(A[0][0])
                    countspl[i][j].append(A[0][1])
                    countsmin[i][j].append(A[0][2])
                    countsbip[i][j].append(A[0][3])
                    countsother[i][j].append(A[0][5])
                    countsdec[i][j].append(A[0][1] + A[0][2])
                    countsosc[i][j].append(A[0][4])
                    tiempos[i][j].append(A[2])
            j += 1
        i += 1

        savinggeneric((counts0, countsdec, countsbip, countsother, countspl,
                       countsmin, countsosc, tiempos),
                      'BarridoFinoReviewFinal')
        #we save the results, in case something happens and we have to start all over.

    return counts0, countsdec, countsbip, countsother, countspl, countsmin, countsosc, tiempos
Esempio n. 5
0
def newexplorargsimforprofiling(h**o=0,
                                allargsini=6,
                                allargsfin=101,
                                numens=1000,
                                opsys='w',
                                cb='yes',
                                numofrelevargs=5):
    '''This function was used for exploring the parameters N and N_A. As seen in other functions, here we call N_A as allargs.
    "h**o" is the homophily parameter.
    "allargsini" is the smallest number of arguments considered for the exploration.
    "allargsfin" is the largest number of arguments considered for the exploration.
    "numens" is the number of copies in the ensamble.
    "opsys" is obsolete, it was originally used for selecting different saving options.
    "cb" is the presence (cb='yes') or abscence (cb='no') of confirmation bias.
    "numofrelevargs" is M, the memory size (originally named as number of relevant arguments).
    This function does not return anything, but saves the results.'''

    from newrunargsim import newrunargsimforprofiling  #the main simulation function
    from saving import savinggeneric  #generic saving method
    for allargs in range(
            allargsini, allargsfin, 2
    ):  #the number of arguments must be even, so the step should be even too.
        print allargs  #for keeping track
        for N in range(10, 101, 1):
            #print N #in case more subtle tracking is required

            #initializations
            counts0 = 0
            countspl = 0
            countsmin = 0
            countsbip = 0
            otherstuff = 0
            ensamblestates = []
            osc = 0  #in case of oscillations
            for copy in range(numens):
                #We run the simulation:
                a = newrunargsimforprofiling(allargs, allargs / 2, N,
                                             numofrelevargs, 5000, cb, 'no',
                                             h**o)
                #check how many of each final state occurred
                if a[1][-1] == N:
                    countspl += 1
                elif a[2][-1] == N:
                    countsmin += 1
                elif a[0][-1] == N:
                    counts0 += 1
                elif a[0][-1] == 0 and a[1][-1] != N and a[2][-1] != N:
                    countsbip += 1
                else:
                    otherstuff += 1
                #save subtler results for deeper analysis (like checking if bipolarization was 50-50 or not).
                ensamblestates.append((a[0][-1], a[1][-1], a[2][-1]))
                #save the number of oscillations encountered
                osc += a[-1]

            #We save the results. The path must be changed for using it in another computer.
            savinggeneric(
                (counts0, countspl, countsmin, countsbip, otherstuff, osc,
                 ensamblestates),
                specifications='Nars=' + str(allargs) + '_N=' + str(N) +
                '_numens=' + str(numens) + '_concb',
                path='C:\\Users\\Fede\\Desktop\\Datos_Argumentos\\Nueva_ArgSim_'
                + str(h**o) + '\\')
Esempio n. 6
0
def newexplorargsimwithconvergence(h**o=0,
                                   allargs=60,
                                   Nini=10,
                                   Nfin=101,
                                   cb='no',
                                   numofrelevargs=6,
                                   numens=1000,
                                   opsys='w'):
    '''This function does the same as the preceding one, but it ignores oscillating states, and instead keeps running
    simulations until "numens" non-oscillating copies have been obtained. Using either function gives the same results. This
    function also allows for testing the time it takes to perform the simulations. Only the new lines are commented. New inputs:
        "Nini" is the initial value of N to explore.
        "Nfin" is the final value of N to explore.
        '''

    from newrunargsim import newrunargsimforprofiling
    import time  #for keeping track of time

    for N in range(
            Nini, Nfin, 1
    ):  #This time we only explore N inside the function. N_A must be epxlored with a loop that goes over this function.
        print(N)
        t = time.time()  #initial time
        counts0 = 0
        countspl = 0
        countsmin = 0
        countsbip = 0
        otherstuff = 0
        countsosc = 0
        ensamblestates = []
        stepcounts = []  #initialize stepcount
        i = 0
        while i < numens:  #while instead of for.
            a = newrunargsimforprofiling(allargs, allargs / 2, N,
                                         numofrelevargs, 10000, cb, '', h**o,
                                         1.0, 3.0, 'Bid')
            if a[3] == 1:  #if it is an oscillation, we keep track of it, but do nothing else.
                countsosc += 1
            else:  #if it is not, procede as the previous function:
                if a[1][-1] == N and a[3] != 1:
                    countspl += 1
                elif a[2][-1] == N and a[3] != 1:
                    countsmin += 1
                elif a[0][-1] == N and a[3] != 1:
                    counts0 += 1
                elif a[0][-1] == 0 and a[1][-1] != N and a[2][-1] != N and a[
                        3] != 1:
                    countsbip += 1
                else:
                    otherstuff += 1
                    ensamblestates.append((a[0][-1], a[1][-1], a[2][-1]))
                stepcounts.append(
                    a[-1])  #We also save how many steps where used.
                i += 1

        elapsed = time.time() - t  #see how much time has passed.
        print(elapsed)  #print it, for keeping track of it on the console.

        from saving import savinggeneric
        savinggeneric(((counts0, countspl, countsmin, countsbip, countsosc,
                        otherstuff), stepcounts, elapsed),
                      'N' + str(N) + 'Nars' + str(allargs) + 'H' + str(h**o) +
                      'cb' + cb + 'M' + str(numofrelevargs),
                      path='D:\\Doctorado\\Simulation_Functions\\BarridoM\\')
Esempio n. 7
0
def newexplorargsimwithconvergencereview2(numofrelevargs=6,
                                          cb='no',
                                          h**o=0,
                                          numens=1000,
                                          opsys='w'):
    '''This function generalizes the one preceding it. We will not repeat the comments already present in the preceding one, but add new
    comments for new pieces of code. We do not need discrepini and discrepfini anymore.'''

    import numpy as np
    from newrunargsim import newrunargsimforprofiling
    import time

    for N in np.arange(10, 100, 1):  #loop over N
        print(N)  #keep track of N
        for NA in range(10, 101, 2):  #loop over N_A
            print(NA)  #keep track of N_A
            if numofrelevargs < NA:
                t = time.time()
                counts0 = 0
                countspl = 0
                countsmin = 0
                countsbip = 0
                otherstuff = 0
                countsosc = 0
                stepcounts = []
                i = 0
                while i < 100:  #this exploration is VERY time consuming. We used only 100 copies (the results are the same).
                    a = newrunargsimforprofiling(NA, NA / 2, N, numofrelevargs,
                                                 10000, cb, '', h**o, 1.0, 3.0,
                                                 'Bid')
                    if a[3] == 1:
                        countsosc += 1
                    else:
                        if a[1][-1] == N and a[3] != 1:
                            countspl += 1
                        elif a[2][-1] == N and a[3] != 1:
                            countsmin += 1
                        elif a[0][-1] == N and a[3] != 1:
                            counts0 += 1
                        elif a[0][-1] == 0 and a[1][-1] != N and a[2][
                                -1] != N and a[3] != 1:
                            countsbip += 1
                        else:
                            otherstuff += 1
                        stepcounts.append(a[-1])
                        i += 1

                elapsed = time.time() - t
                print(elapsed)

                from saving import savinggeneric
                savinggeneric(
                    ((counts0, countspl, countsmin, countsbip, countsosc,
                      otherstuff), stepcounts, elapsed),
                    'N' + str(N) + 'Nars' + str(NA) + 'cb' + cb + 'M' +
                    str(numofrelevargs),
                    path=
                    'D:\\Doctorado\\Simulation_Functions\\BarridoReviewer\\')
            else:
                from saving import savinggeneric
                savinggeneric(
                    ((np.nan, np.nan, np.nan, np.nan, np.nan, np.nan), np.nan,
                     np.nan),
                    'N' + str(N) + 'Nars' + str(NA) + 'cb' + cb + 'M' +
                    str(numofrelevargs),
                    path=
                    'D:\\Doctorado\\Simulation_Functions\\BarridoReviewer2\\')
Esempio n. 8
0
def newexplorargsimwithconvergencereview(numofrelevargs=6,
                                         discrepini=0.2,
                                         discrepfini=2.01,
                                         cb='no',
                                         h**o=0,
                                         numens=1000,
                                         opsys='w'):
    '''As mentioned, this was used to study the special case of N_A/N vs M with N=50.
    "numofrelevargs" is M, the memory size (originally name as such).
    "discrepini" is the first value of N_A/N to be explored.
    "discrepfini" is the last value of N_A/N to be explored.
    "cb" is the presence (cb='yes') or abscence (cb='no') of confirmation bias.
    "h**o" is the homophily parameter (h**o=0 means no homophily).
    "numens" is the number of copies of the ensamble.
    "opsys" is obsolete, it was used to change saving options.
    '''
    import numpy as np
    from newrunargsim import newrunargsimforprofiling  #main simulation function
    import time  #to keep track of time

    N = 50
    #fixed number of agents

    for discrep in np.arange(
            discrepini, discrepfini, 0.04
    ):  #0.04 is the minimum step size if N=50. It means that N_A increases in steps of 2.
        NA = round(discrep * N)
        #find N_A
        if numofrelevargs < NA:  #if this is false, then it makes no physical sense, since agents remember more arguments than the existing ones.
            #if numofrelevargs not in range(10,50,10):
            print(NA)  #keep track of progress
            t = time.time()  #check initial time
            #initializations:
            counts0 = 0
            countspl = 0
            countsmin = 0
            countsbip = 0
            otherstuff = 0
            countsosc = 0
            stepcounts = []
            i = 0
            while i < numens:  #save only those copies without oscillations, until reaching numens.
                #print(i)
                a = newrunargsimforprofiling(NA, NA / 2, N, numofrelevargs,
                                             10000, cb, '', h**o, 1.0, 3.0,
                                             'Bid')
                if a[3] == 1:
                    countsosc += 1  #if it is an oscillating state, only add one to the count, do nothing else
                else:  #if not, procede as usual
                    #add 1 to the count of the state found
                    if a[1][-1] == N and a[3] != 1:
                        countspl += 1
                    elif a[2][-1] == N and a[3] != 1:
                        countsmin += 1
                    elif a[0][-1] == N and a[3] != 1:
                        counts0 += 1
                    elif a[0][-1] == 0 and a[1][-1] != N and a[2][
                            -1] != N and a[3] != 1:
                        countsbip += 1
                    else:
                        otherstuff += 1
                    stepcounts.append(
                        a[-1])  #keep track of the number of steps
                    i += 1

            elapsed = time.time() - t  #save elapsed time
            print(elapsed)  #print it so it can be seen on the console

            #now we save the results. Change directory if used in another computer
            from saving import savinggeneric
            savinggeneric(
                ((counts0, countspl, countsmin, countsbip, countsosc,
                  otherstuff), stepcounts, elapsed),
                'N' + str(N) + 'Nars' + str(NA) + 'discrep' + str(discrep) +
                'cb' + cb + 'M' + str(numofrelevargs),
                path='D:\\Doctorado\\Simulation_Functions\\BarridoReviewer\\')
        else:  #if we are in those cases without physical sense, we save nans. This simplifies analyisis of results.

            from saving import savinggeneric
            savinggeneric(
                ((np.nan, np.nan, np.nan, np.nan, np.nan, np.nan), np.nan,
                 np.nan),
                'N' + str(N) + 'Nars' + str(NA) + 'discrep' + str(discrep) +
                'cb' + cb + 'M' + str(numofrelevargs),
                path='D:\\Doctorado\\Simulation_Functions\\BarridoReviewer\\')