Example #1
0
def nth_moment_compute_and_save(nth, seeds, paths, time, files):
    """
    This function is used to compute all the nth moments we specify involving our positional data and then saves them
    to files.
    :param seeds:
    :param paths:
    :param time:
    :param files:
    :return:
    """
    # todo: create mnx, mny directories

    print('X-DIRECTION')
    for f, file_ in enumerate(files):
        mnxs = []
        for s, seed in enumerate(seeds):
            fil = paths[str(seed)][f]
            print(file_.split('/')[-1])
            par = particles.Particles(fil,
                                      dimension=2,
                                      time_start=0,
                                      time_end=2500,
                                      time_step=2)
            x = par.qx[par.qx < 200]
            m0 = zero_moment(par.qx)
            mnx = mom(nth, position=x, time=time, m0=m0, norm=False) / 1000
            mnxs.append(mnx)
        df = pd.DataFrame(np.array(mnxs).T,
                          columns=[str(s) for s in seeds],
                          index=time)
        df.to_csv(
            '/Users/georgepamfilis/Desktop/THESIS/comsol_project/DATA/micromodel/data_v2/m'
            + str(nth) + 'x/' + file_.split('/')[-1])
    print('Y-DIRECTION')
    for f, file_ in enumerate(files):
        mnys = []
        for s, seed in enumerate(seeds):
            fil = paths[str(seed)][f]
            print(file_.split('/')[-1])
            par = particles.Particles(fil,
                                      dimension=2,
                                      time_start=0,
                                      time_end=2500,
                                      time_step=2)
            y = par.qy[par.qx < 200]
            m0 = zero_moment(par.qx)
            mny = mom(nth, position=y, time=time, m0=m0, norm=False) / 1000
            mnys.append(mny)
        df = pd.DataFrame(np.array(mnys).T,
                          columns=[str(s) for s in seeds],
                          index=time)
        df.to_csv(
            '/Users/georgepamfilis/Desktop/THESIS/comsol_project/DATA/micromodel/data_v2/m'
            + str(nth) + 'y/' + file_.split('/')[-1])
Example #2
0
def zero_moment_compute_and_save(seeds, paths, time, files):
    """
    This function is used to compute all the zero moments involving our positional data and then saves them to files.
    :param seeds:
    :param paths:
    :param time:
    :param files:
    :return:
    """
    for f, file_ in enumerate(files):
        m0s = []
        for s, seed in enumerate(seeds):
            fil = paths[str(seed)][f]
            print(file_.split('/')[-1])
            par = particles.Particles(fil,
                                      dimension=2,
                                      time_start=0,
                                      time_end=2500,
                                      time_step=2)
            m0 = zero_moment(par.qx)
            m0s.append(m0)
        df = pd.DataFrame(np.array(m0s).T,
                          columns=[str(s) for s in seeds],
                          index=time)
        df.to_csv(
            '/Users/georgepamfilis/Desktop/THESIS/comsol_project/DATA/micromodel/data_v2/m0/'
            + file_.split('/')[-1])
Example #3
0
 def __init__(self, width, height):
     self.width = width
     self.height = height
     self.background = (10, 10, 10)
     self.points = Points()
     self.particles = particles.Particles()
     self.nodeColour = (255, 255, 255)
     self.nodeRadius = 4
     self.doTick = True
     self.tick_step = 0.01
     self.outputfile = None
     self.load()
     pygame.font.init()
     self.myfont = pygame.font.SysFont('Arial', 30)
Example #4
0
def get_particles(args, prior, num_edges):

    if args.particles_type == 'euclidian':
        return particles.Particles(prior, args.N, args.num_particles,
                                   num_edges, args.with_weights,
                                   args.with_couplings, args.product_particles,
                                   args.noise_level, args.noise_decay)
    elif args.particles_type == 'quaternion':
        return particles.QuaternionParticles(prior, args.N, args.num_particles,
                                             num_edges, args.with_weights,
                                             args.with_couplings,
                                             args.product_particles,
                                             args.noise_level,
                                             args.noise_decay)
    else:
        raise NotImplementedError()
Example #5
0
    restriction = histogram.Histogram(param_histogram)

    param=particles.Particles.getDefaultParameters()
    param['Dt'] = Dt
    param['N']=100000    
    precond_param=precond.Precond.getDefaultParameters()
    precond_param['Dstar']=0.
    precond_param['sigma']=scipy.zeros_like(grid)
    precond_param['kappa']=particles.doublewell
    param['precond']=precond.Precond(precond_param)

    param_histogram = histogram.Histogram.getDefaultParameters()
    param_histogram['h']=2e-2
    restriction = histogram.Histogram(param_kde)

    fp_sde = particles.Particles(lifting,restriction,rho,grid,lambd,param)

    # CREATING LINEAR SOLVER
    gmres_param = GMRES.GMRESLinearSolver.getDefaultParameters()
    gmres_param['tol']=1e-8
    gmres_param['print']='short'
    gmres_param['builtin']=True
    linsolv = GMRES.GMRESLinearSolver(gmres_param)

    # CREATING NEWTON SOLVER
    newt_param = NewtonSolver.NewtonSolver.getDefaultParameters()
    newt_param['rel_tol']=1e-7
    newt_param['abs_tol']=1e-7
    newt_param['print']='none'
    newt_param['max_iter']=1
    nsolv = NewtonSolver.NewtonSolver(linsolv,newt_param)
path = ("plot")
CHECK_FOLDER = os.path.isdir(path)

# If folder doesn't exist, then create it.
if not CHECK_FOLDER:
    os.makedirs(path)

# frequency of plotting the epidemic states and saving images
plot_freq = 1000

# the object of class simulator
sim = simulator.Simulator()

# the object of class Particles
particles = particles.Particles(sim)

for i in range(sim.number_of_iter):
    if i % 10 == 0:
        print("Completed {}/{} iterations".format(i, sim.number_of_iter))
    if i == 33408:
        print('done')

    # update the records on easch epidemic state
    particles.update_states(i, sim)

    # update the velocities and coordinates of particles
    particles.update_velocities(i, sim)
    particles.update_coordinates(sim)

    # number of vaccines for current iteration
Example #7
0
                1]  #reduce the number of particle simulations by using the data from previous simulations
            if (len(grid) > N):
                print "The number of particles is smaller than the number of bins! Please increase N or increase dx"

            param['Nlarge'] = Nlist[n]
            param['Nsmall'] = N
            # param['Nsmall']=Nlist[n]

            print 'run simulation with N = ', N, ' = ', Nlist[
                n], ' - ', np.sign(n) * Nlist[n - 1], 'particles'
            if (rg_state != None):
                lifting.set_rg_state(rg_state)
            fp_sde = particles.Particles(lifting,
                                         restriction,
                                         rho,
                                         grid,
                                         lambd,
                                         x_prev_sim,
                                         w_prev_sim,
                                         param=param)
            rg_state = lifting.get_rg_state(
            )  #We remember the internal state of the random generator to get new random number for sampling the new particles

            print "calculate .u_Dt"
            rho_Dt = fp_sde.u_Dt
            xmean = fp_sde.x_mean
            np.savetxt('Newton/x_mean_N1e5_mu0p05_alpha5_Dt200', xmean)
            #rho_fine = fp_sde.make_rho_fine(rho_Dt)
            #   rho_sq[n] = rho_sq[n] + (norm(rho_Dt))**2
            E_rho[n] = E_rho[n] + rho_Dt  #matrix

            t0 = time.time()
Example #8
0
lifting0 = inv_transform.Sampler(sampler_param)

sampler_param = inv_transform.Sampler.getDefaultParameters()
sampler_param['seed'] = 1
lifting1 = inv_transform.Sampler(sampler_param)

sampler_param = inv_transform.Sampler.getDefaultParameters()
sampler_param['seed'] = 2
lifting2 = inv_transform.Sampler(sampler_param)

for n in range(len(Nlist)):
    N = Nlist[n]
    param['N'] = N

    print "running with seed 0 ?"
    fp_sde = particles.Particles(lifting0, restriction, rho, grid, lambd,
                                 param)
    rho0_Dt = fp_sde.u_Dt
    tolerance0[n] = scipy.amax(scipy.absolute(rho0_Dt - rho_Dt))

    print "running with seed 1 ?"
    fp_sde = particles.Particles(lifting1, restriction, rho0, grid, lambd,
                                 param)
    rho1_Dt = fp_sde.u_Dt
    tolerance1[n] = scipy.amax(scipy.absolute(rho1_Dt - rho_Dt))

    print "running with seed 2 ?"
    fp_sde = particles.Particles(lifting2, restriction, rho0, grid, lambd,
                                 param)
    rho2_Dt = fp_sde.u_Dt
    tolerance2[n] = scipy.amax(scipy.absolute(rho2_Dt - rho_Dt))
Example #9
0
        "/xy_1250.txt", "/xy_1500.txt", "/xy_1750.txt", "/xy_2000.txt"
    ]

    d = [int(f.split('_')[1].split('.')[0]) for f in files]
    files_ = [folder + f for f in files]

    print('x-dir')

    spatial_variances_x = []

    for f in range(len(files_))[:]:

        print(files_[f])
        par = particles.Particles(files_[f],
                                  dimension=2,
                                  time_start=0,
                                  time_end=4,
                                  time_step=500)
        x = par.qx[par.qx < .1]
        y = par.qy[par.qx < .1]

        spa_var_x = []

        for t in range(x.shape[1]):
            a = x[t]
            a = a.dropna()
            if a.shape[0] == 0:
                spa_var_x.append(0)
            else:
                spa_var_x.append((np.sum(a**2) / a.shape[0]) -
                                 (np.sum(a**1) / a.shape[0])**2)
Example #10
0
 def create_working_particles_effect(self, rect):
     particle = particles.Particles(rect)
     self.sprites.add(particle)
Example #11
0
lx = 0.04  # Domains size in x
ly = 0.04  # and y
nParticles = 100000  # Number of macro particles
weightInit = 1.e9 / nParticles  # Initial weight of macro particles
nMaxParticles = nParticles  # Maximum number of macro particles
dt = 2.155172413795e-11  # Time step
nTimeSteps = 10000  # Number of time steps of the simulation
nAnimate = 20  # Number of time steps between graphical output
nTimeStepsPartMan = 50  # Number of time steps between particle management
nHistoryOut = 1000  # Number of time steps between history output

# Generate all objects
gridObj = Grid(nx, ny, lx, ly,
               1)  # Elliptical boundary with constant radius is circular
beamObj = LHCBeam(gridObj, dt)  # LHC beam type
particlesObj = particles.Particles(
    'electrons', 'variableWeight')  # Particles types and kind
particleBoundaryObj = AbsorbElliptical(
    gridObj, particlesObj)  # Particle boundary fitting to grid/field boundary
secElecEmitObj = FurmanEmitter(
    particleBoundaryObj,
    particlesObj)  # Secondary emission at particle boundary
poissonSolverObj = PoissonSolver(
    gridObj)  # Poisson solver for electric field calculation

# Some setup
homoLoader(gridObj, particlesObj, particleBoundaryObj, nParticles,
           weightInit)  # Loads a homogeneous particle distribution
bAtGridPoints = magneticField.multipoleExpansion(
    gridObj, [0., 5., 0., 0.01])  # Prepare quadrupole field (with error).

physicalParticleCount = numpy.zeros(
Example #12
0
 def create_particles(self):
     while len(self.particle_group) < self.settings.n_particles:
         particle = particles.Particles(self)
         self.particle_group.add(particle)