示例#1
0
def output_fields(filename, id_filename, lookup_filename, magnets_filename, maglist):
    f2 = open(id_filename, 'r')
    info = json.load(f2)
    f2.close()
    f1 = h5py.File(lookup_filename, 'r')
    lookup = {}
    for beam in info['beams']:
        lookup[beam['name']] = f1[beam['name']][...]
    f1.close()

    mags = magnets.Magnets()
    mags.load(magnets_filename)
    ref_mags=generate_reference_magnets(mags)

    f = h5py.File(filename, 'w')
    
    per_beam_field = generate_per_beam_b_field(info, maglist, mags, lookup)
    total_id_field = generate_id_field(info, maglist, mags, lookup)
    for name in per_beam_field.keys():
        f.create_dataset("%s_per_beam" % (name), data=per_beam_field[name])
    f.create_dataset('id_Bfield', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error', data = trajectory_information[0])
    f.create_dataset('id_trajectory', data = trajectory_information[1])
    
    per_beam_field = generate_per_beam_b_field(info, maglist, ref_mags, lookup)
    total_id_field = generate_id_field(info, maglist, ref_mags, lookup)
    for name in per_beam_field.keys():
        f.create_dataset("%s_per_beam_perfect" % (name), data=per_beam_field[name])
    f.create_dataset('id_Bfield_perfect', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_perfect', data = trajectory_information[0])
    f.create_dataset('id_trajectory_perfect', data = trajectory_information[1])

    f.close()
示例#2
0
def output_fields(filename, id_filename, lookup_filename, magnets_filename, maglist):
    f2 = open(id_filename, 'r')
    info = json.load(f2)
    f2.close()
    f1 = h5py.File(lookup_filename, 'r')
    lookup = {}
    for beam in info['beams']:
        lookup[beam['name']] = f1[beam['name']][...]
    f1.close()

    mags = magnets.Magnets()
    mags.load(magnets_filename)
    ref_mags=generate_reference_magnets(mags)

    f = h5py.File(filename, 'w')
    
    per_beam_field = generate_per_beam_b_field(info, maglist, mags, lookup)
    total_id_field = generate_id_field(info, maglist, mags, lookup)
    for name in per_beam_field.keys():
        f.create_dataset("%s_per_beam" % (name), data=per_beam_field[name])
    f.create_dataset('id_Bfield', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error', data = trajectory_information[0])
    f.create_dataset('id_trajectory', data = trajectory_information[1])
    
    per_beam_field = generate_per_beam_b_field(info, maglist, ref_mags, lookup)
    total_id_field = generate_id_field(info, maglist, ref_mags, lookup)
    for name in per_beam_field.keys():
        f.create_dataset("%s_per_beam_perfect" % (name), data=per_beam_field[name])
    f.create_dataset('id_Bfield_perfect', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_perfect', data = trajectory_information[0])
    f.create_dataset('id_trajectory_perfect', data = trajectory_information[1])

    f.close()
示例#3
0
 def calculate_phase_error(self):
     b_array = self.build_b_arrays()[0,0,:,:].squeeze()
     (self.pherr, self.traj) = mt.calculate_phase_error(self.periods,
                                                        self.magdims[2]/float(self.steps_per_magnet),
                                                        4*self.steps_per_magnet,
                                                        b_array.shape[0],
                                                        b_array)
     b_array = self.top.build_b_arrays()[0,0,:,:].squeeze()
     (self.pherr, self.traj) = mt.calculate_phase_error(self.periods,
                                                        self.magdims[2]/float(self.steps_per_magnet),
                                                        4*self.steps_per_magnet,
                                                        b_array.shape[0],
                                                        b_array)
def saveh5(path, best, genome, info, mags, real_bfield):
    new_magnets = fg.generate_per_magnet_array(info, best.genome, mags)
    original_magnets = fg.generate_per_magnet_array(info, genome.genome, mags)
    
    update = fg.compare_magnet_arrays(original_magnets, new_magnets, lookup)
    
    updated_bfield = np.array(real_bfield)
    for beam in update.keys() :
        if update[beam].size != 0:
            updated_bfield = updated_bfield - update[beam]
    
    outfile = os.path.join(path, genome.uid+'-'+best.uid+'.h5')
    logging.debug("filename is %s" % (outfile))
    f = h5py.File(outfile, 'w')
    
    total_id_field = real_bfield
    f.create_dataset('id_Bfield_original', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_original', data = trajectory_information[0])
    f.create_dataset('id_trajectory_original', data = trajectory_information[1])
    
    total_id_field = updated_bfield
    f.create_dataset('id_Bfield_shimmed', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_shimmed', data = trajectory_information[0])
    f.create_dataset('id_trajectory_shimmed', data = trajectory_information[1])
    
    
    ref_mags=generate_reference_magnets(mags)
    total_id_field = generate_id_field(info, best.genome, ref_mags, lookup)
    
    f.create_dataset('id_Bfield_perfect', data=total_id_field)
    trajectory_information=mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_perfect', data = trajectory_information[0])
    f.create_dataset('id_trajectory_perfect', data = trajectory_information[1])
    
    f.close()
示例#5
0
def saveh5(path, best, genome, info, mags, real_bfield, lookup):
    new_magnets = fg.generate_per_magnet_array(info, best.genome, mags)
    original_magnets = fg.generate_per_magnet_array(info, genome.genome, mags)

    update = fg.compare_magnet_arrays(original_magnets, new_magnets, lookup)

    updated_bfield = np.array(real_bfield)
    for beam in update.keys():
        if update[beam].size != 0:
            updated_bfield = updated_bfield - update[beam]

    outfile = os.path.join(path, genome.uid + '-' + best.uid + '.h5')
    logging.debug("filename is %s" % (outfile))
    f = h5py.File(outfile, 'w')

    total_id_field = real_bfield
    f.create_dataset('id_Bfield_original', data=total_id_field)
    trajectory_information = mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_original', data=trajectory_information[0])
    f.create_dataset('id_trajectory_original', data=trajectory_information[1])

    total_id_field = updated_bfield
    f.create_dataset('id_Bfield_shimmed', data=total_id_field)
    trajectory_information = mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_shimmed', data=trajectory_information[0])
    f.create_dataset('id_trajectory_shimmed', data=trajectory_information[1])

    ref_mags = generate_reference_magnets(mags)
    total_id_field = generate_id_field(info, best.genome, ref_mags, lookup)

    f.create_dataset('id_Bfield_perfect', data=total_id_field)
    trajectory_information = mt.calculate_phase_error(info, total_id_field)
    f.create_dataset('id_phase_error_perfect', data=trajectory_information[0])
    f.create_dataset('id_trajectory_perfect', data=trajectory_information[1])

    f.close()
示例#6
0
def process(options, args):

    if options.seed:
        random.seed(int(options.seed_value))

    if options.singlethreaded:
        rank = 0
        size = 1
    else:
        rank = MPI.COMM_WORLD.rank  # The process ID (integer 0-3 for 4-process run)
        size = MPI.COMM_WORLD.size  # The number of processes in the job.

    # get the hostname
    if options.singlethreaded:
        ip = 'localhost'
    else:
        ip = socket.gethostbyname(socket.gethostname())

    logging.debug("Process %d ip address is : %s" % (rank, ip))

    f2 = open(options.id_filename, 'r')
    info = json.load(f2)
    f2.close()

    logging.debug("Loading Lookup")
    f1 = h5py.File(options.lookup_filename, 'r')
    lookup = {}
    for beam in info['beams']:
        logging.debug("Loading beam %s" % (beam['name']))
        lookup[beam['name']] = f1[beam['name']][...]
    f1.close()

    barrier(options.singlethreaded)

    logging.debug("Loading Initial Bfield")
    f1 = h5py.File(options.bfield_filename, 'r')
    real_bfield = f1['id_Bfield'][...]
    f1.close()
    logging.debug(real_bfield)

    barrier(options.singlethreaded)

    logging.debug("Loading magnets")
    mags = magnets.Magnets()
    mags.load(options.magnets_filename)

    logging.debug('mpi runenr calling fg.generate_reference_magnets()')
    ref_mags = fg.generate_reference_magnets(mags)
    logging.debug('mpi runenr calling MagLists()')
    ref_maglist = magnets.MagLists(ref_mags)
    logging.debug('after ref_maglist')
    ref_total_id_field = fg.generate_id_field(info, ref_maglist, ref_mags,
                                              lookup)
    pherr, ref_trajectories = mt.calculate_phase_error(info,
                                                       ref_total_id_field)

    barrier(options.singlethreaded)

    #epoch_path = os.path.join(args[0], 'epoch')
    #next_epoch_path = os.path.join(args[0], 'nextepoch')
    # start by creating the directory to put the initial population in

    population = []
    estar = options.e

    # Load the initial genome
    initialgenome = ID_BCell()
    initialgenome.load(options.genome_filename)

    referencegenome = ID_BCell()
    referencegenome.load(options.genome_filename)

    # make the initial population
    for i in range(options.setup):
        # create a fresh maglist
        newgenome = ID_Shim_BCell()
        newgenome.create(info, lookup, mags, initialgenome.genome,
                         ref_trajectories, options.number_of_changes,
                         real_bfield)
        population.append(newgenome)

    # gather the population
    trans = []
    for i in range(size):
        trans.append(population)

    allpop = alltoall(options.singlethreaded, trans)

    barrier(options.singlethreaded)

    newpop = []
    for pop in allpop:
        newpop += pop

    # Need to deal with replicas and old genomes
    popdict = {}
    for genome in newpop:
        fitness_key = "%1.8E" % (genome.fitness)
        if fitness_key in popdict.keys():
            if popdict[fitness_key].age < genome.age:
                popdict[fitness_key] = genome
        else:
            popdict[fitness_key] = genome

    newpop = []
    for genome in popdict.values():
        if genome.age < options.max_age:
            newpop.append(genome)

    newpop.sort(key=lambda x: x.fitness)

    newpop = newpop[options.setup * rank:options.setup * (rank + 1)]

    for genome in newpop:
        logging.debug("genome fitness: %1.8E   Age : %2i   Mutations : %4i" %
                      (genome.fitness, genome.age, genome.mutations))

    #Checkpoint best solution
    if rank == 0:
        logging.debug("Best fitness so far is %f" % (newpop[0].fitness))
        newpop[0].save(args[0])

    # now run the processing
    for i in range(options.iterations):

        barrier(options.singlethreaded)
        logging.debug("Starting itteration %i" % (i))

        nextpop = []

        for genome in newpop:

            # now we have to create the offspring
            # TODO this is for the moment
            logging.debug("Generating children for %s" % (genome.uid))
            number_of_children = options.setup
            number_of_mutations = mutations(options.c, estar, genome.fitness,
                                            options.scale)
            children = genome.generate_children(number_of_children,
                                                number_of_mutations,
                                                info,
                                                lookup,
                                                mags,
                                                ref_trajectories,
                                                real_bfield=real_bfield)

            # now save the children into the new file
            for child in children:
                nextpop.append(child)

            # and save the original
            nextpop.append(genome)

        # gather the population
        trans = []
        for i in range(size):
            trans.append(nextpop)

        allpop = alltoall(options.singlethreaded, trans)

        newpop = []
        for pop in allpop:
            newpop += pop

        popdict = {}
        for genome in newpop:
            fitness_key = "%1.8E" % (genome.fitness)
            if fitness_key in popdict.keys():
                if popdict[fitness_key].age < genome.age:
                    popdict[fitness_key] = genome
            else:
                popdict[fitness_key] = genome

        newpop = []
        for genome in popdict.values():
            if genome.age < options.max_age:
                newpop.append(genome)

        newpop.sort(key=lambda x: x.fitness)

        estar = newpop[0].fitness * 0.99
        logging.debug("new estar is %f" % (estar))

        newpop = newpop[options.setup * rank:options.setup * (rank + 1)]

        #Checkpoint best solution
        if rank == 0:
            initialgenome.genome.mutate_from_list(newpop[0].genome)
            initialgenome.fitness = newpop[0].fitness
            initialgenome.uid = "A" + newpop[0].uid
            initialgenome.save(args[0])
            saveh5(args[0], initialgenome, referencegenome, info, mags,
                   real_bfield, lookup)
            # After the save reload the original data
            initialgenome.load(options.genome_filename)
            newpop[0].save(args[0])

        for genome in newpop:
            logging.debug(
                "genome fitness: %1.8E   Age : %2i   Mutations : %4i" %
                (genome.fitness, genome.age, genome.mutations))

        barrier(options.singlethreaded)

    barrier(options.singlethreaded)

    # gather the population
    trans = []
    for i in range(size):
        trans.append(nextpop)

    allpop = alltoall(options.singlethreaded, trans)

    newpop = []
    for pop in allpop:
        newpop += pop

    newpop.sort(key=lambda x: x.fitness)

    newpop = newpop[options.setup * rank:options.setup * (rank + 1)]

    #Checkpoint best solution
    if rank == 0:
        initialgenome.genome.mutate_from_list(newpop[0].genome)
        initialgenome.age_bcell()
        initialgenome.save(args[0])
        newpop[0].save(args[0])
示例#7
0
def calculate_trajectory_fitness_from_array(total_id_field, info, ref_trajectories):
    pherr, test_array = mt.calculate_phase_error(info, total_id_field)
    return generate_id_field_cost(test_array, ref_trajectories)
示例#8
0
def calculate_cached_trajectory_fitness(info, lookup, magnets, maglist, ref_trajectories):
    total_id_field = generate_id_field(info, maglist, magnets, lookup)
    pherr, test_array = mt.calculate_phase_error(info, total_id_field)
    return (total_id_field, generate_id_field_cost(test_array, ref_trajectories))
示例#9
0
    #f1 = h5py.File('/home/gdy32713/DAWN_stable/optid/Opt-ID/IDSort/src/v2/2015test.h5', 'r')
    f1 = h5py.File(args[1], 'r')
    lookup = {}
    for beam in info['beams']:
        lookup[beam['name']] = f1[beam['name']][...]
    f1.close()
    

    mags = magnets.Magnets()
    #mags.load('/home/gdy32713/DAWN_stable/optid/Opt-ID/IDSort/src/v2/magnets.mag')
    mags.load(args[2])
    
    ref_mags = generate_reference_magnets(mags)
    ref_maglist = magnets.MagLists(ref_mags)
    ref_total_id_field = generate_id_field(info, ref_maglist, ref_mags, lookup)
    ref_pherr, ref_trajectories = mt.calculate_phase_error(info, ref_total_id_field)

    maglist = magnets.MagLists(mags)
    maglist.shuffle_all()
    original_bfield, maglist_fitness = calculate_cached_trajectory_fitness(info, lookup, mags, maglist, ref_trajectories)
    
    mag_array = generate_per_magnet_array(info, maglist, mags)
    
    for i in range(2):
    

        maglist2 =  copy.deepcopy(maglist)
        maglist2.mutate(1)
        
        mag_array2 = generate_per_magnet_array(info, maglist2, mags)
        
def calculate_trajectory_fitness_from_array(total_id_field, info,
                                            ref_trajectories):
    pherr, test_array = mt.calculate_phase_error(info, total_id_field)
    return generate_id_field_cost(test_array, ref_trajectories)
def calculate_cached_trajectory_fitness(info, lookup, magnets, maglist,
                                        ref_trajectories):
    total_id_field = generate_id_field(info, maglist, magnets, lookup)
    pherr, test_array = mt.calculate_phase_error(info, total_id_field)
    return (total_id_field, generate_id_field_cost(test_array,
                                                   ref_trajectories))
    #f1 = h5py.File('/home/gdy32713/DAWN_stable/optid/Opt-ID/IDSort/src/v2/2015test.h5', 'r')
    f1 = h5py.File(args[1], 'r')
    lookup = {}
    for beam in info['beams']:
        lookup[beam['name']] = f1[beam['name']][...]
    f1.close()

    mags = magnets.Magnets()
    #mags.load('/home/gdy32713/DAWN_stable/optid/Opt-ID/IDSort/src/v2/magnets.mag')
    mags.load(args[2])

    ref_mags = generate_reference_magnets(mags)
    ref_maglist = magnets.MagLists(ref_mags)
    ref_total_id_field = generate_id_field(info, ref_maglist, ref_mags, lookup)
    ref_pherr, ref_trajectories = mt.calculate_phase_error(
        info, ref_total_id_field)

    maglist = magnets.MagLists(mags)
    maglist.shuffle_all()
    original_bfield, maglist_fitness = calculate_cached_trajectory_fitness(
        info, lookup, mags, maglist, ref_trajectories)

    mag_array = generate_per_magnet_array(info, maglist, mags)

    for i in range(2):

        maglist2 = copy.deepcopy(maglist)
        maglist2.mutate(1)

        mag_array2 = generate_per_magnet_array(info, maglist2, mags)
示例#13
0
logging.debug("Loading Initial Bfield")
f1 = h5py.File(options.bfield_filename, 'r')
real_bfield = f1['id_Bfield'][...]
f1.close()

MPI.COMM_WORLD.Barrier()

logging.debug("Loading magnets")
mags = magnets.Magnets()
mags.load(options.magnets_filename)

ref_mags = fg.generate_reference_magnets(mags)
ref_maglist = magnets.MagLists(ref_mags)
ref_total_id_field = fg.generate_id_field(info, ref_maglist, ref_mags, lookup)
pherr, ref_trajectories = mt.calculate_phase_error(info, ref_total_id_field)

MPI.COMM_WORLD.Barrier()

# Load the initial genome
genome = ID_BCell()
genome.load(options.genome_filename)


MPI.COMM_WORLD.Barrier()

# now run the processing
children = genome.generate_children(options.num_children, options.number_of_mutations, info, lookup, mags, ref_trajectories, real_bfield=real_bfield)

children.sort(key=lambda x: x.fitness)
示例#14
0
 def calculate_phase_error(self):
     if self.shimming == False:
         b_array = self.top_beam.build_b_arrays()
         b_array = b_array[b_array.shape[0]/2,0,:,:].squeeze()
         (self.top_pherr, self.top_traj) = mt.calculate_phase_error(self.periods,
                                                            self.magdims[2]/float(self.steps_per_magnet),
                                                            4*self.steps_per_magnet,
                                                            b_array.shape[0],
                                                            b_array)
         b_array = self.bottom_beam.build_b_arrays()
         b_array = b_array[b_array.shape[0]/2,0,:,:].squeeze()
         (self.bottom_pherr, self.bottom_traj) = mt.calculate_phase_error(self.periods,
                                                            self.magdims[2]/float(self.steps_per_magnet),
                                                            4*self.steps_per_magnet,
                                                            b_array.shape[0],
                                                            b_array)
         b_array = self.build_b_array()
         
         #Calculate integrals here
         #[0] = firstIX; [1]=firstIZ, [2] = secondIX; [3] = secondIZ
         integrals = np.zeros([4,b_array.shape[0]])
         
         sstep=self.magdims[2]/float(self.steps_per_magnet)
         smin=-(np.shape(b_array)[2]/2)*sstep
         smax=-smin
         
         integrals[0,:] = np.sum(b_array[:,0,:,0], axis=1)*1e4*sstep
         integrals[1,:] = np.sum(b_array[:,0,:,1], axis=1)*1e4*sstep
         scale_factor = -np.arange(smin, smax, sstep)
         
         integrals[2,:] = -np.sum(b_array[:,0,:,0]*scale_factor, axis=1)*1e4*sstep
         integrals[3,:] = -np.sum(b_array[:,0,:,1]*scale_factor, axis=1)*1e4*sstep
         
         
         b_array = b_array[b_array.shape[0]/2,0,:,:].squeeze()
         (self.pherr, self.traj) = mt.calculate_phase_error(self.periods,
                                                            self.magdims[2]/float(self.steps_per_magnet),
                                                            4*self.steps_per_magnet,
                                                            b_array.shape[0],
                                                            b_array)
     
     if self.shimming == True:
         b_array = self.top_beam.real_barray
         (self.top_pherr, self.top_traj) = mt.calculate_phase_error(self.periods,
                                                            self.magdims[2]/float(self.steps_per_magnet),
                                                            4*self.steps_per_magnet,
                                                            b_array.shape[0],
                                                            b_array)
         b_array = self.bottom_beam.real_barray
         (self.bottom_pherr, self.bottom_traj) = mt.calculate_phase_error(self.periods,
                                                            self.magdims[2]/float(self.steps_per_magnet),
                                                            4*self.steps_per_magnet,
                                                            b_array.shape[0],
                                                            b_array)
         b_array = self.real_b_array
         
         #Calculate integrals here
         #[0] = firstIX; [1]=firstIZ, [2] = secondIX; [3] = secondIZ
         integrals = np.zeros([4,1])
         
         sstep=self.magdims[2]/float(self.steps_per_magnet)
         smin=-(np.shape(b_array)[0]/2)*sstep
         smax=-smin
         
         integrals[0,:] = np.sum(b_array[:,0])*1e4*sstep
         integrals[1,:] = np.sum(b_array[:,1])*1e4*sstep
         scale_factor = -np.arange(smin, smax, sstep)
         
         integrals[2,:] = -np.sum(b_array[:,0]*scale_factor)*1e4*sstep
         integrals[3,:] = -np.sum(b_array[:,1]*scale_factor)*1e4*sstep
         
         
         #b_array = b_array[b_array.shape[0]/2,0,:,:].squeeze()
         (self.pherr, self.traj) = mt.calculate_phase_error(self.periods,
                                                            self.magdims[2]/float(self.steps_per_magnet),
                                                            4*self.steps_per_magnet,
                                                            b_array.shape[0],
                                                            b_array)            
     
     return (self.top_pherr, self.top_traj, self.bottom_pherr, self.bottom_traj, self.pherr, self.traj, integrals)
示例#15
0
logging.debug("Loading Initial Bfield")
f1 = h5py.File(options.bfield_filename, 'r')
real_bfield = f1['id_Bfield'][...]
f1.close()

MPI.COMM_WORLD.Barrier()

logging.debug("Loading magnets")
mags = magnets.Magnets()
mags.load(options.magnets_filename)

ref_mags = fg.generate_reference_magnets(mags)
ref_maglist = magnets.MagLists(ref_mags)
ref_total_id_field = fg.generate_id_field(info, ref_maglist, ref_mags, lookup)
pherr, ref_trajectories = mt.calculate_phase_error(info, ref_total_id_field)

MPI.COMM_WORLD.Barrier()

# Load the initial genome
genome = ID_BCell()
genome.load(options.genome_filename)

MPI.COMM_WORLD.Barrier()

# now run the processing
children = genome.generate_children(options.num_children,
                                    options.number_of_mutations,
                                    info,
                                    lookup,
                                    mags,
示例#16
0
def process(options, args):

    if options.seed:
        random.seed(int(options.seed_value))

    if options.singlethreaded:
        rank = 0
        size = 1
    else:
        rank = MPI.COMM_WORLD.rank  # The process ID (integer 0-3 for 4-process run)
        size = MPI.COMM_WORLD.size  # The number of processes in the job.

    # get the hostname
    if options.singlethreaded:
        ip = 'localhost'
    else:
        ip = socket.gethostbyname(socket.gethostname())

    logging.debug("Process %d ip address is : %s" % (rank, ip))

    f2 = open(options.id_filename, 'r')
    info = json.load(f2)
    f2.close()

    logging.debug("Loading Lookup")
    f1 = h5py.File(options.lookup_filename, 'r')
    lookup = {}
    for beam in info['beams']:
        logging.debug("Loading beam %s" % (beam['name']))
        lookup[beam['name']] = f1[beam['name']][...]
    f1.close()

    barrier(options.singlethreaded)

    logging.debug("Loading magnets")
    mags = magnets.Magnets()
    mags.load(options.magnets_filename)

    ref_mags = fg.generate_reference_magnets(mags)
    ref_maglist = magnets.MagLists(ref_mags)
    ref_total_id_field = fg.generate_id_field(info, ref_maglist, ref_mags,
                                              lookup)
    #logging.debug("before phase calculate error call")
    #logging.debug(ref_total_id_field.shape())
    pherr, ref_trajectories = mt.calculate_phase_error(info,
                                                       ref_total_id_field)

    barrier(options.singlethreaded)

    #epoch_path = os.path.join(args[0], 'epoch')
    #next_epoch_path = os.path.join(args[0], 'nextepoch')
    # start by creating the directory to put the initial population in

    population = []
    estar = options.e

    if options.restart and (rank == 0):
        filenames = os.listdir(args[0])
        # sort the genome filenames to ensure that when given the same set of
        # files in a directory, population[0] is the same across different
        # orderings of the listed directory contents: this is to fix the test
        # MpiRunnerTest.test_process_initial_population() in mpi_runner_test.py
        # when run on travis
        filenames.sort()
        for filename in filenames:
            fullpath = os.path.join(args[0], filename)
            try:
                logging.debug("Trying to load %s" % (fullpath))
                genome = ID_BCell()
                genome.load(fullpath)
                population.append(genome)
                logging.debug("Loaded %s" % (fullpath))
            except:
                logging.debug("Failed to load %s" % (fullpath))
        if len(population) < options.setup:
            # Seed with children from first
            children = population[0].generate_children(
                options.setup - len(population), 20, info, lookup, mags,
                ref_trajectories)
            # now save the children into the new file
            for child in children:
                population.append(child)
    else:
        logging.debug("make the initial population")
        for i in range(options.setup):
            # create a fresh maglist
            maglist = magnets.MagLists(mags)
            maglist.shuffle_all()
            genome = ID_BCell()
            genome.create(info, lookup, mags, maglist, ref_trajectories)
            population.append(genome)

    logging.debug("Initial population created")

    # gather the population
    trans = []
    for i in range(size):
        trans.append(population)

    allpop = alltoall(options.singlethreaded, trans)

    barrier(options.singlethreaded)

    newpop = []
    for pop in allpop:
        newpop += pop

    # Need to deal with replicas and old genomes
    popdict = {}
    for genome in newpop:
        fitness_key = "%1.8E" % (genome.fitness)
        if fitness_key in popdict.keys():
            if popdict[fitness_key].age < genome.age:
                popdict[fitness_key] = genome
        else:
            popdict[fitness_key] = genome

    newpop = []
    for genome in popdict.values():
        if genome.age < options.max_age:
            newpop.append(genome)

    newpop.sort(key=lambda x: x.fitness)

    newpop = newpop[options.setup * rank:options.setup * (rank + 1)]

    for genome in newpop:
        logging.debug("genome fitness: %1.8E   Age : %2i   Mutations : %4i" %
                      (genome.fitness, genome.age, genome.mutations))

    #Checkpoint best solution
    if rank == 0:
        newpop[0].save(args[0])

    # now run the processing
    for i in range(options.iterations):

        barrier(options.singlethreaded)
        logging.debug("Starting itteration %i" % (i))

        nextpop = []

        for genome in newpop:

            # now we have to create the offspring
            # TODO this is for the moment
            logging.debug("Generating children for %s" % (genome.uid))
            number_of_children = options.setup
            number_of_mutations = mutations(options.c, estar, genome.fitness,
                                            options.scale)
            children = genome.generate_children(number_of_children,
                                                number_of_mutations, info,
                                                lookup, mags, ref_trajectories)

            # now save the children into the new file
            for child in children:
                nextpop.append(child)

            # and save the original
            nextpop.append(genome)

        # gather the population
        trans = []
        for i in range(size):
            trans.append(nextpop)

        allpop = alltoall(options.singlethreaded, trans)

        newpop = []
        for pop in allpop:
            newpop += pop

        popdict = {}
        for genome in newpop:
            fitness_key = "%1.8E" % (genome.fitness)
            if fitness_key in popdict.keys():
                if popdict[fitness_key].age < genome.age:
                    popdict[fitness_key] = genome
            else:
                popdict[fitness_key] = genome

        newpop = []
        for genome in popdict.values():
            if genome.age < options.max_age:
                newpop.append(genome)

        newpop.sort(key=lambda x: x.fitness)

        estar = newpop[0].fitness * 0.99
        logging.debug("new estar is %f" % (estar))

        newpop = newpop[options.setup * rank:options.setup * (rank + 1)]

        #Checkpoint best solution
        if rank == 0:
            newpop[0].save(args[0])

        for genome in newpop:
            logging.debug(
                "genome fitness: %1.8E   Age : %2i   Mutations : %4i" %
                (genome.fitness, genome.age, genome.mutations))

        barrier(options.singlethreaded)

    barrier(options.singlethreaded)

    # gather the population
    trans = []
    for i in range(size):
        trans.append(nextpop)

    allpop = alltoall(options.singlethreaded, trans)

    newpop = []
    for pop in allpop:
        newpop += pop

    newpop.sort(key=lambda x: x.fitness)

    newpop = newpop[options.setup * rank:options.setup * (rank + 1)]

    #Checkpoint best solution
    if rank == 0:
        newpop[0].save(args[0])