Example #1
0
def run_geo_opt(polymer, poly_size, smiles_list):
    '''
    Runs geometry optimization calculation on given polymer

    Parameters
    ---------
    polymer: list (specific format)
        [(#,#,#,#), A, B]
    poly_size: int
        number of monomers per polymer
    smiles_list: list
        list of all possible monomer SMILES

    '''
    # make file name string w/ convention monoIdx1_monoIdx2_fullNumerSequence
    file_name = utils.make_file_name(polymer, poly_size)

    # if output file already exists, skip xTB
    exists = os.path.isfile('output/%s.out' % (file_name))
    if exists:
        print("output file existed")
        return

    # make polymer into SMILES string
    poly_smiles = utils.make_polymer_str(polymer, smiles_list, poly_size)

    # make polymer string into pybel molecule object
    mol = pybel.readstring('smi', poly_smiles)
    utils.make3D(mol)

    # write polymer .xyz file to containing folder
    mol.write('xyz', 'input/%s.xyz' % (file_name), overwrite=True)

    # make directory to run xtb in for the polymer
    mkdir_poly = subprocess.call('(mkdir %s)' % (file_name), shell=True)

    # run xTB geometry optimization
    xtb = subprocess.call(
        '(cd %s && /ihome/ghutchison/geoffh/xtb/xtb ../input/%s.xyz --opt >../output/%s.out)'
        % (file_name, file_name, file_name),
        shell=True)

    save_opt_file = subprocess.call('(cp %s/xtbopt.xyz opt/%s_opt.xyz)' %
                                    (file_name, file_name),
                                    shell=True)

    # delete xtb run directory for the polymer
    del_polydir = subprocess.call('(rm -r %s)' % (file_name), shell=True)
Example #2
0
params_SA['N_time_step'] = 100
params_SA['symmetrize'] = False

percentile = 99

fid_SA_group = {}
T_count = 0
for T in np.arange(0.1, 4.01, 0.1):
    nQ_list = [
        0, 100, 200, 300, 400, 500, 1000, 2000, 3000, 5000, 10000, 50000,
        100000
    ]
    for nQ in nQ_list:
        params_SA['N_quench'] = nQ
        params_SA['delta_t'] = T / 100
        file_name = 'data/' + utils.make_file_name(params_SA)
        tmp = []
        with open(file_name, 'rb') as f:
            #print(file_name)
            data = pickle.load(f)
            series = data[1]
            for d in series:
                tmp.append(d[1])
            fidelity = np.mean(tmp)
            #fidelity=np.percentile(tmp,percentile)
            #fidelity=np.max(tmp)
            fidelity_std = np.std(tmp)
            fid_SA_group[(T_count, nQ)] = [fidelity, fidelity_std]
    T_count += 1

#######################################
Example #3
0
def next_gen(params):
    '''
    Runs one post-initial generation

    Parameters
    ---------
    params: list (specific format)
        [pop_size, poly_size, num_mono_species, opt_property, smiles_list, sequence_list, mono_list, population, poly_property_list, n, gen_counter, spear_counter, prop_value_counter]

    Returns
    -------
    params: list (specific format)
        [pop_size, poly_size, num_mono_species, opt_property, smiles_list, sequence_list, mono_list, population, poly_property_list, n, gen_counter, spear_counter, prop_value_counter]
    '''
    # unpack parameters
    pop_size = params[0]
    poly_size = params[1]
    num_mono_species = params[2]
    opt_property = params[3]
    smiles_list = params[4]
    sequence_list = params[5]
    mono_list = params[6]
    population = params[7]
    poly_property_list = params[8]
    n = params[9]
    gen_counter = params[10]
    spear_counter = params[11]
    prop_value_counter = params[12]

    # open output files
    analysis_file = open('gens_analysis.txt', 'a+')
    population_file = open('gens_population.txt', 'a+')
    values_file = open('gens_values.txt', 'a+')
    if opt_property == 'dip':
        dip_polar_file = open('gens_dip_polar.txt', 'a+')
    if opt_property == 'pol':
        polar_dip_file = open('gens_polar_dip.txt', 'a+')
    spear_file = open('gens_spear.txt', 'a+')

    gen_counter += 1

    max_init = max(poly_property_list)

    # create sorted monomer list with most freq first
    gen1 = utils.sort_mono_indicies_list(mono_list)

    # Selection - select heaviest (best) 50% of polymers as parents
    population = parent_select(opt_property, population, poly_property_list)

    # Crossover & Mutation - create children to repopulate bottom 50% of polymers in population
    population = crossover_mutate(population, pop_size, poly_size,
                                  num_mono_species, sequence_list, smiles_list,
                                  mono_list)

    # calculate desired polymer property
    if opt_property == "mw":
        poly_property_list = find_poly_mw(population, poly_size, smiles_list)
    elif opt_property == "dip":
        elec_prop_list = find_elec_prop(population, poly_size, smiles_list)
        poly_property_list = elec_prop_list[0]
        polar_list = elec_prop_list[1]
    elif opt_property == 'pol':
        elec_prop_list = find_elec_prop(population, poly_size, smiles_list)
        poly_property_list = elec_prop_list[1]
        dip_list = elec_prop_list[0]
    else:
        print(
            "Error: opt_property not recognized. trace:main:loop pop properties"
        )

    # record representative generation properties
    min_test = min(poly_property_list)
    max_test = max(poly_property_list)
    avg_test = mean(poly_property_list)

    if opt_property == 'dip':
        compound = utils.make_file_name(
            population[poly_property_list.index(max_test)], poly_size)
        polar_val = polar_list[poly_property_list.index(max_test)]

    if opt_property == 'pol':
        compound = utils.make_file_name(
            population[poly_property_list.index(max_test)], poly_size)
        dip_val = dip_list[poly_property_list.index(max_test)]

    # create sorted monomer list with most freq first
    gen2 = utils.sort_mono_indicies_list(mono_list)

    # calculate Spearman correlation coefficient for begin and end sorted monomer lists
    spear = stats.spearmanr(gen1[:n], gen2[:n])[0]

    # spear_05 = stats.spearmanr(gen1[:n_05], gen2[:n_05])[0]
    # spear_10 = stats.spearmanr(gen1[:n_10], gen2[:n_10])[0]
    # spear_15 = stats.spearmanr(gen1[:n_15], gen2[:n_15])[0]

    # capture monomer indexes and numerical sequence as strings for population file
    analysis_file.write('%f, %f, %f, %f, \n' %
                        (min_test, max_test, avg_test, spear))
    if opt_property == 'dip':
        dip_polar_file.write('%s, %d, %f, %f, \n' %
                             (compound, gen_counter, max_test, polar_val))
    if opt_property == 'pol':
        polar_dip_file.write('%s, %d, %f, %f, \n' %
                             (compound, gen_counter, max_test, dip_val))
    # spear_file.write('%d, %f, %f, %f, \n' %
    # (gen_counter, spear_05, spear_10, spear_15))

    # write polymer population to file
    for polymer in population:
        poly_name = utils.make_file_name(polymer, poly_size)
        population_file.write('%s, ' % (poly_name))
    population_file.write('\n')

    for value in poly_property_list:
        values_file.write('%f, ' % (value))
    values_file.write('\n')

    # keep track of number of successive generations meeting Spearman criterion
    if spear > 0.92:
        spear_counter += 1
    else:
        spear_counter = 0

    # keep track of number of successive generations meeting property value convergence criterion
    if max_test >= (max_init - max_init * 0.05) and max_test <= (
            max_init + max_init * 0.05):
        prop_value_counter += 1
    else:
        prop_value_counter = 0

    # close all output files
    analysis_file.close()
    population_file.close()
    values_file.close()
    if opt_property == 'dip':
        dip_polar_file.close()
    if opt_property == 'pol':
        polar_dip_file.close()
    spear_file.close()

    # make backup copies of output files
    shutil.copy('gens_analysis.txt', 'gens_analysis_copy.txt')
    shutil.copy('gens_population.txt', 'gens_population_copy.txt')
    shutil.copy('gens_values.txt', 'gens_values_copy.txt')
    if opt_property == 'dip':
        shutil.copy('gens_dip_polar.txt', 'gens_dip_polar_copy.txt')
    if opt_property == 'pol':
        shutil.copy('gens_polar_dip.txt', 'gens_polar_dip_copy.txt')
    shutil.copy('gens_spear.txt', 'gens_spear_copy.txt')

    params = [
        pop_size, poly_size, num_mono_species, opt_property, smiles_list,
        sequence_list, mono_list, population, poly_property_list, n,
        gen_counter, spear_counter, prop_value_counter
    ]
    return (params)
Example #4
0
def init_gen(pop_size, poly_size, num_mono_species, opt_property, perc,
             smiles_list):
    '''
    Initializes parameter, creates population, and runs initial generation

    Parameters
    ----------
    pop_size: int
        number of polymers in each generation
    poly_size: int
        number of monomers per polymer
    num_mono_species: int
        number of monomer species in each polymer (e.g. copolymer = 2)
    opt_property: str
        property being optimized
    perc: float
        percentage of number of monomers to compare with Spearman calculation
    smiles_list: list
        list of all possible monomer SMILES

    Returns
    -------
    params: list (specific format)
        [pop_size, poly_size, num_mono_species, opt_property, smiles_list, sequence_list, mono_list, population, poly_property_list, n, gen_counter, spear_counter, prop_value_counter]
    '''

    # create all possible numerical sequences for given number of monomer types
    sequence_list = utils.find_sequences(num_mono_species)

    n = int(len(smiles_list) * perc)
    # n_05 = int(len(smiles_list) * .05)
    # n_10 = int(len(smiles_list) * .10)
    # n_15 = int(len(smiles_list) * .15)

    # initialize generation counter
    gen_counter = 1

    # initialize convergence counter
    spear_counter = 0
    prop_value_counter = 0

    # create monomer frequency list [(mono index 1, frequency), (mono index 2, frequency),...]
    mono_list = []
    for x in range(len(smiles_list)):
        mono_list.append([x, 0])

    # create inital population as list of polymers
    population = []
    population_str = []
    counter = 0
    while counter < pop_size:
        # for polymer in range(pop_size):
        temp_poly = []

        # select sequence type for polymer
        poly_seq = sequence_list[random.randint(0, len(sequence_list) - 1)]
        temp_poly.append(poly_seq)

        # select monomer types for polymer
        for num in range(num_mono_species):
            # randomly select a monomer index
            poly_monomer = random.randint(0, len(smiles_list) - 1)
            temp_poly.append(poly_monomer)
            # increase frequency count for monomer in mono_list
            mono_list[poly_monomer][1] += 1

        # make SMILES string of polymer
        temp_poly_str = utils.make_polymer_str(temp_poly, smiles_list,
                                               poly_size)

        # add polymer to population
        # check for duplication - use str for comparison to avoid homopolymer, etc. type duplicates
        if temp_poly_str in population_str:
            pass
        else:
            population.append(temp_poly)
            population_str.append(temp_poly_str)
            counter += 1

    # find initial population properties
    if opt_property == 'mw':
        # calculate polymer molecular weights
        poly_property_list = find_poly_mw(population, poly_size, smiles_list)
    elif opt_property == 'dip':
        # initialize list of polarizabilities
        polar_list = []
        # calculate electronic properties for each polymer
        elec_prop_list = find_elec_prop(population, poly_size, smiles_list)
        poly_property_list = elec_prop_list[0]
        polar_list = elec_prop_list[1]
    elif opt_property == 'pol':
        # initialize list of dipole moments
        dip_list = []
        # calculate electronic properties for each polymer
        elec_prop_list = find_elec_prop(population, poly_size, smiles_list)
        poly_property_list = elec_prop_list[1]
        dip_list = elec_prop_list[0]

    else:
        print(
            "Error: opt_property not recognized. trace:main:initial pop properties"
        )

    # set initial values for min, max, and avg polymer weights
    min_test = min(poly_property_list)
    max_test = max(poly_property_list)
    avg_test = mean(poly_property_list)

    if opt_property == 'dip':
        compound = utils.make_file_name(
            population[poly_property_list.index(max_test)], poly_size)
        polar_val = polar_list[poly_property_list.index(max_test)]

    if opt_property == 'pol':
        compound = utils.make_file_name(
            population[poly_property_list.index(max_test)], poly_size)
        dip_val = dip_list[poly_property_list.index(max_test)]

    # create new output files
    analysis_file = open('gens_analysis.txt', 'w+')
    population_file = open('gens_population.txt', 'w+')
    values_file = open('gens_values.txt', 'w+')
    if opt_property == 'dip':
        dip_polar_file = open('gens_dip_polar.txt', 'w+')
    if opt_property == 'pol':
        polar_dip_file = open('gens_polar_dip.txt', 'w+')
    spear_file = open('gens_spear.txt', 'w+')

    # write files headers
    analysis_file.write('min, max, avg, spearman, \n')
    population_file.write('polymer populations \n')
    values_file.write('%s values \n' % (opt_property))
    if opt_property == 'dip':
        dip_polar_file.write('compound, gen, dipole, polar \n')
    if opt_property == 'pol':
        polar_dip_file.write('compound, gen, polar, dip \n')
    #spear_file.write('gen, spear_05, spear_10, spear_15 \n')

    # capture initial population data
    analysis_file.write('%f, %f, %f, n/a, \n' % (min_test, max_test, avg_test))
    if opt_property == 'dip':
        dip_polar_file.write('%s, %d, %f, %f, \n' %
                             (compound, 1, max_test, polar_val))
    if opt_property == 'pol':
        polar_dip_file.write('%s, %d, %f, %f, \n' %
                             (compound, 1, max_test, dip_val))
    spear_file.write('1, n/a, n/a, n/a, \n')

    # write polymer population to file
    for polymer in population:
        poly_name = utils.make_file_name(polymer, poly_size)
        population_file.write('%s, ' % (poly_name))
    population_file.write('\n')

    for value in poly_property_list:
        values_file.write('%f, ' % (value))
    values_file.write('\n')

    # close all output files
    analysis_file.close()
    population_file.close()
    values_file.close()
    if opt_property == 'dip':
        dip_polar_file.close()
    if opt_property == 'pol':
        polar_dip_file.close()
    spear_file.close()

    # make backup copies of output files
    shutil.copy('gens_analysis.txt', 'gens_analysis_copy.txt')
    shutil.copy('gens_population.txt', 'gens_population_copy.txt')
    shutil.copy('gens_values.txt', 'gens_values_copy.txt')
    if opt_property == 'dip':
        shutil.copy('gens_dip_polar.txt', 'gens_dip_polar_copy.txt')
    if opt_property == 'pol':
        shutil.copy('gens_polar_dip.txt', 'gens_polar_dip_copy.txt')
    shutil.copy('gens_spear.txt', 'gens_spear_copy.txt')

    params = [
        pop_size, poly_size, num_mono_species, opt_property, smiles_list,
        sequence_list, mono_list, population, poly_property_list, n,
        gen_counter, spear_counter, prop_value_counter
    ]
    return (params)
Example #5
0
def find_elec_prop(population, poly_size, smiles_list):
    '''
    Calculates dipole moment and polarizability of each polymer in population
    TODO: add dipole tensor functionality
    TODO: update parser to catch failures/errors in output file

    Parameters
    ---------
    population: list
        list of polymers in population
    poly_size: int
        number of monomers per polymer
    smiles_list: list
        list of all possible monomer SMILES

    Returns
    -------
    elec_prop_lists: list
        nested list of [list of polymer dipole moments, list of polymer polarizabilities]
    '''

    poly_polar_list = []
    poly_dipole_list = []

    # run xTB geometry optimization
    #nproc = 8
    for polymer in population:
        run_geo_opt(polymer, poly_size, smiles_list)

    # parse xTB output files
    for polymer in population:
        # make file name string w/ convention monoIdx1_monoIdx2_fullNumerSequence
        file_name = utils.make_file_name(polymer, poly_size)
        # count number of successful parsed properties
        num_succ_reads = 0

        # check for xTB failures
        if 'FAILED!' in open('output/%s.out' % (file_name)).read():
            # move output file to 'failed' directory
            move_fail_file = subprocess.call(
                '(mv output/%s.out failed/%s.out)' % (file_name, file_name),
                shell=True)

            # note failure by filling property lists with dummy values
            poly_polar_list.append(-10)
            poly_dipole_list.append(-10)

        # if xTB successful, parse output file for static polarizability and dipole moment
        else:
            read_output = open('output/%s.out' % (file_name), 'r')
            for line in read_output:
                # create list of tokens in line
                tokens = line.split()

                if line.startswith(" Mol. Ī±(0)"):
                    temp_polar = float(tokens[4])
                    poly_polar_list.append(temp_polar)
                    num_succ_reads += 1
                elif line.startswith("   full:"):
                    # dipole tensor - STILL A LIST OF STRINGS (not floats)
                    # TODO: add tensor functionality later
                    dipole_line = tokens
                    temp_dipole = float(tokens[4])
                    poly_dipole_list.append(temp_dipole)
                    num_succ_reads += 1
                    # break inner for loop to avoid overwriting with other lines starting with "full"
                    break
            read_output.close()

        # This is a catch to move files to the failed folder if polarizability
        # .. and dipole are not parsed.
        if num_succ_reads != 2:
            poly_polar_list.append(-10)
            poly_dipole_list.append(-10)
            move_fail_file = subprocess.call(
                '(mv output/%s.out failed/%s.out)' % (file_name, file_name),
                shell=True)
        # reset the number of successful reads
        num_succ_reads = 0

    # make nested list of dipole moment and polarizability lists
    elec_prop_lists = [poly_dipole_list, poly_polar_list]

    return elec_prop_lists
Example #6
0
def main():
    param = {
        'N_time_step': 100,
        'N_quench': 0,
        'Ti': 0.04,
        'action_set': 0,
        'hx_initial_state': -2.0,
        'hx_final_state': 2.0,
        'delta_t': 0.001,
        'hx_i': -4.0,
        'RL_CONSTRAINT': True,
        'L': 6,
        'J': 1.00,
        'hz': 1.0,
        'symmetrize': False
    }
    file_name = ut.make_file_name(param)
    res = ut.gather_data(param, "../data/")
    print(compute_observable.Ed_Ad_OP(res['h_protocol'], 4))
    plotting.protocol(range(100), res['h_protocol'][0])
    #plotting.protocol(range(100),res['h_protocol'][1])
    #print(res['fid'])

    #print(res.keys())

    print(file_name)
    #with open('
    exit()
    import os

    #===========================================================================
    # pca=PCA(n_components=2)
    # param['N_time_step']=10
    # dc=ut.gather_data(param,'../data/')
    # pca.fit(dc['h_protocol']/4.)
    # X=pca.transform(dc['h_protocol']/4.)
    #
    # plt.scatter(X[:,0],X[:,1])
    # plt.title('PCA, $t=0.1$, continuous protocol')
    # plt.savefig("PCA_AS2_t-0p1.pdf")
    # plt.show()
    # exit()
    #===========================================================================

    #===========================================================================
    # dataBB8=[]
    # param['action_set']=0
    # param['N_time_step']=60
    #
    # param['delta_t']=0.5/60.
    # dc=ut.gather_data(param,'../data/')
    # pca=PCA(n_components=2)
    # pca.fit(dc['h_protocol']/4.)
    # print(pca.explained_variance_ratio_)
    # exit()
    #
    # param['delta_t']=3.0/60.
    # dc=ut.gather_data(param,'../data/')
    # X=pca.transform(dc['h_protocol']/4.)
    #
    # title='PCA$_{50}$, $t=3.0$, continuous protocol, nStep$=60$'
    # out_file="PCA_AS0_t-3p0_nStep-60.pdf"
    # plotting.visne_2D(X[:,0],X[:,1],dc['fid'],zlabel="Fidelity",out_file=out_file,title=title,show=True,xlabel='PCA-1',ylabel='PCA-2')
    #
    #===========================================================================
    #exit()
    #plt.scatter(X[:,0],X[:,1])
    #plt.title('PCA$_{50}$, $t=1.5$, continuous protocol, nStep$=60$')
    #plt.savefig("PCA_AS0_t-0p8_nStep-60.pdf")
    #plt.show()
    #exit()
    # exit()

    #===========================================================================
    # param['N_time_step']=2
    # param['action_set']=0
    # dc=ut.gather_data(param,'../data/')
    # print(dc['h_protocol'])
    # exit()
    # dataBB8=[]
    #===========================================================================
    #===============================================================================
    #
    #     param['action_set']=0
    #     param['N_time_step']=60
    #     param['delta_t']=0.5/60
    #
    #     dc=ut.gather_data(param,'../data/')
    #
    #     protocols=dc['h_protocol']
    #     #print(np.shape(dc['h_protocol']))
    #     sort_f=np.argsort(dc['fid'])[::-1]
    #
    #     print(sort_f[0])
    #
    #     #protocols[sort_f[0]]
    #
    #     best_prot=protocols[sort_f[0:10]]
    #     x=np.array(range(60))*1.0/60
    #     #print(best_prot.reshape)
    #     #print(x.shape)
    #     #print(np.array(range(60))*0.1/60)
    #     #print(best_prot)
    #     #print(np.shape(best_prot))
    #     #print(np.shape(np.arange(0.1,3.05,0.1)*0.05))
    #
    #     plotting.protocol(protocols[:2],x,labels=dc['fid'][:2],show=True)
    #
    #     exit()
    #
    #
    #===============================================================================

    param['N_time_step'] = 60
    param['action_set'] = 0

    dataBB8 = []
    compTime = []
    x = []

    for t in np.arange(0.1, 3.05, 0.1):
        dt = t / param['N_time_step']
        param['delta_t'] = dt
        # Changed it to be returning False if file is not found ...
        dc = ut.gather_data(param, '../data/')

        if dc is not False:
            eaop = compute_observable.Ed_Ad_OP(dc['h_protocol'], 4.0)
            print(t, eaop, dc['fid'].shape, '\t', np.mean(dc['n_fid']))
            compTime.append(np.mean(dc['n_fid']))
            dataBB8.append(eaop)
            x.append(t)
        else:
            print("Data not available for %.3f" % dt)

    y = compTime
    plotting.observable(y,
                        x,
                        title='Depth of search for bang-bang protocol',
                        ylabel='\# of fidelity evaluations',
                        xlabel='$T$',
                        marker="-",
                        labels=['Obtained time (SGD)'])
    exit()
    #===========================================================================
    # param['action_set']=0
    # param['delta_t']=0.01
    #===========================================================================
    #===========================================================================
    # for i in range(2,300,4):
    #     param['N_time_step']=i
    #     is_there,dc=ut.gather_data(param,'../data/')
    #     if is_there:
    #         eaop=compute_observable.Ed_Ad_OP(dc['h_protocol'],4.0)
    #         print(i,eaop,dc['fid'].shape,'\t',np.mean(dc['n_fid']))
    #         compTime.append(np.mean(dc['n_fid']))
    #         dataBB8.append(eaop)
    #         x.append(i)
    #     else:
    #         print("Data not available for %i"%i)
    #
    #===========================================================================

    #===========================================================================
    # param['N_time_step']=150
    # is_there,dc=ut.gather_data(param,'../data/')
    # x=np.arange(0,150*0.01,0.01)
    # plotting.protocol(dc['h_protocol'][:3],x,labels=dc['fid'][:3],show=True)
    # exit()
    # #x=np.array(range(2,300,4))*0.01
    #===========================================================================
    param['action_set'] = 0
    param['delta_t'] = 0.01
    mean_fid_BB = []
    h_protocol_BB = {}
    fid_BB = {}
    n_fid_BB = []
    x = []
    sigma_fid = []
    EA_OP = []

    for i in range(2, 300, 4):
        param['N_time_step'] = i
        data_is_available, dc = ut.gather_data(param, '../data/')
        if data_is_available:
            mean_fid_BB.append(np.mean(dc['fid']))
            sigma_fid.append(np.std(dc['fid']))
            fid_BB[i] = dc['fid']
            EA_OP.append(compute_observable.Ed_Ad_OP(dc['h_protocol'], 4.0))
            h_protocol_BB[i] = dc['h_protocol']
            n_fid_BB.append(np.mean(dc['n_fid']))
            x.append(i * param['delta_t'])

    #print(fid_BB[130])
    #mean=np.mean(fid_BB[130])
    #sns.distplot(fid_BB[130],bins=np.linspace(mean-0.005,mean+0.005,100))
    #plt.tick_params(labelleft='off')
    #plt.show()
    x = np.array(x)
    y = [
        n / (x[i] / param['delta_t'])
        for n, i in zip(n_fid_BB, range(len(n_fid_BB)))
    ]

    plotting.observable(y,
                        x,
                        title='Depth of search for bang-bang protocol',
                        ylabel='(\# of fidelity evaluations)/$N$',
                        xlabel='$T$',
                        marker="-",
                        labels=['Minimum time', 'Obtained time (SGD)'])

    #plotting.protocol(h_protocol_BB[130][20:25],np.arange(0,130,1)*param['delta_t'])

    exit()

    #pca.fit()
    #===========================================================================
    # dataCONT=[]
    # for t in range(2,300,4):
    #     print(t)
    #     param['N_time_step']=t
    #     dc=ut.gather_data(param,'../data/')
    #     #print(dc['h_protocol'].shape)
    #     eaop=compute_observable.Ed_Ad_OP(dc['h_protocol'],4.0)
    #     print(eaop)
    #     dataCONT.append(eaop)
    #
    # file="../data/EAOP_"+ut.make_file_name(param)
    # with open(file,'wb') as f:
    #     pickle.dump(dataCONT,f);f.close();
    #
    # exit()
    #
    #===========================================================================

    #===========================================================================
    # param['action_set']=0
    # dataBB8=[]
    # for t in range(2,300,4):
    #     print(t)
    #     param['N_time_step']=t
    #     dc=ut.gather_data(param,'../data/')
    #     eaop=compute_observable.Ed_Ad_OP(dc['h_protocol'],4.0)
    #     print(eaop)
    #     #print(dc['h_protocol'].shape)
    #     dataBB8.append(eaop)
    #
    # file="../data/EAOP_"+ut.make_file_name(param)
    # with open(file,'wb') as f:
    #     pickle.dump(dataBB8,f);f.close();
    #
    # exit()
    #===========================================================================

    #===========================================================================
    # param['N_time_step']=298
    # param['action_set']=0
    # file="../data/EAOP_"+ut.make_file_name(param)
    # with open(file,'rb') as f:
    #     dataBB8=pickle.load(f);f.close();
    #
    # param['action_set']=2
    # f="../data/EAOP_"+ut.make_file_name(param)
    # with open(f,'rb') as file:
    #     dataCONT=pickle.load(file);
    #
    # time_axis=np.array(range(2,300,4))*0.01
    # title="Edward-Anderson parameter ($n=400$) vs. evolution time for SGD\n with the different action protocols ($L=1$)"
    # plotting.observable([dataBB8,dataCONT],[time_axis,time_axis],title=title,
    #                      out_file="SGD_EAOPvsT_AS0-2.pdf",show=True,
    #                      ylabel="$q_{EA}$",xlabel="$t$",labels=['bang-bang8','continuous'])
    #===========================================================================

    #===========================================================================
    # param['N_time_step']=250
    # dc=ut.gather_data(param,'../data/')
    # sns.distplot(dc['fid'],kde=False,label='$t=%.3f$'%(param['N_time_step']*0.01))
    # plt.legend(loc='best')
    # plt.savefig('SGD_hist_fid_t2p5.pdf')
    # plt.show()
    # exit()
    #===========================================================================

    #===========================================================================
    # title="Fidelity ($n=400$) vs. evolution time for SGD\n with the different action protocols ($L=1$)"
    # plotting.observable(np.array(data),np.array(range(2,300,4))*0.01,title=title,
    #                      out_file="SGD_FvsT_AS2.pdf",show=True,
    #                      ylabel="$F$",xlabel="$t$",labels=['continuous'])
    #
    #===========================================================================

    exit()
Example #7
0
def b2(n10,w=10):
    x = np.array(list(np.binary_repr(n10, width=w)),dtype=np.float)
    x[x > 0.5] = 4.
    x[x < 0.5] = -4.
    return x

par=default_parameters()
par['symmetrize']=False
par['L']=6
    
best_fid=0
best_fid_list=[]
for n in range(10,405,10):
    par['N_time_step'] = n
    file=make_file_name(par)
    with open('data/'+file, 'rb') as f:
        data = pickle.load(f)
    for d in data[1]:
        if d[1] > best_fid:
            best_fid = d[1]
    best_fid_list.append(best_fid)

np.savetxt('out.txt',best_fid_list)
#print(data)   

exit()
h=data[0][1]
dt=0.015
t=np.arange(0,3.0,dt)
plotting.protocol(t,h)
Example #8
0
def main():
    
    ut.check_sys_arg(sys.argv)
        
    global action_set,hx_discrete,hx_max#,FIX_NUMBER_FID_EVAL
    
    continuous=[0.01,0.05,0.1,0.2,0.5,1.,2.,3.,4.,8.]
    action_set_name=["bang-bang8","continuous-pos","continuous"]
    action_set_arrays=[
                      np.array([-8.0,0.,8.]),
                      np.array(continuous,dtype=np.float32),
                      np.array([-c for c in continuous]+[0]+continuous,dtype=np.float32)   
                      ]
    all_action_sets=dict(zip(action_set_name,action_set_arrays))
    
    """ 
    Parameters
        L: system size
        J: Jzz interaction
        hz: longitudinal field
        hx_i: initial tranverse field coupling
        hx_initial_state: initial state transverse field
        hx_final_state: final state transverse field
        Ti: initial temperature for annealing
        
        N_quench: number of quenches (i.e. no. of time temperature is quenched to reach exactly T=0)
        N_time_step: number of time steps
        action_set: array of possible actions
        outfile_name: file where data is being dumped (via pickle) 
        delta_t: time scale
        N_restart: number of restart for the annealing
        verbose: If you want the program to print to screen the progress
        symmetrize_protocol: Wether or not to work in the symmetrized sector of protocols
        
        hx_max : maximum hx field (the annealer can go between -hx_max and hx_max
        FIX_NUMBER_FID_EVAL: decide wether you want to fix the maximum number of fidelity evaluations (deprecated)
        RL_CONSTRAINT: use reinforcement learning constraints or not
        fidelity_fast: prepcompute exponential matrices and runs fast_Fidelity() instead of Fidelity()
    """
    #----------------------------------------
    # DEFAULT PARAMETERS
    J = 1.0  # zz interaction
    hz = 1.0  #0.9045/0.809 #1.0 # hz field
    hx_i = -4.0 # -1.0 # initial hx coupling
    act_set_name='bang-bang8'
    Ti=0.04 # initial temperature (for annealing)
    
    L = 10 # system size
    hx_initial_state= -2.0 # initial state
    hx_final_state = 2.0 #+1.0 # final hx coupling
    N_quench=10
    N_time_step=40
    outfile_name='first_test.pkl'
    action_set=all_action_sets['bang-bang8']
    delta_t=0.05
    N_restart=4
    symmetrize_protocol=True
    
    hx_max=4
    h_set=compute_h_set(hx_i,hx_max)
    RL_CONSTRAINT=True 
    verbose=True
    fidelity_fast=True
    
    #----------------------------------------
    
    if len(sys.argv)>1:
        """ 
            if len(sys.argv) > 1 : run from command line -- check command line for parameters 
        """        
        argv=ut.read_command_line_arg(sys.argv,all_action_sets)
        L=argv[0]
        hx_initial_state=argv[1]
        hx_final_state=argv[2]
        N_quench=argv[3]
        N_time_step=argv[4]
        action_set=argv[5]
        outfile_name=argv[6]
        delta_t=argv[7]
        N_restart=argv[8]
        verbose=argv[9]
        act_set_name=argv[10]
        symmetrize_protocol=argv[11]
        



    print("-------------------- > Parameters < --------------------")
    print("L \t\t\t %i\nJ \t\t\t %.3f\nhz \t\t\t %.3f\nhx(t=0) \t\t %.3f\nhx_max \t\t\t %.3f "%(L,J,hz,hx_i,hx_max))
    print("hx_initial_state \t %.2f\nhx_final_state \t\t %.2f"%(hx_initial_state,hx_final_state))
    print("N_quench \t\t %i\ndelta_t \t\t %.2f\nN_restart \t\t %i"%(N_quench,delta_t,N_restart))
    print("N_time_step \t\t %i"%N_time_step)
    print("Total_time \t\t %.2f"%(N_time_step*delta_t))
    print("Output file \t\t %s"%('data/'+outfile_name))
    print("Action_set \t <- \t %s"%np.round(action_set,3))
    print("# of possible actions \t %i"%len(action_set))
    print("Using RL constraints \t %s"%str(RL_CONSTRAINT))
    print("Symmetrizing protocols \t %s"%str(symmetrize_protocol))
    print("Fidelity MODE \t\t %s"%('fast' if fidelity_fast else 'standard'))

    param={'J':J,'hz':hz,'hx':hx_i} # Hamiltonian kwargs 
    hx_discrete=[0]*N_time_step # dynamical part at every time step (initiaze to zero everywhere)
    
    # full system hamiltonian
    H,_ = Hamiltonian.Hamiltonian(L,fct=hx_vs_t,**param)
   
    # calculate initial and final states
    hx_discrete[0]=hx_initial_state # just a trick to get initial state
    _, psi_i = H.eigsh(time=0,k=1,which='SA')
    hx_discrete[0]=hx_final_state # just a trick to get final state
    _, psi_target = H.eigsh(time=0,k=1,which='SA')
    hx_discrete[0]=0
    print("Initial overlap is \t %.5f"%(abs(np.sum(np.conj(psi_i)*psi_target))**2))
    
    # simulated annealing kwargs:
    param_SA={'Ti':Ti,
              'psi_i':psi_i,'H':H,'N_time_step':N_time_step,
              'delta_t':delta_t,'psi_target':psi_target,
              'hx_i':hx_i,'N_quench':N_quench,'RL_CONSTRAINT':RL_CONSTRAINT,
              'verbose':verbose,'hx_initial_state':hx_initial_state,'hx_final_state':hx_final_state,
              'L':L,'J':J,'hz':hz,'action_set':action_set_name.index(act_set_name),
              'fidelity_fast':fidelity_fast,'symmetrize':symmetrize_protocol
    }
    
    if param_SA['fidelity_fast'] :
        print("\nPrecomputing evolution matrices ...")
        start=time.time()
        precompute_expmatrix(param_SA, h_set, H)
        print("Done in %.4f seconds"%(time.time()-start))
        
    if outfile_name=="auto": outfile_name=ut.make_file_name(param_SA)
    
    to_save_par=['Ti','psi_i','N_time_step',
                'delta_t','psi_target','hx_i','N_quench','RL_CONSTRAINT',
                'hx_initial_state','hx_final_state','L','J','hz','action_set',
                'symmetrize'
    ]
    
    file_content=ut.read_current_results('data/%s'%outfile_name)
    
    # Read current data if it exists
    if file_content :
        dict_to_save_parameters, all_results = file_content
        N_current_restart = len(all_results)
        print("Data with %i samples available !" % N_current_restart) 
    else :
        dict_to_save_parameters = dict(zip(to_save_par,[param_SA[p] for p in to_save_par]))
        all_results=[]
        N_current_restart = 0
    
    #print(N_current_restart," ",N_restart)
    for it in range(N_current_restart, N_restart):
        print("\n\n-----------> Starting new iteration <-----------")
        start_time=time.time()
    
        count_fid_eval,best_fid,best_action_protocol,best_hx_discrete = simulate_anneal(param_SA)
    
        result=[count_fid_eval,best_fid,best_action_protocol,best_hx_discrete]
        print("\n----------> RESULT FOR ANNEALING NO %i <-------------"%(it+1))
        print("Number of fidelity eval \t%i"%count_fid_eval)
        print("Best fidelity \t\t\t%.4f"%best_fid)
        print("Best hx_protocol\t\t",list(best_hx_discrete))

        if L > 1:  
            _,E,delta_E,Sd,Sent = MB_observables(best_hx_discrete, param_SA, matrix_dict, fin_vals=True)
            result = result + [E, delta_E, Sd, Sent] # Appending Energy, Energy fluctuations, Diag. entropy, Ent. entropy
        
        all_results.append(result)
        with open('data/%s'%outfile_name,'wb') as pkl_file:
            ## Here read first then save, stop if reached quota
            pickle.dump([dict_to_save_parameters,all_results],pkl_file);pkl_file.close()
            
        print("Saved iteration --> %i to %s"%(it,'data/%s'%outfile_name))
        print("Iteration run time --> %.2f s"%(time.time()-start_time))
    
    print("\n Thank you and goodbye !")