Beispiel #1
0
 def update_json(self, filename):
     """
     This function was originally written to update all keys in the
     json dictionaries in the grain boundary directories.
     The pattern is quite general and can be adapted to just add
     new keys delete old keys consider it a dictionary migration
     routine.
     """
     new_json = {}
     with open(filename, 'r') as json_old:
         old_json = json.load(json_old)
         new_json['zplanes'] = old_json['zplanes']
         new_json['orientation_axis'] = old_json['orientation axis']
         new_json['boundary_plane'] = old_json['boundary plane']
         new_json['coincident_sites'] = old_json['coincident sites']
         new_json['angle'] = old_json['angle']
         new_json['gbid'] = old_json['gbid']
         new_json['n_at'] = old_json['n_unit_cell']
         new_json['type'] = 'symmetric tilt boundary'
         dir_path = os.path.join('/'.join((filename.split('/'))[:-1]),
                                 old_json['gbid'])
         at = Atoms('{0}.xyz'.format(dir_path, old_json['gbid']))
         cell = at.get_cell()
         A = cell[0, 0] * cell[1, 1]
         new_json['A'] = A
         json_path = filename
     with open(json_path, 'w') as json_new_file:
         json.dump(new_json, json_new_file, indent=2)
Beispiel #2
0
 def add_key_to_dict(self, dirname):
     os.path.join(dirname, 'subgb.json')
     new_json = {}
     with open(json_path, 'r') as json_old:
         old_json = json.load(json_old)
     for key in old_json.keys():
         new_json[key] = old_json[key]
     at = Atoms('{0}.xyz'.format(os.path.join(dirname, )))
     cell = at.get_cell()
     A = cell[0, 0] * cell[1, 1]
     new_json['A'] = A
     new_json['n_at'] = len(at)
Beispiel #3
0
def gen_interface():
    """Selects an interfacial region of a bicrystal
    based on common neighbour analysis. The width of the interfacial region
    is equal to 2*(gb_max-gb_min) where gb_max is the z-coordinate of the highest
    non-bcc atom, and gb_min is the lowest non-bcc atom.

    The method creates a file `interface.xyz` in the working directory,
    with the interface centered in a unit cell with 1 angstrom vacuum
    on each side.

    Returns:
      :class:`ase.Atoms`: Atoms object of the interfacial slab in same
      coordinates as original bicrystal.
    """
    #output.xyz must have structure_type property attached.
    ats = Atoms('output.xyz')
    cell_midpoint = ats.get_cell()[2,2]/2.0
    #select non-BCC sites are 0 otherwise 3.
    struct_type = np.array(ats.properties['structure_type'])
    struct_mask = [not struct for struct in struct_type]
    interface = ats.select(struct_mask)
    #select upper interface to decorate.
    interface = interface.select([at.position[2] > cell_midpoint for at in interface])
    z_vals = [at.position[2] for at in interface]
    z_min = min(z_vals)
    z_max = max(z_vals)
    #take slice of interface max uncoordinated with min uncoordinated.
    z_width = (z_max-z_min)/2.0
    z_center = z_width + z_min
    gb_max = z_max + 1.0*z_width
    gb_min = z_min - 1.0*z_width
    zint = ats.select([(gb_min <= at.position[2] <= gb_max) for at in ats])
    #make a copy to return
    int_ats = zint.copy()
    zint.center(vacuum=1.0, axis=2)
    zint.write('interface.xyz')
    #Write POSCAR to use interstitial site generator:
    ats = Atoms('interface.xyz')
    #vasp_args=dict(xc='PBE', amix=0.01, amin=0.001, bmix=0.001, amix_mag=0.01, bmix_mag=0.001,
    #             kpts=[3, 3, 3], kpar=9, lreal='auto', ibrion=-1, nsw=0, nelmdl=-15, ispin=2,
    #             nelm=100, algo='VeryFast', npar=24, lplane=False, lwave=False, lcharg=False, istart=0,
    #             voskown=0, ismear=1, sigma=0.1, isym=2)
    #vasp = Vasp(**vasp_args)
    #vasp.initialize(ats)
    #write_vasp('POSCAR', vasp.atoms_sorted, symbol_count=vasp.symbol_count, vasp5=True)
    return int_ats
Beispiel #4
0
ats = Atoms('crackH.xyz')
ats.set_cutoff(2.4)
ats.calc_connect()
ats.calc_dists()
filter_mask = (ats.get_atomic_numbers()==1)
h_atoms     = ats.select(filter_mask, orig_index=True)
rem=[]
u = np.zeros(3)
for i in h_atoms.orig_index:
  print 'hindex', i
  print 'nneighbs', ats.n_neighbours(i)
  for n in range(ats.n_neighbours(i)):
    j = ats.neighbour(i, n+1, distance=2.4, diff=u)
    print 'neighb index', j
    if ats.distance_min_image(i,j) < 1.1 and j!=i:
      rem.append(i)
rem = list(set(rem))
if len(rem) > 0:
  print 'Removing {} H atoms'.format(len(rem))
  ats.remove_atoms(rem)
else:
  print 'No H atoms closer than threshold.'
#Now a little housekeeping. In the vicinity of a cracktip
#Delaunay can go a little haywire. We remove any H that is far too close to an Fe atom
# |h-fe| < 1.1. and we remove the vacuum Hs
ats.write('crackH.xyz')
h_atoms = sum((ats.get_atomic_numbers() ==1))
zlen    = ats.get_cell()[2,2]
with open('h.txt','w') as f:
  print >>f,  'There are {} H atoms per {} A'.format(h_atoms, zlen)
Beispiel #5
0
def populate_db(material='alphaFe', or_axis='001', gbid='', modify=False):
    """Add canonical grains to SQLite database, and all SubGrainBoundaries
    that can be found below it in the directory tree from their subgb.json files.

    Args:
      material(str): material.
      or_axis(str): orientation axis.
      gbid(str, optional): To add a specific canonical grain from its id.
      modify(bool): If True database will be updated.
    """

    analyze  = GBAnalysis()
    if len(gbid) == 0:
        dir_str  = os.path.join(material, or_axis)
    else:
        dir_str  = os.path.join(material, or_axis)
        dir_str  = os.path.join(dir_str, gbid)
    app.logger.info('dir_str {}'.format(dir_str))
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, dir_str)), gb_files, 'gb.json')
    for gb in gb_files:
        app.logger.info('{} {}'.format(gb[0], gb[1]))
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        try:
            sigma_csl = gb_json['sigma_csl']
        except KeyError:
            sigma_csl = int(gb_json['n_at']/(gb_json['coincident_sites']+gb_json['n_at']))
            gb_json['sigma_csl'] = sigma_csl
            with open(gb[1], 'w') as f:
                json.dump(gb_json, f, indent=2)

        try:
            coincident_sites = gb_json['coincident_sites']
        except KeyError:
            coincident_sites = 0

        gb_dict = {"gb_type"          : gb_json['type'],
                   "n_at"             : gb_json['n_at'],
                   "boundary_plane"   : serialize_vector(map(int, gb_json['boundary_plane'])),
                   "orientation_axis" : serialize_vector(map(int, gb_json['orientation_axis'])),
                   "z_planes"         : serialize_vector(gb_json['zplanes']),
                   "coincident_sites" : coincident_sites,
                   "sigma_csl"        : sigma_csl,
                   "angle"            : gb_json['angle'],
                   "height"           : gb_json['H'],
                   "area"             : gb_json['A'],
                   "notes"            : "",
                   "path"             : os.path.relpath(gb[0], app.config["GRAIN_DATABASE"]),
                   "gbid"             : gb_json['gbid']
                  }

        if modify:
            try:
                GB_model_object = GrainBoundary.create(**gb_dict)
            except IntegrityError:
                GB_model_object = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
                app.logger.info('GB already in database: {}'.format(gb_json['gbid']))
        else:
            try:
                GB_model_object = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
                app.logger.info('Database Contains {}'.format(gb_json['gbid']))
            except  GrainBoundary.DoesNotExist:
                app.logger.info('Not in Database {}'.format(gb_json['gbid']))
                GB_model_object = None

        subgb_files = []
        analyze.find_gb_json('{0}'.format(gb[0]), subgb_files, 'subgb.json')
        for subgb in subgb_files:
            with open(subgb[1],'r') as f:
                subgb_json = json.load(f)
            try:
                converged = subgb_json['converged']
            except KeyError:
                converged = False

            try:
                E_gb = subgb_json["E_gb"]
            except KeyError:
                converged = False
                E_gb = 0.0

            try:
                E_gb_init=subgb_json["E_gb_init"]
            except KeyError:
                E_gb_init = 0.0

            try:
                gbid = subgb_json["gbid"]
            except KeyError:
                gbid = subgb_json["name"]

            try:
                area = subgb_json['A']
            except KeyError:
                structs = glob.glob(os.path.join(subgb[0], '*.xyz'))
                struct  = Atoms(structs[-1])
                cell    = struct.get_cell()
                area    = cell[0][0]*cell[1][1]
                subgb_json['n_at'] = len(struct)

            subgb_dict = {"canonical_grain" : GB_model_object,
                          "converged"       : converged,
                          "E_gb_init"       : E_gb_init,
                          "potential"       : subgb_json["param_file"],
                          "rbt"             : serialize_vector(subgb_json['rbt']),
                          "path"            : os.path.relpath(subgb[0], app.config["GRAIN_DATABASE"]),
                          "area"            : area,
                          "rcut"            : subgb_json["rcut"],
                          "n_at"            : subgb_json['n_at'],
                          "E_gb"            : E_gb,
                          "notes"           : "",
                          "gbid"            : gbid}
            if modify:
                try:
                    SubGrainBoundary.create(**subgb_dict)
                    app.logger.info('Created SubGB entry {}'.format(subgb_dict))
                except IntegrityError:
                    app.logger.info('SubGB already in DB {}'.format(subgb_dict))
            else:
                print subgb_dict
Beispiel #6
0
def gb_check_conv(material='alphaFe', or_axis='001', modify_db=False):
    """Scans through grainboundary directory tree,
    inspecting the subgrain dictionary and the :py:class:`SubGrainBoundary` to test if
    the grain boundary energy, number of atoms, gb_area,
    and convergence flag are consistent. If modify_db is True the SQLite model
    will be updated.

    Args:
      material: Which material to do check json/database convergence consistency on.
      or_axis: Which orientation axis to check.
      modify_db: Boolean. If True updates gb_model in database otherwise
        just prints inconsistent grain json/database value.
    """

    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, os.path.join(material, or_axis))),
                                      gb_files, 'gb.json')
    no_struct_file = open('no_struct.txt','a')
    for gb_num, gb in enumerate(gb_files[:]):
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        for subgb_model in GB_model.subgrains:
            subgb_dict_path = os.path.join(subgb_model.path,'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            with open(subgb_dict_path,'r') as f:
                subgb_dict = json.load(f)
            struct_path = os.path.join(subgb_model.path, subgb_model.gbid+'_traj.xyz')
            struct_path = os.path.join(GRAIN_DATABASE, struct_path)
            app.logger.debug(struct_path)
            try:
                assert subgb_model.converged==subgb_dict['converged']
            except AssertionError:
                if not modify_db:
                    print 'Not updating:'
                    print subgb_dict_path
                    print 'Model: ', subgb_model.converged, 'Json:', subgb_dict['converged']
                else:
                    try:
                        assert type(subgb_dict['converged'])==bool
                    except:
                        print "json 'converged' value not boolean. json file could be corrupted:"
                        print subgb_dict_path
                    else:
                        print 'Updating model instance in database:'
                        print subgb_dict_path
                        print 'Model: ', subgb_model.converged, 'Json:', subgb_dict['converged']
                        subgb_model.converged = subgb_dict['converged']
                        subgb_model.save()

            try:
                assert subgb_model.n_at==subgb_dict['n_at']
            except KeyError:
                try:
                    ats = Atoms(struct_path)
                except RuntimeError:
                    print struct_path.replace('_traj','')
                    ats = Atoms(struct_path.replace('_traj',''))

                cell = ats.get_cell()
                subgb_dict['n_at'] = len(ats)
                subgb_dict['area'] = cell[0][0]*cell[1][1]
                with open(subgb_dict_path, 'w') as f:
                    json.dump(subgb_dict, f, indent=2)
            except AssertionError:
                if not modify_db:
                    print subgb_model.n_at, subgb_dict['n_at']
                else:
                    print 'Updating model instance in database:'
                    subgb_model.n_at = subgb_dict['n_at']
                    print 'Model: {}  json:{}'.format(subgb_model.n_at, subgb_dict['n_at'])
                    subgb_model.save()

            try:
                assert (abs(subgb_model.area - subgb_dict['area']) < 1e-8)
            except KeyError:
                print 'adding area key'
                subgb_dict['area'] = subgb_dict['A']
                with open(subgb_dict_path, 'w') as f:
                    json.dump(subgb_dict, f, indent=2)
            except AssertionError:
                if not modify_db:
                    print subgb_model.area, subgb_dict['area']
                else:
                    subgb_model.area = subgb_dict['area']
                    print 'Model: {}  json:{}'.format(subgb_model.area, subgb_dict['area'])
                    subgb_model.save()

            try:
                assert (abs(subgb_model.E_gb - subgb_dict['E_gb']) < 1e-8)
            except AssertionError:
                if not modify_db:
                    print 'Not updating:'
                    print 'Model E_gb:', subgb_model.E_gb, 'JSON E_gb:',  subgb_dict['E_gb']
                else:
                    print 'Model E_gb:', subgb_model.E_gb, 'JSON E_gb:',  subgb_dict['E_gb']
                    print subgb_dict_path
                    subgb_model.E_gb = subgb_dict['E_gb']
                    subgb_model.save()
            except KeyError:
                subgb_dict['converged']=False
                subgb_dict['E_gb'] = 0.0
                with open(subgb_dict_path, 'w') as f:
                    json.dump(subgb_dict, f, indent=2)