Example #1
0
def change_json_key(material='alphaFe', or_axis='001'):
    """Replace '_traj' string in subgb_dict['gbid'] method.
    Can be modified to update or correct database/subgb strings in json files.

    Args:
      material(str): material to select.
      or_axis(str): orientation axis to select.
    """

    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, os.path.join(material, or_axis))),
                                                   gb_files, 'gb.json')
    for gb in gb_files:
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        for subgb_model in GB_model.subgrains:
            subgb_dict_path = os.path.join(subgb_model.path,'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            with open(subgb_dict_path,'r') as f:
                subgb_dict = json.load(f)
            try:
                assert subgb_dict['gbid'] == subgb_model.gbid
                if '_traj' in subgb_dict['gbid']:
                    print subgb_dict['gbid'], subgb_model.gbid
                    subgb_dict['gbid']= subgb_dict['gbid'].replace('_traj','')
                    with open(subgb_dict_path,'w') as f:
                        json.dump(subgb_dict, f, indent=2)
            except AssertionError:
                print subgb_dict['gbid'], subgb_model.gbid
                subgb_model.gbid = subgb_dict['gbid']
                subgb_model.save()
                print subgb_model.gbid
Example #2
0
def gb_check_dir_integrity(material='alphaFe', or_axis='001'):
    """Check if directory in directory tree contains
    a grain boundary json file, and update sql database to include the
    parent grain if it is missing.
    """
    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE,
                                      os.path.join(material, or_axis))), gb_files, 'gb.json')
    for gb in gb_files:
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        for subgb_model in GB_model.subgrains:
            subgb_dict_path = os.path.join(subgb_model.path,'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            try:
                with open(subgb_dict_path,'r') as f:
                    subgb_dict = json.load(f)
            except IOError:
                NOINPUT = True
                print subgb_dict_path
                while NOINPUT:
                    user_input = raw_input("Directory missing delete model (y/n)?")
                    if user_input == 'y':
                        print 'Deleting Model'
                        subgb_model.delete_instance()
                        NOINPUT=False
                    elif user_input =='n':
                        print 'Keeping Model'
                        NOINPUT=False
                    else:
                        pass
Example #3
0
def add_conv_key(material='alphaFe', or_axis='001'):
    """Check if subgb.json dictionary contains a convergence
    key. If not add key to subgb.json file and default to false.

    Args:
      material(str): name of material to search.
      or_axis(str): orientation axis.
    """

    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE,
                                      os.path.join(material, or_axis))), gb_files, 'gb.json')
    for gb in gb_files:
        print gb[0], gb[1]
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        for subgb_model in GB_model.subgrains:
            subgb_dict_path = os.path.join(subgb_model.path,'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            with open(subgb_dict_path,'r') as f:
                subgb_dict = json.load(f)
            try:
                print subgb_dict['gbid'], subgb_dict['converged']
            except KeyError:
                print 'Adding Convergence Keys'
                subgb_dict['converged'] = False
                with open(subgb_dict_path,'w') as f:
                    json.dump(subgb_dict, f, indent=2)
Example #4
0
def gb_check_path(material='alphaFe', or_axis='001', modify_db=False):
    """Compare consistency between location of subgb.json
    paths and the paths in the closure tree. If path is missing
    from directory tree delete from the SQL database.

    Args:
      material(str): material.
      or_axis(str): orientation axis.
      modify_db(bool): If True database will be updated.
    """

    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, os.path.join(material, or_axis))),
                                      gb_files, 'gb.json')
    no_struct_file = open('no_struct.txt','a')
    for gb_num, gb in enumerate(gb_files[:]):
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        json_path = '/'.join(gb[0].split('/')[7:])
        #check grain model has correct path!
        try:
            assert json_path == GB_model.path
        except AssertionError:
            print GB_model.path, json_path
            q = GrainBoundary.update(path=json_path).where(GrainBoundary.gbid==gb_json['gbid'])
            q.execute()
        #now pull subgb.json paths
        subgb_files = []
        analyze.find_gb_json('{0}'.format(gb[0]), subgb_files, 'subgb.json')
        for subgb in subgb_files:
            subgb_dict_path = os.path.join(subgb[0],'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            with open(subgb_dict_path,'r') as f:
                subgb_dict = json.load(f)

            print subgb_dict_path
            query = (GB_model.subgrains
                             .where((SubGrainBoundary.gbid == subgb_dict['name']) &
                                    (SubGrainBoundary.potential==subgb_dict['param_file'])))
            subgb_model = query.get()
            json_path = '/'.join(subgb[0].split('/')[7:])
            model_path = subgb_model.path
            try:
                assert json_path == model_path
            except AssertionError:
                #print subgb_dict['name'], subgb_model.gbid
                print json_path, model_path
                query = (SubGrainBoundary.update(path=json_path)
                                         .where((SubGrainBoundary.gbid == subgb_dict['name']) &
                                                (SubGrainBoundary.potential==subgb_dict['param_file'])))
                #print query
                query.execute()
        database.commit()
    return
Example #5
0
def populate_db(material='alphaFe', or_axis='001', gbid='', modify=False):
    """Add canonical grains to SQLite database, and all SubGrainBoundaries
    that can be found below it in the directory tree from their subgb.json files.

    Args:
      material(str): material.
      or_axis(str): orientation axis.
      gbid(str, optional): To add a specific canonical grain from its id.
      modify(bool): If True database will be updated.
    """

    analyze  = GBAnalysis()
    if len(gbid) == 0:
        dir_str  = os.path.join(material, or_axis)
    else:
        dir_str  = os.path.join(material, or_axis)
        dir_str  = os.path.join(dir_str, gbid)
    app.logger.info('dir_str {}'.format(dir_str))
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, dir_str)), gb_files, 'gb.json')
    for gb in gb_files:
        app.logger.info('{} {}'.format(gb[0], gb[1]))
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        try:
            sigma_csl = gb_json['sigma_csl']
        except KeyError:
            sigma_csl = int(gb_json['n_at']/(gb_json['coincident_sites']+gb_json['n_at']))
            gb_json['sigma_csl'] = sigma_csl
            with open(gb[1], 'w') as f:
                json.dump(gb_json, f, indent=2)

        try:
            coincident_sites = gb_json['coincident_sites']
        except KeyError:
            coincident_sites = 0

        gb_dict = {"gb_type"          : gb_json['type'],
                   "n_at"             : gb_json['n_at'],
                   "boundary_plane"   : serialize_vector(map(int, gb_json['boundary_plane'])),
                   "orientation_axis" : serialize_vector(map(int, gb_json['orientation_axis'])),
                   "z_planes"         : serialize_vector(gb_json['zplanes']),
                   "coincident_sites" : coincident_sites,
                   "sigma_csl"        : sigma_csl,
                   "angle"            : gb_json['angle'],
                   "height"           : gb_json['H'],
                   "area"             : gb_json['A'],
                   "notes"            : "",
                   "path"             : os.path.relpath(gb[0], app.config["GRAIN_DATABASE"]),
                   "gbid"             : gb_json['gbid']
                  }

        if modify:
            try:
                GB_model_object = GrainBoundary.create(**gb_dict)
            except IntegrityError:
                GB_model_object = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
                app.logger.info('GB already in database: {}'.format(gb_json['gbid']))
        else:
            try:
                GB_model_object = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
                app.logger.info('Database Contains {}'.format(gb_json['gbid']))
            except  GrainBoundary.DoesNotExist:
                app.logger.info('Not in Database {}'.format(gb_json['gbid']))
                GB_model_object = None

        subgb_files = []
        analyze.find_gb_json('{0}'.format(gb[0]), subgb_files, 'subgb.json')
        for subgb in subgb_files:
            with open(subgb[1],'r') as f:
                subgb_json = json.load(f)
            try:
                converged = subgb_json['converged']
            except KeyError:
                converged = False

            try:
                E_gb = subgb_json["E_gb"]
            except KeyError:
                converged = False
                E_gb = 0.0

            try:
                E_gb_init=subgb_json["E_gb_init"]
            except KeyError:
                E_gb_init = 0.0

            try:
                gbid = subgb_json["gbid"]
            except KeyError:
                gbid = subgb_json["name"]

            try:
                area = subgb_json['A']
            except KeyError:
                structs = glob.glob(os.path.join(subgb[0], '*.xyz'))
                struct  = Atoms(structs[-1])
                cell    = struct.get_cell()
                area    = cell[0][0]*cell[1][1]
                subgb_json['n_at'] = len(struct)

            subgb_dict = {"canonical_grain" : GB_model_object,
                          "converged"       : converged,
                          "E_gb_init"       : E_gb_init,
                          "potential"       : subgb_json["param_file"],
                          "rbt"             : serialize_vector(subgb_json['rbt']),
                          "path"            : os.path.relpath(subgb[0], app.config["GRAIN_DATABASE"]),
                          "area"            : area,
                          "rcut"            : subgb_json["rcut"],
                          "n_at"            : subgb_json['n_at'],
                          "E_gb"            : E_gb,
                          "notes"           : "",
                          "gbid"            : gbid}
            if modify:
                try:
                    SubGrainBoundary.create(**subgb_dict)
                    app.logger.info('Created SubGB entry {}'.format(subgb_dict))
                except IntegrityError:
                    app.logger.info('SubGB already in DB {}'.format(subgb_dict))
            else:
                print subgb_dict
Example #6
0
def gb_check_force(material='alphaFe', or_axis='001', force_tol=0.05, modify_db=False, gb_start=0, sub_start=0):
    """Recurse through directory tree, loading the structure file, json dict
    and the model for each subgrain. Check that the force tolerance in the structure
    file has actually been met for convergence.

    Args:
      material(str): material string.
      or_axis(str): orientation axis.
      force_tol(float): Max force magnitude on the relaxed structures.
      modify_db(bool): If True :py:class:`SubGrainBoundary` will be updated.
      gb_start(int): Start from this grain boundary in list.
      sub_start(int): Start from this subgrainboundary in list.
    """

    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, os.path.join(material, or_axis))),
                                                   gb_files, 'gb.json')
    no_struct_file = open('no_struct.txt','a')
    for gb_num, gb in enumerate(gb_files[gb_start:]):
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        for subgb_num, subgb_model in enumerate(GB_model.subgrains[sub_start:]):
            subgb_dict_path = os.path.join(subgb_model.path,'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            with open(subgb_dict_path,'r') as f:
                subgb_dict = json.load(f)
            struct_path = os.path.join(subgb_model.path, subgb_model.gbid+'_traj.xyz')
            struct_path = os.path.join(GRAIN_DATABASE, struct_path)
            try:
                ats = io.read(struct_path, index='-1')
            except RuntimeError:
                print 'No Struct File'
            except EOFError:
                print 'Struct File corrupted'
            except IOError:
                print 'No Traj File'
            else:
                print gb_num+gb_start, subgb_num+sub_start, struct_path
                try:
                    forces = [np.sqrt(x**2+y**2+z**2) for x,y,z, in zip(ats.properties['force'][0],
                                                                        ats.properties['force'][1],
                                                                        ats.properties['force'][2])]
                except KeyError:
                    print gb_num+gb_start, struct_path
                    print 'No Force in atoms object'
                    conv_check = False
                else:
                    if max(forces) <= force_tol:
                        conv_check = True
                    else:
                        conv_check = False
                        subgb_dict['E_gb'] = 0.0

            if modify_db:
                if conv_check != subgb_dict['converged']:
                    print struct_path
                    print 'Force from .xyz: ', conv_check, 'json: ', subgb_dict['converged']
                    print 'Model: ', subgb_model.converged
                    subgb_dict['converged'] = conv_check
                    with open(subgb_dict_path, 'w') as f:
                        json.dump(subgb_dict, f, indent=2)
                else:
                    pass
            else:
                try:
                    if conv_check != subgb_dict['converged']:
                        print struct_path
                        print 'Force from .xyz: ', conv_check, 'json: ', subgb_dict['converged']
                        print 'Model: ', subgb_model.converged
                    else:
                        pass
                except KeyError:
                    print 'no convergence key'
                    subgb_dict['converged']=conv_check
                    with open(subgb_dict_path, 'w') as f:
                        json.dump(subgb_dict, f, indent=2)
Example #7
0
def gb_check_conv(material='alphaFe', or_axis='001', modify_db=False):
    """Scans through grainboundary directory tree,
    inspecting the subgrain dictionary and the :py:class:`SubGrainBoundary` to test if
    the grain boundary energy, number of atoms, gb_area,
    and convergence flag are consistent. If modify_db is True the SQLite model
    will be updated.

    Args:
      material: Which material to do check json/database convergence consistency on.
      or_axis: Which orientation axis to check.
      modify_db: Boolean. If True updates gb_model in database otherwise
        just prints inconsistent grain json/database value.
    """

    analyze  = GBAnalysis()
    gb_files = []
    analyze.find_gb_json('{0}'.format(os.path.join(GRAIN_DATABASE, os.path.join(material, or_axis))),
                                      gb_files, 'gb.json')
    no_struct_file = open('no_struct.txt','a')
    for gb_num, gb in enumerate(gb_files[:]):
        with open(gb[1], 'r') as f:
            gb_json = json.load(f)
        GB_model = GrainBoundary.select().where(GrainBoundary.gbid==gb_json['gbid']).get()
        for subgb_model in GB_model.subgrains:
            subgb_dict_path = os.path.join(subgb_model.path,'subgb.json')
            subgb_dict_path = os.path.join(GRAIN_DATABASE, subgb_dict_path)
            with open(subgb_dict_path,'r') as f:
                subgb_dict = json.load(f)
            struct_path = os.path.join(subgb_model.path, subgb_model.gbid+'_traj.xyz')
            struct_path = os.path.join(GRAIN_DATABASE, struct_path)
            app.logger.debug(struct_path)
            try:
                assert subgb_model.converged==subgb_dict['converged']
            except AssertionError:
                if not modify_db:
                    print 'Not updating:'
                    print subgb_dict_path
                    print 'Model: ', subgb_model.converged, 'Json:', subgb_dict['converged']
                else:
                    try:
                        assert type(subgb_dict['converged'])==bool
                    except:
                        print "json 'converged' value not boolean. json file could be corrupted:"
                        print subgb_dict_path
                    else:
                        print 'Updating model instance in database:'
                        print subgb_dict_path
                        print 'Model: ', subgb_model.converged, 'Json:', subgb_dict['converged']
                        subgb_model.converged = subgb_dict['converged']
                        subgb_model.save()

            try:
                assert subgb_model.n_at==subgb_dict['n_at']
            except KeyError:
                try:
                    ats = Atoms(struct_path)
                except RuntimeError:
                    print struct_path.replace('_traj','')
                    ats = Atoms(struct_path.replace('_traj',''))

                cell = ats.get_cell()
                subgb_dict['n_at'] = len(ats)
                subgb_dict['area'] = cell[0][0]*cell[1][1]
                with open(subgb_dict_path, 'w') as f:
                    json.dump(subgb_dict, f, indent=2)
            except AssertionError:
                if not modify_db:
                    print subgb_model.n_at, subgb_dict['n_at']
                else:
                    print 'Updating model instance in database:'
                    subgb_model.n_at = subgb_dict['n_at']
                    print 'Model: {}  json:{}'.format(subgb_model.n_at, subgb_dict['n_at'])
                    subgb_model.save()

            try:
                assert (abs(subgb_model.area - subgb_dict['area']) < 1e-8)
            except KeyError:
                print 'adding area key'
                subgb_dict['area'] = subgb_dict['A']
                with open(subgb_dict_path, 'w') as f:
                    json.dump(subgb_dict, f, indent=2)
            except AssertionError:
                if not modify_db:
                    print subgb_model.area, subgb_dict['area']
                else:
                    subgb_model.area = subgb_dict['area']
                    print 'Model: {}  json:{}'.format(subgb_model.area, subgb_dict['area'])
                    subgb_model.save()

            try:
                assert (abs(subgb_model.E_gb - subgb_dict['E_gb']) < 1e-8)
            except AssertionError:
                if not modify_db:
                    print 'Not updating:'
                    print 'Model E_gb:', subgb_model.E_gb, 'JSON E_gb:',  subgb_dict['E_gb']
                else:
                    print 'Model E_gb:', subgb_model.E_gb, 'JSON E_gb:',  subgb_dict['E_gb']
                    print subgb_dict_path
                    subgb_model.E_gb = subgb_dict['E_gb']
                    subgb_model.save()
            except KeyError:
                subgb_dict['converged']=False
                subgb_dict['E_gb'] = 0.0
                with open(subgb_dict_path, 'w') as f:
                    json.dump(subgb_dict, f, indent=2)