def grada(direcs,v='bo1_H-H',dft='siesta',batch=100):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
    '''
    print('-  grading ... ... ')
    for m in direcs:
        mol = m

    rn = IRNN(libfile='ffield',direcs=direcs,
                dft=dft,
                batch_size=batch,
                pkl=True)
    rn.initialize()
    rn.session(learning_rate=1.0e-4,method='AdamOptimizer') 

    fg = open('gradient.txt','w')
    l = 'eang'
    grad = rn.get_gradient(rn.__dict__[l][mol],rn.p[v]) 
    print('-  the gradient of %s_%s/%s is: ' %(l,mol,v),grad,file=fg)
    fg.close()

    # anglit = ['D_ang','thet','theta0','expang','f_7','f_8','EANG','EPEN','ETC']
    anglit = ['sbo','f_8','EANG','EPEN','ETC']
    angs = rn.angs
    # angs = ['H-O-H']
    for ang in angs:
        if rn.nang[ang]>0:
           for l in anglit:
               grad = rn.get_gradient(rn.__dict__[l][ang],rn.p[v]) 
               fg = open('gradient.txt','a')
               print('- the gradient of %s/%s is: ' %(l+'_'+ang,v),
                      grad,file=fg) 
               fg.close()
    rn.sess.close()
Exemple #2
0
def gradt(v='bo2_C-N'):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
    '''
    print('-  grading ... ... ')
    direcs = {
        'nm': '/home/feng/cpmd_data/packmol/nm',
        'nme': '/home/feng/cpmd_data/packmol/nme'
    }

    rn = IRNN(libfile='ffield',
              direcs=direcs,
              dft='cpmd',
              batch_size=200,
              pkl=True)
    rn.initialize()
    rn.session(learning_rate=1.0e-4, method='AdamOptimizer')

    fg = open('gradient.txt', 'w')
    grad = rn.get_gradient(rn.Loss, rn.p[v])
    print('-  the gradient of Loss/%s is: ' % v, grad, file=fg)

    molit = ['etor', 'efcon']
    for mol in rn.direcs:
        for l in molit:
            grad = rn.get_gradient(rn.__dict__[l][mol], rn.p[v])
            print('-  the gradient of %s/%s is: ' % (l + '_' + mol, v),
                  grad,
                  file=fg)
    fg.close()

    torlit = ['f_10', 'f_11', 'expv2', 'ETOR', 'Efcon']
    i = 0
    for tor in rn.tors:
        if rn.ntor[tor] > 0:
            # if i<=100:
            if tor == 'C-O-O-N' or tor == 'C-O-O-H':
                for l in torlit:
                    fg = open('gradient.txt', 'a')
                    grad = rn.get_gradient(rn.__dict__[l][tor], rn.p[v])
                    print('- the gradient of %s/%s is: ' % (l + '_' + tor, v),
                          grad,
                          file=fg)
                    fg.close()
                i += 1

    rn.sess.close()
def gradb(direcs,v='bo5',bd='C-O',dft='siesta',batch=100):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
    '''
    v  = v+'_'+bd  

    print('-  grading ... ...')
    for m in direcs:
        mol = m

    rn = IRNN(libfile='ffield',direcs=direcs,
                dft=dft,
                batch_size=batch,
                pkl=True,
                interactive=True)
    rn.initialize()
    rn.session(learning_rate=3.0e-4,method='AdamOptimizer') 

    fg = open('gradient.txt','w')

    bdlit = ['bop_si','bop_pi','bop_pp',
             'bosi','F',
             'bopi',
             'bopp',
             'bo','bso',
             'powb','expb','EBD']
    # bdlit = ['bo','EBD']

    gl = rn.get_gradient(rn.Loss,rn.p[v]) 
    print('-  the gradient of Loss/%s is:' %v,gl,file=fg)
    bonds = rn.bonds
    # bonds = [bd]
    for b in bonds:
        if rn.nbd[b]>0:
           for l in bdlit:
               grad = rn.get_gradient(rn.__dict__[l][b],rn.p[v]) 
               print('-  the gradient of %s/%s is: ' %(l+'_'+b,v),
                     grad,file=fg) 
    fg.close()
    
    ml = ['ebond','elone','eover','eunder','eang',
          'epen','tconj','etor','efcon','evdw','ehb']

    # for mol in direcs:
    for l in ml:
        grad = rn.get_gradient(rn.__dict__[l][mol],rn.p[v]) 
        fg = open('gradient.txt','a')
        print('-  the gradient of %s_%s/%s is: ' %(l,mol,v),grad,file=fg)
        fg.close()

    alit  = ['Delta_lp','Delta_lpcorr','Dpi','Delta_e','nlp','slp','EL',
             'EOV','so','otrm1','otrm2',
             'EUN']
    atoms = rn.spec
    
    for l in alit:
        # atoms = bd.split('-')
        for a in atoms:
            grad = rn.get_gradient(rn.__dict__[l][a],rn.p[v]) 
            fg = open('gradient.txt','a')
            print('-  the gradient of %s_%s/%s is: ' %(l,a,v),grad,file=fg)
            fg.close()
    rn.sess.close()