Exemplo n.º 1
0
def grada(direcs, v='valang_O', dft='siesta', batch=100):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
    '''
    print('-  grading ... ... ')
    for m in direcs:
        mol = m

    rn = ReaxFF(libfile='ffield',
                direcs=direcs,
                dft=dft,
                batch_size=batch,
                pkl=True)
    rn.initialize()
    rn.session(learning_rate=1.0e-4, method='AdamOptimizer')

    fg = open('gradient.txt', 'w')
    l = 'eang'
    grad = rn.get_gradient(rn.__dict__[l][mol], rn.p[v])
    print('-  the gradient of %s_%s/%s is: ' % (l, mol, v), grad, file=fg)
    fg.close()

    # anglit = ['D_ang','thet','theta0','expang','f_7','f_8','EANG','EPEN','ETC']
    anglit = ['EANG']
    angs = rn.angs
    # angs = ['C-C-C']
    for ang in angs:
        if rn.nang[ang] > 0:
            for l in anglit:
                grad = rn.get_gradient(rn.__dict__[l][ang], rn.p[v])
                print('- the gradient of %s/%s is: ' % (l + '_' + ang, v),
                      grad)
    rn.sess.close()
Exemplo n.º 2
0
def gradbm(direcs, v='bo5', bd='H-H', dft='siesta', batch=100):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
    '''
    v_ = v
    v = v + '_' + bd
    print('-  grading ... ...')

    rn = ReaxFF(libfile='ffield',
                direcs=direcs,
                dft=dft,
                batch_size=batch,
                pkl=True)
    rn.initialize()
    rn.session(learning_rate=3.0e-4, method='AdamOptimizer')

    bdlit = [
        'bop', 'bop_si', 'bop_pi', 'bop_pp', 'f_1', 'f_2', 'f_3', 'f_4', 'f_5',
        'bosi', 'bopi', 'bopp', 'powb', 'expb', 'sieng', 'EBD'
    ]

    gl = rn.get_gradient(rn.Loss, rn.p[v])
    logger.info('-  the gradient of Loss/%s is %f.' % (v, gl))

    bonds = rn.bonds
    bonds = [bd]
    for bd in bonds:
        v = v_ + '_' + bd
        if rn.nbd[bd] > 0:
            grad = rn.get_gradient(rn.__dict__['EBD'][bd], rn.p[v])
            logger.info('-  the gradient of %s/%s is: %s' %
                        ('EBD' + '_' + bd, v, str(grad)))
            if not grad is None:
                if np.isnan(grad):
                    for l in bdlit:
                        grad = rn.get_gradient(rn.__dict__[l][bd], rn.p[v])
                        logger.info('-  the gradient of %s/%s is: %s' %
                                    (l + '_' + bd, v, str(grad)))

    ml = [
        'ebond', 'elone', 'eover', 'eunder', 'eang', 'epen', 'tconj', 'etor',
        'efcon', 'evdw', 'ehb'
    ]
    for bd in bonds:
        v = v_ + '_' + bd
        if rn.nbd[bd] > 0:
            for m in direcs:
                mol = m
                grad = rn.get_gradient(rn.__dict__['loss'][mol], rn.p[v])
                logger.info('-  the gradient of %s_%s/%s is: %s' %
                            ('loss', mol, v, str(grad)))

                if not grad is None:
                    if np.isnan(grad):
                        for l in ml:
                            grad = rn.get_gradient(rn.__dict__[l][mol],
                                                   rn.p[v])
                            logger.info('-  the gradient of %s_%s/%s is: %s' %
                                        (l, mol, v, str(grad)))
    rn.sess.close()
Exemplo n.º 3
0
def gradt(v='bo2_C-N',
          direcs={},
          torlit=['f_10', 'f_11', 'expv2', 'ETOR', 'Efcon'],
          batch=100):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
        torlit = ['f_10','f_11','expv2','ETOR','Efcon']
    '''
    print('-  grading ... ... ')
    rn = ReaxFF(libfile='ffield',
                direcs=direcs,
                dft='siesta',
                batch_size=batch,
                pkl=True)
    rn.initialize()
    rn.session(learning_rate=1.0e-4, method='AdamOptimizer')

    fg = open('gradient.txt', 'w')
    grad = rn.get_gradient(rn.Loss, rn.p[v])
    print('-  the gradient of Loss/%s is: ' % v, grad, file=fg)

    molit = ['etor', 'efcon']
    for mol in rn.direcs:
        for l in molit:
            grad = rn.get_gradient(rn.__dict__[l][mol], rn.p[v])
            print('-  the gradient of %s/%s is: ' % (l + '_' + mol, v),
                  grad,
                  file=fg)
    fg.close()

    i = 0
    for tor in rn.tors:
        if rn.ntor[tor] > 0:
            # if tor=='C-H-H-C':
            for l in torlit:
                fg = open('gradient.txt', 'a')
                grad = rn.get_gradient(rn.__dict__[l][tor], rn.p[v])
                print('- the gradient of %s/%s is: ' % (l + '_' + tor, v),
                      grad,
                      file=fg)
                fg.close()
                i += 1

    rn.sess.close()
Exemplo n.º 4
0
def gradb(direcs,
          v='bo5',
          bd='H-H',
          nn=False,
          bo_layer=[9, 2],
          debd=True,
          deba=True,
          deang=True,
          dft='siesta',
          batch=100):
    ''' variables like: bo1_C-H, boc5_C rosi_C-H boc1
    '''
    v_ = v
    v = v + '_' + bd
    print('-  grading ... ...')
    ffield = 'ffield.json' if nn else 'ffield'

    rn = ReaxFF(libfile=ffield,
                direcs=direcs,
                dft=dft,
                nn=nn,
                bo_layer=bo_layer,
                batch_size=batch,
                pkl=True)
    rn.initialize()
    rn.session(learning_rate=3.0e-4, method='AdamOptimizer')

    if nn:
        bdlit = [
            'bop', 'bop_si', 'bop_pi', 'bop_pp', 'F', 'bosi', 'bopi', 'bopp',
            'powb', 'expb', 'sieng', 'EBD'
        ]
    else:
        bdlit = [
            'bop', 'bop_si', 'bop_pi', 'bop_pp', 'f_1', 'f_2', 'f_3', 'f_4',
            'f_5', 'bosi', 'bopi', 'bopp', 'powb', 'expb', 'sieng', 'EBD'
        ]

    if debd:
        bonds = rn.bonds
        for b in bonds:
            v = v_ + '_' + b

            grad = rn.get_gradient(rn.Loss, rn.p[v])
            text_ = '-  the gradient of Loss/%s is ' % v
            logger.info(text_ + str(grad))

            if grad is None:
                continue
            if not np.isnan(grad):
                continue

            if rn.nbd[b] > 0:
                grad = rn.get_gradient(rn.__dict__['EBD'][b], rn.p[v])
                logger.info('-  the gradient of %s/%s is: %s' %
                            ('EBD' + '_' + b, v, str(grad)))
                if not grad is None:
                    if np.isnan(grad):
                        for l in bdlit:
                            grad = rn.get_gradient(rn.__dict__[l][bd], rn.p[v])
                            logger.info('-  the gradient of %s/%s is: %s' %
                                        (l + '_' + b, v, str(grad)))
    v = v_ + '_' + bd
    if deba:
        sl = ['EL', 'EOV', 'EUN']
        alist = {
            'EL': ['Delta_lp', 'Delta_e', 'D', 'explp'],
            'EOV': ['Delta_lpcorr', 'so', 'otrm1', 'otrm2'],
            'EUN': []
        }
        for sp in rn.spec:
            for l in sl:
                if sp in rn.__dict__[l]:
                    grad = rn.get_gradient(rn.__dict__[l][sp], rn.p[v])
                    logger.info('-  the gradient of %s/%s is: %s' %
                                (l + '_' + sp, v, str(grad)))

                    if not grad is None:
                        if np.isnan(grad):
                            for al in alist[l]:
                                grad = rn.get_gradient(rn.__dict__[al][sp],
                                                       rn.p[v])
                                logger.info('-  the gradient of %s/%s is: %s' %
                                            (al + '_' + sp, v, str(grad)))

    if deang:
        al = ['EANG', 'EPEN', 'ETC']
        for ang in rn.angs:
            # v  = 'val1'+'_'+ang
            if rn.nang[ang] > 0:
                for l in al:
                    grad = rn.get_gradient(rn.__dict__[l][ang], rn.p[v])
                    logger.info('-  the gradient of %s/%s is: %s' %
                                (l + '_' + ang, v, str(grad)))

    tl = ['ETOR', 'Efcon']
    for tor in rn.tors:
        # v  = 'tor2' # +'_'+tor
        if rn.ntor[tor] > 0:
            for l in tl:
                grad = rn.get_gradient(rn.__dict__[l][tor], rn.p[v])
                logger.info('-  the gradient of %s/%s is: %s' %
                            (l + '_' + tor, v, str(grad)))
    rn.sess.close()