def forward(self, xs, Vs=None, keep=0): """ convenience function that internall calls the appropriate adolc functions generic call is: [y1,y2,y3,...], [W1,W2,W3,...] = self.forward([x1,x2,...],[V1,V2,...] = None, 0 <= keep <= D+1) where x_ is an array of arbitrary shape shp_xi and Vi the corresponding direction tensor of shape : shp_xi + (P,D) also the outputs are in the same format as handed to the self.dependent function. forward stores the inputs in self.xs and self.Vs as well as self.ys, self.Ws """ # save inputs xs = copy.deepcopy(xs) Vs = copy.deepcopy(Vs) self.xs = xs self.Vs = Vs # prepare xs and Vs # ----------------- rx_list = [] for nx, x in enumerate(xs): if numpy.isscalar(x): x = numpy.asarray([x]) numpy.testing.assert_array_almost_equal( self.independentVariableShapeList[nx], numpy.shape(x), err_msg='\ntaped xs[%d].shape != forward xs[%d]\n' % (nx, nx)) rx = numpy.ravel(x) rx_list.append(rx) self.x = numpy.concatenate(rx_list) if Vs is not None: rV_list = [] for nV, V in enumerate(Vs): V_shp = numpy.shape(V) try: numpy.testing.assert_array_almost_equal( self.independentVariableShapeList[nV], V_shp[:-2]) except: raise ValueError( 'taped independentVariableShapeList = %s\n but supplied Vs = %s' % (str(self.independentVariableShapeList), str(map(numpy.shape, Vs)))) rV_list.append( numpy.reshape(V, (numpy.prod(V_shp[:-2]), ) + V_shp[-2:])) self.V = numpy.ascontiguousarray(numpy.concatenate(rV_list, axis=0)) # run the ADOL-C functions # ------------------------ if Vs is None: self.y = wrapped_functions.zos_forward(self.tape_tag, self.x, keep=keep) else: N, P, D = self.V.shape if keep == 0: self.y, self.W = wrapped_functions.hov_forward( self.tape_tag, self.x, self.V) elif P == 1: Vtmp = self.V.reshape((N, D)) self.y, Wtmp = wrapped_functions.hos_forward( self.tape_tag, self.x, Vtmp, keep) M = Wtmp.shape[0] self.W = Wtmp.reshape((M, P, D)) elif P > 1 and keep > 0: raise NotImplementedError( 'ADOL-C doesn\'t support higher order vector forward with keep!\n \ workaround: several runs forward with P=1') # prepare outputs # --------------- self.ys = [] count = 0 for ns, s in enumerate(self.dependentVariableShapeList): M_ns = numpy.prod(s) self.ys.append(self.y[count:count + M_ns].reshape(s)) count += M_ns if Vs is not None: self.Ws = [] count = 0 for ns, s in enumerate(self.dependentVariableShapeList): M_ns = numpy.prod(s) self.Ws.append(self.W[count:count + M_ns, :, :].reshape(s + (P, D))) count += M_ns # return outputs # -------------- if Vs is None: return self.ys else: return (self.ys, self.Ws)
def forward(self, xs, Vs = None, keep = 0): """ convenience function that internall calls the appropriate adolc functions generic call is: [y1,y2,y3,...], [W1,W2,W3,...] = self.forward([x1,x2,...],[V1,V2,...] = None, 0 <= keep <= D+1) where x_ is an array of arbitrary shape shp_xi and Vi the corresponding direction tensor of shape : shp_xi + (P,D) also the outputs are in the same format as handed to the self.dependent function. forward stores the inputs in self.xs and self.Vs as well as self.ys, self.Ws """ # save inputs xs = copy.deepcopy(xs) Vs = copy.deepcopy(Vs) self.xs = xs self.Vs = Vs # prepare xs and Vs # ----------------- rx_list = [] for nx,x in enumerate(xs): if numpy.isscalar(x): x = numpy.asarray([x]) numpy.testing.assert_array_almost_equal(self.independentVariableShapeList[nx], numpy.shape(x), err_msg = '\ntaped xs[%d].shape != forward xs[%d]\n'%(nx,nx)) rx = numpy.ravel(x) rx_list.append(rx) self.x = numpy.concatenate(rx_list) if Vs is not None: rV_list = [] for nV,V in enumerate(Vs): V_shp = numpy.shape(V) try: numpy.testing.assert_array_almost_equal(self.independentVariableShapeList[nV], V_shp[:-2]) except: raise ValueError('taped independentVariableShapeList = %s\n but supplied Vs = %s'%(str(self.independentVariableShapeList), str(map(numpy.shape, Vs)))) rV_list.append(numpy.reshape(V, (numpy.prod(V_shp[:-2]),) + V_shp[-2:])) self.V = numpy.ascontiguousarray(numpy.concatenate(rV_list,axis=0)) # run the ADOL-C functions # ------------------------ if Vs is None: self.y = wrapped_functions.zos_forward(self.tape_tag, self.x, keep=keep) else: N,P,D = self.V.shape if keep == 0: self.y,self.W = wrapped_functions.hov_forward(self.tape_tag, self.x, self.V) elif P == 1: Vtmp = self.V.reshape((N,D)) self.y,Wtmp = wrapped_functions.hos_forward(self.tape_tag, self.x, Vtmp, keep) M = Wtmp.shape[0] self.W = Wtmp.reshape((M,P,D)) elif P > 1 and keep > 0: raise NotImplementedError('ADOL-C doesn\'t support higher order vector forward with keep!\n \ workaround: several runs forward with P=1') # prepare outputs # --------------- self.ys = [] count = 0 for ns, s in enumerate(self.dependentVariableShapeList): M_ns = numpy.prod(s) self.ys.append(self.y[count:count+M_ns].reshape(s)) count += M_ns if Vs is not None: self.Ws = [] count = 0 for ns, s in enumerate(self.dependentVariableShapeList): M_ns = numpy.prod(s) self.Ws.append(self.W[count:count+M_ns,:,:].reshape(s+(P,D))) count += M_ns # return outputs # -------------- if Vs is None: return self.ys else: return (self.ys, self.Ws)
def reverse(self, Wbars): """ This is a convenience function that internally calls the appropriate adolc functions. Since ADOL-C does not support hov_forward with keep = D+1 this function simply stores the input arguments of the AdolcProgram.forward and performs hos_forward sweeps followed by hov_ti_reverse calls. This adds significant overhead. generic call is: [Vbar1,Vbar2,...] = self.reverse([Wbar1,Wbar2,...]) where Wbari is an array of shape (Q, yi.shape, P, D+1) Vbari is an array of shape (Q, xi.shape, P, D+1) """ # prepare Wbar as (Q,M,P,D+1) array rWbar_list = [] for m, Wbar in enumerate(Wbars): Wbar_shp = numpy.shape(Wbar) try: assert len( self.dependentVariableShapeList[m]) + 3 == numpy.ndim(Wbar) except: raise ValueError( 'Wbar.shape must be (Q, yi.shape,P, D+1)=%s but provided %s' % ('(Q,' + str(self.dependentVariableShapeList[m]) + ',P,D+1)', str(Wbar_shp))) try: assert self.dependentVariableShapeList[m] == Wbar_shp[1:-2] except: raise ValueError( 'taped: Wbar.shape = %s but provided: Wbar.shape = %s ' % (str(self.dependentVariableShapeList[m]), str(Wbar_shp[1:-2]))) rWbar_list.append( numpy.reshape(Wbar, (Wbar_shp[0], ) + (numpy.prod(Wbar_shp[1:-2]), ) + Wbar_shp[-2:])) Wbar = numpy.ascontiguousarray(numpy.concatenate(rWbar_list, axis=1)) if self.Vs is not None: """ this branch is executed after a forward(xs, Vs)""" N, PV, DV = self.V.shape Q, M, P, Dp1 = Wbar.shape D = Dp1 - 1 try: assert PV == P except: raise ValueError( 'The number of directions P=%d in the forward run does not match the number of directions P=%d for the reverse run' % (PV, P)) try: assert DV == D except: raise ValueError( 'The degree D=%d in the forward run does not match the degree D=%d for the reverse run' % (DV, D)) # call ADOL-C function, if P>1 repeat hos_forwards with keep then hov_ti_reverse # if P==1 then call direclty hov_ti_reverse Vbar = numpy.zeros((Q, N, P, D + 1)) for p in range(P): if P >= 1: Vtmp = self.V[:, p, :] wrapped_functions.hos_forward(self.tape_tag, self.x, Vtmp, D + 1) (Vbar[:, :, p, :], nz) = wrapped_functions.hov_ti_reverse( self.tape_tag, Wbar[:, :, p, :]) else: """ this branch is executed after a forward(xs, Vs=None)""" Q, M, P, Dp1 = Wbar.shape D = Dp1 - 1 N = self.x.size try: assert Dp1 == 1 except: raise ValueError( 'You ran a forward(xs, Vs=None) and therefore in Wbar.shape =(Q,M,P,D+1) D should be 0. You provided D=%d.' % D) try: assert P == 1 except: raise ValueError( 'You ran a forward(xs, Vs=None) and therefore in Wbar.shape =(Q,M,P,D+1) P should be 1. You provided P=%d.' % P) Vbar = numpy.zeros((Q, N, P, D + 1)) # print 'Vbar.shape',Vbar.shape # print 'Wbar.shape',Wbar.shape wrapped_functions.zos_forward(self.tape_tag, self.x, keep=1) Vbar[:, :, 0, :] = wrapped_functions.hov_ti_reverse( self.tape_tag, Wbar[:, :, 0, :])[0] # prepare output self.Vbar = Vbar rVbar_list = [] count = 0 for n, s in enumerate(self.independentVariableShapeList): Nx = numpy.prod(s) Q = Vbar.shape[0] rVbar_list.append(Vbar[:, count:count + Nx, :].reshape((Q, ) + s + ( P, Dp1, ))) count += Nx # return output return rVbar_list
def reverse(self, Wbars): """ This is a convenience function that internally calls the appropriate adolc functions. Since ADOL-C does not support hov_forward with keep = D+1 this function simply stores the input arguments of the AdolcProgram.forward and performs hos_forward sweeps followed by hov_ti_reverse calls. This adds significant overhead. generic call is: [Vbar1,Vbar2,...] = self.reverse([Wbar1,Wbar2,...]) where Wbari is an array of shape (Q, yi.shape, P, D+1) Vbari is an array of shape (Q, xi.shape, P, D+1) """ # prepare Wbar as (Q,M,P,D+1) array rWbar_list = [] for m,Wbar in enumerate(Wbars): Wbar_shp = numpy.shape(Wbar) try: assert len(self.dependentVariableShapeList[m])+3 == numpy.ndim(Wbar) except: raise ValueError('Wbar.shape must be (Q, yi.shape,P, D+1)=%s but provided %s'%( '(Q,'+str(self.dependentVariableShapeList[m]) +',P,D+1)' ,str(Wbar_shp))) try: assert self.dependentVariableShapeList[m] == Wbar_shp[1:-2] except: raise ValueError('taped: Wbar.shape = %s but provided: Wbar.shape = %s '%(str(self.dependentVariableShapeList[m]),str(Wbar_shp[1:-2]))) rWbar_list.append(numpy.reshape(Wbar, (Wbar_shp[0],) + (numpy.prod(Wbar_shp[1:-2]),) + Wbar_shp[-2:])) Wbar = numpy.ascontiguousarray(numpy.concatenate(rWbar_list,axis=1)) if self.Vs is not None: """ this branch is executed after a forward(xs, Vs)""" N,PV,DV = self.V.shape Q,M,P,Dp1 = Wbar.shape D = Dp1 - 1 try: assert PV == P except: raise ValueError('The number of directions P=%d in the forward run does not match the number of directions P=%d for the reverse run'%(PV,P)) try: assert DV == D except: raise ValueError('The degree D=%d in the forward run does not match the degree D=%d for the reverse run'%(DV,D)) # call ADOL-C function, if P>1 repeat hos_forwards with keep then hov_ti_reverse # if P==1 then call direclty hov_ti_reverse Vbar = numpy.zeros((Q,N,P,D+1)) for p in range(P): if P>=1: Vtmp = self.V[:,p,:] wrapped_functions.hos_forward(self.tape_tag, self.x, Vtmp, D+1) (Vbar[:,:,p,:],nz) = wrapped_functions.hov_ti_reverse(self.tape_tag, Wbar[:,:,p,:]) else: """ this branch is executed after a forward(xs, Vs=None)""" Q,M,P,Dp1 = Wbar.shape D = Dp1 - 1 N = self.x.size try: assert Dp1 == 1 except: raise ValueError('You ran a forward(xs, Vs=None) and therefore in Wbar.shape =(Q,M,P,D+1) D should be 0. You provided D=%d.'%D) try: assert P == 1 except: raise ValueError('You ran a forward(xs, Vs=None) and therefore in Wbar.shape =(Q,M,P,D+1) P should be 1. You provided P=%d.'%P) Vbar = numpy.zeros((Q,N,P,D+1)) # print 'Vbar.shape',Vbar.shape # print 'Wbar.shape',Wbar.shape wrapped_functions.zos_forward(self.tape_tag, self.x, keep=1) Vbar[:,:,0,:] = wrapped_functions.hov_ti_reverse(self.tape_tag, Wbar[:,:,0,:])[0] # prepare output self.Vbar = Vbar rVbar_list = [] count = 0 for n, s in enumerate(self.independentVariableShapeList): Nx = numpy.prod(s) Q = Vbar.shape[0] rVbar_list.append(Vbar[:,count:count+Nx,:].reshape((Q,) + s+(P,Dp1,))) count += Nx # return output return rVbar_list