def wrapper(*args, **kwargs): if _cache.value is None: return func(*args, **kwargs) args, kwargs = canonicalize(*args, **kwargs) # Hash the function key and the canonicalized arguments and compute the # hexdigest. This is used to identify cache file `cachefile`. h = hashlib.sha1(func_key) for arg in args: h.update(types.nutils_hash(arg)) for hkv in sorted( hashlib.sha1(k.encode()).digest() + types.nutils_hash(v) for k, v in kwargs.items()): h.update(hkv) hkey = h.hexdigest() cachefile = _cache.value / hkey # Open and lock `cachefile`. Try to read it and, if successful, unlock # the file (implicitly by closing the file) and return the value. If # reading fails, e.g. because the file did not exist, call `func`, store # the result, unlock and return. While not necessary per se, we lock the # file immediately to avoid checking twice if there is a cached value: once # before locking the file, and once after locking, at which point another # party may have written something to the cache already. cachefile.parent.mkdir(parents=True, exist_ok=True) cachefile.touch() with cachefile.open('r+b') as f: log.debug('[cache.function {}] acquiring lock'.format(hkey)) _lock_file(f) log.debug('[cache.function {}] lock acquired'.format(hkey)) try: data = pickle.load(f) if len(data) == 3: # For old caches. log_, fail, value = data if fail: raise pickle.UnpicklingError else: value, log_ = data except (EOFError, pickle.UnpicklingError, IndexError): log.debug( '[cache.function {}] failed to load, cache will be rewritten' .format(hkey)) pass else: log.debug('[cache.function {}] load'.format(hkey)) log_.replay() return value # Seek back to the beginning, because pickle might have read garbage. f.seek(0) # Disable the cache temporarily to prevent caching subresults *in* `func`. log_ = log.RecordLog() with disable(), log.add(log_): value = func(*args, **kwargs) pickle.dump((value, log_), f) log.debug('[cache.function {}] store'.format(hkey)) return value
def call(func, kwargs, scriptname, funcname=None): '''set up compute environment and call function''' outdir = config.outdir or os.path.join(os.path.expanduser(config.outrootdir), scriptname) with contextlib.ExitStack() as stack: stack.enter_context(cache.enable(os.path.join(outdir, config.cachedir)) if config.cache else cache.disable()) stack.enter_context(matrix.backend(config.matrix)) stack.enter_context(log.set(log.FilterLog(log.RichOutputLog() if config.richoutput else log.StdoutLog(), minlevel=5-config.verbose))) if config.htmloutput: htmllog = stack.enter_context(log.HtmlLog(outdir, title=scriptname, htmltitle='<a href="http://www.nutils.org">{}</a> {}'.format(SVGLOGO, html.escape(scriptname)), favicon=FAVICON)) uri = (config.outrooturi.rstrip('/') + '/' + scriptname if config.outrooturi else pathlib.Path(outdir).resolve().as_uri()) + '/' + htmllog.filename if config.richoutput: t0 = time.perf_counter() bar = lambda running: '{0} [{1}] {2[0]}:{2[1]:02d}:{2[2]:02d}'.format(uri, 'RUNNING' if running else 'STOPPED', _hms(time.perf_counter()-t0)) stack.enter_context(stickybar.activate(bar, update=1)) else: log.info('opened log at', uri) htmllog.write('<ul style="list-style-position: inside; padding-left: 0px; margin-top: 0px;">{}</ul>'.format(''.join( '<li>{}={} <span style="color: gray;">{}</span></li>'.format(param.name, kwargs.get(param.name, param.default), param.annotation) for param in inspect.signature(func).parameters.values())), level=1, escape=False) stack.enter_context(log.add(htmllog)) stack.enter_context(warnings.via(lambda msg: log.warning(msg))) stack.callback(signal.signal, signal.SIGINT, signal.signal(signal.SIGINT, _sigint_handler)) log.info('nutils v{}'.format(_version())) log.info('start', time.ctime()) try: func(**kwargs) except (KeyboardInterrupt, SystemExit, pdb.bdb.BdbQuit): log.error('killed by user') return 1 except: log.error(traceback.format_exc()) if config.pdb: print(_mkbox( 'YOUR PROGRAM HAS DIED. The Python debugger', 'allows you to examine its post-mortem state', 'to figure out why this happened. Type "h"', 'for an overview of commands to get going.')) pdb.post_mortem() return 2 else: log.info('finish', time.ctime()) return 0
def call(func, kwargs, scriptname, funcname=None): '''set up compute environment and call function''' outdir = config.outdir or os.path.join( os.path.expanduser(config.outrootdir), scriptname) with contextlib.ExitStack() as stack: stack.enter_context( cache.enable(os.path.join(outdir, config.cachedir)) if config. cache else cache.disable()) stack.enter_context(matrix.backend(config.matrix)) stack.enter_context( log.set( log.FilterLog(log.RichOutputLog() if config.richoutput else log.StdoutLog(), minlevel=5 - config.verbose))) if config.htmloutput: html = stack.enter_context( log.HtmlLog( outdir, title=scriptname, htmltitle='<a href="http://www.nutils.org">{}</a> {}'. format(SVGLOGO, scriptname), favicon=FAVICON)) uri = (config.outrooturi.rstrip('/') + '/' + scriptname if config.outrooturi else pathlib.Path( outdir).resolve().as_uri()) + '/' + html.filename if config.richoutput: t0 = time.perf_counter() bar = lambda running: '{0} [{1}] {2[0]}:{2[1]:02d}:{2[2]:02d}'.format( uri, 'RUNNING' if running else 'STOPPED', _hms(time.perf_counter() - t0)) stack.enter_context(stickybar.activate(bar, update=1)) else: log.info('opened log at', uri) html.write( '<ul style="list-style-position: inside; padding-left: 0px; margin-top: 0px;">{}</ul>' .format(''.join( '<li>{}={} <span style="color: gray;">{}</span></li>'. format(param.name, kwargs.get(param.name, param.default), param.annotation) for param in inspect.signature(func).parameters.values())), level=1, escape=False) stack.enter_context(log.add(html)) stack.enter_context(warnings.via(log.warning)) stack.callback(signal.signal, signal.SIGINT, signal.signal(signal.SIGINT, _sigint_handler)) log.info('nutils v{}'.format(_version())) log.info('start', time.ctime()) try: func(**kwargs) except (KeyboardInterrupt, SystemExit, pdb.bdb.BdbQuit): log.error('killed by user') return 1 except: log.error(traceback.format_exc()) if config.pdb: print( _mkbox('YOUR PROGRAM HAS DIED. The Python debugger', 'allows you to examine its post-mortem state', 'to figure out why this happened. Type "h"', 'for an overview of commands to get going.')) pdb.post_mortem() return 2 else: log.info('finish', time.ctime()) return 0
def __iter__(self): length = type(self).length if _cache.value is None: yield from self.resume_index([], 0) else: # The hash of `types.Immutable` uniquely defines this `Recursion`, so use # this to identify the cache directory. All iterations are stored as # separate files, numbered '0000', '0001', ..., in this directory. hkey = self.__nutils_hash__.hex() cachepath = _cache.value / hkey cachepath.mkdir(exist_ok=True, parents=True) log.debug('[cache.Recursion {}] start iterating'.format(hkey)) # The `history` variable is updated while reading from the cache and # truncated to the required length. history = [] # The `exhausted` variable controls if we are reading items from the # cache (`False`) or we are computing values and writing to the cache. # Once `exhausted` is `True` we keep it there, even if at some point # there are cached items available. exhausted = False # The `stop` variable indicates if an exception is raised in `resume`. stop = False for i in itertools.count(): cachefile = cachepath / '{:04d}'.format(i) cachefile.touch() with cachefile.open('r+b') as f: log.debug( '[cache.Recursion {}.{:04d}] acquiring lock'.format( hkey, i)) _lock_file(f) log.debug( '[cache.Recursion {}.{:04d}] lock acquired'.format( hkey, i)) if not exhausted: try: log_, stop, value = pickle.load(f) except (pickle.UnpicklingError, IndexError): log.debug( '[cache.Recursion {}.{:04d}] failed to load, cache will be rewritten from this point' .format(hkey, i)) exhausted = True except EOFError: log.debug( '[cache.Recursion {}.{:04d}] cache exhausted'. format(hkey, i)) exhausted = True else: log.debug( '[cache.Recursion {}.{:04d}] load'.format( hkey, i)) log_.replay() if stop and value is None: value = StopIteration history.append(value) if len(history) > length: history = history[1:] if exhausted: resume = self.resume_index(history, i) f.seek(0) del history if exhausted: # Disable the cache temporarily to prevent caching subresults *in* `func`. log_ = log.RecordLog() with disable(), log.add(log_): try: value = next(resume) except Exception as e: stop = True value = e log.debug('[cache.Recursion {}.{}] store'.format( hkey, i)) pickle.dump((log_, stop, value), f) if not stop: yield value elif isinstance(value, StopIteration): return else: raise value
def main(inflow: 'inflow velocity' = 10, viscosity: 'kinematic viscosity' = 1.0, density: 'density' = 1.0, theta=0.5, timestepsize=0.01): # mesh and geometry definition grid_x_1 = numpy.linspace(-3, -1, 7) grid_x_1 = grid_x_1[:-1] grid_x_2 = numpy.linspace(-1, -0.3, 8) grid_x_2 = grid_x_2[:-1] grid_x_3 = numpy.linspace(-0.3, 0.3, 13) grid_x_3 = grid_x_3[:-1] grid_x_4 = numpy.linspace(0.3, 1, 8) grid_x_4 = grid_x_4[:-1] grid_x_5 = numpy.linspace(1, 3, 7) grid_x = numpy.concatenate( (grid_x_1, grid_x_2, grid_x_3, grid_x_4, grid_x_5), axis=None) grid_y_1 = numpy.linspace(0, 1.5, 16) grid_y_1 = grid_y_1[:-1] grid_y_2 = numpy.linspace(1.5, 2, 4) grid_y_2 = grid_y_2[:-1] grid_y_3 = numpy.linspace(2, 4, 7) grid_y = numpy.concatenate((grid_y_1, grid_y_2, grid_y_3), axis=None) grid = [grid_x, grid_y] topo, geom = mesh.rectilinear(grid) domain = topo.withboundary(inflow='left', wall='top,bottom', outflow='right') - \ topo[18:20, :10].withboundary(flap='left,right,top') # Nutils namespace ns = function.Namespace() # time approximations # TR interpolation ns._functions['t'] = lambda f: theta * f + (1 - theta) * subs0(f) ns._functions_nargs['t'] = 1 # 1st order FD ns._functions['δt'] = lambda f: (f - subs0(f)) / dt ns._functions_nargs['δt'] = 1 # 2nd order FD ns._functions['tt'] = lambda f: (1.5 * f - 2 * subs0(f) + 0.5 * subs00(f) ) / dt ns._functions_nargs['tt'] = 1 # extrapolation for pressure ns._functions['tp'] = lambda f: (1.5 * f - 0.5 * subs0(f)) ns._functions_nargs['tp'] = 1 ns.nu = viscosity ns.rho = density ns.uin = inflow ns.x0 = geom # reference geometry ns.dbasis = domain.basis('std', degree=1).vector(2) ns.d_i = 'dbasis_ni ?meshdofs_n' ns.umesh_i = 'dbasis_ni (1.5 ?meshdofs_n - 2 ?oldmeshdofs_n + 0.5 ?oldoldmeshdofs_n ) / ?dt' ns.x_i = 'x0_i + d_i' # moving geometry ns.ubasis, ns.pbasis = function.chain([ domain.basis('std', degree=2).vector(2), domain.basis('std', degree=1), ]) ns.F_i = 'ubasis_ni ?F_n' # stress field ns.urel_i = 'ubasis_ni ?lhs_n' # relative velocity ns.u_i = 'umesh_i + urel_i' # total velocity ns.p = 'pbasis_n ?lhs_n' # pressure # initialization of dofs meshdofs = numpy.zeros(len(ns.dbasis)) oldmeshdofs = meshdofs oldoldmeshdofs = meshdofs oldoldoldmeshdofs = meshdofs lhs0 = numpy.zeros(len(ns.ubasis)) # for visualization bezier = domain.sample('bezier', 2) # preCICE setup configFileName = "../precice-config.xml" participantName = "Fluid" solverProcessIndex = 0 solverProcessSize = 1 interface = precice.Interface(participantName, configFileName, solverProcessIndex, solverProcessSize) # define coupling meshes meshName = "Fluid-Mesh" meshID = interface.get_mesh_id(meshName) couplinginterface = domain.boundary['flap'] couplingsample = couplinginterface.sample( 'gauss', degree=2) # mesh located at Gauss points dataIndices = interface.set_mesh_vertices(meshID, couplingsample.eval(ns.x0)) # coupling data writeData = "Force" readData = "Displacement" writedataID = interface.get_data_id(writeData, meshID) readdataID = interface.get_data_id(readData, meshID) # initialize preCICE precice_dt = interface.initialize() dt = min(precice_dt, timestepsize) # boundary conditions for fluid equations sqr = domain.boundary['wall,flap'].integral('urel_k urel_k d:x0' @ ns, degree=4) cons = solver.optimize('lhs', sqr, droptol=1e-15) sqr = domain.boundary['inflow'].integral( '((urel_0 - uin)^2 + urel_1^2) d:x0' @ ns, degree=4) cons = solver.optimize('lhs', sqr, droptol=1e-15, constrain=cons) # weak form fluid equations res = domain.integral('t(ubasis_ni,j (u_i,j + u_j,i) rho nu d:x)' @ ns, degree=4) res += domain.integral('(-ubasis_ni,j p δ_ij + pbasis_n u_k,k) d:x' @ ns, degree=4) res += domain.integral('rho ubasis_ni δt(u_i d:x)' @ ns, degree=4) res += domain.integral('rho ubasis_ni t(u_i,j urel_j d:x)' @ ns, degree=4) # weak form for force computation resF = domain.integral('(ubasis_ni,j (u_i,j + u_j,i) rho nu d:x)' @ ns, degree=4) resF += domain.integral('tp(-ubasis_ni,j p δ_ij d:x)' @ ns, degree=4) resF += domain.integral('pbasis_n u_k,k d:x' @ ns, degree=4) resF += domain.integral('rho ubasis_ni tt(u_i d:x)' @ ns, degree=4) resF += domain.integral('rho ubasis_ni (u_i,j urel_j d:x)' @ ns, degree=4) resF += couplinginterface.sample('gauss', 4).integral('ubasis_ni F_i d:x' @ ns) consF = numpy.isnan( solver.optimize('F', couplinginterface.sample('gauss', 4).integral('F_i F_i' @ ns), droptol=1e-10)) # boundary conditions mesh displacements sqr = domain.boundary['inflow,outflow,wall'].integral('d_i d_i' @ ns, degree=2) meshcons0 = solver.optimize('meshdofs', sqr, droptol=1e-15) # weak form mesh displacements meshsqr = domain.integral('d_i,x0_j d_i,x0_j d:x0' @ ns, degree=2) # better initial guess: start from Stokes solution, comment out for comparison with other solvers #res_stokes = domain.integral('(ubasis_ni,j ((u_i,j + u_j,i) rho nu - p δ_ij) + pbasis_n u_k,k) d:x' @ ns, degree=4) #lhs0 = solver.solve_linear('lhs', res_stokes, constrain=cons, arguments=dict(meshdofs=meshdofs, oldmeshdofs=oldmeshdofs, oldoldmeshdofs=oldoldmeshdofs, oldoldoldmeshdofs=oldoldoldmeshdofs, dt=dt)) lhs00 = lhs0 timestep = 0 t = 0 while interface.is_coupling_ongoing(): # read displacements from interface if interface.is_read_data_available(): readdata = interface.read_block_vector_data( readdataID, dataIndices) coupledata = couplingsample.asfunction(readdata) sqr = couplingsample.integral(((ns.d - coupledata)**2).sum(0)) meshcons = solver.optimize('meshdofs', sqr, droptol=1e-15, constrain=meshcons0) meshdofs = solver.optimize('meshdofs', meshsqr, constrain=meshcons) # save checkpoint if interface.is_action_required( precice.action_write_iteration_checkpoint()): lhs_checkpoint = lhs0 lhs00_checkpoint = lhs00 t_checkpoint = t timestep_checkpoint = timestep oldmeshdofs_checkpoint = oldmeshdofs oldoldmeshdofs_checkpoint = oldoldmeshdofs oldoldoldmeshdofs_checkpoint = oldoldoldmeshdofs interface.mark_action_fulfilled( precice.action_write_iteration_checkpoint()) # solve fluid equations lhs1 = solver.newton( 'lhs', res, lhs0=lhs0, constrain=cons, arguments=dict( lhs0=lhs0, dt=dt, meshdofs=meshdofs, oldmeshdofs=oldmeshdofs, oldoldmeshdofs=oldoldmeshdofs, oldoldoldmeshdofs=oldoldoldmeshdofs)).solve(tol=1e-6) # write forces to interface if interface.is_write_data_required(dt): F = solver.solve_linear('F', resF, constrain=consF, arguments=dict( lhs00=lhs00, lhs0=lhs0, lhs=lhs1, dt=dt, meshdofs=meshdofs, oldmeshdofs=oldmeshdofs, oldoldmeshdofs=oldoldmeshdofs, oldoldoldmeshdofs=oldoldoldmeshdofs)) # writedata = couplingsample.eval(ns.F, F=F) # for stresses writedata = couplingsample.eval('F_i d:x' @ ns, F=F, meshdofs=meshdofs) * \ numpy.concatenate([p.weights for p in couplingsample.points])[:, numpy.newaxis] interface.write_block_vector_data(writedataID, dataIndices, writedata) # do the coupling precice_dt = interface.advance(dt) dt = min(precice_dt, timestepsize) # advance variables timestep += 1 t += dt lhs00 = lhs0 lhs0 = lhs1 oldoldoldmeshdofs = oldoldmeshdofs oldoldmeshdofs = oldmeshdofs oldmeshdofs = meshdofs # read checkpoint if required if interface.is_action_required( precice.action_read_iteration_checkpoint()): lhs0 = lhs_checkpoint lhs00 = lhs00_checkpoint t = t_checkpoint timestep = timestep_checkpoint oldmeshdofs = oldmeshdofs_checkpoint oldoldmeshdofs = oldoldmeshdofs_checkpoint oldoldoldmeshdofs = oldoldoldmeshdofs_checkpoint interface.mark_action_fulfilled( precice.action_read_iteration_checkpoint()) if interface.is_time_window_complete(): x, u, p = bezier.eval(['x_i', 'u_i', 'p'] @ ns, lhs=lhs1, meshdofs=meshdofs, oldmeshdofs=oldmeshdofs, oldoldmeshdofs=oldoldmeshdofs, oldoldoldmeshdofs=oldoldoldmeshdofs, dt=dt) with treelog.add(treelog.DataLog()): export.vtk('Fluid_' + str(timestep), bezier.tri, x, u=u, p=p) interface.finalize()
def main(side='Dirichlet'): print("Running nutils") # domain size y_bottom, y_top = 0, 1 x_left, x_right = 0, 2 x_coupling = 1 # x coordinate of coupling interface n = 10 # number of mesh vertices per dimension if side == 'Dirichlet': x_grid = np.linspace(x_left, x_coupling, n) elif side == 'Neumann': x_grid = np.linspace(x_coupling, x_right, n) else: raise Exception('invalid side {!r}'.format(side)) y_grid = np.linspace(y_bottom, y_top, n) # define the Nutils mesh domain, geom = nutils.mesh.rectilinear([x_grid, y_grid]) # Nutils namespace ns = nutils.function.Namespace() ns.x = geom degree = 1 # linear finite elements ns.basis = domain.basis('std', degree=degree) ns.alpha = 3 # parameter of problem ns.beta = 1.3 # parameter of problem ns.u = 'basis_n ?lhs_n' # solution ns.dudt = 'basis_n (?lhs_n - ?lhs0_n) / ?dt' # time derivative ns.flux = 'basis_n ?fluxdofs_n' # heat flux ns.f = 'beta - 2 - 2 alpha' # rhs ns.uexact = '1 + x_0 x_0 + alpha x_1 x_1 + beta ?t' # analytical solution # define the weak form res0 = domain.integral( '(basis_n dudt - basis_n f + basis_n,i u_,i) d:x' @ ns, degree=degree * 2) # set boundary conditions at non-coupling boundaries # top and bottom boundary are non-coupling for both sides sqr0 = domain.boundary['top'].integral( '(u - 1 - x_0 x_0 - alpha - beta ?t)^2 d:x' @ ns, degree=degree * 2) sqr0 += domain.boundary['bottom'].integral( '(u - 1 - x_0 x_0 - beta ?t)^2 d:x' @ ns, degree=degree * 2) if side == 'Dirichlet': # left boundary is non-coupling sqr0 += domain.boundary['left'].integral( '(u - 1 - alpha x_1 x_1 - beta ?t)^2 d:x' @ ns, degree=degree * 2) elif side == 'Neumann': # right boundary is non-coupling sqr0 += domain.boundary['right'].integral( '(u - 1 - x_0 x_0 - alpha x_1 x_1 - beta ?t)^2 d:x' @ ns, degree=degree * 2) # preCICE setup interface = precice.Interface(side, "../precice-config.xml", 0, 1) # define coupling mesh mesh_name = side + "-Mesh" mesh_id = interface.get_mesh_id(mesh_name) coupling_boundary = domain.boundary['right' if side == 'Dirichlet' else 'left'] coupling_sample = coupling_boundary.sample('gauss', degree=degree * 2) vertices = coupling_sample.eval(ns.x) vertex_ids = interface.set_mesh_vertices(mesh_id, vertices) # coupling data write_data = "Temperature" if side == "Neumann" else "Heat-Flux" read_data = "Heat-Flux" if side == "Neumann" else "Temperature" write_data_id = interface.get_data_id(write_data, mesh_id) read_data_id = interface.get_data_id(read_data, mesh_id) # helper functions to project heat flux to coupling boundary projection_matrix = coupling_boundary.integrate( ns.eval_nm('basis_n basis_m d:x'), degree=degree * 2) projection_cons = np.zeros(res0.shape) projection_cons[projection_matrix.rowsupp(1e-15)] = np.nan def fluxdofs(v): return projection_matrix.solve(v, constrain=projection_cons) # helper data structure to apply heat flux correctly dx_function = 'd:x' @ ns precice_dt = interface.initialize() # write initial data if interface.is_action_required(precice.action_write_initial_data()): write_data = np.zeros(len(vertex_ids)) interface.write_block_scalar_data(write_data_id, vertex_ids, write_data) interface.mark_action_fulfilled(precice.action_write_initial_data()) interface.initialize_data() t = 0 # initial condition sqr = domain.integral('(u - uexact)^2' @ ns, degree=degree * 2) lhs0 = nutils.solver.optimize('lhs', sqr, droptol=1e-15, arguments=dict(t=t)) bezier = domain.sample('bezier', degree * 2) x, u, uexact = bezier.eval(['x_i', 'u', 'uexact'] @ ns, lhs=lhs0, t=t) with treelog.add(treelog.DataLog()): nutils.export.vtk(side + '-0', bezier.tri, x, Temperature=u, reference=uexact) t += precice_dt timestep = 0 dt = 0.1 while interface.is_coupling_ongoing(): # update (time-dependent) boundary condition cons0 = nutils.solver.optimize('lhs', sqr0, droptol=1e-15, arguments=dict(t=t)) # read data from interface if interface.is_read_data_available(): read_data = interface.read_block_scalar_data( read_data_id, vertex_ids) read_function = coupling_sample.asfunction(read_data) if side == 'Dirichlet': sqr = coupling_sample.integral((ns.u - read_function)**2) cons = nutils.solver.optimize('lhs', sqr, droptol=1e-15, constrain=cons0, arguments=dict(t=t)) res = res0 else: cons = cons0 res = res0 + coupling_sample.integral( ns.basis * read_function * dx_function) # save checkpoint if interface.is_action_required( precice.action_write_iteration_checkpoint()): lhs_checkpoint = lhs0 t_checkpoint = t timestep_checkpoint = timestep interface.mark_action_fulfilled( precice.action_write_iteration_checkpoint()) # potentially adjust non-matching timestep sizes dt = min(dt, precice_dt) # solve nutils timestep lhs = nutils.solver.solve_linear('lhs', res, constrain=cons, arguments=dict(lhs0=lhs0, dt=dt, t=t)) # write data to interface if interface.is_write_data_required(dt): if side == 'Dirichlet': flux_function = res.eval(lhs0=lhs0, lhs=lhs, dt=dt, t=t) write_data = coupling_sample.eval( 'flux' @ ns, fluxdofs=fluxdofs(flux_function)) else: write_data = coupling_sample.eval('u' @ ns, lhs=lhs) interface.write_block_scalar_data(write_data_id, vertex_ids, write_data) # do the coupling precice_dt = interface.advance(dt) # advance variables t += dt timestep += 1 lhs0 = lhs # read checkpoint if required if interface.is_action_required( precice.action_read_iteration_checkpoint()): lhs0 = lhs_checkpoint t = t_checkpoint timestep = timestep_checkpoint interface.mark_action_fulfilled( precice.action_read_iteration_checkpoint()) else: # go to next timestep bezier = domain.sample('bezier', degree * 2) x, u, uexact = bezier.eval(['x_i', 'u', 'uexact'] @ ns, lhs=lhs, t=t) with treelog.add(treelog.DataLog()): nutils.export.vtk(side + "-" + str(timestep), bezier.tri, x, Temperature=u, reference=uexact) interface.finalize()
def main(): print("Running utils") # define the Nutils mesh grid = [ np.linspace(a, b, round((b - a) / size) + 1) for (a, b, size) in [(0, 1, 0.05), (-.25, 0, 0.05)] ] domain, geom = nutils.mesh.rectilinear(grid) # Nutils namespace ns = nutils.function.Namespace() ns.x = geom ns.basis = domain.basis('std', degree=1) # linear finite elements ns.u = 'basis_n ?lhs_n' # solution ns.dudt = 'basis_n (?lhs_n - ?lhs0_n) / ?dt' # time derivative ns.flux = 'basis_n ?fluxdofs_n' # heat flux ns.k = 100 # thermal diffusivity ns.uwall = 310 # wall temperature # define the weak form res = domain.integral('(basis_n dudt + k basis_n,i u_,i) d:x' @ ns, degree=2) # define Dirichlet boundary condition sqr = domain.boundary['bottom'].integral('(u - uwall)^2 d:x' @ ns, degree=2) cons = nutils.solver.optimize('lhs', sqr, droptol=1e-15) # preCICE setup interface = precice.Interface("Solid", "../precice-config.xml", 0, 1) # define coupling mesh mesh_name = "Solid-Mesh" mesh_id = interface.get_mesh_id(mesh_name) coupling_boundary = domain.boundary['top'] coupling_sample = coupling_boundary.sample( 'gauss', degree=2) # mesh vertices at Gauss points vertices = coupling_sample.eval(ns.x) vertex_ids = interface.set_mesh_vertices(mesh_id, vertices) # coupling data flux_id = interface.get_data_id("Heat-Flux", mesh_id) temperature_id = interface.get_data_id("Temperature", mesh_id) # helper functions to project heat flux to coupling boundary projection_matrix = coupling_boundary.integrate( ns.eval_nm('basis_n basis_m d:x'), degree=2) projection_cons = np.zeros(res.shape) projection_cons[projection_matrix.rowsupp(1e-15)] = np.nan def fluxdofs(v): return projection_matrix.solve(v, constrain=projection_cons) precice_dt = interface.initialize() cons0 = cons # to not lose the Dirichlet BC at the bottom lhs0 = np.zeros(res.shape) # solution from previous timestep timestep = 0 dt = 0.01 # set u = uwall as initial condition and visualize sqr = domain.integral('(u - uwall)^2' @ ns, degree=2) lhs0 = nutils.solver.optimize('lhs', sqr) bezier = domain.sample('bezier', 2) x, u = bezier.eval(['x_i', 'u'] @ ns, lhs=lhs0) with treelog.add(treelog.DataLog()): nutils.export.vtk('Solid_0', bezier.tri, x, T=u) while interface.is_coupling_ongoing(): # read temperature from interface if interface.is_read_data_available(): temperature_values = interface.read_block_scalar_data( temperature_id, vertex_ids) temperature_function = coupling_sample.asfunction( temperature_values) sqr = coupling_sample.integral((ns.u - temperature_function)**2) cons = nutils.solver.optimize('lhs', sqr, droptol=1e-15, constrain=cons0) # save checkpoint if interface.is_action_required( precice.action_write_iteration_checkpoint()): lhs_checkpoint = lhs0 timestep_checkpoint = timestep interface.mark_action_fulfilled( precice.action_write_iteration_checkpoint()) # potentially adjust non-matching timestep sizes dt = min(dt, precice_dt) # solve nutils timestep lhs = nutils.solver.solve_linear('lhs', res, constrain=cons, arguments=dict(lhs0=lhs0, dt=dt)) # write heat fluxes to interface if interface.is_write_data_required(dt): flux_function = res.eval(lhs0=lhs0, lhs=lhs, dt=dt) flux_values = coupling_sample.eval( '-flux' @ ns, fluxdofs=fluxdofs(flux_function)) interface.write_block_scalar_data(flux_id, vertex_ids, flux_values) # do the coupling precice_dt = interface.advance(dt) # advance variables timestep += 1 lhs0 = lhs # read checkpoint if required if interface.is_action_required( precice.action_read_iteration_checkpoint()): lhs0 = lhs_checkpoint timestep = timestep_checkpoint interface.mark_action_fulfilled( precice.action_read_iteration_checkpoint()) else: # go to next timestep if timestep % 20 == 0: # visualize bezier = domain.sample('bezier', 2) x, u = bezier.eval(['x_i', 'u'] @ ns, lhs=lhs) with treelog.add(treelog.DataLog()): nutils.export.vtk('Solid_' + str(timestep), bezier.tri, x, T=u) interface.finalize()
def main(elemsize: 'mesh width in x and y direction' = 0.05, btype: 'type of basis function (std/spline)' = 'std', degree: 'polynomial degree' = 1, dt=.01): print("Running utils") # the mesh grid = [ numpy.linspace(a, b, round((b - a) / size) + 1) for (a, b, size) in [(0, 1, elemsize), (-.25, 0, elemsize), (0, .05, .05)] ] domain, geom = nutils.mesh.rectilinear(grid, periodic=[2]) # nutils namespace ns = nutils.function.Namespace() ns.x = geom ns.basis = domain.basis(btype, degree=degree) ns.u = 'basis_n ?lhs_n' # solution ns.dudt = 'basis_n (?lhs_n - ?lhs0_n) / ?dt' ns.flux = 'basis_n ?fluxdofs_n' ns.k = 100 # thermal diffusivity ns.uwall = 310 # wall temperature # the weak form res = domain.integral('(basis_n dudt + k basis_n,i u_,i) d:x' @ ns, degree=degree * 2) # Dirichlet boundary condition sqr = domain.boundary['bottom'].integral('(u - uwall)^2 d:x' @ ns, degree=degree * 2) cons = nutils.solver.optimize('lhs', sqr, droptol=1e-15) # preCICE setup configFileName = "../precice-config.xml" participantName = "Nutils" solverProcessIndex = 0 solverProcessSize = 1 interface = precice.Interface(participantName, configFileName, solverProcessIndex, solverProcessSize) # define coupling meshes meshNameGP = "Nutils-Mesh-GP" # Gauss points meshNameCC = "Nutils-Mesh-CC" # cell centers (potentially sub-sampled) meshIDGP = interface.get_mesh_id(meshNameGP) meshIDCC = interface.get_mesh_id(meshNameCC) couplinginterface = domain.boundary['top'] couplingsampleGP = couplinginterface.sample('gauss', degree=degree * 2) couplingsampleCC = couplinginterface.sample( 'uniform', 4) # number of sub-samples for better mapping verticesGP = couplingsampleGP.eval(ns.x) verticesCC = couplingsampleCC.eval(ns.x) dataIndicesGP = interface.set_mesh_vertices(meshIDGP, verticesGP) dataIndicesCC = interface.set_mesh_vertices(meshIDCC, verticesCC) # coupling data writeData = "Heat-Flux" readData = "Temperature" writedataID = interface.get_data_id(writeData, meshIDCC) readdataID = interface.get_data_id(readData, meshIDGP) # heat flux computation projectionmatrix = couplinginterface.integrate( ns.eval_nm('basis_n basis_m d:x'), degree=degree * 2) projectioncons = numpy.zeros(res.shape) projectioncons[projectionmatrix.rowsupp(1e-15)] = numpy.nan fluxdofs = lambda v: projectionmatrix.solve(v, constrain=projectioncons) precice_dt = interface.initialize() cons0 = cons # to not lose the Dirichlet BC at the bottom lhs0 = numpy.zeros(res.shape) timestep = 1 # project initial condition and visualize sqr = domain.integral('(u - uwall)^2' @ ns, degree=degree * 2) lhs0 = nutils.solver.optimize('lhs', sqr) bezier = domain.sample('bezier', 2) x, u = bezier.eval(['x_i', 'u'] @ ns, lhs=lhs0) with treelog.add(treelog.DataLog()): nutils.export.vtk('Solid_0', bezier.tri, x, T=u) while interface.is_coupling_ongoing(): # read temperature from interface if interface.is_read_data_available(): readdata = interface.read_block_scalar_data( readdataID, dataIndicesGP) coupledata = couplingsampleGP.asfunction(readdata) sqr = couplingsampleGP.integral((ns.u - coupledata)**2) cons = nutils.solver.optimize('lhs', sqr, droptol=1e-15, constrain=cons0) # save checkpoint if interface.is_action_required( precice.action_write_iteration_checkpoint()): lhscheckpoint = lhs0 interface.mark_action_fulfilled( precice.action_write_iteration_checkpoint()) # potentially adjust non-matching timestep sizes dt = min(dt, precice_dt) # solve nutils timestep lhs = nutils.solver.solve_linear('lhs', res, constrain=cons, arguments=dict(lhs0=lhs0, dt=dt)) # write heat fluxes to interface if interface.is_write_data_required(dt): fluxvalues = res.eval(lhs0=lhs0, lhs=lhs, dt=dt) writedata = couplingsampleCC.eval('-flux' @ ns, fluxdofs=fluxdofs(fluxvalues)) interface.write_block_scalar_data(writedataID, dataIndicesCC, writedata) # do the coupling precice_dt = interface.advance(dt) # read checkpoint if required if interface.is_action_required( precice.action_read_iteration_checkpoint()): interface.mark_action_fulfilled( precice.action_read_iteration_checkpoint()) lhs0 = lhscheckpoint else: # go to next timestep and visualize bezier = domain.sample('bezier', 2) x, u = bezier.eval(['x_i', 'u'] @ ns, lhs=lhs0) with treelog.add(treelog.DataLog()): if timestep % 20 == 0: nutils.export.vtk('Solid_' + str(timestep), bezier.tri, x, T=u) timestep += 1 lhs0 = lhs interface.finalize()
def main(X: unit['m'], Y: unit['m'], l0: unit['m'], degree: int, du: unit['m']): ''' Mechanical test case .. arguments:: X [0.5mm] Domain size in x direction. Y [0.04mm] Domain size in y direction. l0 [0.015mm] Charateristic length scale. degree [1] Polynomial degree of the approximation. du [0.01mm] Applied displacement. ''' assert degree > 0 # create the mesh topo, geom = mesh.rectilinear([ numpy.linspace(0.001, 0.001 + X, 31), numpy.linspace(0.001, 0.001 + Y, 11) ]) # prepare the integration and post processing samples ipoints = topo.sample('gauss', 2 * degree) bezier = topo.sample('bezier', 2 * degree) # initialize the namespace ns = function.Namespace() ns.x = geom ns.ubasis = topo.basis('th-spline', degree=degree).vector(topo.ndims) ns.dbasis = topo.basis('th-spline', degree=degree) ns.Hbasis = ipoints.basis() ns.u_i = 'ubasis_ni ?solu_n' ns.d = 'dbasis_n ?sold_n' ns.H0 = 'Hbasis_n ?solH0_n' ns.l0 = l0 ns.du = du # volume coupling fields ns.Gc = 'dbasis_n ?gcdofs_n' ns.lmbda = 'dbasis_n ?lmbdadofs_n' ns.mu = 'dbasis_n ?mudofs_n' # formulation ns.strain_ij = '( u_i,j + u_j,i ) / 2' ns.stress_ij = 'lmbda strain_kk δ_ij + 2 mu strain_ij' ns.psi = 'stress_ij strain_ij / 2' ns.H = function.max(ns.psi, ns.H0) ns.gamma = '( d^2 + l0^2 d_,i d_,i ) / (2 l0)' # boundary condition for displacement field sqru = topo.boundary['top'].integral('( u_i n_i - du )^2 d:x' @ ns, degree=degree * 2) sqru += topo.boundary['bottom'].integral('( u_i n_i )^2 d:x' @ ns, degree=degree * 2) sqru += topo.boundary['bottom'].boundary['left'].integral( 'u_i u_i d:x' @ ns, degree=degree * 2) consu = solver.optimize('solu', sqru, droptol=1e-12) # initialize the solution vectors solu = numpy.zeros(ns.ubasis.shape[0]) sold = numpy.zeros(ns.dbasis.shape[0]) solH0 = ipoints.eval(0.) # preCICE setup configFileName = "precice-config.xml" participantName = "BrittleFracture" solverProcessIndex = 0 solverProcessSize = 1 interface = precice.Interface(participantName, configFileName, solverProcessIndex, solverProcessSize) # define coupling mesh meshName = "BrittleFracture-Mesh" meshID = interface.get_mesh_id(meshName) couplingsample = topo.sample('gauss', degree=degree * 2) vertices = couplingsample.eval(ns.x) dataIndices = interface.set_mesh_vertices(meshID, vertices) lmbda = 121153.8e6 # First Lamé parameter in Pa mu = 80769.2e6 # Second Lamé parameter in Pa sqrl = couplingsample.integral((ns.lmbda - lmbda)**2) lmbdadofs = solver.optimize('lmbdadofs', sqrl, droptol=1e-12) sqrm = couplingsample.integral((ns.mu - mu)**2) mudofs = solver.optimize('mudofs', sqrm, droptol=1e-12) # coupling data gcID = interface.get_data_id("Gc", meshID) precice_dt = interface.initialize( ) # pseudo timestep size handled by preCICE nstep = 10000 # very high number of steps, end of simulation is steered by preCICE instead # time loop with treelog.iter.fraction('step', range(nstep)) as counter: for istep in counter: if not interface.is_coupling_ongoing(): break if interface.is_read_data_available(): gc = interface.read_block_scalar_data(gcID, dataIndices) gc_function = couplingsample.asfunction(gc) sqrg = couplingsample.integral((ns.Gc - gc_function)**2) gcdofs = solver.optimize('gcdofs', sqrg, droptol=1e-12) ############################ # Phase field problem # ############################ resd = ipoints.integral( '( Gc / l0 ) ( d dbasis_n + l0^2 d_,i dbasis_n,i ) d:x' @ ns) resd += ipoints.integral('2 H ( d - 1 ) dbasis_n d:x' @ ns) sold = solver.solve_linear('sold', resd, arguments={ 'solu': solu, 'solH0': solH0, 'lmbdadofs': lmbdadofs, 'mudofs': mudofs, 'gcdofs': gcdofs }) ############################ # Elasticity problem # ############################ resu = topo.integral('( 1 - d )^2 ubasis_ni,j stress_ij d:x' @ ns, degree=2 * degree) solu = solver.solve_linear('solu', resu, arguments={ 'sold': sold, 'lmbdadofs': lmbdadofs, 'mudofs': mudofs }, constrain=consu) # Update zero state and history field solH0 = ipoints.eval(ns.H, arguments={ 'solu': solu, 'solH0': solH0, 'lmbdadofs': lmbdadofs, 'mudofs': mudofs }) # do the coupling precice_dt = interface.advance(precice_dt) ############################ # Output # ############################ # element-averaged history field transforms = ipoints.transforms[0] indicator = function.kronecker( 1., axis=0, length=len(transforms), pos=function.TransformsIndexWithTail(transforms, function.TRANS).index) areas, integrals = ipoints.integrate( [indicator, indicator * ns.H], arguments={ 'solu': solu, 'solH0': solH0, 'lmbdadofs': lmbdadofs, 'mudofs': mudofs, 'gcdofs': gcdofs }) H = indicator.dot(integrals / areas) # evaluate fields points, dvals, uvals, lvals, mvals, gcvals = bezier.eval( ['x_i', 'd', 'u_i', 'lmbda', 'mu', 'Gc'] @ ns, arguments={ 'solu': solu, 'sold': sold, 'solH0': solH0, 'lmbdadofs': lmbdadofs, 'mudofs': mudofs, 'gcdofs': gcdofs }) Hvals = bezier.eval(H, arguments={'solu': solu, 'solH0': solH0}) with treelog.add(treelog.DataLog()): export.vtk('Solid_' + str(istep), bezier.tri, points, Gc=gcvals, D=dvals, U=uvals, H=Hvals) interface.finalize()