def print_components_info(self, pre): tools.print_info(pre, 'The coupled solver ', self.__class__.__name__, ' has the following components:') tools.print_components_info(pre, self.components) # restart if self.restart: tools.print_info(80 * '═' + f'\n\tRestart from time step {self.timestep_start_current}\n' + 80 * '═')
def load_results_data(self): results_file_name = f'{self.case_name}_results.pickle' try: with open(results_file_name, 'rb') as results_file: results_data = pickle.load(results_file) except FileNotFoundError: tools.print_info(f'Not able to append results to {results_file_name} because file not found\n' f' Saving results to new file: {results_file_name}', layout='warning') return if self.debug != 'solution_r' in results_data.keys(): raise ValueError('Value of debug attribute in CoupledSolverGaussSeidel can not be changed upon restart') self.timestep_start_global = results_data['timestep_start'] self.complete_solution_x = results_data['solution_x'][:, :self.timestep_start_current - self.timestep_start_global + 1] self.complete_solution_y = results_data['solution_y'][:, :self.timestep_start_current - self.timestep_start_global + 1] self.x = results_data['interface_x'] self.y = results_data['interface_y'] self.iterations = results_data['iterations'][:self.timestep_start_current - self.timestep_start_global] self.run_time_previous = results_data['run_time'] self.residual = results_data['residual'][:self.timestep_start_current - self.timestep_start_global] self.info = results_data.get('info', '') + '' + f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} :' \ f' restart calculation from time step {self.timestep_start_current} on {socket.gethostname()}\n' if self.debug: tools.print_info(f'Restart in debug mode may not append results to pickle file correctly', layout='warning') self.complete_solution_r = results_data['solution_r'] return results_data
def initialize(self, model_part_from, model_part_to): super().initialize(model_part_from, model_part_to) # calculate coefficients iterable = [] cond = [] for i_to in range(self.n_to): nearest = self.nearest[i_to, :] iterable.append( (self.distances[i_to, :], self.coords_from[nearest, :], self.shape_parameter)) if self.parallel: processes = cpu_count() with Pool(processes=processes) as pool: # optimal chunksize automatically calculated out = pool.starmap(get_coeffs, iterable) self.coeffs = np.vstack(tuple(zip(*out))[0]) cond = list(zip(*out))[1] else: self.coeffs = np.zeros(self.nearest.shape) for i_to, tup in enumerate(iterable): out = get_coeffs(*tup) self.coeffs[i_to, :] = out[0].flatten() cond.append(out[1]) # check condition number cond = max(cond) if cond > 1e13: tools.print_info( f'The highest condition number of the interpolation matrices is {cond:.2e} > 1e13\n' f'Decrease the shape parameter to decrease the condition number', layout='warning')
def finalize(self): super().finalize() # print summary header if self.solver_level == 0: out = '╔' + 79 * '═' + '\n║\tSummary\n╠' + 79 * '═' tools.print_info(out) self.print_summary() for component in self.components: component.finalize()
def print_components_info(self, pre): tools.print_info(pre, "The component ", self.__class__.__name__, " maps the following model parts:") pre = tools.update_pre(pre) for i, mapper in enumerate(self.mappers.values()): name_from, name_to = mapper.model_part_names tmp1, tmp2 = ('└─', ' └─') if i == len(self.mappers.values()) - 1 else ( '├─', '│ └─') tools.print_info( pre, f"{tmp1}ModelPart '{name_from}' to " + f"ModelPart '{name_to}' with the mapper:") mapper.print_components_info(pre + tmp2)
def __init__(self, parameters): super().__init__(parameters) self.coeffs = None # check and store settings self.parallel = self.settings[ 'parallel'] if 'parallel' in self.settings else False self.shape_parameter = (self.settings['shape_parameter'] if 'shape_parameter' in self.settings else 200) if self.shape_parameter < 2: tools.print_info( f'Shape parameter is {self.shape_parameter} < 2\n', layout='warning') # determine number of nearest neighbours self.n_nearest = 81 if len(self.directions) == 3 else 9
def initialize(self): super().initialize() # initialize mappers if required if self.index_mapped is not None: self.solver_wrappers[self.index_other].initialize() interface_input_from = self.solver_wrappers[self.index_other].get_interface_output() interface_output_to = self.solver_wrappers[self.index_other].get_interface_input() self.solver_wrappers[self.index_mapped].initialize(interface_input_from, interface_output_to) else: self.solver_wrappers[0].initialize() self.solver_wrappers[1].initialize() self.x = self.solver_wrappers[1].get_interface_output().copy() self.y = self.solver_wrappers[0].get_interface_output().copy() self.convergence_criterion.initialize() self.predictor.initialize(self.x) self.start_time = time.time() self.init_time = self.start_time - self.init_time title = '╔' + 78 * '═' + f'╗\n║{self.case_name.upper():^78}║\n╚' + 78 * '═' + '╝\n' tools.print_info(title) # restart if self.restart: if not (self.x.has_same_model_parts(self.restart_data['interface_x']) and self.y.has_same_model_parts(self.restart_data['interface_y'])): raise ValueError('Restart not possible because model parts changed') self.predictor = self.restart_data['predictor'] self.components[0] = self.predictor # update save results if self.save_results: results_data = None if self.restart: results_data = self.load_results_data() if results_data is None: # no results file to append to self.info = f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} : ' \ f'start calculation of time step {self.timestep_start_current} on {socket.gethostname()}\n' if self.debug: self.complete_solution_x = np.empty((self.x.get_interface_data().shape[0], 0)) self.complete_solution_y = np.empty((self.y.get_interface_data().shape[0], 0)) self.complete_solution_r = np.empty((self.x.get_interface_data().shape[0], 0)) else: self.complete_solution_x = self.x.get_interface_data().reshape(-1, 1) self.complete_solution_y = self.y.get_interface_data().reshape(-1, 1)
def filter(self): v = np.hstack((self.vcurr, np.hstack(self.vprev))) if not v.shape[1]: raise RuntimeError('No information to filter') # remove columns resulting in small diagonal elements in R singular = True while singular and v.shape[1]: rr = np.linalg.qr(v, mode='r') diag = np.diagonal(rr) m = min(abs(diag)) if m < self.min_significant: i = np.argmin(abs(diag)) tools.print_info(f'Removing column {i}: {m} < min_significant', layout='warning') if i < self.vcurr.shape[1]: self.vcurr = np.delete(self.vcurr, i, 1) self.wcurr = np.delete(self.wcurr, i, 1) else: num_columns = self.vcurr.shape[1] j = -1 while i >= num_columns: j += 1 num_columns += self.vprev[j].shape[1] num_columns -= self.vprev[j].shape[1] self.vprev[j] = np.delete(self.vprev[j], i - num_columns, 1) self.wprev[j] = np.delete(self.wprev[j], i - num_columns, 1) v = np.hstack((self.vcurr, np.hstack(self.vprev))) else: singular = False # remove columns if number of columns exceeds number of rows while v.shape[0] < v.shape[1]: if self.vcurr.shape[0] < self.vcurr.shape[1]: self.vcurr = np.delete(self.vcurr, -1, 1) self.wcurr = np.delete(self.wcurr, -1, 1) else: i = -1 while self.vprev[i].shape[1] == 0: i -= 1 self.vprev[i] = np.delete(self.vprev[i], -1, 1) self.wprev[i] = np.delete(self.wprev[i], -1, 1) v = np.hstack((self.vcurr, np.hstack(self.vprev)))
def initialize_solution_step(self): super().initialize_solution_step() # for parallel: create a folder with the correct time stamp for decomposition of pointDisplacement_Next # for serial: folder will normally be present, except for time 0: make a folder 0.0000 with specified precision timestamp = '{:.{}f}'.format(self.physical_time, self.time_precision) path = os.path.join(self.working_directory, timestamp) if self.cores > 1 or self.physical_time == 0: os.makedirs(path, exist_ok=True) # prepare new time step folder and reset the number of iterations self.timestep += 1 self.iteration = 0 self.physical_time += self.delta_t self.prev_timestamp = timestamp self.cur_timestamp = f'{self.physical_time:.{self.time_precision}f}' if not self.settings['parallel']: # if serial new_path = os.path.join(self.working_directory, self.cur_timestamp) if os.path.isdir(new_path): tools.print_info( f'Overwrite existing time step folder: {new_path}', layout='warning') check_call(f'rm -rf {new_path}', shell=True) check_call(f'mkdir -p {new_path}', shell=True) else: for i in np.arange(self.cores): new_path = os.path.join(self.working_directory, 'processor' + str(i), self.cur_timestamp) if os.path.isdir(new_path): if i == 0: tools.print_info( f'Overwrite existing time step folder: {new_path}', layout='warning') check_call(f'rm -rf {new_path}', shell=True) check_call(f'mkdir -p {new_path}', shell=True) self.send_message('next') self.wait_message('next_ready')
def filter(self): if self.v.shape[1] == 0: raise RuntimeError('No information to filter') # remove columns resulting in small diagonal elements in R singular = True while singular and self.v.shape[1]: rr = np.linalg.qr(self.v, mode='r') diag = np.diagonal(rr) m = min(abs(diag)) if m < self.min_significant: i = np.argmin(abs(diag)) tools.print_info(f'Removing column {i}: {m} < minsignificant', layout='warning') self.v = np.delete(self.v, i, 1) self.w = np.delete(self.w, i, 1) else: singular = False # remove columns if number of columns exceeds number of rows if self.v.shape[0] < self.v.shape[1]: self.v = np.delete(self.v, -1, 1) self.w = np.delete(self.w, -1, 1)
def print_summary(self): solver_init_time_percs = [] solver_run_time_percs = [] pre = '║' + ' │' * self.solver_level out = '' if self.solver_level == 0: out += f'{pre}Total calculation time{" (after restart)" if self.restart else ""}:' \ f' {self.init_time + self.run_time:.3f}s\n' # initialization time if self.solver_level == 0: out += f'{pre}Initialization time: {self.init_time:0.3f}s\n' out += f'{pre}Distribution of initialization time:\n' for solver in self.solver_wrappers: solver_init_time_percs.append(solver.init_time / self.init_time * 100) out += f'{pre}\t{solver.__class__.__name__}: {solver.init_time:.0f}s ({solver_init_time_percs[-1]:0.1f}%)\n' if solver.__class__.__name__ == 'SolverWrapperMapped': out += f'{pre}\t└─{solver.solver_wrapper.__class__.__name__}: {solver.solver_wrapper.init_time:.0f}s' \ f' ({solver.solver_wrapper.init_time / self.init_time * 100:0.1f}%)\n' if self.solver_level == 0: out += f'{pre}\tOther: {self.init_time - sum([s.init_time for s in self.solver_wrappers]):.0f}s' \ f' ({100 - sum(solver_init_time_percs):0.1f}%)\n' # run time if self.solver_level == 0: out += f'{pre}Run time{" (after restart)" if self.restart else ""}: {self.run_time:0.3f}s\n' out += f'{pre}Distribution of run time:\n' for solver in self.solver_wrappers: solver_run_time_percs.append(solver.run_time / self.run_time * 100) out += f'{pre}\t{solver.__class__.__name__}: {solver.run_time:.0f}s ({solver_run_time_percs[-1]:0.1f}%)\n' if solver.__class__.__name__ == 'SolverWrapperMapped': out += f'{pre}\t└─{solver.solver_wrapper.__class__.__name__}: {solver.solver_wrapper.run_time:.0f}s' \ f' ({solver.solver_wrapper.run_time / self.run_time * 100:0.1f}%)\n' if self.solver_level == 0: out += f'{pre}\tCoupling: {self.run_time - sum([s.run_time for s in self.solver_wrappers]):.0f}s' \ f' ({100 - sum(solver_run_time_percs):0.1f}%)\n' out += f'{pre}Average number of iterations per time step' \ f'{" (including before restart)" if self.restart else ""}: {np.array(self.iterations).mean():0.2f}' if self.solver_level == 0: out += '\n╚' + 79 * '═' tools.print_info(out)
def print_components_info(self, pre): tools.print_info(pre, 'The component ', self.__class__.__name__, ' maps the following solver wrapper:') pre = tools.update_pre(pre) self.solver_wrapper.print_components_info(pre + '├─') tools.print_info(pre, '├─', 'Input mapper:') self.mapper_interface_input.print_components_info(pre + '│ └─') tools.print_info(pre, '└─', 'Output mapper:') self.mapper_interface_output.print_components_info(pre + ' └─')
def print_components_info(self, pre): tools.print_info(pre, "The component ", self.__class__.__name__, " combines the following solver wrappers:") pre = tools.update_pre(pre) tools.print_info(pre, '├─', "Mapped solver wrappers:") for sol_wrapper in self.mapped_solver_wrapper_list[:-1]: sol_wrapper.print_components_info(pre + '│ ├─') self.solver_wrapper_list[-1].print_components_info(pre + '│ └─') tools.print_info(pre, '└─', "Master solver wrapper:") self.master_solver_wrapper.print_components_info(pre + ' └─')
def initialize(self): Component.initialize(self) self.solver_wrapper.initialize() # initialize test_class interface_input = self.solver_wrapper.interface_input if self.test_class is None: self.dummy_solver = None tools.print_info( 'No test class specified, zero input will be used') for model_part_name, variable in interface_input.model_part_variable_pairs: if data_structure.variables_dimensions[variable] == 1: tools.print_info( f'\t0 is used as {variable} input to {model_part_name}' ) elif data_structure.variables_dimensions[variable] == 3: tools.print_info( f'\t[0 0 0] is used as {variable} input to {model_part_name}' ) else: if not os.path.isfile('dummy_solver.py'): raise ModuleNotFoundError( f'Test class specified, but no file named dummy_solver.py in {os.getcwd()}' ) module = tools.import_module('dummy_solver', 'dummy_solver.py') if not hasattr(module, self.test_class): raise NameError( f'Module dummy_solver has no class {self.test_class}') self.dummy_solver = getattr(module, self.test_class)() tools.print_info( f'The functions from {self.test_class} will be used to calculate the following inputs:' ) for model_part_name, variable in interface_input.model_part_variable_pairs: if data_structure.variables_dimensions[variable] == 1: tools.print_info( f'\t{variable} [Scalar] on {model_part_name}') elif data_structure.variables_dimensions[variable] == 3: tools.print_info( f'\t{variable} [3D array] on {model_part_name}') tools.print_info() # initialize variables if self.solver_index == 1: self.x = self.solver_wrapper.get_interface_output() self.y = self.solver_wrapper.get_interface_input() else: self.x = self.solver_wrapper.get_interface_input() self.y = self.solver_wrapper.get_interface_output() if self.save_results: self.complete_solution_x = self.x.get_interface_data().reshape( -1, 1) self.complete_solution_y = self.y.get_interface_data().reshape( -1, 1) self.start_time = time.time()
def solve_solution_step(self, interface_input): self.iteration += 1 # store incoming loads self.interface_input.set_interface_data( interface_input.get_interface_data()) # write loads (from interface data to a file that will be read by USR.f) self.write_loads() # copy input data for debugging if self.debug: for dct in self.interface_input.parameters: mp_name = dct['model_part'] mp_id = self.model_part_surface_ids[mp_name] file_name1 = join( self.dir_csm, f'CSM_Time{self.timestep}Surface{mp_id}Cpu0Input.dat') file_name2 = join( self.dir_csm, f'CSM_Time{self.timestep}Surface{mp_id}Cpu0Input' f'_Iter{self.iteration}.dat') shutil.copy2(file_name1, file_name2) # run Abaqus and check for (licensing) errors self.print_log( f'\n### Time step {self.timestep}, iteration {self.iteration} ###') bool_completed = False attempt = 0 while not bool_completed and attempt < 10000: attempt += 1 if attempt > 1: tools.print_info( f'Warning attempt {attempt - 1} to run Abaqus failed, new attempt in one minute', layout='warning') time.sleep(60) tools.print_info(f'Starting attempt {attempt}') if self.timestep == 1: cmd = f'abaqus job=CSM_Time{self.timestep} input=CSM_Time{self.timestep - 1} ' \ f'cpus={self.cores} output_precision=full interactive >> {self.logfile} 2>&1' subprocess.run(cmd, shell=True, cwd=self.dir_csm, executable='/bin/bash', env=self.env) else: cmd = f'abaqus job=CSM_Time{self.timestep} oldjob=CSM_Time{self.timestep - 1} input=CSM_Restart ' \ f'cpus={self.cores} output_precision=full interactive >> {self.logfile} 2>&1' subprocess.run(cmd, shell=True, cwd=self.dir_csm, executable='/bin/bash', env=self.env) # check log for completion and or errors subprocess.run(f'tail -n 10 {self.logfile} > Temp_log.coco', shell=True, cwd=self.dir_csm, executable='/bin/bash', env=self.env) log_tmp = os.path.join(self.dir_csm, 'Temp_log.coco') bool_lic = True with open(log_tmp, 'r') as fp: for line in fp: if any(x in line for x in [ 'Licensing error', 'license error', 'Error checking out Abaqus license' ]): bool_lic = False if not bool_lic: tools.print_info('Abaqus licensing error', layout='fail') elif 'COMPLETED' in line: # check final line for completed bool_completed = True elif bool_lic: # final line did not contain 'COMPLETED' but also no licensing error detected raise RuntimeError( f'Abaqus did not complete, unclassified error, see {self.logfile} for extra ' f'information') # append additional information to log file cmd = f'tail -n 23 CSM_Time{self.timestep}.msg | head -n 15 | sed -e \'s/^[ \\t]*//\' ' \ f'>> {self.logfile} 2>&1' subprocess.run(cmd, shell=True, cwd=self.dir_csm, executable='/bin/bash', env=self.env) # write Abaqus output cmd = f'abaqus ./GetOutput.exe CSM_Time{self.timestep} 1 >> {self.logfile} 2>&1' subprocess.run(cmd, shell=True, cwd=self.dir_csm, executable='/bin/bash', env=self.env) # read Abaqus output data for dct in self.interface_output.parameters: mp_name = dct['model_part'] # read in output file for surface nodes mp_id = self.model_part_surface_ids[mp_name] file_name = join( self.dir_csm, f'CSM_Time{self.timestep}Surface{mp_id}Output.dat') data = np.loadtxt(file_name, skiprows=1) # copy output data for debugging if self.debug: file_name2 = join( self.dir_csm, f'CSM_Time{self.timestep}Surface{mp_id}Output_Iter{self.iteration}.dat' ) shutil.copy(file_name, file_name2) if data.shape[1] != self.dimensions: raise ValueError(f'given dimension does not match coordinates') # get surface node displacements n_nodes = data.shape[0] model_part = self.model.get_model_part(mp_name) if n_nodes != model_part.size: raise ValueError( 'size of data does not match size of ModelPart') displacement = np.zeros( (n_nodes, 3)) # also require z-input for 2D cases displacement[:, :self.dimensions] = data self.interface_output.set_variable_data(mp_name, 'displacement', displacement) return self.interface_output
def print_components_info(self, pre): tools.print_info(pre, "The component ", self.__class__.__name__)
legend_entries = ['case_results'] # load cases results = {} for name, path in zip(legend_entries, case_paths): with open(os.path.join(common_path, path), 'rb') as file: results.update({name: pickle.load(file)}) # reference case case_reference = legend_entries[0] # check equal number of time steps time_steps = len(results[case_reference]['iterations']) for case in legend_entries: if not len(results[case]['iterations']) == time_steps: raise Exception(f"Number of time steps for case {case} ({len(results[case]['iterations'])}) " f"differs from number of time steps of reference case ({time_steps}).") for case in legend_entries: norm = [] for i in range(time_steps): norm = norm + [np.linalg.norm(results[case_reference]['solution_x'][:, i] - results[case]['solution_x'][:, i])] norm_average = np.array(norm).mean() tools.print_info(f'Average norm of the difference per time step between {case_reference} and {case} is {norm_average}.') for case, result in results.items(): iterations = np.array(result['iterations']).mean() time = result['time'] tools.print_info(f'{case}: average # iterations = {iterations:0.2f} and elapsed time = {time:0.3f}s') tools.print_info('\t', result['iterations'])
for case in legend_entries: residual_list.append(results[case]['residual']) iterations_list.append(results[case]['iterations']) for ls in residual_list[-1]: if len(ls) > it_max: it_max = len(ls) + 1 # make figure plt.figure() residual = None for case, residuals in zip(legend_entries, residual_list): residual = np.ones((len(residuals), it_max)) * np.nan for i, ls in enumerate(residuals): residual[i, :len(ls)] = np.array(ls) plt.plot(residual.flatten(), 'o-', label=case) plt.axhline(tolerance, color='k', label=f'tolerance {tolerance}') plt.gca().xaxis.set_major_formatter( FuncFormatter(lambda value, _: int(value // it_max + 1))) plt.yscale('log') plt.ylabel('norm of residual') plt.xlabel('time step') plt.legend() # average number of iteration per time step for case, iterations in zip(legend_entries, iterations_list): avg_iterations = np.mean(iterations) tools.print_info(f'{case}: average # iterations = {avg_iterations:0.2f}') tools.print_info('\t', iterations) plt.show()
def read_modify_controldict(self): """ reads the controlDict file in the case-directory and modifies some entries required by the coconut_pimpleFoam. The values of these entries are taken from paramters.json file. :return: """ file_name = os.path.join(self.working_directory, 'system/controlDict') with open(file_name, 'r') as control_dict_file: control_dict = control_dict_file.read() self.write_interval = of_io.get_int(input_string=control_dict, keyword='writeInterval') time_format = of_io.get_string(input_string=control_dict, keyword='timeFormat') self.write_precision = of_io.get_int(input_string=control_dict, keyword='writePrecision') if not time_format == 'fixed': msg = f'timeFormat:{time_format} in controlDict not implemented. Changed to "fixed"' tools.print_info(msg, layout='warning') control_dict = re.sub(r'timeFormat' + of_io.delimter + r'\w+', f'timeFormat fixed', control_dict) control_dict = re.sub(r'application' + of_io.delimter + r'\w+', f'application {self.application}', control_dict) control_dict = re.sub( r'startTime' + of_io.delimter + of_io.float_pattern, f'startTime {self.start_time}', control_dict) control_dict = re.sub( r'deltaT' + of_io.delimter + of_io.float_pattern, f'deltaT {self.delta_t}', control_dict) control_dict = re.sub( r'timePrecision' + of_io.delimter + of_io.int_pattern, f'timePrecision {self.time_precision}', control_dict) control_dict = re.sub( r'endTime' + of_io.delimter + of_io.float_pattern, f'endTime 1e15', control_dict) # delete previously defined coconut functions coconut_start_string = '// CoCoNuT function objects' control_dict = re.sub(coconut_start_string + r'.*', '', control_dict, flags=re.S) with open(file_name, 'w') as control_dict_file: control_dict_file.write(control_dict) control_dict_file.write(coconut_start_string + '\n') control_dict_file.write('boundary_names (') for boundary_name in self.boundary_names: control_dict_file.write(boundary_name + ' ') control_dict_file.write(');\n\n') control_dict_file.write('functions\n{\n') for boundary_name in self.boundary_names: control_dict_file.write( f'PRESSURE_{boundary_name}\n' f'{{\n' f'type surfaceRegion;\n' f'libs ("libfieldFunctionObjects.so");\n' f'executeControl timeStep;\n' f'executeInterval 1;\n' f'writeControl timeStep;\n' f'writeInterval 1;\n' f'timeFormat fixed;\n' f'timePrecision {self.time_precision};\n' f'operation none;\n' f'writeFields true;\n' f'surfaceFormat raw;\n' f'regionType patch;\n' f'name {boundary_name};\n' f'fields (p);\n' f'}}\n') control_dict_file.write( f'wallShearStress\n' f'{{\n' f'type wallShearStress;\n' f'libs ("libfieldFunctionObjects.so");\n' f'executeControl timeStep;\n' f'executeInterval 1;\n' f'writeControl timeStep;\n' f'writeInterval 1;\n' f'timeFormat fixed;\n' f'timePrecision {self.time_precision};\n' f'log false;\n' f'}}\n') control_dict_file.write( f'TRACTION_{boundary_name}\n' f'{{\n' f'type surfaceRegion;\n' f'libs ("libfieldFunctionObjects.so");\n' f'' f'executeControl timeStep;\n' f'executeInterval 1;\n' f'writeControl timeStep;\n' f'writeInterval 1;\n' f'timeFormat fixed;\n' f'timePrecision {self.time_precision};\n' f'operation none;\n' f'writeFields true;\n' f'surfaceFormat raw;\n' f'regionType patch;\n' f'name {boundary_name};\n' f'fields ( wallShearStress);\n' f'}}\n') control_dict_file.write('}') self.write_footer(file_name)
def print_iteration_info(self, r): info = f'{self.iteration:<16d}{r.norm():<28.17e}' tools.print_info(' │' * self.solver_level, info, flush=True)
def __init__(self, animation_figure, solution, interface, dt, time_step_start, model_part_name=None, variable=None, name=None): """ Creates Animation instance self.info: list e.g. [("mp_a", ["PRESSURE", "TRACTION"]), ("mp_b, "DISPLACEMENT")] this dictates the order of the values in solution and coordinates: e.g. p0, p1, ..., pm, tx0, ty0, tz0, tx1, ty1, tz1,, ..., txm, tym, tzm, dx0, dy0, dz0, ..., dxm, dym, dzm where p and t are pressure and traction on mp_a and d is displacement on mp_b self.coordinates: np.array contains the initial coordinates of the nodes on the interface as given in self.info e.g. x0, y0, z0, x1, y1, z1, ... :param animation_figure: (AnimationFigure) AnimationFigure object where animation is created :param solution : (np.array) contains as columns the solution of each time step, order is dictated by self.info :param interface: (Interface) object interface to be plotted :param dt: (float) time step size :param time_step_start: (int) starting time step (typically zero) :param model_part_name: (string) model part to be plotted (optional if there is only one) :param variable: (string) variable to be plotted (optional if there is only one corresponding to the model part) :param name: (string) the name in the legend (optional) """ self.animation_figure = animation_figure self.complete_solution = solution self.interface = interface self.info = interface.model_part_variable_pairs self.time_steps = solution.shape[1] - 1 # number of times steps self.dt = dt self.time_step_start = time_step_start # check that model_part_name or variable are given if not unique if model_part_name is None: if len(set(_[0] for _ in self.info)) > 1: raise Exception( f"Specify model_part_name: more than one present: {self.info}" ) else: model_part_name = self.info[0][0] elif model_part_name not in [_[0] for _ in self.info]: raise Exception( f"Given model_part_name '{model_part_name}' is not found") self.model_part = self.interface.get_model_part(model_part_name) mp_vars = [pair for pair in self.info if pair[0] == model_part_name] if variable is None: if len(mp_vars) > 1: raise Exception( f"Specify variable: more than one present: {[_[1] for _ in mp_vars]}" ) else: self.variable = mp_vars[0][1] else: if variable != variable.lower(): raise Exception(f"Variable '{variable}' should be lower case") if variable not in [_[1] for _ in mp_vars] + ['coordinates']: raise Exception(f"Given variable '{variable}' is not found") else: self.variable = variable self.name = name if name is not None else model_part_name + ': ' + variable self.coordinates = np.zeros((self.model_part.size, 3)) for j, direction in enumerate(['x0', 'y0', 'z0']): self.coordinates[:, j] = getattr(self.model_part, direction) self.m = self.model_part.size # number of nodes self.animation = None self.mask = None self.argsort = None self.initial_position = None self.abscissa = None self.solution = None self.displacement = None self.line = None self.initialized = False # find location of data index = 0 self.displacement_available = False for mp_name, var in self.info: mp = interface.get_model_part(mp_name) dimension = data_structure.variables_dimensions[var] if var == variable and mp_name == model_part_name: # correct location self.start_index = index self.dimension = dimension self.end_index = index + self.dimension * self.m if self.m != mp.size: raise Exception("Number of coordinates do not match") if var == "displacement" and mp_name == model_part_name: # displacement location self.start_index_displacement = index self.end_index_displacement = index + 3 * self.m self.displacement_available = True if self.m != mp.size: raise Exception("Number of coordinates do not match") index += dimension * mp.size if not self.displacement_available: out = f"{self.name} ({model_part_name}: {variable}): Nodes positions are not updated, because no " \ f"'displacement' available" if self.variable == 'coordinates': raise Exception(out) else: tools.print_info(out, layout='warning') if index != self.complete_solution.shape[0]: raise Exception( "Size of provided solution data does not match interface")
def print_iteration_info(self, r): info = f'{self.time_step:<16d}{self.x.norm():<28.17e}{self.y.norm():<28.17e}' tools.print_info(' │' * self.solver_level, info, flush=True)
def __init__(self, parameters): """Should only initialize the solver that is to be tested""" Component.__init__(self) self.parameters = parameters self.settings = parameters.get( 'settings', {}) # settings is optional as long as the necessary parameters... # ... are in test_settings self.init_time = time.time() if 'test_settings' not in self.parameters.keys( ): # requires a new parameter input 'test_settings' raise KeyError( 'The coupled_solver "test_single_solver" requires "test_settings" which was not detected.' ) test_settings = parameters['test_settings'] self.settings.update( test_settings ) # update settings with test_settings (test_settings are prioritized) # read parameters self.solver_index = self.settings[ 'solver_index'] # solver to be tested; starts at 0 self.test_class = self.settings.get('test_class') self.timestep_start_current = self.timestep_start_global = self.settings.get( 'timestep_start', 0) self.restart = False # no restart allowed self.save_restart = 0 # no restart files are saved self.save_results = self.settings.get( 'save_results', 0) # time step interval to save results self.delta_t = self.settings['delta_t'] tools.print_info( f'Using delta_t = {self.delta_t} and timestep_start = {self.timestep_start_current}' ) # create dummy components self.predictor = DummyComponent() self.convergence_criterion = DummyComponent() self.dummy_solver = None # solver wrapper settings parameters = self.parameters['solver_wrappers'][self.solver_index] if parameters['type'] == 'solver_wrappers.mapped': parameters = parameters['settings'][ 'solver_wrapper'] # for mapped solver: the solver_wrapper itself tested settings = parameters['settings'] orig_wd = settings[ 'working_directory'] # working directory changed to a test_working_directory i = 0 while os.path.exists(f'{orig_wd}_test{i}'): i += 1 cur_wd = f'{orig_wd}_test{i}' settings['working_directory'] = cur_wd os.system(f'cp -r {orig_wd} {cur_wd}') tools.print_info( f'{cur_wd} is the working_directory for the test\nCopying {orig_wd} to {cur_wd} \n' ) # add delta_t and timestep_start to solver_wrapper settings tools.pass_on_parameters(self.settings, parameters['settings'], ['timestep_start', 'delta_t']) self.solver_wrapper = tools.create_instance(parameters) self.solver_wrappers = [self.solver_wrapper ] # used for printing summary self.components = [self.solver_wrapper ] # will only contain 1 solver wrapper self.x = None self.y = None self.time_step = self.timestep_start_current self.iteration = None # iteration self.solver_level = 0 # 0 is main solver (time step is printed) self.start_time = None self.run_time = None self.run_time_previous = 0 self.iterations = [] # save results variables if self.save_results: self.complete_solution_x = None self.complete_solution_y = None self.residual = [] self.info = None self.case_name = self.settings.get('case_name', 'case') # case name self.case_name += '_' + cur_wd self.debug = False
def print_header(self): if self.time_step == self.timestep_start_current + 1: header = f'════════════════════════════════════════════════════════════════════════════════\n' \ f"{'Time step':<16}{'Norm x':<28}{'Norm y':<28}" tools.print_info(header, flush=True)
def print_header(self): header = (80 * '═' + f'\n\tTime step {self.time_step}\n' + 80 * '═' + f'\n{"Iteration":<16}{"Norm residual":<28}') tools.print_info(header, flush=True)
def print_components_info(self, pre): tools.print_info(pre, "The component ", self.__class__.__name__, " combining the following mappers:") tools.print_components_info(pre, self.mappers)
os.mkdir('docs') os.mkdir('docs/images') os.mkdir('docs/assets') os.mkdir('docs/assets/images') # find all MarkDown files in CoCoNuT files = glob.glob('../**/*.md', recursive=True) # check for duplicate MarkDown files filenames = [] for file in files: filenames.append(file.split('/')[-1]) for i, filename in enumerate(filenames): if filenames.count(filename) > 1: tools.print_info(f'WARNING - duplicate filename "{files[i]}"') # copy all MarkDown files to docs folder for file in files: shutil.copy(file, 'docs/') # check if all MarkDown files are mentioned in nav unused = [] used = False for filename in filenames: with open('mkdocs.yml', 'r') as file: for line in file: if filename in line: used = True break if not used: