Beispiel #1
0
 def test_HDF5NodalSolutionStepDataIO(self):
     with ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
         write_model_part = ModelPart("write")
         KratosMetis.SetMPICommunicatorProcess(write_model_part).Execute()
         self._initialize_model_part(write_model_part)
         hdf5_file = self._get_file()
         hdf5_model_part_io = self._get_model_part_io(hdf5_file)
         hdf5_model_part_io.WriteModelPart(write_model_part)
         read_model_part = ModelPart("read")
         KratosMetis.SetMPICommunicatorProcess(read_model_part).Execute()
         hdf5_model_part_io.ReadModelPart(read_model_part)
         KratosTrilinos.ParallelFillCommunicator(read_model_part.GetRootModelPart()).Execute()
         hdf5_nodal_solution_step_data_io = self._get_nodal_solution_step_data_io(hdf5_file)
         hdf5_nodal_solution_step_data_io.WriteNodalResults(write_model_part.Nodes, 0)
         hdf5_nodal_solution_step_data_io.ReadNodalResults(read_model_part.Nodes, read_model_part.GetCommunicator(), 0)
         read_model_part.GetCommunicator().SynchronizeNodalSolutionStepsData()
         # Check data.
         for read_node, write_node in zip(read_model_part.Nodes, write_model_part.Nodes):
             self.assertEqual(read_node.GetSolutionStepValue(DISPLACEMENT_X), write_node.GetSolutionStepValue(DISPLACEMENT_X))
             self.assertEqual(read_node.GetSolutionStepValue(DISPLACEMENT_Y), write_node.GetSolutionStepValue(DISPLACEMENT_Y))
             self.assertEqual(read_node.GetSolutionStepValue(DISPLACEMENT_Z), write_node.GetSolutionStepValue(DISPLACEMENT_Z))
             self.assertEqual(read_node.GetSolutionStepValue(VELOCITY_X), write_node.GetSolutionStepValue(VELOCITY_X))
             self.assertEqual(read_node.GetSolutionStepValue(VELOCITY_Y), write_node.GetSolutionStepValue(VELOCITY_Y))
             self.assertEqual(read_node.GetSolutionStepValue(VELOCITY_Z), write_node.GetSolutionStepValue(VELOCITY_Z))
             self.assertEqual(read_node.GetSolutionStepValue(ACCELERATION_X), write_node.GetSolutionStepValue(ACCELERATION_X))
             self.assertEqual(read_node.GetSolutionStepValue(ACCELERATION_Y), write_node.GetSolutionStepValue(ACCELERATION_Y))
             self.assertEqual(read_node.GetSolutionStepValue(ACCELERATION_Z), write_node.GetSolutionStepValue(ACCELERATION_Z))
             self.assertEqual(read_node.GetSolutionStepValue(PRESSURE), write_node.GetSolutionStepValue(PRESSURE))
             self.assertEqual(read_node.GetSolutionStepValue(VISCOSITY), write_node.GetSolutionStepValue(VISCOSITY))
             self.assertEqual(read_node.GetSolutionStepValue(DENSITY), write_node.GetSolutionStepValue(DENSITY))
             self.assertEqual(read_node.GetSolutionStepValue(ACTIVATION_LEVEL), write_node.GetSolutionStepValue(ACTIVATION_LEVEL))
             self.assertEqual(read_node.GetSolutionStepValue(PARTITION_INDEX), write_node.GetSolutionStepValue(PARTITION_INDEX))
         if KratosMPI.mpi.rank == 0:
             self._remove_file("test_hdf5_model_part_io_mpi.h5")
Beispiel #2
0
 def test_HDF5ModelPartIO(self):
     with ControlledExecutionScope(os.path.dirname(os.path.realpath(__file__))):
         write_model_part = ModelPart("write")
         KratosMetis.SetMPICommunicatorProcess(write_model_part).Execute()
         self._initialize_model_part(write_model_part)
         hdf5_file = self._get_file()
         hdf5_model_part_io = self._get_model_part_io(hdf5_file)
         hdf5_model_part_io.WriteModelPart(write_model_part)
         read_model_part = ModelPart("read")
         KratosMetis.SetMPICommunicatorProcess(read_model_part).Execute()
         hdf5_model_part_io.ReadModelPart(read_model_part)
         KratosTrilinos.ParallelFillCommunicator(read_model_part.GetRootModelPart()).Execute()
         read_model_part.GetCommunicator().SynchronizeNodalSolutionStepsData()
         # Check nodes (node order should be preserved on read/write to ensure consistency with nodal results)
         self.assertEqual(read_model_part.NumberOfNodes(), write_model_part.NumberOfNodes())
         for read_node, write_node in zip(read_model_part.Nodes, write_model_part.Nodes):
             self.assertEqual(read_node.Id, write_node.Id)
             self.assertEqual(read_node.X, write_node.X)
             self.assertEqual(read_node.Y, write_node.Y)
             self.assertEqual(read_node.Z, write_node.Z)
         # Check elements
         self.assertEqual(read_model_part.NumberOfElements(), write_model_part.NumberOfElements())
         first_elem_id = next(iter(read_model_part.Elements)).Id
         read_model_part.GetElement(first_elem_id) # Force a sort since order is mixed by openmp.
         for read_elem, write_elem in zip(read_model_part.Elements, write_model_part.Elements):
             self.assertEqual(read_elem.Id, write_elem.Id)
             self.assertEqual(read_elem.Properties.Id, write_elem.Properties.Id)
             self.assertEqual(len(read_elem.GetNodes()), len(write_elem.GetNodes()))
             for read_elem_node, write_elem_node in zip(read_elem.GetNodes(), write_elem.GetNodes()):
                 self.assertEqual(read_elem_node.Id, write_elem_node.Id)
         # Check conditions
         self.assertEqual(read_model_part.NumberOfConditions(), write_model_part.NumberOfConditions())
         first_cond_id = next(iter(read_model_part.Conditions)).Id
         read_model_part.GetCondition(first_cond_id) # Force a sort since order is mixed by openmp.
         for read_cond, write_cond in zip(read_model_part.Conditions, write_model_part.Conditions):
             self.assertEqual(read_cond.Id, write_cond.Id)
             self.assertEqual(read_cond.Properties.Id, write_cond.Properties.Id)
             self.assertEqual(len(read_cond.GetNodes()), len(write_cond.GetNodes()))
             for read_cond_node, write_cond_node in zip(read_cond.GetNodes(), write_cond.GetNodes()):
                 self.assertEqual(read_cond_node.Id, write_cond_node.Id)
         # Check process info
         self.assertEqual(read_model_part.ProcessInfo[DOMAIN_SIZE], write_model_part.ProcessInfo[DOMAIN_SIZE])
         self.assertEqual(read_model_part.ProcessInfo[TIME], write_model_part.ProcessInfo[TIME])
         read_vector = read_model_part.ProcessInfo[INITIAL_STRAIN]
         write_vector = write_model_part.ProcessInfo[INITIAL_STRAIN]
         self.assertEqual(read_vector.Size(), write_vector.Size())
         for i in range(len(read_vector)):
             self.assertEqual(read_vector[i], write_vector[i])
         read_matrix = read_model_part.ProcessInfo[GREEN_LAGRANGE_STRAIN_TENSOR]
         write_matrix = write_model_part.ProcessInfo[GREEN_LAGRANGE_STRAIN_TENSOR]
         self.assertEqual(read_matrix.Size1(), write_matrix.Size1())
         self.assertEqual(read_matrix.Size2(), write_matrix.Size2())
         for i in range(read_matrix.Size1()):
             for j in range(read_matrix.Size2()):
                 self.assertEqual(read_matrix[i,j], write_matrix[i,j])
         if KratosMPI.mpi.rank == 0:
             self._remove_file("test_hdf5_model_part_io_mpi.h5")
Beispiel #3
0
    def CreateCommunicators(self):
        ## Construct and execute the MPICommunicator
        KratosMetis.SetMPICommunicatorProcess(self.main_model_part).Execute()

        ## Construct and execute the Parallel fill communicator
        ParallelFillCommunicator = KratosTrilinos.ParallelFillCommunicator(self.main_model_part.GetRootModelPart())
        ParallelFillCommunicator.Execute()

        if KratosMPI.mpi.rank == 0 :
            print("MPI communicators constructed.")
Beispiel #4
0
    def _read_model_part_mpi(self, main_model_part):

        if (KratosMPI.mpi.size == 1):
            self.skipTest(
                "Test can be run only using more than one mpi process")

        ## Add variables to the model part
        main_model_part.AddNodalSolutionStepVariable(
            KratosMultiphysics.DENSITY)
        main_model_part.AddNodalSolutionStepVariable(
            KratosMultiphysics.VISCOSITY)
        main_model_part.AddNodalSolutionStepVariable(
            KratosMultiphysics.DISPLACEMENT)
        main_model_part.AddNodalSolutionStepVariable(
            KratosMultiphysics.PARTITION_INDEX)

        ## Serial partition of the original .mdpa file
        input_filename = "test_mpi_communicator"
        if KratosMPI.mpi.rank == 0:

            # Original .mdpa file reading
            model_part_io = KratosMultiphysics.ModelPartIO(input_filename)

            # Partition of the original .mdpa file
            number_of_partitions = KratosMPI.mpi.size  # Number of partitions equals the number of processors
            domain_size = main_model_part.ProcessInfo[
                KratosMultiphysics.DOMAIN_SIZE]
            verbosity = 0
            sync_conditions = True  # Make sure that the condition goes to the same partition as the element is a face of

            partitioner = KratosMetis.MetisDivideHeterogeneousInputProcess(
                model_part_io, number_of_partitions, domain_size, verbosity,
                sync_conditions)
            partitioner.Execute()

            print("Metis divide finished.")

        KratosMPI.mpi.world.barrier()

        ## Read the partitioned .mdpa files
        mpi_input_filename = input_filename + "_" + str(KratosMPI.mpi.rank)
        model_part_io = KratosMultiphysics.ModelPartIO(mpi_input_filename)
        model_part_io.ReadModelPart(main_model_part)

        ## Construct and execute the MPICommunicator
        KratosMetis.SetMPICommunicatorProcess(main_model_part).Execute()

        ## Construct and execute the Parallel fill communicator
        ParallelFillCommunicator = KratosTrilinos.ParallelFillCommunicator(
            main_model_part.GetRootModelPart())
        ParallelFillCommunicator.Execute()

        ## Check submodelpart of each main_model_part of each processor
        self.assertTrue(main_model_part.HasSubModelPart("Skin"))
        skin_sub_model_part = main_model_part.GetSubModelPart("Skin")
Beispiel #5
0
    def CreateCommunicators(self):
        ## Construct and execute the MPICommunicator
        KratosMetis.SetMPICommunicatorProcess(self.main_model_part).Execute()

        ## Construct and execute the Parallel fill communicator
        ParallelFillCommunicator = KratosTrilinos.ParallelFillCommunicator(
            self.main_model_part.GetRootModelPart())
        ParallelFillCommunicator.Execute()

        if KratosMPI.mpi.rank == 0:
            KratosMultiphysics.Logger.PrintInfo(
                "::[TrilinosImportModelPartUtility]::",
                "MPI communicators constructed.")
    def ReadModelPart(self,filename):

        model_part = KratosMultiphysics.ModelPart("Test ModelPart")

        model_part.AddNodalSolutionStepVariable(KratosMultiphysics.PARTITION_INDEX)
        model_part.AddNodalSolutionStepVariable(KratosMultiphysics.DISPLACEMENT)
        model_part.AddNodalSolutionStepVariable(KratosMultiphysics.FORCE)
        model_part.AddNodalSolutionStepVariable(KratosMultiphysics.REACTION)
        model_part.AddNodalSolutionStepVariable(KratosMultiphysics.FSI_INTERFACE_RESIDUAL)
        mpi.world.barrier()

        KratosMetis.SetMPICommunicatorProcess(model_part).Execute()

        model_part_io = KratosMultiphysics.ModelPartIO( filename )
        model_part_io.ReadModelPart(model_part)
        model_part.SetBufferSize(2)

        KratosTrilinos.ParallelFillCommunicator(model_part).Execute()

        return model_part
    def ImportModelPart(self):
        if (self.settings["model_import_settings"]["input_type"].GetString() ==
                "mdpa"):
            # here we read the already existing partitions from the primal solution.
            input_filename = self.settings["model_import_settings"][
                "input_filename"].GetString()
            mpi_input_filename = input_filename + "_" + str(KratosMPI.mpi.rank)
            self.settings["model_import_settings"]["input_filename"].SetString(
                mpi_input_filename)
            KratosMultiphysics.ModelPartIO(mpi_input_filename).ReadModelPart(
                self.main_model_part)

            # here we shall check that the input read has the shape we like
            aux_params = KratosMultiphysics.Parameters("{}")
            aux_params.AddValue("volume_model_part_name",
                                self.settings["volume_model_part_name"])
            aux_params.AddValue("skin_parts", self.settings["skin_parts"])

            # here we replace the dummy elements we read with proper elements
            self.settings.AddEmptyValue("element_replace_settings")
            if (self.main_model_part.ProcessInfo[
                    KratosMultiphysics.DOMAIN_SIZE] == 3):
                self.settings[
                    "element_replace_settings"] = KratosMultiphysics.Parameters(
                        """
                    {
                        "element_name": "VMSAdjointElement3D",
                        "condition_name": "SurfaceCondition3D3N"
                    }
                    """)
            elif (self.main_model_part.ProcessInfo[
                    KratosMultiphysics.DOMAIN_SIZE] == 2):
                self.settings[
                    "element_replace_settings"] = KratosMultiphysics.Parameters(
                        """
                    {
                        "element_name": "VMSAdjointElement2D",
                        "condition_name": "LineCondition2D2N"
                    }
                    """)
            else:
                raise Exception("domain size is not 2 nor 3")

            KratosMultiphysics.ReplaceElementsAndConditionsProcess(
                self.main_model_part,
                self.settings["element_replace_settings"]).Execute()

            import check_and_prepare_model_process_fluid
            check_and_prepare_model_process_fluid.CheckAndPrepareModelProcess(
                self.main_model_part, aux_params).Execute()

            # here we read the KINEMATIC VISCOSITY and DENSITY and we apply it to the nodes
            for el in self.main_model_part.Elements:
                rho = el.Properties.GetValue(KratosMultiphysics.DENSITY)
                kin_viscosity = el.Properties.GetValue(
                    KratosMultiphysics.VISCOSITY)
                break

            KratosMultiphysics.VariableUtils().SetScalarVar(
                KratosMultiphysics.DENSITY, rho, self.main_model_part.Nodes)
            KratosMultiphysics.VariableUtils().SetScalarVar(
                KratosMultiphysics.VISCOSITY, kin_viscosity,
                self.main_model_part.Nodes)

        else:
            raise Exception("Other input options are not yet implemented.")

        current_buffer_size = self.main_model_part.GetBufferSize()
        if (self.GetMinimumBufferSize() > current_buffer_size):
            self.main_model_part.SetBufferSize(self.GetMinimumBufferSize())

        MetisApplication.SetMPICommunicatorProcess(
            self.main_model_part).Execute()

        ParallelFillCommunicator = TrilinosApplication.ParallelFillCommunicator(
            self.main_model_part.GetRootModelPart())
        ParallelFillCommunicator.Execute()

        if KratosMPI.mpi.rank == 0:
            print("MPI communicators constructed.")
            print("MPI model reading finished.")