Esempio n. 1
0
    def test_dump(self):
        aero_file = AeroFile(test_path / "data_files" / "aero_1.yaml")
        aero_file.dump(test_path / "data_files" / "aero_2.i.yaml")

        aero_1_data = flex_load(test_path / "data_files" / "aero_1.yaml")
        aero_2_data = flex_load(test_path / "data_files" / "aero_2.i.yaml")

        # Check whether the data in the aero_2_data dictionary is the same as in aero_1_data.
        # Since aero_1_data was written by hand, it doesn't contain all the optional parameters
        # for the coefficient models, while aero_2_data does. So we are only checking the parameters
        # in aero_1_data and ignore any parameter unique to aero_2_data.

        def check_dictionaries(d1, d2):
            for key, value in d1.items():
                # print(f"dict key='{key}, v1='{value}, v2={d2[key]}")
                if isinstance(value, dict):
                    self.assertIsInstance(d2[key], dict)
                    check_dictionaries(value, d2[key])
                elif isinstance(value, float):
                    self.assertAlmostEqual(value, d2[key])
                elif isinstance(value, (list, tuple)):
                    self.assertIsInstance(d2[key], (list, tuple))
                    check_list(value, d2[key])
                elif isinstance(value, set):
                    # It's assumed that sets do not contain lists, dictionaries or
                    # other sets and that both sets should contain the same elements.
                    self.assertIsInstance(d2[key], set)
                    self.assertEqual(len(d2[key] - value), 0)
                else:
                    self.assertEqual(value, d2[key])

        def check_list(l1, l2):
            for i, value in enumerate(l1):
                # print(f"list i='{i}, v1='{value}, v2={l2[i]}")
                if isinstance(value, dict):
                    self.assertIsInstance(l2[i], dict)
                    check_dictionaries(value, l2[i])
                elif isinstance(value, float):
                    self.assertAlmostEqual(value, l2[i])
                elif isinstance(value, (list, tuple)):
                    self.assertIsInstance(l2[i], (list, tuple))
                    check_list(value, l2[i])
                elif isinstance(value, set):
                    # It's assumed that sets do not contain lists, dictionaries or
                    # other sets and that both sets should contain the same elements.
                    self.assertIsInstance(l2[i], set)
                    self.assertEqual(len(l2[i] - value), 0)
                else:
                    self.assertEqual(value, l2[i])

        aero_2_data['created'] = datetime_parse(aero_2_data['created'])

        check_dictionaries(aero_1_data, aero_2_data)
Esempio n. 2
0
    def test_dump_load(self):
        for (serializer_name, serializer), \
            (extension, extension_serializer), \
            is_gzipped,\
            is_gzipped_default in product(self.serializers.items(),
                                          self.file_extensions.items(),
                                          [True, False],
                                          [True, False]):
            with self.subTest(serializer=serializer_name,
                              extension=extension,
                              is_gzipped=is_gzipped,
                              is_gzipped_default=is_gzipped_default):
                file_path = self.flex_file_temp / f"data.i{extension}{'.gz' if is_gzipped else ''}"
                if extension == ".unknown":
                    is_actually_gzipped = is_gzipped or is_gzipped_default
                else:
                    is_actually_gzipped = is_gzipped

                flex_dump(self.test_data,
                          file_path,
                          default_serializer=serializer,
                          default_is_gzipped=is_gzipped_default)
                loaded_in_data = flex_load(
                    file_path,
                    default_serializer=serializer,
                    default_is_gzipped=is_gzipped_default)

                det_serializer, det_is_gzipped, det_is_binary = \
                    determine_serialization(file_path, default_serializer=serializer, default_is_gzipped=is_gzipped_default)
                self.assertEqual(det_is_gzipped, is_actually_gzipped)
                errors = object_hierarchy_equals(loaded_in_data,
                                                 self.test_data)
                print(yaml.dump(errors, default_flow_style=False))
                self.assertEqual(len(errors), 0)
                rm(file_path)
Esempio n. 3
0
 def validator(self):
     """
     Cerberus validator object used to validate the aero_files being read.
     """
     # Load in schema, create a cerberus validator object and return it.
     schema = flex_load(abs_path("aero_file_schema.yaml"))
     return Validator(schema)
Esempio n. 4
0
    def __load_data(self):
        """
        Loads in the aerodynamic data from a file.

        :raises ValueError: If an unknown model was defined in the file.
        """

        if "r" in self.mode:
            # Load raw data from file.
            raw_data = flex_load(file_path=self.path,
                                 default_serializer=msgpack,
                                 default_is_gzipped=True)

            # Validate data, raise error if the data is invalid.
            valid, error = self.validate_data(raw_data)
            if not valid:
                raise ValueError("Invalid aerodynamic file.\n" + yaml.dump(error))

            # Read in parameters.
            self.case = raw_data['case']
            self.description = raw_data['description']
            self.lref = raw_data['lref']
            self.sref = raw_data['sref']
            self.latref = raw_data['latref']
            self.mrc = np.array(raw_data['mrc']).reshape((3, 1))

            # Load in the created datetime.
            # The datetime should be stored as a string in the data file, but
            # some serializer, like the yaml loader, will automatically parse it into a
            # datetime object.
            if issubclass(type(raw_data['created']), datetime):
                self.created = raw_data['created']
            else:
                self.created = datetime_parse(raw_data['created'])

            # Loop through the coefficient entries an create an instance of AeroMethodBase for each.
            for c_data in raw_data['coefficients']:
                # Get the class for the specified model.
                model_class = aero_models_registry.get(c_data['model'])

                # Check whether a class for the model was found. If not raise an exception.
                if model_class is None:
                    raise ValueError("Aerodynamic model '{}' unknown.".format(c_data['model']))
                else:
                    # Validate the properties dictionary.
                    result = model_class.validate_data(c_data['properties'])
                    if result is not None:
                        raise ValueError("Invalid aerodynamic file.\n" + result)

                    # If a class was found, create an instance of it and pass the parameters to the
                    # constructor and store the resulting object in the coefficients table.
                    coefficient_model = model_class(**c_data['properties'])

                    # Check whether are parameters are valid.
                    # TODO: Finish writing this.

                    self.add_coefficient(c_data['name'], coefficient_model)
Esempio n. 5
0
    def setUp(self) -> None:
        self.verification_module = EOM6DOFVerificationDataModule()
        self.simulation = Simulation(
            states_class=EOM6DOFStates,
            integrator=AB3Integrator(h=0.01, rk4=True, fd_max_order=1),
            modules=[EOM6DOF(), self.verification_module],
            logging=Logging(),
            initial_state_values=None,
        )

        verification_data_path = Path(
            __file__).parent / "eom6dof_verification_data.msgp.gz"
        self.verification_data = flex_load(verification_data_path)
Esempio n. 6
0
    def test_file(self):
        with open(test_dir_path / "data_files" / "xsens_raw_data.bin",
                  "rb") as f:
            xp = XSensParser(f)
            xp.start()

            tables = xp.get_tables()

            # Uncomment this line to update the tables in the data file.
            # File(test_dir_path / "data_files" / "correct_xsens_tables.msgp.gz").dump(tables)

            correct_tables = flex_load(test_dir_path / "data_files" /
                                       "correct_xsens_tables.msgp.gz")

            errors = object_hierarchy_equals(tables, correct_tables)
            # print("\n".join(errors))
            self.assertEqual(len(errors), 0)
Esempio n. 7
0
    def main(cls, args: argparse.Namespace):
        # Load calibration data if provided.
        calibration_data = None
        if args.calibration_data is not None:
            calibration_data = flex_load(args.calibration_data)

        if args.input_file is not None:
            if args.large_file:
                output_path = pathlib.PurePath(args.output_file[0])
                output_format = "".join(output_path.suffixes)
                output_format = None if output_format == '' else output_format
                output_path = output_path.stem

                with open(args.input_file, "rb") as f:
                    xp = XSensLargeFileParser(
                        f,
                        output_path=output_path,
                        output_format=output_format,
                        calibration_data=calibration_data,
                        verbose=args.verbose)
                    xp.start()

            else:
                with open(args.input_file, "rb") as f:
                    xp = XSensParser(f,
                                     verbose=args.verbose,
                                     calibration_data=calibration_data)
                    xp.start()
                    if args.verbose:
                        print("device_id:", xp.device_id)
                    tables = xp.get_tables()
                    for key, value in tables.items():
                        if isinstance(value, str):
                            continue
                        # print(key, type(value['time']), type(value['data'][0]))

                for output_path in map(pathlib.Path, args.output_file):
                    flex_dump(tables, output_path)

        else:
            raise ValueError("No input source given.")