def test_simple_result_set_as_dict(self):
        """
        Try the first result set with a single record from dict.
        """
        rs = ResultSet(
            self._get_result_set_file(
                "record_set_files/test_data_1.txt.result.yml"))

        # Test the happy path
        base_timestamp = 3583861263.0
        particle_a = CtdpfParserDataParticle(
            "10.5914,  4.1870,  161.06,   2693.0",
            internal_timestamp=base_timestamp,
            new_sequence=True).generate_dict()
        particle_b = CtdpfParserDataParticle(
            "10.5915,  4.1871,  161.07,   2693.1",
            internal_timestamp=base_timestamp + 1).generate_dict()

        self.assertTrue(rs.verify([particle_a, particle_b]))
        self.assertIsNone(rs.report())

        # test record count mismatch
        self.assertFalse(rs.verify([particle_a]))
        self.assertIsNotNone(rs.report())

        # test out of order record
        self.assertFalse(rs.verify([particle_b, particle_a]))
        self.assertIsNotNone(rs.report())

        # test bad data record
        self.assertFalse(rs.verify([particle_a, particle_a]))
        self.assertIsNotNone(rs.report())
    def test_simple_result_set_as_dict(self):
        """
        Try the first result set with a single record from dict.
        """
        rs = ResultSet(self._get_result_set_file("record_set_files/test_data_1.txt.result.yml"))

        # Test the happy path
        base_timestamp = 3583861263.0
        particle_a = CtdpfParserDataParticle("10.5914,  4.1870,  161.06,   2693.0",
                                             internal_timestamp=base_timestamp, new_sequence=True).generate_dict()
        particle_b = CtdpfParserDataParticle("10.5915,  4.1871,  161.07,   2693.1",
                                             internal_timestamp=base_timestamp+1).generate_dict()

        self.assertTrue(rs.verify([particle_a, particle_b]))
        self.assertIsNone(rs.report())

        # test record count mismatch
        self.assertFalse(rs.verify([particle_a]))
        self.assertIsNotNone(rs.report())

        # test out of order record
        self.assertFalse(rs.verify([particle_b, particle_a]))
        self.assertIsNotNone(rs.report())

        # test bad data record
        self.assertFalse(rs.verify([particle_a, particle_a]))
        self.assertIsNotNone(rs.report())
Esempio n. 3
0
    def assert_particles(self, particles, yml_file, resource_path=None):
        """
        Assert that the contents of the particles match those in the results
        yaml file.

        @param particles either a DataParticle sub-class or particle dictionary
        to compare with the particles in the .yml file
        @param yml_file the .yml file name or full path containing particles
        to compare
        @param resource_path the path to the .yml file, used only if yml_file
        does not contain the full path
        """

        # see if .yml file has the full path
        if os.path.exists(yml_file):
            rs_file = yml_file
        # if not the full path, check if resource path was defined
        elif resource_path is not None:
            rs_file = os.path.join(resource_path, yml_file)
        # out of places to check for the file, raise an error
        else:
            raise DatasetParserException('Test yaml file cannot be found to assert particles')

        # initialize result set with this .yml results file
        rs = ResultSet(rs_file)
        # compare results particles and assert that the output was successful
        self.assertTrue(rs.verify(particles),
                        msg=('Failed unit test data validation for file %s' % yml_file))
Esempio n. 4
0
    def assert_particles(self, particles, yml_file, resource_path=None):
        """
        Assert that the contents of the particles match those in the results
        yaml file.

        @param particles either a DataParticle sub-class or particle dictionary
        to compare with the particles in the .yml file
        @param yml_file the .yml file name or full path containing particles
        to compare
        @param resource_path the path to the .yml file, used only if yml_file
        does not contain the full path
        """

        # see if .yml file has the full path
        if os.path.exists(yml_file):
            rs_file = yml_file
        # if not the full path, check if resource path was defined
        elif resource_path is not None:
            rs_file = os.path.join(resource_path, yml_file)
        # out of places to check for the file, raise an error
        else:
            raise DatasetParserException(
                'Test yaml file cannot be found to assert particles')

        # initialize result set with this .yml results file
        rs = ResultSet(rs_file)
        # compare results particles and assert that the output was successful
        self.assertTrue(rs.verify(particles),
                        msg=('Failed unit test data validation for file %s' %
                             yml_file))
Esempio n. 5
0
    def test_full_types(self):
        """
        Confirm that all data types pass or fail verification as expected
        """
        ftdp = FullTypesDataParticle([])

        # First test with the correct data in the yml to confirm everything passes
        rs = ResultSet(TEST_PATH + 'full_types.yml')
        if not rs.verify([ftdp]):
            self.fail("Failed verification")

        # All parameters should not match
        rs = ResultSet(TEST_PATH + 'full_bad_types.yml')
        # expect this to fail
        if rs.verify([ftdp]):
            self.fail("Should have failed verification, but verification passed")
Esempio n. 6
0
    def assert_data(self,
                    particle_class,
                    result_set_file=None,
                    count=1,
                    timeout=10):
        """
        Wait for a data particle in the data callback queue
        @param particle_class, class of the expected data particles
        @param result_set_file, filename containing definition of the resulting dataset
        @param count, how many records to wait for
        @param timeout, how long to wait for the records.
        """
        try:
            particles = self.get_samples(particle_class, count, timeout)
        except Timeout:
            log.error(
                "Failed to detect particle %s, expected %d particles, found %d",
                particle_class, count, found)
            self.fail("particle detection failed. Expected %d, Found %d" %
                      (count, found))

        # Verify the data against the result data set definition
        if result_set_file:
            rs_file = self._get_source_data_file(result_set_file)
            rs = ResultSet(rs_file)

            self.assertTrue(rs.verify(particles),
                            msg="Failed data validation, check the logs.")
Esempio n. 7
0
    def test_multiple_bad_type_object(self):
        """
        Test that a bad type or bad object does not match
        """
        ftdp = FakeDataParticle([])

        # yml has bad type in individual particle (stream)
        rs = ResultSet(TEST_PATH + 'bad_type_multiple.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail("Should have failed particle verification, but verification passed")

        # yml has bad class in individual particle (stream)
        rs = ResultSet(TEST_PATH + 'bad_class_multiple.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail("Should have failed particle verification, but verification passed")
Esempio n. 8
0
    def test_full_types(self):
        """
        Confirm that all data types pass or fail verification as expected
        """
        ftdp = FullTypesDataParticle([])

        # First test with the correct data in the yml to confirm everything passes
        rs = ResultSet(TEST_PATH + 'full_types.yml')
        if not rs.verify([ftdp]):
            self.fail("Failed verification")

        # All parameters should not match
        rs = ResultSet(TEST_PATH + 'full_bad_types.yml')
        # expect this to fail
        if rs.verify([ftdp]):
            self.fail(
                "Should have failed verification, but verification passed")
Esempio n. 9
0
    def verify_particle_contents(self, particles, result_set_file):
        """
        Verify that the contents of the particles match those in the result file.
        """

        rs_file = self._get_source_data_file(result_set_file)
        rs = ResultSet(rs_file)
        self.assertTrue(rs.verify(particles), msg="Failed Integration test data validation")
Esempio n. 10
0
    def assert_data_values(self, particles, dataset_definition_file):
        """
        Verify particles match the particles defined in the definition file
        """
        rs_file = self._get_source_data_file(dataset_definition_file)
        rs = ResultSet(rs_file)

        self.assertTrue(rs.verify(particles))
Esempio n. 11
0
    def assert_data_values(self, particles, dataset_definition_file):
        """
        Verify particles match the particles defined in the definition file
        """
        rs_file = self._get_source_data_file(dataset_definition_file)
        rs = ResultSet(rs_file)

        self.assertTrue(rs.verify(particles))
Esempio n. 12
0
    def verify_particle_contents(self, particles, result_set_file):
        """
        Verify that the contents of the particles match those in the result file.
        """

        rs_file = self._get_source_data_file(result_set_file)
        rs = ResultSet(rs_file)
        self.assertTrue(rs.verify(particles),
                        msg='Failed Integration test data validation')
Esempio n. 13
0
    def test_round(self):
        rs = ResultSet(self._get_result_set_file("record_set_files/test_data_2.txt.result.yml"))

        # Test the happy path
        base_timestamp = 3583861263.0
        particle_a = CtdpfParserDataParticle("10.5914,  4.1870,  161.06,   2693.0",
                                             internal_timestamp=base_timestamp, new_sequence=True).generate_dict()

        self.assertTrue(rs.verify([particle_a]))
        self.assertIsNone(rs.report())
Esempio n. 14
0
    def test_round(self):
        """
        Test that rounding occurs
        """
        fdp = FakeDataParticle([])

        # test with a rounding dictionary in the yml
        rs = ResultSet(TEST_PATH + 'fake_round.yml')
        # expect this to pass
        if not rs.verify([fdp]):
            self.fail("Failed particle verification")

        frp = FakeRoundParticle([])

        # test with rounding dictionary with a nested list
        rs = ResultSet(TEST_PATH + 'fake_round_list.yml')
        # expect this to pass
        if not rs.verify([frp]):
            self.fail("Failed particle verification")
Esempio n. 15
0
    def test_round(self):
        """
        Test that rounding occurs
        """
        fdp = FakeDataParticle([])

        # test with a rounding dictionary in the yml
        rs = ResultSet(TEST_PATH + 'fake_round.yml')
        # expect this to pass
        if not rs.verify([fdp]):
            self.fail("Failed particle verification")

        frp = FakeRoundParticle([])

        # test with rounding dictionary with a nested list
        rs = ResultSet(TEST_PATH + 'fake_round_list.yml')
        # expect this to pass
        if not rs.verify([frp]):
            self.fail("Failed particle verification")
Esempio n. 16
0
    def test_incorrect_length(self):
        """
        Test that not having the matching number of particles in the yml and results generates fails
        """
        ftdp = FakeDataParticle([])

        # only one particle in results file
        rs = ResultSet(TEST_PATH + 'fake_particle.yml')
        # compare to two, this should fail
        if rs.verify([ftdp, ftdp]):
            self.fail("Should have failed particle verification, but verification passed")
Esempio n. 17
0
    def test_timestamp(self):
        """
        Test that the timestamp string conversion is working
        """
        ftdp = FakeDataParticle([])

        # File contains 4 particles, each with different formatted timestamp string
        rs = ResultSet(TEST_PATH + 'timestamp_string.yml')
        # confirm all strings match
        if not rs.verify([ftdp, ftdp, ftdp, ftdp]):
            self.fail("Should have failed verification, but verification passed")
Esempio n. 18
0
    def test_no_particle_timestamp(self):
        """
        Test if a class has not set the particle timestamp but one is in the .yml that they do not match
        """
        ftdp = FakeNoTsParticle([])

        # .yml file contains timestamp, class does not
        rs = ResultSet(TEST_PATH + 'fake_no_ts_particle.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail("Should have failed particle verification, but verification passed")
Esempio n. 19
0
    def test_missing_type_multiple(self):
        """
        Test if a header has MULTIPLE but the particle does not specify the type that this does not match
        """
        ftdp = FakeDataParticle([])

        # yml file is missing type in individual particle (stream)
        rs = ResultSet(TEST_PATH + 'missing_type_multiple.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail("Should have failed particle verification, but verification passed")
Esempio n. 20
0
    def test_not_data_particle(self):
        """
        Test that a class that is not a data particle is not accepted
        """
        ndp = NotDataParticle()

        # class is not a subclass of DataParticle
        rs = ResultSet(TEST_PATH + 'not_data_particle.yml')
        # this should fail
        if rs.verify([ndp]):
            self.fail("Should have failed particle verification, but verification passed")
Esempio n. 21
0
    def test_multiple_bad_type_object(self):
        """
        Test that a bad type or bad object does not match
        """
        ftdp = FakeDataParticle([])

        # yml has bad type in individual particle (stream)
        rs = ResultSet(TEST_PATH + 'bad_type_multiple.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail(
                "Should have failed particle verification, but verification passed"
            )

        # yml has bad class in individual particle (stream)
        rs = ResultSet(TEST_PATH + 'bad_class_multiple.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail(
                "Should have failed particle verification, but verification passed"
            )
Esempio n. 22
0
    def test_timestamp(self):
        """
        Test that the timestamp string conversion is working
        """
        ftdp = FakeDataParticle([])

        # File contains 4 particles, each with different formatted timestamp string
        rs = ResultSet(TEST_PATH + 'timestamp_string.yml')
        # confirm all strings match
        if not rs.verify([ftdp, ftdp, ftdp, ftdp]):
            self.fail(
                "Should have failed verification, but verification passed")
Esempio n. 23
0
    def test_particle_dict_compare(self):
        """
        test that a particle already converted to a dictionary can be compared
        """
        fdp = FakeDataParticle([])
        fdp_dict = fdp.generate_dict()

        # normal fake particle
        rs = ResultSet(TEST_PATH + 'fake_particle.yml')
        # expect this to pass
        if not rs.verify([fdp_dict]):
            self.fail("Failed particle verification")
Esempio n. 24
0
    def test_particle_dict_compare(self):
        """
        test that a particle already converted to a dictionary can be compared
        """
        fdp = FakeDataParticle([])
        fdp_dict = fdp.generate_dict()

        # normal fake particle
        rs = ResultSet(TEST_PATH + 'fake_particle.yml')
        # expect this to pass
        if not rs.verify([fdp_dict]):
            self.fail("Failed particle verification")
Esempio n. 25
0
    def test_incorrect_length(self):
        """
        Test that not having the matching number of particles in the yml and results generates fails
        """
        ftdp = FakeDataParticle([])

        # only one particle in results file
        rs = ResultSet(TEST_PATH + 'fake_particle.yml')
        # compare to two, this should fail
        if rs.verify([ftdp, ftdp]):
            self.fail(
                "Should have failed particle verification, but verification passed"
            )
Esempio n. 26
0
    def test_not_data_particle(self):
        """
        Test that a class that is not a data particle is not accepted
        """
        ndp = NotDataParticle()

        # class is not a subclass of DataParticle
        rs = ResultSet(TEST_PATH + 'not_data_particle.yml')
        # this should fail
        if rs.verify([ndp]):
            self.fail(
                "Should have failed particle verification, but verification passed"
            )
Esempio n. 27
0
    def test_no_particle_timestamp(self):
        """
        Test if a class has not set the particle timestamp but one is in the .yml that they do not match
        """
        ftdp = FakeNoTsParticle([])

        # .yml file contains timestamp, class does not
        rs = ResultSet(TEST_PATH + 'fake_no_ts_particle.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail(
                "Should have failed particle verification, but verification passed"
            )
Esempio n. 28
0
    def test_missing_type_multiple(self):
        """
        Test if a header has MULTIPLE but the particle does not specify the type that this does not match
        """
        ftdp = FakeDataParticle([])

        # yml file is missing type in individual particle (stream)
        rs = ResultSet(TEST_PATH + 'missing_type_multiple.yml')
        # this should fail
        if rs.verify([ftdp]):
            self.fail(
                "Should have failed particle verification, but verification passed"
            )
    def test_simple_result_set(self):
        """
        Try the first result set with a single record.
        """
        rs = ResultSet(
            self._get_result_set_file(
                "record_set_files/test_data_1.txt.result.yml"))

        # Test the happy path
        base_timestamp = 3583861263.0
        particle_a = CtdpfParserDataParticle(
            "10.5914,  4.1870,  161.06,   2693.0",
            internal_timestamp=base_timestamp,
            new_sequence=True)
        particle_b = CtdpfParserDataParticle(
            "10.5915,  4.1871,  161.07,   2693.1",
            internal_timestamp=base_timestamp + 1)

        self.assertTrue(rs.verify([particle_a, particle_b]))
        self.assertIsNone(rs.report())

        # test record count mismatch
        self.assertFalse(rs.verify([particle_a]))
        self.assertIsNotNone(rs.report())

        # test out of order record
        self.assertFalse(rs.verify([particle_b, particle_a]))
        self.assertIsNotNone(rs.report())

        # test bad data record
        self.assertFalse(rs.verify([particle_a, particle_a]))
        self.assertIsNotNone(rs.report())

        # multiple data types in result
        self.assertFalse(rs.verify([particle_a, 'foo']))
        self.assertIsNotNone(rs.report())

        # stream name mismatch
        particle_a._data_particle_type = 'foo'
        particle_b._data_particle_type = 'foo'
        self.assertFalse(rs.verify([particle_a, particle_b]))
        self.assertIsNotNone(rs.report())

        # internal timestamp mismatch
        particle_a = CtdpfParserDataParticle(
            "10.5914,  4.1870,  161.06,   2693.0",
            internal_timestamp=base_timestamp + 1,
            new_sequence=True)
        particle_b = CtdpfParserDataParticle(
            "10.5915,  4.1871,  161.07,   2693.1",
            internal_timestamp=base_timestamp + 2)
        self.assertFalse(rs.verify([particle_a, particle_a]))
        self.assertIsNotNone(rs.report())
Esempio n. 30
0
    def test_simple_result_set(self):
        """
        Try the first result set with a single record.
        """
        rs = ResultSet(self._get_result_set_file("record_set_files/test_data_1.txt.result.yml"))

        # Test the happy path
        base_timestamp = 3583886463.0
        particle_a = CtdpfParserDataParticle(
            "10.5914,  4.1870,  161.06,   2693.0", internal_timestamp=base_timestamp, new_sequence=True
        )
        particle_b = CtdpfParserDataParticle(
            "10.5915,  4.1871,  161.07,   2693.1", internal_timestamp=base_timestamp + 1
        )

        self.assertTrue(rs.verify([particle_a, particle_b]))
        self.assertIsNone(rs.report())

        # test record count mismatch
        self.assertFalse(rs.verify([particle_a]))
        self.assertIsNotNone(rs.report())

        # test out of order record
        self.assertFalse(rs.verify([particle_b, particle_a]))
        self.assertIsNotNone(rs.report())

        # test bad data record
        self.assertFalse(rs.verify([particle_a, particle_a]))
        self.assertIsNotNone(rs.report())

        # multiple data types in result
        self.assertFalse(rs.verify([particle_a, "foo"]))
        self.assertIsNotNone(rs.report())

        # stream name mismatch
        particle_a._data_particle_type = "foo"
        particle_b._data_particle_type = "foo"
        self.assertFalse(rs.verify([particle_a, particle_b]))
        self.assertIsNotNone(rs.report())

        # internal timestamp mismatch
        particle_a = CtdpfParserDataParticle(
            "10.5914,  4.1870,  161.06,   2693.0", internal_timestamp=base_timestamp + 1, new_sequence=True
        )
        particle_b = CtdpfParserDataParticle(
            "10.5915,  4.1871,  161.07,   2693.1", internal_timestamp=base_timestamp + 2
        )
        self.assertFalse(rs.verify([particle_a, particle_a]))
        self.assertIsNotNone(rs.report())
Esempio n. 31
0
    def assert_data(self, particle_class, result_set_file=None, count=1, timeout=10):
        """
        Wait for a data particle in the data callback queue
        @param particle_class, class of the expected data particles
        @param result_set_file, filename containing definition of the resulting dataset
        @param count, how many records to wait for
        @param timeout, how long to wait for the records.
        """
        try:
            particles = self.get_samples(particle_class, count, timeout)
        except Timeout:
            log.error("Failed to detect particle %s, expected %d particles, found %d", particle_class, count, found)
            self.fail("particle detection failed. Expected %d, Found %d" % (count, found))

        # Verify the data against the result data set definition
        if result_set_file:
            rs_file = self._get_source_data_file(result_set_file)
            rs = ResultSet(rs_file)

            self.assertTrue(rs.verify(particles), msg="Failed data validation, check the logs.")
Esempio n. 32
0
    def assert_data(self,
                    particle_class,
                    result_set_file=None,
                    count=1,
                    timeout=10):
        """
        Wait for a data particle in the data callback queue
        @param particle_class, class of the expected data particles
        @param result_set_file, filename containing definition of the resulting dataset
        @param count, how many records to wait for
        @param timeout, how long to wait for the records.
        """
        to = gevent.Timeout(timeout)
        to.start()
        done = False

        try:
            while (not done):
                found = 0
                for data in self.data_callback_result:
                    if isinstance(data, particle_class):
                        found += 1

                    if found == count:
                        done = True

                if not done:
                    log.debug("No particle detected yet, sleep for a bit")
                    gevent.sleep(1)
        except Timeout:
            log.error("Failed to detect particle %s", particle_class)
            self.fail("particle detection failed.")
        finally:
            to.cancel()

        # Verify the data against the result data set definition
        if result_set_file:
            rs_file = self._get_source_data_file(result_set_file)
            rs = ResultSet(rs_file)

            self.assertTrue(rs.verify(self.data_callback_result))
Esempio n. 33
0
    def assert_data(self, particle_class, result_set_file=None, count=1, timeout=10):
        """
        Wait for a data particle in the data callback queue
        @param particle_class, class of the expected data particles
        @param result_set_file, filename containing definition of the resulting dataset
        @param count, how many records to wait for
        @param timeout, how long to wait for the records.
        """
        to = gevent.Timeout(timeout)
        to.start()
        done = False

        try:
            while(not done):
                found = 0
                for data in self.data_callback_result:
                    if isinstance(data, particle_class):
                        found += 1

                    if found == count:
                        done = True

                if not done:
                    log.debug("No particle detected yet, sleep for a bit")
                    gevent.sleep(1)
        except Timeout:
            log.error("Failed to detect particle %s", particle_class)
            self.fail("particle detection failed.")
        finally:
            to.cancel()

        # Verify the data against the result data set definition
        if result_set_file:
            rs_file = self._get_source_data_file(result_set_file)
            rs = ResultSet(rs_file)

            self.assertTrue(rs.verify(self.data_callback_result))
Esempio n. 34
0
    def test_fake_particle(self):
        """
        Create a fake data particle class and test that comparison either fails or passes as expected
        """
        fdp = FakeDataParticle([])

        # particle is missing internal_timestamp
        rs = ResultSet(TEST_PATH + 'missing_timestamp.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should be missing timestamp, but verification passed")

        # test with particle object and type in header
        rs = ResultSet(TEST_PATH + 'fake_particle.yml')
        # expect this to pass
        if not rs.verify([fdp]):
            self.fail("Failed particle verification")

        # test with MULTIPLE in particle object and type in header
        rs = ResultSet(TEST_PATH + 'fake_multiple.yml')
        # expect this to pass
        if not rs.verify([fdp]):
            self.fail("Failed particle verification")

        # particle class does not match
        rs = ResultSet(TEST_PATH + 'class_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have class mismatch, but verification passed")

        # particle stream does not match
        rs = ResultSet(TEST_PATH + 'stream_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have stream mismatch, but verification passed")

        # particle class does not match inside particle
        rs = ResultSet(TEST_PATH + 'class_mismatch_multiple.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have class mismatch, but verification passed")

        # particle stream does not match inside particle
        rs = ResultSet(TEST_PATH + 'stream_mismatch_multiple.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have stream mismatch, but verification passed")

        # particle timestamp does not match
        rs = ResultSet(TEST_PATH + 'timestamp_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have timestamp mismatch, but verification passed")

        # particle string does not match
        rs = ResultSet(TEST_PATH + 'string_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have string mismatch, but verification passed")

        # particle float does not match
        rs = ResultSet(TEST_PATH + 'float_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have float mismatch, but verification passed")

        # 2nd particle is empty
        rs = ResultSet(TEST_PATH + 'empty_particle.yml')
        # expect this to fail
        if rs.verify([fdp, fdp]):
            self.fail("Should have empty particle, but verification passed")

        # particle class does not match
        rs = ResultSet(TEST_PATH + 'bad_key_particle.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have key mismatch, but verification passed")
Esempio n. 35
0
    def test_fake_particle(self):
        """
        Create a fake data particle class and test that comparison either fails or passes as expected
        """
        fdp = FakeDataParticle([])

        # particle is missing internal_timestamp
        rs = ResultSet(TEST_PATH + 'missing_timestamp.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should be missing timestamp, but verification passed")

        # test with particle object and type in header
        rs = ResultSet(TEST_PATH + 'fake_particle.yml')
        # expect this to pass
        if not rs.verify([fdp]):
            self.fail("Failed particle verification")

        # test with MULTIPLE in particle object and type in header
        rs = ResultSet(TEST_PATH + 'fake_multiple.yml')
        # expect this to pass
        if not rs.verify([fdp]):
            self.fail("Failed particle verification")

        # particle class does not match
        rs = ResultSet(TEST_PATH + 'class_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have class mismatch, but verification passed")

        # particle stream does not match
        rs = ResultSet(TEST_PATH + 'stream_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have stream mismatch, but verification passed")

        # particle class does not match inside particle
        rs = ResultSet(TEST_PATH + 'class_mismatch_multiple.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have class mismatch, but verification passed")

        # particle stream does not match inside particle
        rs = ResultSet(TEST_PATH + 'stream_mismatch_multiple.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have stream mismatch, but verification passed")

        # particle timestamp does not match
        rs = ResultSet(TEST_PATH + 'timestamp_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail(
                "Should have timestamp mismatch, but verification passed")

        # particle string does not match
        rs = ResultSet(TEST_PATH + 'string_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have string mismatch, but verification passed")

        # particle float does not match
        rs = ResultSet(TEST_PATH + 'float_mismatch.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have float mismatch, but verification passed")

        # 2nd particle is empty
        rs = ResultSet(TEST_PATH + 'empty_particle.yml')
        # expect this to fail
        if rs.verify([fdp, fdp]):
            self.fail("Should have empty particle, but verification passed")

        # particle class does not match
        rs = ResultSet(TEST_PATH + 'bad_key_particle.yml')
        # expect this to fail
        if rs.verify([fdp]):
            self.fail("Should have key mismatch, but verification passed")