def test_valid(self):
        """Testing function valid."""
        # Drop the database and create tables
        unittest_db.initialize_db()

        # Test with valid data
        result = validate.ValidateCache(data=self.data)
        self.assertEqual(result.valid(), True)

        # Test with invalid data (string)
        result = validate.ValidateCache(data='string')
        self.assertEqual(result.valid(), False)
    def test_getinfo(self):
        """Testing function getinfo."""
        # Drop the database and create tables
        unittest_db.initialize_db()

        # Test with valid data
        result = validate.ValidateCache(data=self.data)
        data_dict = result.getinfo()

        # Check main keys
        for key, _ in self.data.items():
            self.assertEqual(self.data[key], data_dict[key])
예제 #3
0
    def __init__(self, filename):
        """Method initializing the class.

        Args:
            filename: Cache filename

        Returns:
            None

        """
        # Initialize key variables
        self.filename = filename
        self._information = defaultdict(lambda: defaultdict(dict))
        self._sources = []
        self.validated = False
        self.agent_meta = {}
        data_types = ['timeseries', 'timefixed']

        # Ingest data
        validator = validate.ValidateCache(filename)
        information = validator.getinfo()

        # Log if data is bad
        if information is False:
            log_message = ('Cache ingest file %s is invalid.') % (filename)
            log.log2warning(1051, log_message)
            return
        else:
            self.validated = True

        # Process validated data
        if self.validated is True:
            # Get main keys
            self.agent_meta = _main_keys(information)
            timestamp = self.agent_meta['timestamp']
            id_agent = self.agent_meta['id_agent']

            # Process timeseries data
            for data_type in data_types:
                # Skip if data type isn't in the data
                if data_type not in information:
                    continue

                # Process the data type
                for agent_label, label_dict in sorted(
                        information[data_type].items()):
                    # Get universal parameters for label_dict
                    base_type = _base_type(label_dict['base_type'])
                    description = label_dict['description']

                    # Create a key in the data based on the base_type
                    if base_type not in self._information[data_type]:
                        self._information[data_type][base_type] = []

                    # Process the data associated with the agent_label
                    for datapoint in label_dict['data']:
                        # Create a unique, unchangeable id_datapoint for data
                        index = datapoint[0]
                        value = datapoint[1]
                        source = datapoint[2]
                        id_datapoint = _id_datapoint(
                            id_agent, agent_label, index,
                            self.agent_meta['agent'],
                            self.agent_meta['devicename'])

                        # Convert values to float if this is
                        # data that could be charted
                        if base_type is not None:
                            value = float(value)

                        # Update the data
                        self._information[data_type][base_type].append({
                            'id_agent':
                            id_agent,
                            'id_datapoint':
                            id_datapoint,
                            'value':
                            value,
                            'timestamp':
                            timestamp
                        })

                        # Update sources after fixing encoding
                        self._sources.append({
                            'id_agent': id_agent,
                            'id_datapoint': id_datapoint,
                            'agent_label': agent_label,
                            'agent_source': source,
                            'description': description,
                            'base_type': base_type
                        })
    def test___init__(self):
        """Testing function __init__."""
        # Initialize key variables
        directory = tempfile.mkdtemp()
        id_agent = self.data['id_agent']
        last_timestamp = self.data['timestamp']
        filepath = ('%s/%s_%s_%s.json') % (
            directory,
            last_timestamp,
            id_agent,
            general.hashstring(general.randomstring()))

        # Drop the database and create tables
        unittest_db.initialize_db()

        # Test with valid data
        result = validate.ValidateCache(data=self.data)
        self.assertEqual(result.valid(), True)

        # Test with invalid data (string)
        result = validate.ValidateCache(data='string')
        self.assertEqual(result.valid(), False)

        # Test with invalid data (string)
        data_dict = copy.deepcopy(self.data)
        data_dict.pop('agent', None)
        result = validate.ValidateCache(data=data_dict)
        self.assertEqual(result.valid(), False)

        # Write good data to file and test
        with open(filepath, 'w') as f_handle:
            json.dump(self.data, f_handle)
        result = validate.ValidateCache(filepath=filepath)
        self.assertEqual(result.valid(), True)

        #################################################################
        #################################################################
        # Add record to DeviceAgent table and test for validity with the
        # same data, which should be False
        #################################################################
        #################################################################

        unittest_db.setup_db_deviceagent(self.data)

        # Attempting to insert duplicate data should fail
        with open(filepath, 'w') as f_handle:
            json.dump(self.data, f_handle)
        result = validate.ValidateCache(filepath=filepath)
        self.assertEqual(result.valid(), False)

        #################################################################
        #################################################################
        # Test with invalid data in file
        #################################################################
        #################################################################

        # Drop the database and create tables
        unittest_db.initialize_db()

        # Write bad data to file and test
        data_dict = copy.deepcopy(self.data)
        data_dict.pop('agent', None)
        with open(filepath, 'w') as f_handle:
            json.dump(data_dict, f_handle)
        result = validate.ValidateCache(filepath=filepath)
        self.assertEqual(result.valid(), False)

        # Cleanup
        os.remove(filepath)
        os.removedirs(directory)