def test_jsonify(self, mock_reader): """Verifies keys/values properly set in json body.""" mock_reader.TransactionReader = MockReader body = json.jsonify({'mint': {}, 'net_sum': self.netsum_config}, '/tmp/notreal') self.assertEqual(len(body), 7, msg=body) self.assertEqual(body[-1]['measurement'], 'net_sum') self.assertEqual(body[-1]['fields']['value'], 2.25) measurement_keys = [ DATA[0][1], DATA[0][2], DATA[0][5], DATA[1][1], DATA[1][2], DATA[1][5] ] # Make a measurement counter measurement_counter = dict() for key in measurement_keys: measurement_counter[key] = 0 for entry in body: for key in measurement_keys: if entry['measurement'] == key: measurement_counter[key] += 1 for key in measurement_keys: self.assertEqual(measurement_counter[key], 1)
def test_no_archive_dir(self, mock_reader): """Verify we don't cause errors in this mode.""" mock_reader.TransactionReader = MockReader config = { 'mint': { 'directory': '/foo', 'archive': None } } body = json.jsonify(config, '/foo/bar.csv') self.assertEqual(len(body), 6)
def test_archive_single_file_custom_dir(self, mock_reader): """Verify we don't cause errors in this mode.""" mock_reader.TransactionReader = MockReader config = { 'mint': { 'file': '/foo.csv', 'archive': { 'directory': '/tmp' } } } body = json.jsonify(config, '/foo.csv') self.assertEqual(len(body), 6)
def influxdb_write(config, client, source, db_skip=False): """Reads source data and writes to client.""" json_body = jsonify(config, source) if db_skip: LOGGER.warning("Skipping database write.") json_info = json.dumps(json_body) with open('{}.json'.format(source), 'w') as outfile: json.dump(json_info, outfile) LOGGER.info("Data sent to %s.json", source) return True client.write_data(json_body) return True