def test_log_successful_export(self): """ Test logging of export results to the export history table. """ # @REVIEWED self.assertTrue(self.exporter.logSuccessfulExport(name = 'test_export', url = 'http://test_url', datetime = 0, size = 100)) conn = MSGDBConnector().connectDB() cursor = conn.cursor() dbUtil = MSGDBUtil() self.assertTrue( dbUtil.executeSQL(cursor, 'select * from "ExportHistory" where ' 'timestamp = ' 'to_timestamp(0)')) self.assertEqual(len(cursor.fetchall()), 1, "There should only be one result row.") self.assertTrue( dbUtil.executeSQL(cursor, 'delete from "ExportHistory" where ' 'timestamp = to_timestamp(0)')) conn.commit()
def setUp(self): self.dupeChecker = MECODupeChecker() self.p = MECOXMLParser(True) # run in testing mode self.dbConnect = MSGDBConnector(True) self.dbUtil = MSGDBUtil() self.conn = self.dbConnect.connectDB() self.cur = self.conn.cursor()
def test_log_successful_export(self): """ Test logging of export results to the export history table. """ # @REVIEWED self.assertTrue( self.exporter.logSuccessfulExport(name='test_export', url='http://test_url', datetime=0, size=100)) conn = MSGDBConnector().connectDB() cursor = conn.cursor() dbUtil = MSGDBUtil() self.assertTrue( dbUtil.executeSQL( cursor, 'select * from "ExportHistory" where ' 'timestamp = ' 'to_timestamp(0)')) self.assertEqual(len(cursor.fetchall()), 1, "There should only be one result row.") self.assertTrue( dbUtil.executeSQL( cursor, 'delete from "ExportHistory" where ' 'timestamp = to_timestamp(0)')) conn.commit()
def __init__(self): """ Constructor. """ warnings.simplefilter('default') warnings.warn("This module is deprecated in favor of SEKNotifier.", DeprecationWarning) self.config = MSGConfiger() self.logger = SEKLogger(__name__, 'info') self.connector = MSGDBConnector() self.conn = self.connector.connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil() self.noticeTable = 'NotificationHistory' self.notificationHeader = "This is a message from the Hawaii Smart " \ "Energy Project MSG Project notification " \ "system.\n\n" self.noReplyNotice = '\n\nThis email account is not monitored. No ' \ 'replies will originate from this ' \ 'account.\n\nYou are receiving this message ' \ 'because you are on the recipient list for ' \ 'notifications for the Hawaii Smart Energy ' \ 'Project.'
class MECODBReader(object): """ Read records from a database. """ def __init__(self, testing = False): """ Constructor. :param testing: True if in testing mode. """ self.connector = MSGDBConnector() self.conn = MSGDBConnector(testing).connectDB() self.dbUtil = MSGDBUtil() self.dbName = self.dbUtil.getDBName(self.connector.dictCur) def selectRecord(self, conn, table, keyName, keyValue): """ Read a record in the database given a table name, primary key name, and value for the key. :param conn DB connection :param table DB table name :param keyName DB column name for primary key :param keyValue Value to be matched :returns: Row containing record data. """ print "selectRecord:" sql = """SELECT * FROM "%s" WHERE %s = %s""" % ( table, keyName, keyValue) dcur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) self.dbUtil.executeSQL(dcur, sql) row = dcur.fetchone() return row def readingAndMeterCounts(self): """ Retrieve the reading and meter counts. :returns: Multiple lists containing the retrieved data. """ sql = """SELECT "Day", "Reading Count", "Meter Count" FROM count_of_readings_and_meters_by_day""" dcur = self.conn.cursor(cursor_factory = psycopg2.extras.DictCursor) self.dbUtil.executeSQL(dcur, sql) rows = dcur.fetchall() dates = [] meterCounts = [] readingCounts = [] for row in rows: dates.append(row[0]) readingCounts.append(row[1] / row[2]) meterCounts.append(row[2]) return dates, readingCounts, meterCounts
class MECODBDeleter(object): """ Provide delete routines for MECO DB. """ def __init__(self): """ Constructor. """ self.dbUtil = MSGDBUtil() def deleteRecord(self, conn, tableName, idText, idValue): """ Delete record from DB where record has an int-based serial number. param: tableName param: idText DB column name for record ID param: idValue Value of the ID to be deleted """ sql = """DELETE FROM "{}" where {} = {}""".format(tableName, idText, idValue) dictCur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) self.dbUtil.executeSQL(dictCur, sql) conn.commit()
def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'DEBUG') self.cursor = MSGDBConnector().connectDB().cursor() self.dbUtil = MSGDBUtil()
def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'debug') self.mapper = MECOMapper() self.dupeChecker = MECODupeChecker() self.dbUtil = MSGDBUtil()
def __init__(self, testing=False): """ Constructor. :param testing: True if testing mode is being used. """ self.logger = SEKLogger(__name__, 'info') self.dbUtil = MSGDBUtil() self.dupeChecker = MSGWeatherDataDupeChecker()
def __init__(self, testing=False): """ Constructor. :param testing: Flag for testing mode. """ self.logger = SEKLogger(__name__, 'debug') self.dbUtil = MSGDBUtil()
def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'debug') self.mecoConfig = MSGConfiger() self.currentReadingID = 0 self.dbUtil = MSGDBUtil()
def setUp(self): self.reader = MECODBReader() self.connector = MSGDBConnector(True) self.conn = self.connector.connectDB() self.inserter = MECODBInserter() self.util = MSGDBUtil() self.lastSeqVal = None self.tableName = 'MeterData' self.colName = 'meter_data_id' self.deleter = MECODBDeleter()
class TestMECODupeChecker(unittest.TestCase): """ Unit tests for duplicate checking. """ def setUp(self): self.dupeChecker = MECODupeChecker() self.p = MECOXMLParser(True) # run in testing mode self.dbConnect = MSGDBConnector(True) self.dbUtil = MSGDBUtil() self.conn = self.dbConnect.connectDB() self.cur = self.conn.cursor() def testInit(self): self.assertEqual(self.dupeChecker.__class__.__name__, "MECODupeChecker", "Dupe checker has been created.") def testFindIndividualDupe(self): """ Find a duplicate record when only one exists. """ self.dbUtil.eraseTestMeco() self.p.filename = "../../test-data/meco_v3-energy-test-data.xml" fileObject = open(self.p.filename, "rb") self.p.parseXML(fileObject, True) self.assertTrue( self.dupeChecker.readingBranchDupeExists(self.conn, '100000', '2013-04-08 00:30:00', '1', True), "Record should already exist") def testLoadOnTop(self): """ If the same data set is loaded in succession, all values will be duplicated. Verify that this is true. This is no longer possible as duplicates are dropped before insertion. """ pass def testLoadSingleMissingEntry(self): """ A reading will be inserted into the database where the reading does not currently exist as determined by the MeterName-IntervalEndTime-Channel tuple. """ pass def tearDown(self): self.dbConnect.closeDB(self.conn)
def __init__(self): """ Constructor. """ print __name__ self.logger = SEKLogger(__name__) self.connector = MSGDBConnector() self.dbUtil = MSGDBUtil() self.notifier = MSGNotifier() self.configer = MSGConfiger()
def __init__(self, testing = False): """ Constructor. :param testing: True if in testing mode. """ self.connector = MSGDBConnector() self.conn = MSGDBConnector(testing).connectDB() self.dbUtil = MSGDBUtil() self.dbName = self.dbUtil.getDBName(self.connector.dictCur)
def setUp(self): self.i = MECODBInserter() self.util = MSGDBUtil() self.connector = MSGDBConnector(True) self.deleter = MECODBDeleter() self.reader = MECODBReader() self.lastSeqVal = None self.conn = self.connector.connectDB() self.sampleTableName = 'MeterData' self.sampleDict = {'MeterName': '100001', 'UtilDeviceID': '100001', 'MacID': '00:00:00:00:00:00:00:00'} self.keyName = 'meter_data_id'
def __init__(self, exitOnError=True, commitOnEveryInsert=False, testing=False): """ Constructor. :param testing: if True, the testing DB will be connected instead of the production DB. """ self.logger = SEKLogger(__name__, 'info') self.configer = MSGConfiger() self.conn = MSGDBConnector().connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil() self.notifier = MSGNotifier() self.mathUtil = MSGMathUtil() self.timeUtil = MSGTimeUtil() self.nextMinuteCrossing = {} self.nextMinuteCrossingWithoutSubkeys = None self.exitOnError = exitOnError self.commitOnEveryInsert = commitOnEveryInsert section = 'Aggregation' tableList = [ 'irradiance', 'agg_irradiance', 'weather', 'agg_weather', 'circuit', 'agg_circuit', 'egauge', 'agg_egauge' ] self.dataParams = { 'weather': ('agg_weather', 'timestamp', ''), 'egauge': ('agg_egauge', 'datetime', 'egauge_id'), 'circuit': ('agg_circuit', 'timestamp', 'circuit'), 'irradiance': ('agg_irradiance', 'timestamp', 'sensor_id') } self.columns = {} # tables[datatype] gives the table name for datatype. self.tables = { t: self.configer.configOptionValue(section, '{}_table'.format(t)) for t in tableList } for t in self.tables.keys(): self.logger.log('t:{}'.format(t), 'DEBUG') try: self.columns[t] = self.dbUtil.columnsString( self.cursor, self.tables[t]) except TypeError as error: self.logger.log( 'Ignoring missing table: Error is {}.'.format(error), 'error')
class TestMECOXMLParser(unittest.TestCase): """ Unit tests for MECO XML Parser. """ def setUp(self): self.p = MECOXMLParser(True) # run in testing mode self.dbConnect = MSGDBConnector(True) self.dbUtil = MSGDBUtil() self.conn = self.dbConnect.connectDB() self.cur = self.conn.cursor() def testMECOXMLParserCanBeInited(self): self.assertIsNotNone(self.p) def testEveryElementIsVisited(self): self.dbUtil.eraseTestMeco() self.p.filename = "../../test-data/meco_v3-energy-test-data.xml" fileObject = open(self.p.filename, "rb") expectedCount = 125 self.p.parseXML(fileObject, True) print "element count = %s" % self.p.processForInsertElementCount self.assertEqual(self.p.processForInsertElementCount, expectedCount) def testAllTableNamesArePresent(self): self.dbUtil.eraseTestMeco() self.p.filename = "../../test-data/meco_v3-energy-test-data.xml" fileObject = open(self.p.filename, "rb") self.p.parseXML(fileObject, True) fail = False for key in self.p.tableNameCount.keys(): print key + ": ", print self.p.tableNameCount[key] if self.p.tableNameCount[key] < 1: if key != 'ChannelStatus' and key != 'IntervalStatus' and key \ != 'EventData' and key != 'Event': print "table = %s" % key fail = True self.assertFalse(fail, "At least one table of each type should have been " "encountered.") def tearDown(self): self.dbConnect.closeDB(self.conn)
class TestMECOXMLParser(unittest.TestCase): """ Unit tests for MECO XML Parser. """ def setUp(self): self.p = MECOXMLParser(True) # run in testing mode self.dbConnect = MSGDBConnector(True) self.dbUtil = MSGDBUtil() self.conn = self.dbConnect.connectDB() self.cur = self.conn.cursor() def testMECOXMLParserCanBeInited(self): self.assertIsNotNone(self.p) def testEveryElementIsVisited(self): self.dbUtil.eraseTestMeco() self.p.filename = "../../test-data/meco_v3-energy-test-data.xml" fileObject = open(self.p.filename, "rb") expectedCount = 125 self.p.parseXML(fileObject, True) print "element count = %s" % self.p.processForInsertElementCount self.assertEqual(self.p.processForInsertElementCount, expectedCount) def testAllTableNamesArePresent(self): self.dbUtil.eraseTestMeco() self.p.filename = "../../test-data/meco_v3-energy-test-data.xml" fileObject = open(self.p.filename, "rb") self.p.parseXML(fileObject, True) fail = False for key in self.p.tableNameCount.keys(): print key + ": ", print self.p.tableNameCount[key] if self.p.tableNameCount[key] < 1: if key != 'ChannelStatus' and key != 'IntervalStatus' and key \ != 'EventData' and key != 'Event': print "table = %s" % key fail = True self.assertFalse( fail, "At least one table of each type should have been " "encountered.") def tearDown(self): self.dbConnect.closeDB(self.conn)
class MSGWeatherDataDupeChecker(object): """ Determine if a duplicate record exists based on the tuple (WBAN, Date, Time, StationType). """ def __init__(self, testing = False): """ Constructor. :param testing: Flag for testing mode. """ self.logger = SEKLogger(__name__, 'debug') self.dbUtil = MSGDBUtil() def duplicateExists(self, dbCursor, wban, datetime, recordType): """ Check for the existence of a duplicate record. :param dbCursor :param wban :param datetime :param recordType :returns: True if a duplicate record exists, otherwise False. """ tableName = "WeatherNOAA" sql = """SELECT wban, datetime, record_type FROM \"%s\" WHERE wban = '%s' AND datetime = '%s' AND record_type = '%s'""" % ( tableName, wban, datetime, recordType) self.logger.log("sql=%s" % sql, 'debug') self.logger.log("wban=%s, datetime=%s, record_type=%s" % ( wban, datetime, recordType), 'debug') self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: return True else: return False
def __init__(self): """ Constructor. A database connection is not maintained here to keep this class lightweight. """ self.logger = SEKLogger(__name__, DEBUG) self.configer = MSGConfiger() self.url = self.configer.configOptionValue('Weather Data', 'weather_data_url') self.pattern = self.configer.configOptionValue('Weather Data', 'weather_data_pattern') self.fileList = [] self.dateList = [] # List of dates corresponding weather data files. self.fillFileListAndDateList() self.dbUtil = MSGDBUtil()
def __init__(self, testing = False): """ Constructor. :param testing: Flag for testing mode. """ self.logger = SEKLogger(__name__, 'debug') self.dbUtil = MSGDBUtil()
def __init__(self): """ Constructor. """ self.logger = MSGLogger(__name__, 'debug') self.mapper = MECOMapper() self.dupeChecker = MECODupeChecker() self.dbUtil = MSGDBUtil()
def __init__(self, testing = False): """ Constructor. :param testing: True if testing mode is being used. """ self.logger = SEKLogger(__name__, 'info') self.dbUtil = MSGDBUtil() self.dupeChecker = MSGWeatherDataDupeChecker()
class MSGWeatherDataDupeChecker(object): """ Determine if a duplicate record exists based on the tuple (WBAN, Date, Time, StationType). """ def __init__(self, testing=False): """ Constructor. :param testing: Flag for testing mode. """ self.logger = SEKLogger(__name__, 'debug') self.dbUtil = MSGDBUtil() def duplicateExists(self, dbCursor, wban, datetime, recordType): """ Check for the existence of a duplicate record. :param dbCursor :param wban :param datetime :param recordType :returns: True if a duplicate record exists, otherwise False. """ tableName = "WeatherNOAA" sql = """SELECT wban, datetime, record_type FROM \"%s\" WHERE wban = '%s' AND datetime = '%s' AND record_type = '%s'""" % ( tableName, wban, datetime, recordType) self.logger.log("sql=%s" % sql, 'debug') self.logger.log( "wban=%s, datetime=%s, record_type=%s" % (wban, datetime, recordType), 'debug') self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: return True else: return False
def setUp(self): self.i = MECODBInserter() # Connect to the testing database. self.connector = MSGDBConnector(testing=True) self.conn = self.connector.connectDB() self.lastSeqVal = None # Does this work having the dictCur be in another class? self.dictCur = self.connector.dictCur self.cursor = self.conn.cursor() self.deleter = MECODBDeleter() self.tableName = 'MeterData' self.columnName = 'meter_data_id' self.configer = MSGConfiger() self.logger = SEKLogger(__name__, 'debug') self.dbUtil = MSGDBUtil()
def __init__(self): """ Constructor. """ self.logger = MSGLogger(__name__, 'DEBUG') self.aggregator = MSGDataAggregator() self.notifier = MSGNotifier() self.rawTypes = ['weather', 'egauge', 'circuit', 'irradiance'] self.connector = MSGDBConnector() self.conn = self.connector.connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil()
def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'DEBUG') self.aggregator = MSGDataAggregator() self.notifier = MSGNotifier() self.rawTypes = [x.name for x in list(MSGAggregationTypes)] self.connector = MSGDBConnector() self.conn = self.connector.connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil()
def countOfDBExports(self, since = None): """ :param since: datetime indicating last export datetime. :return: Int of count of exports. """ myDatetime = lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%S') if not since: since = myDatetime('1900-01-01 00:00') self.logger.log(since.strftime('%Y-%m-%d %H:%M'), 'DEBUG') sql = 'SELECT COUNT("public"."ExportHistory"."timestamp") FROM ' \ '"public"."ExportHistory" WHERE "timestamp" > \'{}\''.format( since.strftime('%Y-%m-%d %H:%M')) conn = MSGDBConnector().connectDB() cursor = conn.cursor() dbUtil = MSGDBUtil() rows = None if dbUtil.executeSQL(cursor, sql, exitOnFail = False): rows = cursor.fetchall() assert len(rows) == 1, 'Invalid return value.' return rows[0][0]
def countOfDBExports(self, since=None): """ :param since: datetime indicating last export datetime. :return: Int of count of exports. """ myDatetime = lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%S') if not since: since = myDatetime('1900-01-01 00:00') self.logger.log(since.strftime('%Y-%m-%d %H:%M'), 'DEBUG') sql = 'SELECT COUNT("public"."ExportHistory"."timestamp") FROM ' \ '"public"."ExportHistory" WHERE "timestamp" > \'{}\''.format( since.strftime('%Y-%m-%d %H:%M')) conn = MSGDBConnector().connectDB() cursor = conn.cursor() dbUtil = MSGDBUtil() rows = None if dbUtil.executeSQL(cursor, sql, exitOnFail=False): rows = cursor.fetchall() assert len(rows) == 1, 'Invalid return value.' return rows[0][0]
def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'DEBUG', useColor=False) self.timeUtil = MSGTimeUtil() self.configer = MSGConfiger() self.fileUtil = MSGFileUtil() self.pythonUtil = MSGPythonUtil() # for debugging self.connector = MSGDBConnector() self.conn = self.connector.connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil() self.notifier = SEKNotifier( connector=self.connector, dbUtil=self.dbUtil, user=self.configer.configOptionValue('Notifications', 'email_username'), password=self.configer.configOptionValue('Notifications', 'email_password'), fromaddr=self.configer.configOptionValue('Notifications', 'email_from_address'), toaddr=self.configer.configOptionValue('Notifications', 'email_recipients'), testing_toaddr=self.configer.configOptionValue( 'Notifications', 'testing_email_recipients'), smtp_server_and_port=self.configer.configOptionValue( 'Notifications', 'smtp_server_and_port')) # Google Drive parameters. self.clientID = self.configer.configOptionValue( 'Export', 'google_api_client_id') self.clientSecret = self.configer.configOptionValue( 'Export', 'google_api_client_secret') self.oauthScope = 'https://www.googleapis.com/auth/drive' self.oauthConsent = 'urn:ietf:wg:oauth:2.0:oob' self.googleAPICredentials = '' self.exportTempWorkPath = self.configer.configOptionValue( 'Export', 'db_export_work_path') self.credentialPath = self.configer.configOptionValue( 'Export', 'google_api_credentials_path') self.credentialStorage = Storage('{}/google_api_credentials'.format( self.credentialPath)) self._driveService = None self._cloudFiles = None self.postAgent = 'Maui Smart Grid 1.0.0 DB Exporter' self.retryDelay = 10 self.availableFilesURL = ''
def insertData(files, table, cols, testing = False): """ Insert aggregated data generated by this script into a database table. :param files: A list of the filenames to be processed. :param table: The name of the table in the DB. :param cols: A list of the columns (as strings) in the table. :param testing: Specify whether to use test (false by default). """ connector = MSGDBConnector() conn = connector.connectDB() dbUtil = MSGDBUtil() cursor = conn.cursor() cnt = 0 for file in files: with open(file, 'r') as csvfile: reader = csv.reader(csvfile, delimiter = ',') # Skip the header line. reader.next() for row in reader: sql = """INSERT INTO "%s" (%s) VALUES (%s)""" % ( table, ','.join(cols), ','.join("'" + item.strip() + "'" for item in row)) sql = sql.replace("'NULL'", 'NULL') dbUtil.executeSQL(cursor, sql) cnt += 1 if cnt % 10000 == 0: conn.commit() conn.commit() cnt = 0
def logSuccessfulExport(self, name = '', url = '', datetime = 0, size = 0): """ When an export has been successful, log information about the export to the database. The items to log include: * filename * URL * timestamp * filesize :param name: String :param url: String :param datetime: :param size: Int :return: True if no errors occurred, else False. """ def exportHistoryColumns(): return ['name', 'url', 'timestamp', 'size'] timestamp = lambda \ datetime: 'to_timestamp(0)' if datetime == 0 else "timestamp " \ "'{}'".format( datetime) sql = 'INSERT INTO "{0}" ({1}) VALUES ({2}, {3}, {4}, {5})'.format( self.configer.configOptionValue('Export', 'export_history_table'), ','.join(exportHistoryColumns()), "'" + name + "'", "'" + url + "'", timestamp(datetime), size) conn = MSGDBConnector().connectDB() cursor = conn.cursor() dbUtil = MSGDBUtil() result = dbUtil.executeSQL(cursor, sql, exitOnFail = False) conn.commit() return result
def insertData(files, table, cols): """ Insert aggregated data generated by this script into a database table. :param files: A list of the filenames to be processed. :param table: The name of the table in the DB. :param cols: A list of the columns (as strings) in the table. :param testing: Specify whether to use test """ connector = MSGDBConnector() conn = connector.connectDB() dbUtil = MSGDBUtil() cursor = conn.cursor() cnt = 0 for file in files: with open(file, 'rb') as csvfile: myReader = csv.reader(csvfile, delimiter=',') # Skip the header line. myReader.next() for row in myReader: print row sql = """INSERT INTO "%s" (%s) VALUES (%s)""" % ( table, ','.join(cols), ','.join("'" + item.strip() + "'" for item in row)) sql = sql.replace("'NULL'", 'NULL') dbUtil.executeSQL(cursor, sql) cnt += 1 if cnt % 10000 == 0: conn.commit() conn.commit() cnt = 0
def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'DEBUG') self.aggregator = MSGDataAggregator(exitOnError=False, commitOnEveryInsert=True) self.notifier = MSGNotifier() # Available types are in ['weather', 'egauge', 'circuit', 'irradiance']. self.rawTypes = [''] self.connector = MSGDBConnector() self.conn = self.connector.connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil()
def logSuccessfulExport(self, name='', url='', datetime=0, size=0): """ When an export has been successful, log information about the export to the database. The items to log include: * filename * URL * timestamp * filesize :param name: String :param url: String :param datetime: :param size: Int :return: True if no errors occurred, else False. """ def exportHistoryColumns(): return ['name', 'url', 'timestamp', 'size'] timestamp = lambda \ datetime: 'to_timestamp(0)' if datetime == 0 else "timestamp " \ "'{}'".format( datetime) sql = 'INSERT INTO "{0}" ({1}) VALUES ({2}, {3}, {4}, {5})'.format( self.configer.configOptionValue('Export', 'export_history_table'), ','.join(exportHistoryColumns()), "'" + name + "'", "'" + url + "'", timestamp(datetime), size) conn = MSGDBConnector().connectDB() cursor = conn.cursor() dbUtil = MSGDBUtil() result = dbUtil.executeSQL(cursor, sql, exitOnFail=False) conn.commit() return result
class MECODBDeleter(object): """ Provide delete routines for MECO DB. """ def __init__(self): """ Constructor. """ self.dbUtil = MSGDBUtil() def deleteRecord(self, conn, tableName, idText, idValue): """ Delete record from DB where record has an int-based serial number. param: tableName param: idText DB column name for record ID param: idValue Value of the ID to be deleted """ sql = """delete from "%s" where %s = %s""" % (tableName, idText, idValue) dictCur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) self.dbUtil.executeSQL(dictCur, sql) conn.commit()
def __init__(self, exitOnError=True, commitOnEveryInsert=False, testing=False): """ Constructor. :param testing: if True, the testing DB will be connected instead of the production DB. """ self.logger = SEKLogger(__name__, "info") self.configer = MSGConfiger() self.conn = MSGDBConnector().connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil() self.notifier = MSGNotifier() self.mathUtil = MSGMathUtil() self.timeUtil = MSGTimeUtil() self.nextMinuteCrossing = {} self.nextMinuteCrossingWithoutSubkeys = None self.exitOnError = exitOnError self.commitOnEveryInsert = commitOnEveryInsert section = "Aggregation" tableList = [ "irradiance", "agg_irradiance", "weather", "agg_weather", "circuit", "agg_circuit", "egauge", "agg_egauge", ] self.dataParams = { "weather": ("agg_weather", "timestamp", ""), "egauge": ("agg_egauge", "datetime", "egauge_id"), "circuit": ("agg_circuit", "timestamp", "circuit"), "irradiance": ("agg_irradiance", "timestamp", "sensor_id"), } self.columns = {} # tables[datatype] gives the table name for datatype. self.tables = {t: self.configer.configOptionValue(section, "{}_table".format(t)) for t in tableList} for t in self.tables.keys(): self.logger.log("t:{}".format(t), "DEBUG") try: self.columns[t] = self.dbUtil.columnsString(self.cursor, self.tables[t]) except TypeError as error: self.logger.log("Ignoring missing table: Error is {}.".format(error), "error")
class TestMECODBRead(unittest.TestCase): def setUp(self): self.reader = MECODBReader() self.connector = MSGDBConnector(True) self.conn = self.connector.connectDB() self.inserter = MECODBInserter() self.util = MSGDBUtil() self.lastSeqVal = None self.tableName = 'MeterData' self.colName = 'meter_data_id' self.deleter = MECODBDeleter() def testMECODBReadCanBeInited(self): self.assertIsNotNone(self.reader) def testSelectRecord(self): """ Insert and retrieve a record to test the ability to select a record. """ print "testSelectRecord:" print "self.conn = %s" % self.conn sampleDict = { 'MeterName': '100001', 'UtilDeviceID': '100001', 'MacID': '00:00:00:00:00:00:00:00' } self.inserter.insertData(self.conn, self.tableName, sampleDict) self.lastSeqVal = self.util.getLastSequenceID(self.conn, self.tableName, self.colName) print "lastSeqVal = %s" % self.lastSeqVal row = self.reader.selectRecord(self.conn, self.tableName, self.colName, self.lastSeqVal) self.assertEqual(row[self.colName], self.lastSeqVal) def tearDown(self): # Delete the record that was inserted. if self.lastSeqVal != None: self.deleter.deleteRecord(self.conn, self.tableName, self.colName, self.lastSeqVal) self.connector.closeDB(self.conn)
def __init__(self, exitOnError = True, commitOnEveryInsert = False, testing = False): """ Constructor. :param testing: if True, the testing DB will be connected instead of the production DB. """ self.logger = MSGLogger(__name__, 'info') self.configer = MSGConfiger() self.conn = MSGDBConnector().connectDB() self.cursor = self.conn.cursor() self.dbUtil = MSGDBUtil() self.notifier = MSGNotifier() self.mathUtil = MSGMathUtil() self.timeUtil = MSGTimeUtil() self.nextMinuteCrossing = {} self.nextMinuteCrossingWithoutSubkeys = None self.exitOnError = exitOnError self.commitOnEveryInsert = commitOnEveryInsert section = 'Aggregation' tableList = ['irradiance', 'agg_irradiance', 'weather', 'agg_weather', 'circuit', 'agg_circuit', 'egauge', 'agg_egauge'] self.dataParams = {'weather': ('agg_weather', 'timestamp', ''), 'egauge': ('agg_egauge', 'datetime', 'egauge_id'), 'circuit': ('agg_circuit', 'timestamp', 'circuit'), 'irradiance': ( 'agg_irradiance', 'timestamp', 'sensor_id')} self.columns = {} # tables[datatype] gives the table name for datatype. self.tables = { t: self.configer.configOptionValue(section, '%s_table' % t) for t in tableList} for t in self.tables.keys(): self.logger.log('t:%s' % t, 'DEBUG') try: self.columns[t] = self.dbUtil.columnsString(self.cursor, self.tables[t]) except TypeError as error: self.logger.log('Ignoring missing table: Error is %s.' % error, 'error')
def __init__(self): """ Constructor. A database connection is not maintained here to keep this class lightweight. This results in the class not having a parameter for TESTING MODE. """ self.logger = MSGLogger(__name__, 'info') self.configer = MSGConfiger() self.url = self.configer.configOptionValue('Weather Data', 'weather_data_url') self.pattern = self.configer.configOptionValue('Weather Data', 'weather_data_pattern') self.fileList = [] self.dateList = [] # List of dates corresponding weather data files. self.fillFileListAndDateList() self.dbUtil = MSGDBUtil()
def setUp(self): self.i = MECODBInserter() # Connect to the testing database. self.connector = MSGDBConnector(testing = True) self.conn = self.connector.connectDB() self.lastSeqVal = None # Does this work having the dictCur be in another class? self.dictCur = self.connector.dictCur self.cursor = self.conn.cursor() self.deleter = MECODBDeleter() self.tableName = 'MeterData' self.columnName = 'meter_data_id' self.configer = MSGConfiger() self.logger = MSGLogger(__name__, 'debug') self.dbUtil = MSGDBUtil()
class TestMECODBRead(unittest.TestCase): def setUp(self): self.reader = MECODBReader() self.connector = MSGDBConnector(True) self.conn = self.connector.connectDB() self.inserter = MECODBInserter() self.util = MSGDBUtil() self.lastSeqVal = None self.tableName = 'MeterData' self.colName = 'meter_data_id' self.deleter = MECODBDeleter() def testMECODBReadCanBeInited(self): self.assertIsNotNone(self.reader) def testSelectRecord(self): """ Insert and retrieve a record to test the ability to select a record. """ print "testSelectRecord:" print "self.conn = %s" % self.conn sampleDict = {'MeterName': '100001', 'UtilDeviceID': '100001', 'MacID': '00:00:00:00:00:00:00:00'} self.inserter.insertData(self.conn, self.tableName, sampleDict) self.lastSeqVal = self.util.getLastSequenceID(self.conn, self.tableName, self.colName) print "lastSeqVal = %s" % self.lastSeqVal row = self.reader.selectRecord(self.conn, self.tableName, self.colName, self.lastSeqVal) self.assertEqual(row[self.colName], self.lastSeqVal) def tearDown(self): # Delete the record that was inserted. if self.lastSeqVal != None: self.deleter.deleteRecord(self.conn, self.tableName, self.colName, self.lastSeqVal) self.connector.closeDB(self.conn)
class MECODBInserter(object): """ Provides methods that perform insertion of MECO data. """ def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'debug') self.mapper = MECOMapper() self.dupeChecker = MECODupeChecker() self.dbUtil = MSGDBUtil() def __call__(self, param): print "CallableClass.__call__(%s)" % param def insertData(self, conn, tableName, columnsAndValues, fKeyVal=None, withoutCommit=0): """ Given a table name and a dictionary of column names and values, insert them to the DB. :param conn: database connection :param tableName: name of the db table :param columnsAndValues: dictionary of columns and values to be inserted to the db :param (optional) fKeyVal: an explicit foreign key value :param (optional) withoutCommit: a flag indicated that the insert will not be immediately committed :returns: A database cursor. """ cur = conn.cursor() # Get a dictionary of mapped (from DB to source data) column names. columnDict = self.mapper.getDBColNameDict(tableName) dbColsAndVals = {} if VISUALIZE_DATA: print "----------" + tableName + "----------" print columnDict print columnsAndValues for col in columnDict.keys(): # Use default as the value for the primary key so that the # private key is obtained from the predefined sequence. if col == '_pkey': if VISUALIZE_DATA: print columnDict[col], # DB col name. print 'DEFAULT' dbColsAndVals[columnDict[col]] = 'DEFAULT' # For the foreign key, set the value from the given parameter. elif col == '_fkey': if VISUALIZE_DATA: print columnDict[col], # DB col name. print fKeyVal dbColsAndVals[columnDict[col]] = fKeyVal else: if VISUALIZE_DATA: print columnDict[col], # DB col name. # The Register and Reading tables need to handle NULL # values as a special case. if tableName == 'Register' or tableName == 'Reading': try: if VISUALIZE_DATA: print columnsAndValues[col] # data source value dbColsAndVals[columnDict[col]] = columnsAndValues[col] except: if VISUALIZE_DATA: print 'NULL' dbColsAndVals[columnDict[col]] = 'NULL' # For all other cases, simply pass the value. else: if VISUALIZE_DATA: print columnsAndValues[col] # data source value dbColsAndVals[columnDict[col]] = columnsAndValues[col] # Add a creation timestamp to MeterData. if tableName == 'MeterData': dbColsAndVals['created'] = 'NOW()' cols = [] vals = [] for col in dbColsAndVals.keys(): cols.append(col) # DEFAULT, NULL and NOW() need to appear without quotes. if dbColsAndVals[col] in {'DEFAULT', 'NULL', 'NOW()'}: vals.append(dbColsAndVals[col]) else: vals.append( "'%s'" % dbColsAndVals[col]) # Surround value with single quotes. sql = """INSERT INTO "%s" (%s) VALUES (%s)""" % ( tableName, ','.join(cols), ','.join(vals)) self.dbUtil.executeSQL(cur, sql) if withoutCommit == 0: try: conn.commit() except: self.logger.log("ERROR: Commit failed.", 'debug') return cur
class MECODupeChecker(object): """ Check for duplicate data in the database. """ def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'debug') self.mecoConfig = MSGConfiger() self.currentReadingID = 0 self.dbUtil = MSGDBUtil() def getLastElement(self, rows): """ Get the last element in a collection. Example: rows = (element1, element2, element3) getLastElement(rows) # return element3 :param rows Result froms from a query :return last element in the collection """ for i, var in enumerate(rows): if i == len(rows) - 1: return var def eventBranchDupeExists(self, conn, meterName, eventTime): """ :param conn: Database connection. :param meterName: Meter name in MeterData table. :param eventTime: Timestamp of event. :return: True if tuple exists, False if not. """ dbCursor = conn.cursor() sql = """SELECT "Event".event_time, "MeterData".meter_data_id, "EventData".event_data_id FROM ( ( "MeterData" JOIN "EventData" ON ( ( "MeterData".meter_data_id = "EventData" .meter_data_id ) ) ) JOIN "Event" ON ( ( "EventData".event_data_id = "Event" .event_data_id ) ) ) WHERE "MeterData".meter_name = '%s' AND "Event".event_time = '%s' """ % (meterName, eventTime) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: return True else: return False def registerBranchDupeExists(self, conn, meterName, readTime, registerNumber, DEBUG=False): """ Determine if a register branch duplicate exists for a given meter name, read time, number tuple. :param conn: Database connection. :param meterName: Meter name in MeterData table. :param readTime: Read time in RegisterRead table. :param registerNumber: Corresponds to DB column "number". :return: True if tuple exists, False if not. """ dbCursor = conn.cursor() sql = """SELECT "public"."MeterData".meter_name, "public"."RegisterRead".read_time, "public"."Register"."number" FROM "public"."MeterData" INNER JOIN "public"."RegisterData" ON "public" ."MeterData".meter_data_id = "public" ."RegisterData".meter_data_id INNER JOIN "public"."RegisterRead" ON "public"."RegisterData" .register_data_id = "public" ."RegisterRead".register_data_id INNER JOIN "public"."Tier" ON "public"."RegisterRead" .register_read_id = "public"."Tier" .register_read_id INNER JOIN "public"."Register" ON "public"."Tier".tier_id = "public"."Register".tier_id WHERE "public"."MeterData".meter_name = '%s' AND "public"."RegisterRead".read_time = '%s' AND "public"."Register".number = '%s' """ % (meterName, readTime, registerNumber) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: return True else: return False def readingBranchDupeExists(self, conn, meterName, endTime, channel=None, DEBUG=False): """ Duplicate cases: 1. Tuple (meterID, endTime) exists in the database. @DEPRECATED in favor of (2), full meterName-endTime-channel query. 2. Tuple (meterID, endTime, channel) exists in the database. :param conn: Database connection. :param meterName: Meter name in MeterData table. :param endTime: End time in Interval table. :param channel: Required parameter that was previously optional. An optional channel is now deprecated. :return: True if tuple exists, False if not. """ dbCursor = conn.cursor() if DEBUG: print "readingBranchDupeExists():" if channel != None: sql = """SELECT "Interval".end_time, "MeterData".meter_name, "MeterData".meter_data_id, "Reading".channel, "Reading".reading_id FROM "MeterData" INNER JOIN "IntervalReadData" ON "MeterData" .meter_data_id = "IntervalReadData".meter_data_id INNER JOIN "Interval" ON "IntervalReadData" .interval_read_data_id = "Interval".interval_read_data_id INNER JOIN "Reading" ON "Interval".interval_id = "Reading" .interval_id WHERE "Interval".end_time = '%s' and meter_name = '%s' and channel = '%s'""" % (endTime, meterName, channel) else: # deprecated query sql = """SELECT "Interval".end_time, "MeterData".meter_name, "MeterData".meter_data_id FROM "MeterData" INNER JOIN "IntervalReadData" ON "MeterData" .meter_data_id = "IntervalReadData".meter_data_id INNER JOIN "Interval" ON "IntervalReadData" .interval_read_data_id = "Interval".interval_read_data_id WHERE "Interval".end_time = '%s' and meter_name = '%s'""" % (endTime, meterName) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: assert len( rows) < 2, "Dupes should be less than 2, found %s: %s." % ( len(rows), rows) self.currentReadingID = self.getLastElement(rows[0]) self.logger.log('Reading ID = %s.' % self.currentReadingID, 'silent') self.logger.log( "Duplicate found for meter %s, end time %s, channel %s." % (meterName, endTime, channel), 'silent') return True else: self.logger.log( "Found no rows for meter %s, end time %s, channel %s." % (meterName, endTime, channel), 'silent') return False def readingValuesAreInTheDatabase(self, conn, readingDataDict): """ Given a reading ID, verify that the values associated are present in the database. Values are from the columns: 1. channel 2. raw_value 3. uom 4. value :param dictionary containing reading values :return True if the existing values are the same, otherwise return False """ dbCursor = conn.cursor() sql = """SELECT "Reading".reading_id, "Reading".channel, "Reading".raw_value, "Reading".uom, "Reading"."value" FROM "Reading" WHERE "Reading".reading_id = %s""" % ( self.currentReadingID) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if self.currentReadingID == 0: return False # assert len(rows) == 1 or len(rows) == 0 assert len( rows) == 1, "Didn't find a matching reading for reading ID %s." %\ self.currentReadingID if len(rows) == 1: self.logger.log("Found %s existing matches." % len(rows), 'silent') allEqual = True if int(readingDataDict['Channel']) == int(rows[0][1]): print "channel equal," else: self.logger.log( "channel not equal: %s,%s,%s" % (int(readingDataDict['Channel']), int( rows[0][1]), readingDataDict['Channel'] == rows[0][1]), 'debug') allEqual = False if int(readingDataDict['RawValue']) == int(rows[0][2]): print "raw value equal," else: self.logger.log( "rawvalue not equal: %s,%s,%s" % (int(readingDataDict['RawValue']), int(rows[0][2]), readingDataDict['RawValue'] == rows[0][2]), 'debug') allEqual = False if readingDataDict['UOM'] == rows[0][3]: print "uom equal," else: self.logger.log( "uom not equal: %s,%s,%s" % (readingDataDict['UOM'], rows[0][3], readingDataDict['UOM'] == rows[0][3]), 'debug') allEqual = False if self.approximatelyEqual(float(readingDataDict['Value']), float(rows[0][4]), 0.001): self.logger.log("value equal", 'silent') else: self.logger.log( "value not equal: %s,%s,%s" % (float(readingDataDict['Value']), float( rows[0][4]), readingDataDict['Value'] == rows[0][4]), 'debug') allEqual = False if allEqual: return True else: return False else: return False def approximatelyEqual(self, a, b, tolerance): return abs(a - b) < tolerance
class MSGDataVerifier(object): """ Perform verification procedures related to data integrity. """ def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'DEBUG') self.cursor = MSGDBConnector().connectDB().cursor() self.dbUtil = MSGDBUtil() def mecoReadingsDupeCount(self): """ Generate counts of MECO dupe readings. """ dupes = 0 startDate = lambda y, m: '%d-%02d-%02d' % (y, m, 1) endDate = lambda y, m: '%d-%02d-%02d' % ( y, m, calendar.monthrange(y, m)[1]) for y in YEARS: startDates = [startDate(y, m) for m in map(lambda x: x + 1, range(12))] endDates = [endDate(y, m) for m in map(lambda x: x + 1, range(12))] for start in startDates: cnt = self.__mecoReadingsDupeCount(start, endDates[ startDates.index(start)]) self.logger.log('start: %s, dupe cnt: %s' % (start, cnt), 'INFO') dupes += cnt return dupes def __mecoReadingsDupeCount(self, startDate, endDate): """ :param startDate: :param endDate: :returns: DB row count. """ self.dbUtil.executeSQL(self.cursor, """SELECT "Interval".end_time, "MeterData".meter_name, "Reading".channel FROM "MeterData" INNER JOIN "IntervalReadData" ON "MeterData" .meter_data_id = "IntervalReadData".meter_data_id INNER JOIN "Interval" ON "IntervalReadData" .interval_read_data_id = "Interval".interval_read_data_id INNER JOIN "Reading" ON "Interval".interval_id = "Reading" .interval_id WHERE "Interval".end_time BETWEEN '%s' and '%s' GROUP BY "MeterData".meter_name, "Interval".end_time, "Reading".channel HAVING (COUNT(*) > 1)""" % (startDate, endDate)) return len(self.cursor.fetchall()) def egaugeAggregationCount(self): """ There should not be more than 96 15-min interval endpoints within a single calendar day for a given sub ID. :return: """ pass
class MECODupeChecker(object): """ Check for duplicate data in the database. """ def __init__(self): """ Constructor. """ self.logger = SEKLogger(__name__, 'debug') self.mecoConfig = MSGConfiger() self.currentReadingID = 0 self.dbUtil = MSGDBUtil() def getLastElement(self, rows): """ Get the last element in a collection. Example: rows = (element1, element2, element3) getLastElement(rows) # return element3 :param rows Result froms from a query :return last element in the collection """ for i, var in enumerate(rows): if i == len(rows) - 1: return var def eventBranchDupeExists(self, conn, meterName, eventTime): """ :param conn: Database connection. :param meterName: Meter name in MeterData table. :param eventTime: Timestamp of event. :return: True if tuple exists, False if not. """ dbCursor = conn.cursor() sql = """SELECT "Event".event_time, "MeterData".meter_data_id, "EventData".event_data_id FROM ( ( "MeterData" JOIN "EventData" ON ( ( "MeterData".meter_data_id = "EventData" .meter_data_id ) ) ) JOIN "Event" ON ( ( "EventData".event_data_id = "Event" .event_data_id ) ) ) WHERE "MeterData".meter_name = '%s' AND "Event".event_time = '%s' """ % (meterName, eventTime) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: return True else: return False def registerBranchDupeExists(self, conn, meterName, readTime, registerNumber, DEBUG = False): """ Determine if a register branch duplicate exists for a given meter name, read time, number tuple. :param conn: Database connection. :param meterName: Meter name in MeterData table. :param readTime: Read time in RegisterRead table. :param registerNumber: Corresponds to DB column "number". :return: True if tuple exists, False if not. """ dbCursor = conn.cursor() sql = """SELECT "public"."MeterData".meter_name, "public"."RegisterRead".read_time, "public"."Register"."number" FROM "public"."MeterData" INNER JOIN "public"."RegisterData" ON "public" ."MeterData".meter_data_id = "public" ."RegisterData".meter_data_id INNER JOIN "public"."RegisterRead" ON "public"."RegisterData" .register_data_id = "public" ."RegisterRead".register_data_id INNER JOIN "public"."Tier" ON "public"."RegisterRead" .register_read_id = "public"."Tier" .register_read_id INNER JOIN "public"."Register" ON "public"."Tier".tier_id = "public"."Register".tier_id WHERE "public"."MeterData".meter_name = '%s' AND "public"."RegisterRead".read_time = '%s' AND "public"."Register".number = '%s' """ % (meterName, readTime, registerNumber) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: return True else: return False def readingBranchDupeExists(self, conn, meterName, endTime, channel = None, DEBUG = False): """ Duplicate cases: 1. Tuple (meterID, endTime) exists in the database. @DEPRECATED in favor of (2), full meterName-endTime-channel query. 2. Tuple (meterID, endTime, channel) exists in the database. :param conn: Database connection. :param meterName: Meter name in MeterData table. :param endTime: End time in Interval table. :param channel: Required parameter that was previously optional. An optional channel is now deprecated. :return: True if tuple exists, False if not. """ dbCursor = conn.cursor() if DEBUG: print "readingBranchDupeExists():" if channel != None: sql = """SELECT "Interval".end_time, "MeterData".meter_name, "MeterData".meter_data_id, "Reading".channel, "Reading".reading_id FROM "MeterData" INNER JOIN "IntervalReadData" ON "MeterData" .meter_data_id = "IntervalReadData".meter_data_id INNER JOIN "Interval" ON "IntervalReadData" .interval_read_data_id = "Interval".interval_read_data_id INNER JOIN "Reading" ON "Interval".interval_id = "Reading" .interval_id WHERE "Interval".end_time = '%s' and meter_name = '%s' and channel = '%s'""" % ( endTime, meterName, channel) else: # deprecated query sql = """SELECT "Interval".end_time, "MeterData".meter_name, "MeterData".meter_data_id FROM "MeterData" INNER JOIN "IntervalReadData" ON "MeterData" .meter_data_id = "IntervalReadData".meter_data_id INNER JOIN "Interval" ON "IntervalReadData" .interval_read_data_id = "Interval".interval_read_data_id WHERE "Interval".end_time = '%s' and meter_name = '%s'""" % ( endTime, meterName) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if len(rows) > 0: assert len( rows) < 2, "Dupes should be less than 2, found %s: %s." % ( len(rows), rows) self.currentReadingID = self.getLastElement(rows[0]) self.logger.log('Reading ID = %s.' % self.currentReadingID, 'silent') self.logger.log( "Duplicate found for meter %s, end time %s, channel %s." % ( meterName, endTime, channel), 'silent') return True else: self.logger.log( "Found no rows for meter %s, end time %s, channel %s." % ( meterName, endTime, channel), 'silent') return False def readingValuesAreInTheDatabase(self, conn, readingDataDict): """ Given a reading ID, verify that the values associated are present in the database. Values are from the columns: 1. channel 2. raw_value 3. uom 4. value :param dictionary containing reading values :return True if the existing values are the same, otherwise return False """ dbCursor = conn.cursor() sql = """SELECT "Reading".reading_id, "Reading".channel, "Reading".raw_value, "Reading".uom, "Reading"."value" FROM "Reading" WHERE "Reading".reading_id = %s""" % ( self.currentReadingID) self.dbUtil.executeSQL(dbCursor, sql) rows = dbCursor.fetchall() if self.currentReadingID == 0: return False # assert len(rows) == 1 or len(rows) == 0 assert len( rows) == 1, "Didn't find a matching reading for reading ID %s." %\ self.currentReadingID if len(rows) == 1: self.logger.log("Found %s existing matches." % len(rows), 'silent') allEqual = True if int(readingDataDict['Channel']) == int(rows[0][1]): print "channel equal," else: self.logger.log("channel not equal: %s,%s,%s" % ( int(readingDataDict['Channel']), int(rows[0][1]), readingDataDict['Channel'] == rows[0][1]), 'debug') allEqual = False if int(readingDataDict['RawValue']) == int(rows[0][2]): print "raw value equal," else: self.logger.log("rawvalue not equal: %s,%s,%s" % ( int(readingDataDict['RawValue']), int(rows[0][2]), readingDataDict['RawValue'] == rows[0][2]), 'debug') allEqual = False if readingDataDict['UOM'] == rows[0][3]: print "uom equal," else: self.logger.log("uom not equal: %s,%s,%s" % ( readingDataDict['UOM'], rows[0][3], readingDataDict['UOM'] == rows[0][3]), 'debug') allEqual = False if self.approximatelyEqual(float(readingDataDict['Value']), float(rows[0][4]), 0.001): self.logger.log("value equal", 'silent') else: self.logger.log("value not equal: %s,%s,%s" % ( float(readingDataDict['Value']), float(rows[0][4]), readingDataDict['Value'] == rows[0][4]), 'debug') allEqual = False if allEqual: return True else: return False else: return False def approximatelyEqual(self, a, b, tolerance): return abs(a - b) < tolerance
class MSGEgaugeNewDataChecker(object): """ Provide notification of newly loaded MSG eGauge data. This uses notification type MSG_EGAUGE_SERVICE. """ def __init__(self): """ Constructor. """ print __name__ self.logger = SEKLogger(__name__) self.connector = MSGDBConnector() self.dbUtil = MSGDBUtil() self.notifier = MSGNotifier() self.configer = MSGConfiger() def newDataCount(self): """ Measure the amount of new data that is present since the last time new data was reported. """ cursor = self.connector.conn.cursor() tableName = 'EgaugeEnergyAutoload' lastTime = self.lastReportDate('MSG_EGAUGE_SERVICE') if lastTime is None: lastTime = '1900-01-01' sql = """SELECT COUNT(*) FROM "%s" WHERE datetime > '%s'""" % ( tableName, lastTime) success = self.dbUtil.executeSQL(cursor, sql) if success: rows = cursor.fetchall() if not rows[0][0]: return 0 else: return rows[0][0] else: # @todo Raise an exception. return None def lastReportDate(self, notificationType): """ Get the last time a notification was reported. :param notificationType: A string indicating the type of the notification. It is stored in the event history. :returns: datetime of last report date. """ cursor = self.connector.conn.cursor() sql = """SELECT MAX("notificationTime") FROM "%s" WHERE "notificationType" = '%s'""" % ( NOTIFICATION_HISTORY_TABLE, notificationType) success = self.dbUtil.executeSQL(cursor, sql) if success: rows = cursor.fetchall() if not rows[0][0]: return None else: return rows[0][0] else: # @todo Raise an exception. return None def saveNotificationTime(self): """ Save the notification event to the notification history. """ cursor = self.connector.conn.cursor() sql = """INSERT INTO "%s" ("notificationType", "notificationTime") VALUES ('MSG_EGAUGE_SERVICE', NOW())""" % NOTIFICATION_HISTORY_TABLE success = self.dbUtil.executeSQL(cursor, sql) self.connector.conn.commit() if not success: # @todo Raise an exception. self.logger.log( 'An error occurred while saving the notification time.') def sendNewDataNotification(self, testing = False): """ Sending notification reporting on new data being available since the last time new data was reported. :param testing: Use testing mode when True. """ lastReportDate = self.lastReportDate('MSG_EGAUGE_SERVICE') if not lastReportDate: lastReportDate = "never" msgBody = '\nNew MSG eGauge data has been loaded to %s.' % self\ .connector.dbName msgBody += '\n\n' msgBody += 'The new data count is %s readings.' % self.newDataCount() msgBody += '\n\n' msgBody += 'The last report date was %s.' % lastReportDate msgBody += '\n\n' self.notifier.sendNotificationEmail(msgBody, testing = testing) self.saveNotificationTime()