from weatherLib.weatherUtil import WLogger __INSERT_OBS = "insert into weather_work " + \ "(tsa, time, temperature, humidity, pressure, " + \ "light, fwVersion, swVersion, version, " + \ "isThermometer, isBarometer, isHygrometer, isClock) " + \ "values (%(tsa)s, %(time)s, %(temperature)s, %(humidity)s, %(pressure)s, " + \ "%(light)s, %(fwVersion)s, %(swVersion)s, %(version)s, " + \ "%(isThermometer)s, %(isBarometer)s, %(isHygrometer)s, %(isClock)s); " host = 'localhost' user = '******' password = '******' database = 'weather' logger = WLogger(loggerName='weather.tools') logger.logMessage("Starting...") hostlist = [{'host:': 'localhost', 'port': 9200}] #hostlist = [ # {'host':'elastic00','port':9200}, # {'host':'elastic01','port':9200}, # {'host':'elastic02','port':9200}, # ] def scanIndex(indexName, filtered): doc = WeatherData(using=client) s_filt = doc.search(using=client,index=indexName).\ filter('range', **{'tsa': {'lt':20180916001872}}) s_all = doc.search(using=client, index=indexName) logger.logMessage("Collecting and saving documents from elasticsearch.")
with open(renumFile,'r') as f: line = f.readline().rstrip() while line != '': fields = line.split(';') oper = { '_index': fields[3], '_op_type': 'update', '_id': fields[2].rstrip(), '_type': 'doc', '_source:': {'doc': {'tsa': fields[0]}}} yield oper line = f.readline().rstrip() result = eshelp.bulk(client,generate()) logger.logMessage('Bulk result: {0}'.format(result)) logger.logMessage('End : elasticsearch bulk update') logger.logMessage("Starting...") #logger.setLevel("INFO") #step010() #step020() #step030() #step040() #step045() #logger.setLevel("DEBUG") #step050() #step060() logger.logMessage("Finished")
class WeatherQueue(object): """ Weather measurements queue. Implemented on a sqlite3 database """ def __init__(self,dbdir): """ Initialize the queue database connection and, if necessary, create the database. Also create the lock object that will be used to synchronize access """ self.logger = WLogger() self.theLock = threading.Lock() self.curDay = 0 self.curTSA = 0 ini_file = pkg_resources.resource_filename(__name__,'./database/wQueue.ini') config = configparser.ConfigParser() config.read([ini_file]) tableDDL = config['queueDatabase']['table'] tsasDDL = config['queueDatabase']['control'] indexESDDL = config['queueDatabase']['indexES'] indexDBDDL = config['queueDatabase']['indexDB'] dbFile = os.path.join(dbdir,'wQueue.db') try: self.theConn = sqlite3.connect(dbFile,check_same_thread=False) self.theConn.isolation_level = 'IMMEDIATE' self.theConn.execute(tableDDL) self.theConn.execute(indexESDDL) self.theConn.execute(indexDBDDL) self.theConn.execute(tsasDDL) self.theConn.commit() self.logger.logMessage(level="INFO",message="Queue database opened at {0:s}".format(dbFile)) except: self.logger.logException('Error initializing queue database') def pushLine(self,line): """ Push a line into the queue. This function blocks until the database is not locked """ stamp,_,_,_,_,_,_,_,_,_,_,_ = parseLine(line) datestamp = calendar.timegm(stamp.date().timetuple()) theTsa = 1 with self.theLock: try: result = self.theConn.execute(_SELECT_TSA, [datestamp]) resCol = result.fetchone() if resCol == None: self.theConn.execute(_INSERT_DAY, [datestamp]) else: theTsa = resCol[0] + 1 self.theConn.execute(_UPDATE_TSA, [theTsa, datestamp]) fullTsa = (stamp.year * 10000 + stamp.month * 100 + stamp.day) * 1000000 + theTsa self.theConn.execute(_INSERT_QUEUE, [fullTsa,line]) self.theConn.commit() except: self.logger.logException('Error inserting line into the queue database') self.theConn.rollback() def getDbQueue(self): """ Get al the queue lines NOT marked as inserted into the database. (isDB == 0) """ with self.theLock: try: result = self.theConn.execute(_SELECT_DB) queueContent = result.fetchall() return queueContent except: self.logger.logException('Error fetching DB queue') self.theConn.rollback() return None def markDbQueue(self, theId): """ Mark a queue entry as inserted into the database Parameters: - theId: row identifier to mark """ with self.theLock: with self.theConn: self.theConn.execute(_UPDATE_DB, [theId]) self.theConn.commit() self.logger.logMessage(level='DEBUG', message = 'Queue entry {0} marked as DB-done'.format(theId)) def getESQueue(self): """ Get al the queue lines NOT marked as indexed in elasticserch. (isES == 0) """ with self.theLock: try: result = self.theConn.execute(_SELECT_ES) queueContent = result.fetchall() return queueContent except: self.logger.logException('Error fetching ES queue') self.theConn.rollback() return None def markESQueue(self, theId): """ Mark a queue entry as indexed in elasticsearch Parameters: - theId: row identifier to mark """ with self.theLock: with self.theConn: self.theConn.execute(_UPDATE_ES, [theId]) self.theConn.commit() self.logger.logMessage(level='DEBUG', message = 'Queue entry {0} marked as ES-done'.format(theId)) def purgeQueue(self): with self.theLock: with self.theConn as conn: result = conn.execute(_COUNT_QUEUE) r = result.fetchone() count = r[0] self.logger.logMessage(message="About to purge {0} queue entries.".format(count)) conn.execute(_PURGE_QUEUE) conn.commit() self.logger.logMessage(message="Queue purged.")
dbThread = None esThread = None janitorThread = None watchdogThread = None logger = WLogger() dataEvent = threading.Event() dataEvent.clear() config = configparser.ConfigParser() cf = config.read([ '/etc/weartherClient.ini', '/usr/local/etc/weatherClient.ini', 'weatherClient.ini' ]) logger.logMessage( level="INFO", message="Configuration loaded from configuration files [{l}]".format(l=cf)) data_dir = config['data']['directory'] w_address = config['bluetooth']['address'] w_service = config['bluetooth']['service'] pg_host = config['postgres']['host'] pg_user = config['postgres']['user'] pg_password = config['postgres']['password'] pg_database = config['postgres']['database'] pg_retry = int(config['postgres']['retryDelay']) es_hosts = eval(config['elastic']['hosts'])
os.path.join(os.path.dirname(__file__), os.pardir)) sys.path.append(script_path) from weatherLib.weatherDoc import WeatherData from weatherLib.weatherUtil import WLogger _UPDATE_DOC = 'update weather set esDocId = %(esDocId)s where tsa = %(tsa)s;' _SELECT_DOC = 'select tsa,time from weather where esDocId is null order by tsa;' host = 'localhost' user = '******' password = '******' database = 'weather' logger = WLogger(loggerName='weather.tools') logger.logMessage("Starting...") hostlist = [ { 'host': 'elastic00', 'port': 9200 }, { 'host': 'elastic01', 'port': 9200 }, { 'host': 'elastic02', 'port': 9200 }, ]