def _gaPost(eventType, content): logger=mlog.mosaicLogging().getLogger(name=__name__) try: headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"} gac=_gaCredentialCache() if eval(gac["gaenable"]): payload="v=1&tid={0}&cid={1}&t=event&ec=mosaic-{2}-{3}&ea={4}&el={5}".format( dec(gac["gaid"]), _uuid(), mosaic.__version__, mosaic.__build__, eventType, content ) if mosaic.DeveloperMode: _debug="/debug" else: _debug="" conn=httplib.HTTPSConnection(dec(gac["gaurl"])) conn.request("POST", "{0}/{1}".format(_debug, dec(gac["gamode"])), payload, headers) response=conn.getresponse() data=response.read() conn.close() if _debug: logger.debug(_d("ga collect: {0}", data)) except BaseException as err: logger.debug(_d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) pass
def _gaPost(eventType, content): logger = mlog.mosaicLogging().getLogger(name=__name__) try: headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" } gac = _gaCredentialCache() if eval(gac["gaenable"]): payload = "v=1&tid={0}&cid={1}&t=event&ec=mosaic-{2}-{3}&ea={4}&el={5}".format( dec(gac["gaid"]), _uuid(), mosaic.__version__, mosaic.__build__, eventType, content) if mosaic.DeveloperMode: _debug = "/debug" else: _debug = "" conn = httplib.HTTPSConnection(dec(gac["gaurl"])) conn.request("POST", "{0}/{1}".format(_debug, dec(gac["gamode"])), payload, headers) response = conn.getresponse() data = response.read() conn.close() if _debug: logger.debug(_d("ga collect: {0}", data)) except BaseException as err: logger.debug( _d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) pass
def _initdb(self, **kwargs): """ Initialize the database tables Args: tableName name of database table. Default is 'metadata' """ self.logger=mlog.mosaicLogging().getLogger(__name__) if not hasattr(self, 'tableName'): self.tableName='metadata' if not hasattr(self, 'colNames'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) if not hasattr(self, 'colNames_t'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) dbTimeout=kwargs.pop('timeout', 11.0) self.logger.debug(_d("DB Timeout = {0}", dbTimeout)) self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite') self.logger.debug(_d("dbFilename {0}", self.dbFilename)) self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout) self._setuptables() self.logger.debug(_d("DB setup complete."))
def _initdb(self, **kwargs): """ Initialize the database tables Args: tableName name of database table. Default is 'metadata' """ self.logger=mlog.mosaicLogging().getLogger(__name__) if not hasattr(self, 'tableName'): self.tableName='metadata' if not hasattr(self, 'colNames'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) if not hasattr(self, 'colNames_t'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) dbTimeout=kwargs.pop('timeout', 11.0) self.logger.debug(_d("DB Timeout = {0}", dbTimeout)) dbfile=kwargs.get('dbFilename', '') if dbfile=='': self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite') else: self.dbFilename=dbfile self.logger.debug(_d("dbFilename {0}", self.dbFilename)) self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout) self._setuptables() self.logger.debug(_d("DB setup complete."))
def _opendb(self, dbname, **kwargs): try: self.logger.debug(_d("open DB {0}", dbname)) except AttributeError: self.logger = mlog.mosaicLogging().getLogger(__name__) self.logger.debug(_d("open DB {0}", dbname)) if not hasattr(self, 'tableName'): self.logger.debug( _d("Attribute tableName not found. Setting tableName to 'metadata'" )) self.tableName = 'metadata' self.dbFilename = dbname # colnames and colname types are needed for appending data. If they are not passed # as arguments, no exception is raised. In the future this can be retrieved from the # metadata_t table in the db. try: self.colNames = kwargs['colNames'] self.colNames_t = kwargs['colNames_t'] except: pass # if not hasattr(self, 'colNames_t'): # raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.db = sqlite3.connect(dbname, detect_types=sqlite3.PARSE_DECLTYPES) self._setuptables()
def _gaCredentialCache(): try: try: logger=mlog.mosaicLogging().getLogger(name=__name__) ga_cache=format_path(tempfile.gettempdir()+'/.ga') logger.debug(_d("Looking for GA cache {0}", ga_cache)) gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge=timedelta(hours=24) gaAge=datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) ga_old=_gaSettingsDict(ga_cache) _getGASettings(ga_cache) ga_new=_gaSettingsDict(ga_cache) if ga_old["gaenable"]==False: ga_new["gaenable"]=False with open(ga_cache, "w") as ga: ga.write(json.dumps(ga_new)) else: logger.debug(_d("GA settings cache found ({0}). gaAge={1}", str(ga_cache), str(gaAge))) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read()) except BaseException as err: logger.debug(_d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) return
def _opendb(self, dbname, **kwargs): try: self.logger.debug(_d("open DB {0}", dbname)) except AttributeError: self.logger=mlog.mosaicLogging().getLogger(__name__) self.logger.debug(_d("open DB {0}", dbname)) if not hasattr(self, 'tableName'): self.logger.debug(_d("Attribute tableName not found. Setting tableName to 'metadata'")) self.tableName='metadata' self.dbFilename=dbname # colnames and colname types are needed for appending data. If they are not passed # as arguments, no exception is raised. In the future this can be retrieved from the # metadata_t table in the db. try: self.colNames=kwargs['colNames'] self.colNames_t=kwargs['colNames_t'] except: pass # if not hasattr(self, 'colNames_t'): # raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.db = sqlite3.connect(dbname, detect_types=sqlite3.PARSE_DECLTYPES) self._setuptables()
def __init__(self, trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings, settingsString): """ Initialize a new event segment object """ # Required arguments self.trajDataObj=trajDataObj self.eventProcHnd=eventProcHnd # Reset function timer since esTimer is a class variable partitionTimer.Reset() self.settingsDict = eventPartitionSettings self.eventProcSettingsDict = eventProcSettings self.procTime=0.0 self.FsHz=self.trajDataObj.FsHz self.DataLengthSec=self.trajDataObj.DataLengthSec try: self.writeEventTS=int(self.settingsDict.pop("writeEventTS",1)) self.parallelProc=int(self.settingsDict.pop("parallelProc",1)) self.reserveNCPU=int(self.settingsDict.pop("reserveNCPU",2)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err ) sys.stdout.flush() self.tEventProcObj=self.eventProcHnd([], self.FsHz, eventstart=0,eventend=0, baselinestats=[ 0,0,0 ], algosettingsdict=self.eventProcSettingsDict.copy(), savets=False, absdatidx=0, datafileHnd=None ) self.mdioDBHnd=sqlite3MDIO.sqlite3MDIO() self.mdioDBHnd.initDB( dbPath=self.trajDataObj.datPath, tableName='metadata', colNames=(self.tEventProcObj.mdHeadings()), colNames_t=(self.tEventProcObj.mdHeadingDataType()) ) self.mdioDBHnd.writeSettings(settingsString) self.logger=mlog.mosaicLogging().getLogger(name=__name__, dbHnd=self.mdioDBHnd) self.logger.debug(_d("Event Segment Initialization")) self.logger.debug(_d("{0}", settingsString)) if self.trajDataObj.dataFilter: self.fstring=type(self.trajDataObj.dataFilterObj).__name__ else: self.fstring='None' self._writeanalysisinfo() if self.parallelProc: self._setupparallel() # Setup function timing self.timingObj=mosaicTiming.mosaicTiming() self._init(trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings)
def _getGASettings(ga_cache): logger=mlog.mosaicLogging().getLogger(name=__name__) try: req=urllib2.Request(mosaic.DocumentationURL+".ga") streamHandler=urllib2.build_opener() stream=streamHandler.open(req) with open(ga_cache, 'w') as ga: ga.write( stream.read() ) logger.debug(_d("Cached GA settings to {0}.", ga_cache)) except: logger.debug(_d("An error occured when trying to cache GA settings."))
def _getGASettings(ga_cache): logger = mlog.mosaicLogging().getLogger(name=__name__) try: req = urllib2.Request(mosaic.DocumentationURL + ".ga") streamHandler = urllib2.build_opener() stream = streamHandler.open(req) with open(ga_cache, 'w') as ga: ga.write(stream.read()) logger.debug(_d("Cached GA settings to {0}.", ga_cache)) except: logger.debug(_d("An error occured when trying to cache GA settings."))
def timing_wrapper(*args, **kwargs): if self.TimingEnabled: t1 = self.time() res = func(*args, **kwargs) t2 = self.time() try: funcTimingObj=self.timingDataDict[func.__name__] except KeyError: funcname=func.__name__ funcTimingObj=timingData(funcname) self.timingDataDict[funcname]=funcTimingObj self._updateTiming(funcTimingObj, t1, t2) if not self.TimingSummary: logger=mlog.mosaicLogging().getLogger(func.__name__) logger.debug(_d( "Timing: iterations={0}, total={1:0.3f} ms, last={2:0.3f} ms, maximum={3:0.3f} ms", funcTimingObj["counter"], funcTimingObj["total"], funcTimingObj["last"], funcTimingObj["maxtime"] )) else: res = func(*args, **kwargs) return res
def PrintCurrentTime(self): """ Print timing results of the most recent function call """ if self.TimingEnabled: for k, v in self.timingDataDict.iteritems(): self.logger.debug( _d(*v._currentTime()) )
def exportToCSV(self, query): """ Export database records that match the specified query to a CSV flat file. """ csvfile=format_path( self.dbFile.split('.')[0]+'.csv' ) df=pandas.DataFrame(self.queryDB(query), columns=self._col_names(query, self.db.cursor(), self.tableName)) df.to_csv( csvfile ) self.logger.debug(_d("{0}", csvfile))
def exportToCSV(self, query): """ Export database records that match the specified query to a CSV flat file. """ csvfile=format_path( self.dbFile.split('.')[0]+'.csv' ) df=pandas.DataFrame(self.queryDB(query), columns=self._col_names(query, self.db.cursor(), self.tableName)) df.to_csv( csvfile ) self.logger.debug(_d("{0}", csvfile))
def _setuppartition(self): # At the start of a run, store baseline stats for the open channel state # Later, we use these values to detect drift # First, calculate the number of points to include using the blockSizeSec # class attribute and the sampling frequency specified in trajDataObj self.nPoints = int(self.blockSizeSec * self.FsHz) self.logger.debug(_d("nPoints={0}", self.nPoints)) # a global counter that keeps track of the position in data pipe. self.globalDataIndex = 0 self.dataStart = 0 if self.meanOpenCurr == -1. or self.sdOpenCurr == -1. or self.slopeOpenCurr == -1.: [self.meanOpenCurr, self.sdOpenCurr, self.slopeOpenCurr] = self._openchanstats( self.trajDataObj.previewdata(self.nPoints)) self.logger.debug( _d("Automatic open channel stats: {0}, {1}, {2}", self.meanOpenCurr, self.sdOpenCurr, self.slopeOpenCurr)) else: self.logger.warning( "WARNING: Automatic open channel state estimation has been disabled." ) # Initialize a FIFO queue to keep track of open channel conductance #self.openchanFIFO=npfifo.npfifo(nPoints) # setup a local data store that is used by the main event partition loop self.currData = deque() #### Event Queue #### # self.eventQueue=[] self.thrCurr = (abs(self.meanOpenCurr) - self.eventThreshold * abs(self.sdOpenCurr)) self.logger.debug(_d("Partition setup complete.")) #### Vars for event partition stats #### self.minDrift = abs(self.meanOpenCurr) self.maxDrift = abs(self.meanOpenCurr) self.minDriftR = self.slopeOpenCurr self.maxDriftR = self.slopeOpenCurr
def PrintStatistics(self): """ Print average timing results of the function call """ if self.TimingEnabled: for k, v in self.timingDataDict.iteritems(): try: self.logger.debug( _d(*v._timingStatistics()) ) except ZeroDivisionError: self.logger.error( "ERROR: No timing data is available.")
def rawQuery(self, query): try: self.db.commit() c = self.db.cursor() c.execute(str(query)) self.logger.debug(_d("{0}", query)) return c.fetchall() except sqlite3.OperationalError, err: raise
def rawQuery(self, query): try: self.db.commit() c = self.db.cursor() c.execute(str(query)) self.logger.debug(_d("{0}", query)) return c.fetchall() except sqlite3.OperationalError, err: raise
def _gaCredentialCache(): logger = mlog.mosaicLogging().getLogger(name=__name__) ga_cache = resource_path("mosaic/utilities/.ga") try: gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge = timedelta(hours=24) gaAge = datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) _getGASettings(ga_cache) else: logger.debug(_d("GA settings cache found. gaAge={0}", gaAge)) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read())
def _gaCredentialCache(): logger=mlog.mosaicLogging().getLogger(name=__name__) ga_cache=resource_path("mosaic/utilities/.ga") try: gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge=timedelta(hours=24) gaAge=datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) _getGASettings(ga_cache) else: logger.debug(_d("GA settings cache found. gaAge={0}", gaAge)) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read())
def _gaCredentialCache(): try: try: logger = mlog.mosaicLogging().getLogger(name=__name__) ga_cache = format_path(tempfile.gettempdir() + '/.ga') logger.debug(_d("Looking for GA cache {0}", ga_cache)) gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge = timedelta(hours=24) gaAge = datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) ga_old = _gaSettingsDict(ga_cache) _getGASettings(ga_cache) ga_new = _gaSettingsDict(ga_cache) if ga_old["gaenable"] == False: ga_new["gaenable"] = False with open(ga_cache, "w") as ga: ga.write(json.dumps(ga_new)) else: logger.debug( _d("GA settings cache found ({0}). gaAge={1}", str(ga_cache), str(gaAge))) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read()) except BaseException as err: logger.debug( _d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) return
def _setuppartition(self): # At the start of a run, store baseline stats for the open channel state # Later, we use these values to detect drift # First, calculate the number of points to include using the blockSizeSec # class attribute and the sampling frequency specified in trajDataObj self.nPoints=int(self.blockSizeSec*self.FsHz) self.logger.debug(_d("nPoints={0}", self.nPoints)) # a global counter that keeps track of the position in data pipe. self.globalDataIndex=0 self.dataStart=0 if self.meanOpenCurr == -1. or self.sdOpenCurr == -1. or self.slopeOpenCurr == -1.: [ self.meanOpenCurr, self.sdOpenCurr, self.slopeOpenCurr ] = self._openchanstats(self.trajDataObj.previewdata(self.nPoints)) self.logger.debug(_d("Automatic open channel stats: {0}, {1}, {2}", self.meanOpenCurr, self.sdOpenCurr, self.slopeOpenCurr)) else: self.logger.warning("WARNING: Automatic open channel state estimation has been disabled.") # Initialize a FIFO queue to keep track of open channel conductance #self.openchanFIFO=npfifo.npfifo(nPoints) # setup a local data store that is used by the main event partition loop self.currData = deque() #### Event Queue #### # self.eventQueue=[] self.thrCurr=(abs(self.meanOpenCurr)-self.eventThreshold*abs(self.sdOpenCurr)) self.logger.debug(_d("Partition setup complete.")) #### Vars for event partition stats #### self.minDrift=abs(self.meanOpenCurr) self.maxDrift=abs(self.meanOpenCurr) self.minDriftR=self.slopeOpenCurr self.maxDriftR=self.slopeOpenCurr
def queryDB(self, query): try: self.db.commit() c = self.db.cursor() colnames=self._col_names(query, c, self.tableName) colnames_t=list(str(c) for c in (c.execute( 'select '+','.join(colnames)+' from '+self.tableName+'_t' ).fetchall())[0]) self.logger.debug(_d("{0}", query)) c.execute(str(query)) return [ self._decoderecord(colnames, colnames_t, rec) for rec in c.fetchall() ] except sqlite3.OperationalError, err: raise
def queryDB(self, query): try: self.db.commit() c = self.db.cursor() colnames=self._col_names(query, c, self.tableName) colnames_t=list(str(c) for c in (c.execute( 'select '+','.join(colnames)+' from '+self.tableName+'_t' ).fetchall())[0]) self.logger.debug(_d("{0}", query)) c.execute(str(query)) return [ self._decoderecord(colnames, colnames_t, rec) for rec in c.fetchall() ] except sqlite3.OperationalError, err: raise
def __init__(self, trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings, settingsString, **kwargs): """ Initialize a new event segment object """ # Required arguments self.trajDataObj=trajDataObj self.eventProcHnd=eventProcHnd # Reset function timer since esTimer is a class variable partitionTimer.Reset() self.settingsDict = eventPartitionSettings self.eventProcSettingsDict = eventProcSettings self.procTime=0.0 self.FsHz=self.trajDataObj.FsHz self.DataLengthSec=self.trajDataObj.DataLengthSec try: self.writeEventTS=int(self.settingsDict.pop("writeEventTS",1)) self.parallelProc=int(self.settingsDict.pop("parallelProc",1)) self.reserveNCPU=int(self.settingsDict.pop("reserveNCPU",2)) self.driftThreshold=float(self.settingsDict.pop("driftThreshold",2.0)) self.maxDriftRate=float(self.settingsDict.pop("maxDriftRate",2.0)) self.minBaseline=float(self.settingsDict.pop("minBaseline",-1.)) self.maxBaseline=float(self.settingsDict.pop("maxBaseline",-1.)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err ) sys.stdout.flush() self.tEventProcObj=self.eventProcHnd([], [], self.FsHz, eventstart=0,eventend=0, baselinestats=[ 0,0,0 ], algosettingsdict=self.eventProcSettingsDict.copy(), savets=False, absdatidx=0, datafileHnd=None ) self.mdioDBHnd=sqlite3MDIO.sqlite3MDIO() self.mdioDBHnd.initDB( dbPath=self.trajDataObj.datPath, tableName='metadata', colNames=(self.tEventProcObj.mdHeadings()), colNames_t=(self.tEventProcObj.mdHeadingDataType()), dbFilename=kwargs.get('dbFilename', '') ) self.mdioDBHnd.writeSettings(settingsString) self.logger=mlog.mosaicLogging().getLogger(name=__name__, dbHnd=self.mdioDBHnd) self.logger.debug(_d("Event Segment Initialization")) self.logger.debug(_d("{0}", settingsString)) if self.trajDataObj.dataFilter: self.fstring=type(self.trajDataObj.dataFilterObj).__name__ else: self.fstring='None' self._writeanalysisinfo() if self.parallelProc: self._setupparallel() # Setup function timing self.timingObj=mosaicTiming.mosaicTiming() self._init(trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings)