def __init__(self, datpath, defaultwarn=True): """ """ self.settingsFile=None self.logger=mlog.mosaicLogging().getLogger(name=__name__) if os.path.isfile(datpath+'/.settings'): self.settingsFile=datpath+"/.settings" settingstr="".join((open(self.settingsFile, 'r').readlines())) elif os.path.isfile(datpath+'/settings'): self.settingsFile=datpath+"/settings" settingstr="".join((open(self.settingsFile, 'r').readlines())) # elif os.path.isfile('.settings'): # print "Settings file not found in data directory. Default settings will be used." # self.settingsFile=os.getcwd()+"/.settings" # elif os.path.isfile('settings'): # print "Settings file not found in data directory. Default settings will be used." # self.settingsFile=os.getcwd()+"/settings" else: if defaultwarn: self.logger.warning( "WARNING: Settings file not found in data directory. Default settings will be used." ) settingstr=__settings__ self.parseSettingsString( settingstr )
def _gaPost(eventType, content): logger = mlog.mosaicLogging().getLogger(name=__name__) try: headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" } gac = _gaCredentialCache() if eval(gac["gaenable"]): payload = "v=1&tid={0}&cid={1}&t=event&ec=mosaic-{2}-{3}&ea={4}&el={5}".format( dec(gac["gaid"]), _uuid(), mosaic.__version__, mosaic.__build__, eventType, content) if mosaic.DeveloperMode: _debug = "/debug" else: _debug = "" conn = httplib.HTTPSConnection(dec(gac["gaurl"])) conn.request("POST", "{0}/{1}".format(_debug, dec(gac["gamode"])), payload, headers) response = conn.getresponse() data = response.read() conn.close() if _debug: logger.debug(_d("ga collect: {0}", data)) except BaseException as err: logger.debug( _d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) pass
def __init__(self, datpath, defaultwarn=True): """ """ self.settingsFile = None self.logger = mlog.mosaicLogging().getLogger(name=__name__) if os.path.isfile(datpath + '/.settings'): self.settingsFile = datpath + "/.settings" settingstr = "".join((open(self.settingsFile, 'r').readlines())) elif os.path.isfile(datpath + '/settings'): self.settingsFile = datpath + "/settings" settingstr = "".join((open(self.settingsFile, 'r').readlines())) # elif os.path.isfile('.settings'): # print "Settings file not found in data directory. Default settings will be used." # self.settingsFile=os.getcwd()+"/.settings" # elif os.path.isfile('settings'): # print "Settings file not found in data directory. Default settings will be used." # self.settingsFile=os.getcwd()+"/settings" else: if defaultwarn: self.logger.warning( "WARNING: Settings file not found in data directory. Default settings will be used." ) settingstr = __settings__ self.parseSettingsString(settingstr)
def timing_wrapper(*args, **kwargs): if self.TimingEnabled: t1 = self.time() res = func(*args, **kwargs) t2 = self.time() try: funcTimingObj=self.timingDataDict[func.__name__] except KeyError: funcname=func.__name__ funcTimingObj=timingData(funcname) self.timingDataDict[funcname]=funcTimingObj self._updateTiming(funcTimingObj, t1, t2) if not self.TimingSummary: logger=mlog.mosaicLogging().getLogger(func.__name__) logger.debug(_d( "Timing: iterations={0}, total={1:0.3f} ms, last={2:0.3f} ms, maximum={3:0.3f} ms", funcTimingObj["counter"], funcTimingObj["total"], funcTimingObj["last"], funcTimingObj["maxtime"] )) else: res = func(*args, **kwargs) return res
def _opendb(self, dbname, **kwargs): try: self.logger.debug(_d("open DB {0}", dbname)) except AttributeError: self.logger=mlog.mosaicLogging().getLogger(__name__) self.logger.debug(_d("open DB {0}", dbname)) if not hasattr(self, 'tableName'): self.logger.debug(_d("Attribute tableName not found. Setting tableName to 'metadata'")) self.tableName='metadata' self.dbFilename=dbname # colnames and colname types are needed for appending data. If they are not passed # as arguments, no exception is raised. In the future this can be retrieved from the # metadata_t table in the db. try: self.colNames=kwargs['colNames'] self.colNames_t=kwargs['colNames_t'] except: pass # if not hasattr(self, 'colNames_t'): # raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.db = sqlite3.connect(dbname, detect_types=sqlite3.PARSE_DECLTYPES) self._setuptables()
def _gaPost(eventType, content): logger=mlog.mosaicLogging().getLogger(name=__name__) try: headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"} gac=_gaCredentialCache() if eval(gac["gaenable"]): payload="v=1&tid={0}&cid={1}&t=event&ec=mosaic-{2}-{3}&ea={4}&el={5}".format( dec(gac["gaid"]), _uuid(), mosaic.__version__, mosaic.__build__, eventType, content ) if mosaic.DeveloperMode: _debug="/debug" else: _debug="" conn=httplib.HTTPSConnection(dec(gac["gaurl"])) conn.request("POST", "{0}/{1}".format(_debug, dec(gac["gamode"])), payload, headers) response=conn.getresponse() data=response.read() conn.close() if _debug: logger.debug(_d("ga collect: {0}", data)) except BaseException as err: logger.debug(_d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) pass
def _gaCredentialCache(): try: try: logger=mlog.mosaicLogging().getLogger(name=__name__) ga_cache=format_path(tempfile.gettempdir()+'/.ga') logger.debug(_d("Looking for GA cache {0}", ga_cache)) gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge=timedelta(hours=24) gaAge=datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) ga_old=_gaSettingsDict(ga_cache) _getGASettings(ga_cache) ga_new=_gaSettingsDict(ga_cache) if ga_old["gaenable"]==False: ga_new["gaenable"]=False with open(ga_cache, "w") as ga: ga.write(json.dumps(ga_new)) else: logger.debug(_d("GA settings cache found ({0}). gaAge={1}", str(ga_cache), str(gaAge))) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read()) except BaseException as err: logger.debug(_d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) return
def _init(self, **kwargs): """ Initialize the single step analysis class. """ # initialize the object's metadata (to -1) as class attributes self.mdOpenChCurrent=-1 self.mdBlockedCurrent=-1 self.mdEventStart=-1 self.mdEventEnd=-1 self.mdBlockDepth = -1 self.mdResTime = -1 self.mdRCConst1 = -1 self.mdRCConst2 = -1 self.mdRedChiSq = -1 self.mdAbsEventStart = -1 self.a2sLogger=mlog.mosaicLogging().getLogger(__name__) # Settings for single step event processing # settings for gaussian fits try: self.FitTol=float(self.settingsDict.pop("FitTol", 1.e-7)) self.FitIters=int(self.settingsDict.pop("FitIters", 5000)) self.BlockRejectRatio=float(self.settingsDict.pop("BlockRejectRatio", 0.8)) self.LinkRCConst=int(self.settingsDict.pop("LinkRCConst", 1)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err )
def _opendb(self, dbname, **kwargs): try: self.logger.debug(_d("open DB {0}", dbname)) except AttributeError: self.logger = mlog.mosaicLogging().getLogger(__name__) self.logger.debug(_d("open DB {0}", dbname)) if not hasattr(self, 'tableName'): self.logger.debug( _d("Attribute tableName not found. Setting tableName to 'metadata'" )) self.tableName = 'metadata' self.dbFilename = dbname # colnames and colname types are needed for appending data. If they are not passed # as arguments, no exception is raised. In the future this can be retrieved from the # metadata_t table in the db. try: self.colNames = kwargs['colNames'] self.colNames_t = kwargs['colNames_t'] except: pass # if not hasattr(self, 'colNames_t'): # raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.db = sqlite3.connect(dbname, detect_types=sqlite3.PARSE_DECLTYPES) self._setuptables()
def _init(self, trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings): """ Segment a trajectory """ # parse algorithm specific settings from the settings dict try: self.blockSizeSec=float(self.settingsDict.pop("blockSizeSec", 1.0)) self.eventPad=int(self.settingsDict.pop("eventPad", 500)) self.minEventLength=int(self.settingsDict.pop("minEventLength",5)) self.eventThreshold=float(self.settingsDict.pop("eventThreshold",6.0)) self.driftThreshold=float(self.settingsDict.pop("driftThreshold",2.0)) self.maxDriftRate=float(self.settingsDict.pop("maxDriftRate",2.0)) self.meanOpenCurr=float(self.settingsDict.pop("meanOpenCurr",-1.)) self.sdOpenCurr=float(self.settingsDict.pop("sdOpenCurr",-1.)) self.slopeOpenCurr=float(self.settingsDict.pop("slopeOpenCurr",-1.)) except ValueError as err: raise commonExceptions.SettingsTypeError( err ) #### Vars for event partition #### self.esLogger=mlog.mosaicLogging().getLogger(name=__name__, dbHnd=self.mdioDBHnd) self.eventstart=False self.eventdat=[] self.preeventdat=deque(maxlen=self.eventPad) self.eventcount=0 self.eventprocessedcount=0
def _initdb(self, **kwargs): """ Initialize the database tables Args: tableName name of database table. Default is 'metadata' """ self.logger=mlog.mosaicLogging().getLogger(__name__) if not hasattr(self, 'tableName'): self.tableName='metadata' if not hasattr(self, 'colNames'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) if not hasattr(self, 'colNames_t'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) dbTimeout=kwargs.pop('timeout', 11.0) self.logger.debug(_d("DB Timeout = {0}", dbTimeout)) dbfile=kwargs.get('dbFilename', '') if dbfile=='': self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite') else: self.dbFilename=dbfile self.logger.debug(_d("dbFilename {0}", self.dbFilename)) self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout) self._setuptables() self.logger.debug(_d("DB setup complete."))
def _initdb(self, **kwargs): """ Initialize the database tables Args: tableName name of database table. Default is 'metadata' """ self.logger=mlog.mosaicLogging().getLogger(__name__) if not hasattr(self, 'tableName'): self.tableName='metadata' if not hasattr(self, 'colNames'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__)) if not hasattr(self, 'colNames_t'): raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) self.logger.error("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__)) dbTimeout=kwargs.pop('timeout', 11.0) self.logger.debug(_d("DB Timeout = {0}", dbTimeout)) self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite') self.logger.debug(_d("dbFilename {0}", self.dbFilename)) self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout) self._setuptables() self.logger.debug(_d("DB setup complete."))
def __set__(self, obj, value): if self.fset is None: raise AttributeError("can't set attribute") if mosaic.LogProperties: logger=mlog.mosaicLogging().getLogger(name=self.fset.__name__) logger.debug(_dprop( "SET {0}={1}", self.fset.__name__, value )) self.fset(obj, value)
def __set__(self, obj, value): if self.fset is None: raise AttributeError("can't set attribute") if mosaic.LogProperties: logger = mlog.mosaicLogging().getLogger(name=self.fset.__name__) logger.debug(_dprop("SET {0}={1}", self.fset.__name__, value)) self.fset(obj, value)
class mosaicUnitTests(base): log = mlog.mosaicLogging().getLogger(__name__) description = "run the MOSAIC unit test suite." user_options = [('algorithms', 'a', 'run algorithmic tests'), ('segment', 's', 'run time-series segmentation tests'), ('dependencies', 'd', 'test MOSAIC dependency versions'), ('modules', 'm', 'test MOSAIC modules'), ('trajio', 't', 'test MOSAIC I/O')] def initialize_options(self): self.algorithms = 0 self.segment = 0 self.dependencies = 0 self.modules = 0 self.trajio = 0 def finalize_options(self): pass def run(self): try: testList = [] if self.algorithms: mosaicUnitTests.log.debug("Running algorithm unit tests") testList.extend( ['adept_Test', 'cusum_Test', 'adept2State_Test']) if self.segment: mosaicUnitTests.log.debug( "Running event segmentation unit tests") testList.extend( ['eventPartition_Test', 'eventPartitionParallel_Test']) if self.dependencies: mosaicUnitTests.log.debug("Running dependency unit tests") testList.extend(['dependencyVersion_Test']) if self.modules: mosaicUnitTests.log.debug( "Running module import unit tests") testList.extend(['import_Tests']) if self.trajio: mosaicUnitTests.log.debug( "Running module trajectory I/O unit tests") testList.extend(['trajio_Test']) if self.verbose: mosaicUnitTests.log.debug("Running verbose unit tests") testargs = ['mosaic', '-v', '--where=mosaic/tests/'] else: testargs = ['mosaic', '--where=mosaic/tests/'] testargs.extend(testList) return nose.main(argv=testargs) except: raise
def __delete__(self, obj): if self.fdel is None: raise AttributeError("can't delete attribute") if mosaic.LogProperties: logger = mlog.mosaicLogging().getLogger(name=self.fdel.__name__) logger.debug(_dprop("DEL {0}", self.fdel.__name__)) self.fdel(obj)
def __delete__(self, obj): if self.fdel is None: raise AttributeError("can't delete attribute") if mosaic.LogProperties: logger=mlog.mosaicLogging().getLogger(name=self.fdel.__name__) logger.debug(_dprop( "DEL {0}", self.fdel.__name__ )) self.fdel(obj)
def __init__(self, trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings, settingsString): """ Initialize a new event segment object """ # Required arguments self.trajDataObj=trajDataObj self.eventProcHnd=eventProcHnd # Reset function timer since esTimer is a class variable partitionTimer.Reset() self.settingsDict = eventPartitionSettings self.eventProcSettingsDict = eventProcSettings self.procTime=0.0 self.FsHz=self.trajDataObj.FsHz self.DataLengthSec=self.trajDataObj.DataLengthSec try: self.writeEventTS=int(self.settingsDict.pop("writeEventTS",1)) self.parallelProc=int(self.settingsDict.pop("parallelProc",1)) self.reserveNCPU=int(self.settingsDict.pop("reserveNCPU",2)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err ) sys.stdout.flush() self.tEventProcObj=self.eventProcHnd([], self.FsHz, eventstart=0,eventend=0, baselinestats=[ 0,0,0 ], algosettingsdict=self.eventProcSettingsDict.copy(), savets=False, absdatidx=0, datafileHnd=None ) self.mdioDBHnd=sqlite3MDIO.sqlite3MDIO() self.mdioDBHnd.initDB( dbPath=self.trajDataObj.datPath, tableName='metadata', colNames=(self.tEventProcObj.mdHeadings()), colNames_t=(self.tEventProcObj.mdHeadingDataType()) ) self.mdioDBHnd.writeSettings(settingsString) self.logger=mlog.mosaicLogging().getLogger(name=__name__, dbHnd=self.mdioDBHnd) self.logger.debug(_d("Event Segment Initialization")) self.logger.debug(_d("{0}", settingsString)) if self.trajDataObj.dataFilter: self.fstring=type(self.trajDataObj.dataFilterObj).__name__ else: self.fstring='None' self._writeanalysisinfo() if self.parallelProc: self._setupparallel() # Setup function timing self.timingObj=mosaicTiming.mosaicTiming() self._init(trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings)
def _init(self, **kwargs): """ """ try: self.filterCoeff = eval(kwargs['filterCoeff']) except KeyError: self.filterCoeff = [1.0 / 10.0] * 10 self.filtBuf = np.array([]) self.logger = mlog.mosaicLogging().getLogger(__name__)
def __get__(self, obj, objtype=None): if obj is None: return self if self.fget is None: raise AttributeError("unreadable attribute") rval = self.fget(obj) if mosaic.LogProperties: logger = mlog.mosaicLogging().getLogger(name=self.fget.__name__) logger.debug(_dprop("GET {0}={1}", self.fget.__name__, rval)) return rval
def _init(self, **kwargs): """ """ try: self.filterCoeff=eval(kwargs['filterCoeff']) except KeyError: self.filterCoeff=[1.0/10.0]*10 self.filtBuf=np.array([]) self.logger=mlog.mosaicLogging().getLogger(__name__)
def _init(self, **kwargs): if not hasattr(self, 'Rfb') or not hasattr(self, 'Cfb'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the feedback resistance (Rfb) and feedback capacitance (Cfb) to be defined.".format(type(self).__name__)) if not hasattr(self, 'format'): self.format='V' # additional meta data self.fileFormat='qdf' self.qdfLogger=mlog.mosaicLogging().getLogger(name=__name__)
def __get__(self, obj, objtype=None): if obj is None: return self if self.fget is None: raise AttributeError("unreadable attribute") rval=self.fget(obj) if mosaic.LogProperties: logger=mlog.mosaicLogging().getLogger(name=self.fget.__name__) logger.debug(_dprop( "GET {0}={1}", self.fget.__name__, rval )) return rval
class ModuleImportTest(object): log=mlog.mosaicLogging().getLogger(__name__) def runTestCase(self, modulename): module=__import__(modulename) ModuleImportTest.log.debug("import "+modulename) for submod in modulename.split('.')[1:]: module=getattr(module, submod) ModuleImportTest.log.debug("import "+submod) return
def _init(self, **kwargs): if not hasattr(self, 'Rfb') or not hasattr(self, 'Cfb'): raise metaTrajIO.InsufficientArgumentsError( "{0} requires the feedback resistance (Rfb) and feedback capacitance (Cfb) to be defined." .format(type(self).__name__)) if not hasattr(self, 'format'): self.format = 'V' # additional meta data self.fileFormat = 'qdf' self.qdfLogger = mlog.mosaicLogging().getLogger(name=__name__)
def _getGASettings(ga_cache): logger = mlog.mosaicLogging().getLogger(name=__name__) try: req = urllib2.Request(mosaic.DocumentationURL + ".ga") streamHandler = urllib2.build_opener() stream = streamHandler.open(req) with open(ga_cache, 'w') as ga: ga.write(stream.read()) logger.debug(_d("Cached GA settings to {0}.", ga_cache)) except: logger.debug(_d("An error occured when trying to cache GA settings."))
def _getGASettings(ga_cache): logger=mlog.mosaicLogging().getLogger(name=__name__) try: req=urllib2.Request(mosaic.DocumentationURL+".ga") streamHandler=urllib2.build_opener() stream=streamHandler.open(req) with open(ga_cache, 'w') as ga: ga.write( stream.read() ) logger.debug(_d("Cached GA settings to {0}.", ga_cache)) except: logger.debug(_d("An error occured when trying to cache GA settings."))
def _init(self, **kwargs): """ """ self.logger = mlog.mosaicLogging().getLogger(__name__) try: self.waveletType = str(kwargs['wavelet']) self.waveletLevel = int(kwargs['level']) self.waveletThresholdType = str(kwargs['thresholdType']) self.waveletThresholdSubType = str(kwargs['thresholdSubType']) self.maxWaveletLevel = self.waveletLevel except KeyError: self.logger.error( "ERROR: Missing mandatory arguments 'wavelet', 'level' or 'threshold'" )
def _init(self, **kwargs): """ Initialize the single step analysis class. """ # initialize the object's metadata (to -1) as class attributes self.mdOpenChCurrent = -1 self.mdCurrentStep = [-1] self.mdNStates = -1 self.mdBlockDepth = [-1] self.mdEventDelay = [-1] self.mdStateResTime = [-1] self.mdEventStart = -1 self.mdEventEnd = -1 self.mdResTime = -1 self.mdRCConst = [-1] self.mdAbsEventStart = -1 self.mdRedChiSq = -1 self.nStates = -1 self.adeptLogger = mlog.mosaicLogging().getLogger(__name__) # Settings for single step event processing # settings for gaussian fits try: self.FitTol = float(self.settingsDict.pop("FitTol", 1.e-7)) self.FitIters = int(self.settingsDict.pop("FitIters", 5000)) self.StepSize = float(self.settingsDict.pop("StepSize", 3.0)) self.MinStateLength = float( self.settingsDict.pop("MinStateLength", 4)) self.MaxEventLength = int( self.settingsDict.pop("MaxEventLength", 10000)) self.LinkRCConst = int(self.settingsDict.pop("LinkRCConst", 1)) # initThr=float(self.settingsDict["InitThreshold"]) # if initThr: # print "Warning: InitThreshold is deprecated. Please use StepSize instead (see the docs for additional information). StepSize set to {0}".format(3.0*initThr) # self.StepSize=3.0*initThr except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError(err)
def _init(self, **kwargs): """ """ self.logger=mlog.mosaicLogging().getLogger(__name__) try: self.filterOrder=float(kwargs['filterOrder']) self.filterCutoff=float(kwargs['filterCutoff']) except KeyError: self.logger.error( "ERROR: Missing mandatory arguments 'filterOrder' or 'filterCutoff'" ) try: self.causal = kwargs['causal'] == "True" except KeyError: self.causal = False if self.causal: raise NotImplementedError('Causal filter has not been implemented yet')
def _init(self, **kwargs): """ Initialize the single step analysis class. """ # initialize the object's metadata (to -1) as class attributes self.mdOpenChCurrent=-1 self.mdCurrentStep=[-1] self.mdNStates=-1 self.mdBlockDepth=[-1] self.mdEventDelay=[-1] self.mdStateResTime=[-1] self.mdEventStart=-1 self.mdEventEnd=-1 self.mdResTime = -1 self.mdRCConst=[-1] self.mdAbsEventStart = -1 self.mdRedChiSq=-1 self.nStates=-1 self.adeptLogger=mlog.mosaicLogging().getLogger(__name__) # Settings for single step event processing # settings for gaussian fits try: self.FitTol=float(self.settingsDict.pop("FitTol", 1.e-7)) self.FitIters=int(self.settingsDict.pop("FitIters", 5000)) self.StepSize=float(self.settingsDict.pop("StepSize", 3.0)) self.MinStateLength=float(self.settingsDict.pop("MinStateLength", 4)) self.MaxEventLength=int(self.settingsDict.pop("MaxEventLength", 10000)) self.LinkRCConst=int(self.settingsDict.pop("LinkRCConst", 1)) # initThr=float(self.settingsDict["InitThreshold"]) # if initThr: # print "Warning: InitThreshold is deprecated. Please use StepSize instead (see the docs for additional information). StepSize set to {0}".format(3.0*initThr) # self.StepSize=3.0*initThr except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err )
def _init(self, **kwargs): """ """ self.logger = mlog.mosaicLogging().getLogger(__name__) try: self.filterOrder = float(kwargs['filterOrder']) self.filterCutoff = float(kwargs['filterCutoff']) except KeyError: self.logger.error( "ERROR: Missing mandatory arguments 'filterOrder' or 'filterCutoff'" ) try: self.causal = kwargs['causal'] == "True" except KeyError: self.causal = False if self.causal: raise NotImplementedError( 'Causal filter has not been implemented yet')
def _gaCredentialCache(): logger = mlog.mosaicLogging().getLogger(name=__name__) ga_cache = resource_path("mosaic/utilities/.ga") try: gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge = timedelta(hours=24) gaAge = datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) _getGASettings(ga_cache) else: logger.debug(_d("GA settings cache found. gaAge={0}", gaAge)) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read())
def _gaCredentialCache(): logger=mlog.mosaicLogging().getLogger(name=__name__) ga_cache=resource_path("mosaic/utilities/.ga") try: gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge=timedelta(hours=24) gaAge=datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) _getGASettings(ga_cache) else: logger.debug(_d("GA settings cache found. gaAge={0}", gaAge)) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read())
def _init(self, **kwargs): """ Initialize the single step analysis class. """ # initialize the object's metadata (to -1) as class attributes self.mdOpenChCurrent = -1 self.mdCurrentStep = [-1] self.mdNStates = -1 self.mdBlockDepth = [-1] self.mdEventDelay = [-1] self.mdStateResTime = [-1] self.mdEventStart = -1 self.mdEventEnd = -1 self.mdResTime = -1 self.mdAbsEventStart = -1 self.mdThreshold = -1 self.nStates = -1 self.cusumLogger = mlog.mosaicLogging().getLogger(__name__) # Settings for detection of changed in current level try: self.StepSize = float(self.settingsDict.pop("StepSize", 3.0)) self.MinThreshold = float( self.settingsDict.pop("MinThreshold", 2.0)) self.MaxThreshold = float( self.settingsDict.pop("MaxThreshold", 10.0)) self.MinLength = float(self.settingsDict.pop("MinLength", 10)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError(err) self.mdThreshold = self.MinThreshold
def __init__(self): """ Initialize timing functions """ self.timingDataDict={} if mosaic.DeveloperMode and mosaic.CodeProfiling !='none': self.TimingEnabled=True if mosaic.CodeProfiling=='summary': self.TimingSummary=True else: self.TimingSummary=False self.logger=mlog.mosaicLogging().getLogger(__name__) else: self.TimingEnabled=False # Setup platform-dependent timing function if sys.platform.startswith('win'): self.timingFunc=time.clock else: self.timingFunc=time.time
def _init(self, **kwargs): """ Initialize the single step analysis class. """ # initialize the object's metadata (to -1) as class attributes self.mdOpenChCurrent=-1 self.mdCurrentStep=[-1] self.mdNStates=-1 self.mdBlockDepth=[-1] self.mdEventDelay=[-1] self.mdStateResTime=[-1] self.mdEventStart=-1 self.mdEventEnd=-1 self.mdResTime = -1 self.mdAbsEventStart = -1 self.mdThreshold=-1 self.nStates=-1 self.cusumLogger=mlog.mosaicLogging().getLogger(__name__) # Settings for detection of changed in current level try: self.StepSize=float(self.settingsDict.pop("StepSize", 3.0)) self.MinThreshold=float(self.settingsDict.pop("MinThreshold", 2.0)) self.MaxThreshold=float(self.settingsDict.pop("MaxThreshold", 10.0)) self.MinLength=float(self.settingsDict.pop("MinLength", 10)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err ) self.mdThreshold = self.MinThreshold
def __init__(self, icurr, icurrU, Fs, **kwargs): """ Store a ref to the raw event data. """ self.eventData=icurr self.eventDataU=icurrU self.Fs=Fs self.logger=mlog.mosaicLogging().getLogger(name=__name__) # Will throw a key error if not passed self.eStartEstimate=kwargs['eventstart'] self.eEndEstimate=kwargs['eventend'] self.settingsDict=kwargs['algosettingsdict'] self.absDataStartIndex=kwargs['absdatidx'] [ self.baseMean, self.baseSD, self.baseSlope ]=kwargs['baselinestats'] self.saveTS=kwargs['savets'] # Optional args. If dataFileHnd is not passed at init, it must be set later # If not set before WriteEvent is called, it will result in a MissingMDIOError self.dataFileHnd=kwargs.pop("datafilehnd", None) # self.dataFileHnd=kwargs['datafileHnd'] # meta-data attrs that are common to all event processing self.mdProcessingStatus='normal' # Setup function timing self.timingObj=mosaicTiming.mosaicTiming() self.mdEventProcessTime=0.0 # print self.settingsDict # Call sub-class initialization self._init(**kwargs)
def __init__(self, icurr, icurrU, Fs, **kwargs): """ Store a ref to the raw event data. """ self.eventData = icurr self.eventDataU = icurrU self.Fs = Fs self.logger = mlog.mosaicLogging().getLogger(name=__name__) # Will throw a key error if not passed self.eStartEstimate = kwargs['eventstart'] self.eEndEstimate = kwargs['eventend'] self.settingsDict = kwargs['algosettingsdict'] self.absDataStartIndex = kwargs['absdatidx'] [self.baseMean, self.baseSD, self.baseSlope] = kwargs['baselinestats'] self.saveTS = kwargs['savets'] # Optional args. If dataFileHnd is not passed at init, it must be set later # If not set before WriteEvent is called, it will result in a MissingMDIOError self.dataFileHnd = kwargs.pop("datafilehnd", None) # self.dataFileHnd=kwargs['datafileHnd'] # meta-data attrs that are common to all event processing self.mdProcessingStatus = 'normal' # Setup function timing self.timingObj = mosaicTiming.mosaicTiming() self.mdEventProcessTime = 0.0 # print self.settingsDict # Call sub-class initialization self._init(**kwargs)
def _gaCredentialCache(): try: try: logger = mlog.mosaicLogging().getLogger(name=__name__) ga_cache = format_path(tempfile.gettempdir() + '/.ga') logger.debug(_d("Looking for GA cache {0}", ga_cache)) gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime) gaExpireAge = timedelta(hours=24) gaAge = datetime.today() - gaModTime if gaAge > gaExpireAge: logger.debug(_d("GA settings cache has expired.")) ga_old = _gaSettingsDict(ga_cache) _getGASettings(ga_cache) ga_new = _gaSettingsDict(ga_cache) if ga_old["gaenable"] == False: ga_new["gaenable"] = False with open(ga_cache, "w") as ga: ga.write(json.dumps(ga_new)) else: logger.debug( _d("GA settings cache found ({0}). gaAge={1}", str(ga_cache), str(gaAge))) except: logger.debug(_d("GA settings are not cached.")) _getGASettings(ga_cache) with open(ga_cache, 'r') as ga: return json.loads(ga.read()) except BaseException as err: logger.debug( _d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc())) return
# deletion more effectively, by not deleting elements every time # popdata is called. Instead, data is actually deleted when the index # exceeds 1 million data points. self.currDataIdx = 0 # a var that determines if the end of the data stream is imminent. self.nearEndOfData = 0 # A global index that tracks the number of data points retrieved. self.globalDataIndex = 0 self.datLenSec = 0 self.initPipe = False self.logger = mlog.mosaicLogging().getLogger(name=__name__) # Call sub-class init self._init(**kwargs) def Stop(self): trajTimer.PrintStatistics() raise EmptyDataPipeError("End of data.") ################################################################# # Public API: functions ################################################################# @property def FsHz(self): """ .. important:: |property|
def _init(self, **kwargs): self.abfLogger=mlog.mosaicLogging().getLogger(name=__name__)
class binTrajIO(metaTrajIO.metaTrajIO): """ Read a file that contains interleaved binary data, ordered by column. Only a single column that holds ionic current data is read. The current in pA is returned after scaling by the amplifier scale factor (``AmplifierScale``) and removing any offsets (``AmplifierOffset``) if provided. :Usage and Assumptions: Binary data is interleaved by column. For three columns (*a*, *b*, and *c*) and *N* rows, binary data is assumed to be of the form: [ a_1, b_1, c_1, a_2, b_2, c_2, ... ... ..., a_N, b_N, c_N ] The column layout is specified with the ``ColumnTypes`` parameter, which accepts a list of tuples. For the example above, if column **a** is the ionic current in a 64-bit floating point format, column **b** is the ionic current representation in 16-bit integer format and column **c** is an index in 16-bit integer format, the ``ColumnTypes`` paramter is a list with three tuples, one for each column, as shown below: [('curr_pA', 'float64'), ('AD_V', 'int16'), ('index', 'int16')] The first element of each tuple is an arbitrary text label and the second element is a valid `Numpy type <http://docs.scipy.org/doc/numpy/user/basics.types.html>`_. Finally, the ``IonicCurrentColumn`` parameter holds the name (text label defined above) of the column that holds the ionic current time-series. Note that if an integer column is selected, the ``AmplifierScale`` and ``AmplifierOffset`` parameters can be used to convert the voltage from the A/D to a current. Assuming that we use a floating point representation of the ionic current, and a sampling rate of 50 kHz, a settings section that will read the binary file format defined above is: .. code-block:: javascript "binTrajIO": { "AmplifierScale" : "1", "AmplifierOffset" : "0", "SamplingFrequency" : "50000", "ColumnTypes" : "[('curr_pA', 'float64'), ('AD_V', 'int16'), ('index', 'int16')]", "IonicCurrentColumn" : "curr_pA", "dcOffset": "0.0", "filter": "*.bin", "start": "0.0", "HeaderOffset": 0 } :Settings Examples: Read 16-bit signed integers (big endian) with a 512 byte header offset. Set the amplifier scale to 400 pA, sampling rate to 200 kHz. .. code-block:: javascript "binTrajIO": { "AmplifierOffset": "0.0", "SamplingFrequency": 200000, "AmplifierScale": "400./2**16", "ColumnTypes": "[('curr_pA', '>i2')]", "dcOffset": 0.0, "filter": "*.dat", "start": 0.0, "HeaderOffset": 512, "IonicCurrentColumn": "curr_pA" } Read a two-column file: 64-bit floating point and 64-bit integers, and no header offset. Set the amplifier scale to 1 and sampling rate to 200 kHz. .. code-block:: javascript "binTrajIO": { "AmplifierOffset": "0.0", "SamplingFrequency": 200000, "AmplifierScale": "1.0", "ColumnTypes" : "[('curr_pA', 'float64'), ('AD_V', 'int64')]", "dcOffset": 0.0, "filter": "*.bin", "start": 0.0, "HeaderOffset": 0, "IonicCurrentColumn": "curr_pA" } :Parameters: In addition to :class:`~mosaic.metaTrajIO.metaTrajIO` args, - `AmplifierScale` : Full scale of amplifier (pA/2^nbits) that varies with the gain (default: 1.0). - `AmplifierOffset` : Current offset in the recorded data in pA (default: 0.0). - `SamplingFrequency` : Sampling rate of data in the file in Hz. - `HeaderOffset` : Ignore first *n* bytes of the file for header (default: 0 bytes). - `ColumnTypes` : A list of tuples with column names and types (see `Numpy types <http://docs.scipy.org/doc/numpy/user/basics.types.html>`_). Note only integer and floating point numbers are supported. - `IonicCurrentColumn` : Column name that holds ionic current data. :Returns: None :Errors: None """ def _init(self, **kwargs): if not hasattr(self, 'SamplingFrequency'): raise metaTrajIO.InsufficientArgumentsError( "{0} requires the sampling rate in Hz to be defined.".format( type(self).__name__)) if not hasattr(self, 'ColumnTypes'): raise metaTrajIO.InsufficientArgumentsError( "{0} requires the column types to be defined.".format( type(self).__name__)) else: if type(self.ColumnTypes) is str or type( self.ColumnTypes) is unicode: self.ColumnTypes = eval(self.ColumnTypes) if not hasattr(self, 'IonicCurrentColumn'): raise metaTrajIO.InsufficientArgumentsError( "{0} requires the ionic current column to be defined.".format( type(self).__name__)) if not hasattr(self, 'HeaderOffset'): self.HeaderOffset = 0 try: self.IonicCurrentType = dict( self.ColumnTypes)[self.IonicCurrentColumn] except KeyError, err: self.IonicCurrentColumn = self.ColumnTypes[0][0] self.IonicCurrentType = self.ColumnTypes[0][1] logging.warning( "WARNING: IonicCurrentColumn {0} not found. Defaulting to {1}." .format(err, self.IonicCurrentColumn)) if not hasattr(self, 'AmplifierScale'): self.AmplifierScale = 1.0 else: self.AmplifierScale = float(eval(self.AmplifierScale)) if not hasattr(self, 'AmplifierOffset'): self.AmplifierOffset = 0.0 else: self.AmplifierOffset = float(self.AmplifierOffset) # additional meta data self.fileFormat = 'bin' # set the sampling frequency in Hz. if not hasattr(self, 'Fs'): self.Fs = self.SamplingFrequency self.binLogger = mlog.mosaicLogging().getLogger(name=__name__)
self.currDataIdx=0 # a var that determines if the end of the data stream is imminent. self.nearEndOfData=0 # A global index that tracks the number of data points retrieved. self.globalDataIndex=0 self.datLenSec=0 self.initPipe=False # A list that holds the names of processed files. self.processedFilenames=[] self.logger=mlog.mosaicLogging().getLogger(name=__name__) # Call sub-class init self._init(**kwargs) def Stop(self): trajTimer.PrintStatistics() raise EmptyDataPipeError("End of data.") ################################################################# # Public API: functions ################################################################# @property def FsHz(self): """ .. important:: |property|
class chimeraTrajIO(metaTrajIO.metaTrajIO): """ Read a file generated by the Chimera VC100. The current in pA is returned after scaling by the amplifier scale factors. :Usage and Assumptions: Binary data is in a single column. As of 7/11/16 can only be unsigned 16 bit integers and has only one column: The column layout is specified with the ``ColumnTypes`` parameter, which accepts a list of tuples. [('curr_pA', '<u2')] The option is left in in case of future changes to the platform, but can be left alone in the settings file for now. The first element of each tuple is an arbitrary text label and the second element is a valid `Numpy type <http://docs.scipy.org/doc/numpy/user/basics.types.html>`_. Chimera gain settings are used to convert the integers stored by the ADC to current values. The following block provides an example. Parameters can be found in the .mat files output by the VC100. IMPORTANT: This setup assumes that all files that match `filter` have the same ADC settings. Future versions could imlement reading of the matching .mat files to remove the necessity to enter these parameters. .. code-block:: javascript "chimeraTrajIO": { "TIAgain" : "100000000", "preADCgain" : "1.305", "SamplingFrequency" : "4166666.66667", "ColumnTypes" : "[('curr_pA', '<u2')]", "IonicCurrentColumn" : "curr_pA", "mVoffset": "-0.2776", "pAoffset": "2.0e-10", "ADCvref": "2.5", "ADCbits": "14", "filter": "*.log", "start": "0.0", "HeaderOffset": "0" } :Parameters: In addition to :class:`~mosaic.metaTrajIO.metaTrajIO` args, - `SamplingFrequency` : Sampling rate of data in the file in Hz. - `ColumnTypes` : A list of tuples with column names and types (see `Numpy types <http://docs.scipy.org/doc/numpy/user/basics.types.html>`_). Note only integer and floating point numbers are supported. - `IonicCurrentColumn`: Column name that holds ionic current data. - `mVoffset` : voltage offset of ADC - `ADCvref` : voltage reference point for ADC - `ADCbits` : amplifier scale precision in bits - `TIAgain` : Feedback resistor value. - `preADCgain` : analog gain before ADC - `HeaderOffset` : Ignore first *n* bytes of the file for header (currently fixed at: 0 bytes). :Returns: None :Errors: None """ def _init(self, **kwargs): if not hasattr(self, 'SamplingFrequency'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the sampling rate in Hz to be defined.".format(type(self).__name__)) if not hasattr(self, 'ColumnTypes'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the column types to be defined.".format(type(self).__name__)) else: if type(self.ColumnTypes) is str or type(self.ColumnTypes) is unicode: self.ColumnTypes=eval(self.ColumnTypes) if not hasattr(self, 'IonicCurrentColumn'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the ionic current column to be defined.".format(type(self).__name__)) if not hasattr(self, 'HeaderOffset'): self.HeaderOffset=0 try: self.IonicCurrentType=dict(self.ColumnTypes)[self.IonicCurrentColumn] except KeyError, err: self.IonicCurrentColumn=self.ColumnTypes[0][0] self.IonicCurrentType=self.ColumnTypes[0][1] print "IonicCurrentColumn {0} not found. Defaulting to {1}.".format(err, self.IonicCurrentColumn) if not hasattr(self, 'TIAgain'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the TIAgain be specified as found in the appropriate .mat file.".format(type(self).__name__)) else: self.TIAgain=float(self.TIAgain) if not hasattr(self, 'preADCgain'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the preADCgain be specified as found in the appropriate .mat file.".format(type(self).__name__)) else: self.preADCgain=float(self.preADCgain) if not hasattr(self, 'mVoffset'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the mVoffset be specified as found in the appropriate .mat file.".format(type(self).__name__)) else: self.mVoffset=float(self.mVoffset) if not hasattr(self, 'pAoffset'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the pAoffset be specified as found in the appropriate .mat file.".format(type(self).__name__)) else: self.pAoffset=float(self.pAoffset) if not hasattr(self, 'ADCvref'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the ADCvref be specified as found in the appropriate .mat file.".format(type(self).__name__)) else: self.ADCvref=float(self.ADCvref) if not hasattr(self, 'ADCbits'): raise metaTrajIO.InsufficientArgumentsError("{0} requires the ADCbits be specified as found in the appropriate .mat file.".format(type(self).__name__)) else: self.ADCbits=float(self.ADCbits) # additional meta data self.fileFormat='bin' # set the sampling frequency in Hz. if not hasattr(self, 'Fs'): self.Fs=self.SamplingFrequency self.chimeraLogger=mlog.mosaicLogging().getLogger(name=__name__)
def __init__(self, trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings, settingsString, **kwargs): """ Initialize a new event segment object """ # Required arguments self.trajDataObj=trajDataObj self.eventProcHnd=eventProcHnd # Reset function timer since esTimer is a class variable partitionTimer.Reset() self.settingsDict = eventPartitionSettings self.eventProcSettingsDict = eventProcSettings self.procTime=0.0 self.FsHz=self.trajDataObj.FsHz self.DataLengthSec=self.trajDataObj.DataLengthSec try: self.writeEventTS=int(self.settingsDict.pop("writeEventTS",1)) self.parallelProc=int(self.settingsDict.pop("parallelProc",1)) self.reserveNCPU=int(self.settingsDict.pop("reserveNCPU",2)) self.driftThreshold=float(self.settingsDict.pop("driftThreshold",2.0)) self.maxDriftRate=float(self.settingsDict.pop("maxDriftRate",2.0)) self.minBaseline=float(self.settingsDict.pop("minBaseline",-1.)) self.maxBaseline=float(self.settingsDict.pop("maxBaseline",-1.)) except ValueError as err: raise mosaic.commonExceptions.SettingsTypeError( err ) sys.stdout.flush() self.tEventProcObj=self.eventProcHnd([], [], self.FsHz, eventstart=0,eventend=0, baselinestats=[ 0,0,0 ], algosettingsdict=self.eventProcSettingsDict.copy(), savets=False, absdatidx=0, datafileHnd=None ) self.mdioDBHnd=sqlite3MDIO.sqlite3MDIO() self.mdioDBHnd.initDB( dbPath=self.trajDataObj.datPath, tableName='metadata', colNames=(self.tEventProcObj.mdHeadings()), colNames_t=(self.tEventProcObj.mdHeadingDataType()), dbFilename=kwargs.get('dbFilename', '') ) self.mdioDBHnd.writeSettings(settingsString) self.logger=mlog.mosaicLogging().getLogger(name=__name__, dbHnd=self.mdioDBHnd) self.logger.debug(_d("Event Segment Initialization")) self.logger.debug(_d("{0}", settingsString)) if self.trajDataObj.dataFilter: self.fstring=type(self.trajDataObj.dataFilterObj).__name__ else: self.fstring='None' self._writeanalysisinfo() if self.parallelProc: self._setupparallel() # Setup function timing self.timingObj=mosaicTiming.mosaicTiming() self._init(trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings)