Пример #1
0
def listDataFolders():
    # logger=mlog.mosaicLogging().getLogger(name=__name__)

    params = dict(request.get_json())

    level = params.get('level', 'Data Root')
    if level == 'Data Root':
        folder = mosaic.WebServerDataLocation
        logger.info("/list-data-folders: " + folder)
    else:
        folder = format_path(mosaic.WebServerDataLocation + '/' + level + '/')
        logger.info("/list-data-folders: " + folder)

    folderList = []

    for item in sorted(glob.glob(folder + '/*')):
        itemAttr = {}
        if os.path.isdir(item):
            itemAttr['name'] = os.path.relpath(item, folder)
            itemAttr['relpath'] = os.path.relpath(
                item, format_path(mosaic.WebServerDataLocation))
            itemAttr['desc'] = _folderDesc(item)
            itemAttr['modified'] = time.strftime(
                '%m/%d/%Y, %I:%M %p', time.localtime(os.path.getmtime(item)))

            folderList.append(itemAttr)

    return jsonify(respondingURL='list-data-folders',
                   level=level + '/',
                   fileData=folderList), 200
Пример #2
0
def listDatabaseFiles():
	params = dict(request.get_json())

	level=params.get('level', 'Data Root')
	logger.info("/list-database-files: "+str(level))
	if level == 'Data Root':
		folder=mosaic.WebServerDataLocation
		logger.info("/list-database-files: "+folder)
	else:
		folder=format_path(mosaic.WebServerDataLocation+'/'+level+'/')
		logger.info("/list-database-files: "+folder)

	fileList=[]

	for item in sorted(glob.glob(folder+'/*')):
		itemAttr={}
		if os.path.isdir(item):
			itemAttr['name']=os.path.relpath(item, folder)
			itemAttr['relpath']=os.path.relpath(item, format_path(mosaic.WebServerDataLocation) )
			itemAttr['desc']=_folderDesc(item)
			itemAttr['modified']=time.strftime('%m/%d/%Y, %I:%M %p', time.localtime(os.path.getmtime(item)))

			fileList.append(itemAttr)
		else:
			if _fileExtension(item)==".sqlite":
				itemAttr['name']=os.path.relpath(item, folder)
				itemAttr['relpath']=os.path.relpath(item, format_path(mosaic.WebServerDataLocation) )
				itemAttr['desc']="SQLite database, {0}".format(_fileSize(item))
				itemAttr['modified']=time.strftime('%m/%d/%Y, %I:%M %p', time.localtime(os.path.getmtime(item)))

				fileList.append(itemAttr)

	return jsonify( respondingURL='list-database-files', level=level+'/', fileData=fileList ), 200
Пример #3
0
def listDataFolders():
	# logger=mlog.mosaicLogging().getLogger(name=__name__)

	params = dict(request.get_json())

	level=params.get('level', 'Data Root')
	if level == 'Data Root':
		folder=mosaic.WebServerDataLocation
		logger.info("/list-data-folders: "+folder)
	else:
		folder=format_path(mosaic.WebServerDataLocation+'/'+level+'/')
		logger.info("/list-data-folders: "+folder)
		

	folderList=[]

	for item in sorted(glob.glob(folder+'/*')):
		itemAttr={}
		if os.path.isdir(item):
			itemAttr['name']=os.path.relpath(item, folder)
			itemAttr['relpath']=os.path.relpath(item, format_path(mosaic.WebServerDataLocation) )
			itemAttr['desc']=_folderDesc(item)
			itemAttr['modified']=time.strftime('%m/%d/%Y, %I:%M %p', time.localtime(os.path.getmtime(item)))

			folderList.append(itemAttr)

	return jsonify( respondingURL='list-data-folders', level=level+'/', fileData=folderList ), 200
Пример #4
0
def newAnalysis():
    global gAnalysisSessions
    # logger=mlog.mosaicLogging().getLogger(name=__name__)

    try:
        defaultSettings = False
        params = dict(request.get_json())

        dataPath = params.get('dataPath', None)
        settingsString = params.get('settingsString', None)
        sessionID = params.get('sessionID', None)

        if dataPath and not settingsString:  # brand new session
            # print "brand new session: ", dataPath, settingsString, sessionID
            logger.info("/new-analysis: " +
                        format_path(mosaic.WebServerDataLocation + '/' +
                                    dataPath))

            sessionID = gAnalysisSessions.newSession()
            ma = mosaicAnalysis.mosaicAnalysis(
                format_path(mosaic.WebServerDataLocation + '/' + dataPath),
                sessionID)

            gAnalysisSessions.addDataPath(
                sessionID,
                format_path(mosaic.WebServerDataLocation + '/' + dataPath))
            gAnalysisSessions.addMOSAICAnalysisObject(sessionID, ma)
        elif sessionID and settingsString:  # update settings
            # print "update settings: ", dataPath, settingsString, sessionID
            ma = gAnalysisSessions.getSessionAttribute(sessionID,
                                                       'mosaicAnalysisObject')
            ma.updateSettings(settingsString)

            gAnalysisSessions.addSettingsString(sessionID,
                                                ma.analysisSettingsDict)
        elif sessionID and not settingsString:  # a session ID loaded from a route
            # print "session id from route: ", dataPath, settingsString, sessionID
            ma = gAnalysisSessions.getSessionAttribute(sessionID,
                                                       'mosaicAnalysisObject')
        else:
            raise InvalidPOSTRequest('An invalid POST request was received.')

        return jsonify(respondingURL='new-analysis',
                       sessionID=sessionID,
                       **ma.setupAnalysis()), 200
    except EmptyDataPipeError, err:
        gAnalysisSessions.pop(sessionID, None)
        return jsonify(respondingURL='new-analysis',
                       errType='EmptyDataPipeError',
                       errSummary="End of data.",
                       errText=str(err)), 500
Пример #5
0
    def runAnalysis(self):
        try:
            fname = 'eventMD-' + str(
                datetime.datetime.now().strftime('%Y%m%d-%H%M%S')) + '.sqlite'

            self.dbFile = format_path(self.dataPath + "/" + fname)
            # self.dbFile=self.dataPath+"/eventMD-20161208-130302.sqlite"

            self.processHandle = self._processHnd()

            self._writeSettings()

            self.analysisObject = sca.SingleChannelAnalysis(
                self.dataPath,
                self.trajIOHandle,
                None,
                self.partitionHandle,
                self.processHandle,
                dbFilename=fname)
            self.analysisObject.Run(forkProcess=True)

            time.sleep(3)

            self.analysisRunning = True
        except:
            raise
Пример #6
0
	def _loadEBSState(self):
		path=self.analysisDataModel["DataFilesPath"] 

		if path:
			ebsFile=glob.glob(format_path(str(path)+'/*_?tate.txt'))

			if len(ebsFile) > 0:
				ebsState=mosaicgui.EBSStateFileDict.EBSStateFileDict(ebsFile[0])
				
				rfb=ebsState.pop('FB Resistance',1.0)
				cfb=ebsState.pop('FB Capacitance',1.0)

				self.qdfRfbLineEdit.setText( str(float(rfb)/1E9) )
				self.qdfCfbLineEdit.setText( str(float(cfb)/1E-12) )

				# More Traj viewer settings
				self.analysisDataModel["Rfb"]=rfb
				self.analysisDataModel["Cfb"]=cfb

				# Show QDF specific widgets
				self.qdfCfbLineEdit.show()				
				self.qdfRfbLineEdit.show()
				self.CfbLabel.show()
				self.RfbLabel.show()
				self.CfbUnitsLabel.show()
				self.RfbUnitsLabel.show()
			else:
				# Hide QDF specific widgets
				self.qdfCfbLineEdit.hide()				
				self.qdfRfbLineEdit.hide()
				self.CfbLabel.hide()
				self.RfbLabel.hide()
				self.CfbUnitsLabel.hide()
				self.RfbUnitsLabel.hide()
Пример #7
0
def _gaCredentialCache():
	try:
		try:
			logger=mlog.mosaicLogging().getLogger(name=__name__)
			
			ga_cache=format_path(tempfile.gettempdir()+'/.ga')
			logger.debug(_d("Looking for GA cache {0}", ga_cache))

			gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime)
			gaExpireAge=timedelta(hours=24)
			gaAge=datetime.today() - gaModTime

			if gaAge > gaExpireAge:
				logger.debug(_d("GA settings cache has expired."))
				ga_old=_gaSettingsDict(ga_cache)
				_getGASettings(ga_cache)
				ga_new=_gaSettingsDict(ga_cache)

				if ga_old["gaenable"]==False:
					ga_new["gaenable"]=False

				with open(ga_cache, "w") as ga:
					ga.write(json.dumps(ga_new))
			else:
				logger.debug(_d("GA settings cache found ({0}). gaAge={1}", str(ga_cache), str(gaAge)))
		except:
			logger.debug(_d("GA settings are not cached."))
			_getGASettings(ga_cache)

		with open(ga_cache, 'r') as ga:
			return json.loads(ga.read())
	except BaseException as err:
		logger.debug(_d("Exception ignored: {0}\n{1}", repr(err), traceback.format_exc()))
		return
Пример #8
0
	def _filename(self):
		"""
			Return a output filename that contains the data file prefix and and the block index.
		"""
		self._creategenerator()

		return format_path(self.outDir+'/'+next(self.fileGenerator)+'.'+self.extension)
Пример #9
0
	def runAnalysis(self):
		try:
			fname='eventMD-'+str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite'

			self.dbFile=format_path(self.dataPath+"/"+fname)
			# self.dbFile=self.dataPath+"/eventMD-20161208-130302.sqlite"

			self.processHandle=self._processHnd()

			self._writeSettings()

			self.analysisObject=sca.SingleChannelAnalysis(
				self.dataPath,
				self.trajIOHandle,
				None,
				self.partitionHandle,
				self.processHandle,
				dbFilename=fname
			)
			self.analysisObject.Run(forkProcess=True)

			time.sleep(3)

			self.analysisRunning=True
		except:
			raise
Пример #10
0
    def _loadEBSState(self):
        path = self.analysisDataModel["DataFilesPath"]

        if path:
            ebsFile = glob.glob(format_path(str(path) + '/*_?tate.txt'))

            if len(ebsFile) > 0:
                ebsState = mosaicgui.EBSStateFileDict.EBSStateFileDict(
                    ebsFile[0])

                rfb = ebsState.pop('FB Resistance', 1.0)
                cfb = ebsState.pop('FB Capacitance', 1.0)

                self.qdfRfbLineEdit.setText(str(float(rfb) / 1E9))
                self.qdfCfbLineEdit.setText(str(float(cfb) / 1E-12))

                # More Traj viewer settings
                self.analysisDataModel["Rfb"] = rfb
                self.analysisDataModel["Cfb"] = cfb

                # Show QDF specific widgets
                self.qdfCfbLineEdit.show()
                self.qdfRfbLineEdit.show()
                self.CfbLabel.show()
                self.RfbLabel.show()
                self.CfbUnitsLabel.show()
                self.RfbUnitsLabel.show()
            else:
                # Hide QDF specific widgets
                self.qdfCfbLineEdit.hide()
                self.qdfRfbLineEdit.hide()
                self.CfbLabel.hide()
                self.RfbLabel.hide()
                self.CfbUnitsLabel.hide()
                self.RfbUnitsLabel.hide()
Пример #11
0
	def _initdb(self, **kwargs):
		"""
			Initialize the database tables
			
			Args:
				tableName   name of database table. Default is 'metadata'
		"""
		self.logger=mlog.mosaicLogging().getLogger(__name__)

		if not hasattr(self, 'tableName'):
			self.tableName='metadata'
		if not hasattr(self, 'colNames'):
			raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__))
			self.logger.error("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__))
		if not hasattr(self, 'colNames_t'):
			raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__))
			self.logger.error("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__))

		dbTimeout=kwargs.pop('timeout', 11.0)
		self.logger.debug(_d("DB Timeout = {0}", dbTimeout))

		self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite')
		self.logger.debug(_d("dbFilename {0}", self.dbFilename))
		self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout)

		self._setuptables()

		self.logger.debug(_d("DB setup complete."))
Пример #12
0
def loadAnalysis():
    global gAnalysisSessions

    try:
        params = dict(request.get_json())
        db = params.get('databaseFile', None)

        databaseFile = format_path(mosaic.WebServerDataLocation + '/' + db)
        if not databaseFile:
            raise InvalidPOSTRequest(
                "Missing required parameter 'databaseFile'")

        info, settings = _dbInfo(databaseFile)

        dataPath = info['datPath']

        sessionID = gAnalysisSessions.newSession()
        ma = mosaicAnalysis.mosaicAnalysis(dataPath, sessionID)
        ma.updateSettings(settings)

        # ma.setupAnalysis()

        gAnalysisSessions.addDatabaseFile(sessionID, databaseFile)
        gAnalysisSessions.addAnalysisRunningFlag(sessionID, False)
        gAnalysisSessions.addDataPath(sessionID, dataPath)
        gAnalysisSessions.addMOSAICAnalysisObject(sessionID, ma)

        return jsonify(respondingURL='load-analysis', sessionID=sessionID), 200
    except InvalidPOSTRequest, err:
        return jsonify(respondingURL='load-analysis',
                       errType='InvalidPOSTRequest',
                       errSummary="An invalid POST request was received.",
                       errText=str(err)), 500
Пример #13
0
	def _initdb(self, **kwargs):
		"""
			Initialize the database tables
			
			Args:
				tableName   name of database table. Default is 'metadata'
		"""
		self.logger=mlog.mosaicLogging().getLogger(__name__)

		if not hasattr(self, 'tableName'):
			self.tableName='metadata'
		if not hasattr(self, 'colNames'):
			raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__))
			self.logger.error("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__))
		if not hasattr(self, 'colNames_t'):
			raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__))
			self.logger.error("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__))

		dbTimeout=kwargs.pop('timeout', 11.0)
		self.logger.debug(_d("DB Timeout = {0}", dbTimeout))

		dbfile=kwargs.get('dbFilename', '')
		if dbfile=='':
			self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite')
		else:
			self.dbFilename=dbfile

		self.logger.debug(_d("dbFilename {0}", self.dbFilename))
		self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout)

		self._setuptables()

		self.logger.debug(_d("DB setup complete."))
Пример #14
0
def loadAnalysis():
	global gAnalysisSessions

	try:
		params = dict(request.get_json())
		db=params.get('databaseFile', None)

		databaseFile = format_path(mosaic.WebServerDataLocation+'/'+db )
		if not databaseFile:
			raise InvalidPOSTRequest("Missing required parameter 'databaseFile'")

		info, settings=_dbInfo(databaseFile)

		dataPath=info['datPath']

		sessionID=gAnalysisSessions.newSession()
		ma=mosaicAnalysis.mosaicAnalysis(dataPath, sessionID) 
		ma.updateSettings(settings)
		
		# ma.setupAnalysis()

		gAnalysisSessions.addDatabaseFile(sessionID, databaseFile)
		gAnalysisSessions.addAnalysisRunningFlag(sessionID, False)
		gAnalysisSessions.addDataPath(sessionID, dataPath)
		gAnalysisSessions.addMOSAICAnalysisObject(sessionID, ma)

		return jsonify(respondingURL='load-analysis', sessionID=sessionID ), 200
	except InvalidPOSTRequest, err:
		return jsonify( respondingURL='load-analysis', errType='InvalidPOSTRequest', errSummary="An invalid POST request was received.", errText=str(err) ), 500
Пример #15
0
    def __init__(self, **kwargs):
        """
		"""
        self.CHUNKSIZE = 10000
        self.dataGenerator = None

        # start by setting all passed keyword arguments as class attributes
        for (k, v) in kwargs.iteritems():
            setattr(self, k, v)

        # Check if the passed arguments are sane
        if hasattr(self, 'dirname') and hasattr(self, 'fnames'):
            raise IncompatibleArgumentsError(
                "Incompatible arguments: expect either 'dirname' or 'fnames' when initializing class {0}."
                .format(type(self).__name__))

        # Check for the filter arg
        if not hasattr(self, 'filter'):
            self.filter = '*'

        if hasattr(self, 'fnames'):
            # set fnames here.
            self.dataFiles = self.fnames
            delattr(self, 'fnames')
        else:
            try:
                if hasattr(self, 'dirname') and hasattr(self, 'nfiles'):
                    # N files from a directory
                    self.dataFiles = glob.glob(
                        format_path(
                            str(self.dirname) + "/" +
                            str(self.filter)))[:int(self.nfiles)]
                    delattr(self, 'dirname')
                    delattr(self, 'nfiles')
                elif hasattr(self, 'dirname'):
                    # all files from a directory
                    self.dataFiles = glob.glob(
                        format_path(
                            str(self.dirname) + "/" + str(self.filter)))
                    delattr(self, 'dirname')
                else:
                    raise IncompatibleArgumentsError(
                        "Missing arguments: 'dirname' or 'fnames' must be supplied to initialize {0}"
                        .format(type(self).__name__))
            except AttributeError, err:
                raise IncompatibleArgumentsError(err)
Пример #16
0
	def exportToCSV(self, query):
		"""
			Export database records that match the specified query to a CSV flat file.
		"""
		csvfile=format_path( self.dbFile.split('.')[0]+'.csv' )
		df=pandas.DataFrame(self.queryDB(query), columns=self._col_names(query, self.db.cursor(), self.tableName))
		df.to_csv( csvfile )

		self.logger.debug(_d("{0}", csvfile))
Пример #17
0
	def exportToCSV(self, query):
		"""
			Export database records that match the specified query to a CSV flat file.
		"""
		csvfile=format_path( self.dbFile.split('.')[0]+'.csv' )
		df=pandas.DataFrame(self.queryDB(query), columns=self._col_names(query, self.db.cursor(), self.tableName))
		df.to_csv( csvfile )

		self.logger.debug(_d("{0}", csvfile))
Пример #18
0
	def dropEvent( self, event ):
		data = event.mimeData()
		urls = data.urls()
		if ( urls and urls[0].scheme() == 'file' ):
			# for some reason, this doubles up the intro slash
			filepath = str( format_path( urls[0].path()) )
			
			if filepath.endswith('.sqlite'):
				self.setText(filepath)
Пример #19
0
    def dropEvent(self, event):
        data = event.mimeData()
        urls = data.urls()
        if (urls and urls[0].scheme() == 'file'):
            # for some reason, this doubles up the intro slash
            filepath = str(format_path(urls[0].path()))

            if filepath.endswith('.sqlite'):
                self.setText(filepath)
Пример #20
0
def _uuid():
	uuidfile=format_path(tempfile.gettempdir()+'/.mosaicuuid')
	try:
		with open (uuidfile, "r") as u:
			return u.read()
	except:
		uuidgen=str(uuid.uuid4())
		with open(uuidfile, "w") as uw:
			uw.write(uuidgen)
		return uuidgen
Пример #21
0
def _uuid():
    uuidfile=format_path(expanduser('~')+"/.mosaicuuid")
    try:
        with open (uuidfile, "r") as u:
            return u.read()
    except:
        uuidgen=str(uuid.uuid4())
        with open(uuidfile, "w") as uw:
            uw.write(uuidgen)
        return uuidgen
Пример #22
0
def _uuid():
    uuidfile = format_path(expanduser('~') + "/.mosaicuuid")
    try:
        with open(uuidfile, "r") as u:
            return u.read()
    except:
        uuidgen = str(uuid.uuid4())
        with open(uuidfile, "w") as uw:
            uw.write(uuidgen)
        return uuidgen
Пример #23
0
def _uuid():
    uuidfile = format_path(tempfile.gettempdir() + '/.mosaicuuid')
    try:
        with open(uuidfile, "r") as u:
            return u.read()
    except:
        uuidgen = str(uuid.uuid4())
        with open(uuidfile, "w") as uw:
            uw.write(uuidgen)
        return uuidgen
Пример #24
0
def listDatabaseFiles():
    params = dict(request.get_json())

    level = params.get('level', 'Data Root')
    logger.info("/list-database-files: " + str(level))
    if level == 'Data Root':
        folder = mosaic.WebServerDataLocation
        logger.info("/list-database-files: " + folder)
    else:
        folder = format_path(mosaic.WebServerDataLocation + '/' + level + '/')
        logger.info("/list-database-files: " + folder)

    fileList = []

    for item in sorted(glob.glob(folder + '/*')):
        itemAttr = {}
        if os.path.isdir(item):
            itemAttr['name'] = os.path.relpath(item, folder)
            itemAttr['relpath'] = os.path.relpath(
                item, format_path(mosaic.WebServerDataLocation))
            itemAttr['desc'] = _folderDesc(item)
            itemAttr['modified'] = time.strftime(
                '%m/%d/%Y, %I:%M %p', time.localtime(os.path.getmtime(item)))

            fileList.append(itemAttr)
        else:
            if _fileExtension(item) == ".sqlite":
                itemAttr['name'] = os.path.relpath(item, folder)
                itemAttr['relpath'] = os.path.relpath(
                    item, format_path(mosaic.WebServerDataLocation))
                itemAttr['desc'] = "SQLite database, {0}".format(
                    _fileSize(item))
                itemAttr['modified'] = time.strftime(
                    '%m/%d/%Y, %I:%M %p',
                    time.localtime(os.path.getmtime(item)))

                fileList.append(itemAttr)

    return jsonify(respondingURL='list-database-files',
                   level=level + '/',
                   fileData=fileList), 200
Пример #25
0
    def OnAggregateUsage(self):
        try:
            ga_cache = format_path(tempfile.gettempdir() + '/.ga')
            with open(ga_cache, "r") as garead:
                gac = json.load(garead)

            gac["gaenable"] = str(self.actionAggregate_Usage.isChecked())

            with open(ga_cache, "w") as gawrite:
                json.dump(gac, gawrite, indent=4, sort_keys=True)
        except:
            pass
Пример #26
0
def newAnalysis():
	global gAnalysisSessions
	# logger=mlog.mosaicLogging().getLogger(name=__name__)

	try:
		defaultSettings=False
		params = dict(request.get_json())

		dataPath = params.get('dataPath', None)
		settingsString = params.get('settingsString', None)
		sessionID=params.get('sessionID', None)

		if dataPath and not settingsString:		# brand new session
			# print "brand new session: ", dataPath, settingsString, sessionID	
			logger.info("/new-analysis: "+format_path(mosaic.WebServerDataLocation+'/'+dataPath))

			sessionID=gAnalysisSessions.newSession()
			ma=mosaicAnalysis.mosaicAnalysis( format_path(mosaic.WebServerDataLocation+'/'+dataPath), sessionID) 

			gAnalysisSessions.addDataPath(sessionID, format_path(mosaic.WebServerDataLocation+'/'+dataPath) )
			gAnalysisSessions.addMOSAICAnalysisObject(sessionID, ma)
		elif sessionID and settingsString:	# update settings
			# print "update settings: ", dataPath, settingsString, sessionID
			ma=gAnalysisSessions.getSessionAttribute(sessionID, 'mosaicAnalysisObject')
			ma.updateSettings(settingsString)

			gAnalysisSessions.addSettingsString(sessionID, ma.analysisSettingsDict)
		elif sessionID and not settingsString:  # a session ID loaded from a route
			# print "session id from route: ", dataPath, settingsString, sessionID
			ma=gAnalysisSessions.getSessionAttribute(sessionID, 'mosaicAnalysisObject')
		else:
			raise InvalidPOSTRequest('An invalid POST request was received.')
		

		return jsonify(respondingURL='new-analysis', sessionID=sessionID, **ma.setupAnalysis() ), 200
	except EmptyDataPipeError, err:
		gAnalysisSessions.pop(sessionID, None)
		return jsonify( respondingURL='new-analysis', errType='EmptyDataPipeError', errSummary="End of data.", errText=str(err) ), 500
Пример #27
0
def initialization():
	ga_cache=format_path(tempfile.gettempdir()+'/.ga')

	params = dict(request.get_json())
	appAnalytics=params.get("appAnalytics", -1)

	gac=ga._gaCredentialCache()
	if appAnalytics!=-1:
		gac["gaenable"]=str(appAnalytics)

	with open(ga_cache, "w") as g:
		g.write(json.dumps(gac))

	return jsonify( respondingURL="initialization", appAnalytics=0, showAnalyticsOptions=0, serverMode=mosaic.WebServerMode), 200
Пример #28
0
	def __init__(self, **kwargs):
		"""
		"""
		self.CHUNKSIZE=10000
		self.dataGenerator=None

		# start by setting all passed keyword arguments as class attributes
		for (k,v) in kwargs.iteritems():
			setattr(self, k, v)

		# Check if the passed arguments are sane	
		if hasattr(self, 'dirname') and hasattr(self, 'fnames'):
			raise IncompatibleArgumentsError("Incompatible arguments: expect either 'dirname' or 'fnames' when initializing class {0}.".format(type(self).__name__))

		# Check for the filter arg
		if not hasattr(self, 'filter'):
			self.filter='*'

		if hasattr(self, 'fnames'):
			# set fnames here.
			self.dataFiles=self.fnames
			delattr(self, 'fnames')
		else:
			try:
				if hasattr(self, 'dirname') and hasattr(self,'nfiles'):
					# N files from a directory
					self.dataFiles=glob.glob(format_path(str(self.dirname)+"/"+str(self.filter)))[:int(self.nfiles)]
					delattr(self, 'dirname')
					delattr(self, 'nfiles')
				elif hasattr(self, 'dirname'):
					# all files from a directory
					self.dataFiles=glob.glob(format_path(str(self.dirname)+"/"+str(self.filter)))
					delattr(self, 'dirname')
				else:
					raise IncompatibleArgumentsError("Missing arguments: 'dirname' or 'fnames' must be supplied to initialize {0}".format(type(self).__name__))
			except AttributeError, err:
				raise IncompatibleArgumentsError(err)
Пример #29
0
	def __init__(self, trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings, settingsString):
		"""
			Initialize a new event segment object
		"""
		# Required arguments
		self.trajDataObj=trajDataObj
		self.eventProcHnd=eventProcHnd

		self.settingsDict = eventPartitionSettings 
		self.eventProcSettingsDict = eventProcSettings

		try:
			self.writeEventTS=int(self.settingsDict.pop("writeEventTS",1))
			self.parallelProc=int(self.settingsDict.pop("parallelProc",1))
			self.reserveNCPU=int(self.settingsDict.pop("reserveNCPU",2))
		except ValueError as err:
			raise commonExceptions.SettingsTypeError( err )

		sys.stdout.flush()

		self.logFileHnd=open(format_path(self.trajDataObj.datPath+'/eventProcessing.log'),'w')
		
		self.tEventProcObj=self.eventProcHnd([], self.trajDataObj.FsHz, eventstart=0,eventend=0, baselinestats=[ 0,0,0 ], algosettingsdict=self.eventProcSettingsDict.copy(), savets=False, absdatidx=0, datafileHnd=None )

		self.mdioDBHnd=sqlite3MDIO.sqlite3MDIO()
		self.mdioDBHnd.initDB(
								dbPath=self.trajDataObj.datPath, 
								tableName='metadata',
								colNames=(self.tEventProcObj.mdHeadings())+['TimeSeries'],
								colNames_t=(self.tEventProcObj.mdHeadingDataType())+['REAL_LIST']
							)
		self.mdioDBHnd.writeSettings(settingsString)
		if self.trajDataObj.dataFilter:
			fstring=type(self.trajDataObj.dataFilterObj).__name__
		else:
			fstring='None'
		self.mdioDBHnd.writeAnalysisInfo([
							self.trajDataObj.datPath,
							self.trajDataObj.fileFormat,
							type(self).__name__,
							type(self.tEventProcObj).__name__,
							fstring,
						])

		if self.parallelProc:
			self._setupparallel()


		self._init(trajDataObj, eventProcHnd, eventPartitionSettings, eventProcSettings)
Пример #30
0
def initialization():
    ga_cache = format_path(tempfile.gettempdir() + '/.ga')

    params = dict(request.get_json())
    appAnalytics = params.get("appAnalytics", -1)

    gac = ga._gaCredentialCache()
    if appAnalytics != -1:
        gac["gaenable"] = str(appAnalytics)

    with open(ga_cache, "w") as g:
        g.write(json.dumps(gac))

    return jsonify(respondingURL="initialization",
                   appAnalytics=0,
                   showAnalyticsOptions=0,
                   serverMode=mosaic.WebServerMode), 200
Пример #31
0
	def _initdb(self, **kwargs):
		"""
			Initialize the database tables
			
			Args:
				tableName   name of database table. Default is 'metadata'
		"""
		if not hasattr(self, 'tableName'):
			self.tableName='metadata'
		if not hasattr(self, 'colNames'):
			raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames' must be supplied to initialize {0}".format(type(self).__name__))
		if not hasattr(self, 'colNames_t'):
			raise metaMDIO.InsufficientArgumentsError("Missing arguments: 'colNames_t' must be supplied to initialize {0}".format(type(self).__name__))

		dbTimeout=kwargs.pop('timeout', 11.0)

		self.dbFilename=format_path(self.dbPath+'/'+'eventMD-' +str(datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))+'.sqlite')
		self.db = sqlite3.connect(self.dbFilename, detect_types=sqlite3.PARSE_DECLTYPES, timeout=dbTimeout)

		self._setuptables()
Пример #32
0
    def analysisStatistics(self):
        statsDict = {}

        s = self._eventStats()
        statsDict['fractionNormal'] = s[0]
        statsDict['fractionWarn'] = s[1]
        statsDict['fractionError'] = s[2]
        statsDict['nTotal'] = s[3]

        c = self._caprate()
        statsDict['captureRateMean'] = c[0]
        statsDict['captureRateSigma'] = c[1]

        t = self._timePerEvent()
        statsDict['processTimePerEventMean'] = t[0]
        statsDict['processTimePerEventSigma'] = t[1]

        o = self._openChanCurrent()
        statsDict['openChannelCurrentMean'] = o[0]
        statsDict['openChannelCurrentSigma'] = o[1]

        statsDict['analysisProgressPercent'] = self._analysisProgress()

        dbHnd = sqlite.sqlite3MDIO()
        dbHnd.openDB(self.analysisDB)
        analysisInfo = dbHnd.readAnalysisInfo()

        procName = analysisStatistics.processingAlgorithmName
        partName = analysisStatistics.partitionAlgorithmName

        statsDict['partitionAlgorithm'] = partName[
            analysisInfo['partitionAlgorithm']]
        statsDict['processingAlgorithm'] = procName[
            analysisInfo['processingAlgorithm']]
        statsDict['FskHz'] = analysisInfo['FsHz'] / 1000.
        statsDict['dataType'] = analysisInfo['dataType']
        statsDict['datPath'] = format_path((analysisInfo['datPath'].replace(
            str(WebServerDataLocation), "<Data Root>/")).replace('//', '/'))
        statsDict['timeseriesSaved'] = self._timeseriesSaved()

        return statsDict
Пример #33
0
	def analysisStatistics(self):
		statsDict={}
		
		s=self._eventStats()
		statsDict['fractionNormal']=s[0]
		statsDict['fractionWarn']=s[1]
		statsDict['fractionError']=s[2]
		statsDict['nTotal']=s[3]

		c=self._caprate()
		statsDict['captureRateMean']=c[0]
		statsDict['captureRateSigma']=c[1]

		t=self._timePerEvent()
		statsDict['processTimePerEventMean']=t[0]
		statsDict['processTimePerEventSigma']=t[1]

		o=self._openChanCurrent()
		statsDict['openChannelCurrentMean']=o[0]
		statsDict['openChannelCurrentSigma']=o[1]

		statsDict['analysisProgressPercent']=self._analysisProgress()

		dbHnd=sqlite.sqlite3MDIO()
		dbHnd.openDB(self.analysisDB)
		analysisInfo=dbHnd.readAnalysisInfo()

		procName=analysisStatistics.processingAlgorithmName
		partName=analysisStatistics.partitionAlgorithmName

		statsDict['partitionAlgorithm']=partName[analysisInfo['partitionAlgorithm']]
		statsDict['processingAlgorithm']=procName[analysisInfo['processingAlgorithm']]
		statsDict['FskHz']=analysisInfo['FsHz']/1000.
		statsDict['dataType']=analysisInfo['dataType']
		statsDict['datPath']=format_path((analysisInfo['datPath'].replace(str(WebServerDataLocation), "<Data Root>/")).replace('//', '/'))
		statsDict['timeseriesSaved']=self._timeseriesSaved()

		return statsDict
Пример #34
0
def _gaCredentialCache():
    try:
        try:
            logger = mlog.mosaicLogging().getLogger(name=__name__)

            ga_cache = format_path(tempfile.gettempdir() + '/.ga')
            logger.debug(_d("Looking for GA cache {0}", ga_cache))

            gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime)
            gaExpireAge = timedelta(hours=24)
            gaAge = datetime.today() - gaModTime

            if gaAge > gaExpireAge:
                logger.debug(_d("GA settings cache has expired."))
                ga_old = _gaSettingsDict(ga_cache)
                _getGASettings(ga_cache)
                ga_new = _gaSettingsDict(ga_cache)

                if ga_old["gaenable"] == False:
                    ga_new["gaenable"] = False

                with open(ga_cache, "w") as ga:
                    ga.write(json.dumps(ga_new))
            else:
                logger.debug(
                    _d("GA settings cache found ({0}). gaAge={1}",
                       str(ga_cache), str(gaAge)))
        except:
            logger.debug(_d("GA settings are not cached."))
            _getGASettings(ga_cache)

        with open(ga_cache, 'r') as ga:
            return json.loads(ga.read())
    except BaseException as err:
        logger.debug(
            _d("Exception ignored: {0}\n{1}", repr(err),
               traceback.format_exc()))
        return
Пример #35
0
def _gaCredentialCache():
    logger = mlog.mosaicLogging().getLogger(name=__name__)
    # ga_cache=resource_path("mosaic/utilities/.ga")
    ga_cache = format_path(tempfile.gettempdir() + '/.ga')
    logger.debug(_d("Looking for GA cache {0}", ga_cache))

    try:
        gaModTime = datetime.fromtimestamp(os.stat(ga_cache).st_mtime)
        gaExpireAge = timedelta(hours=24)
        gaAge = datetime.today() - gaModTime

        if gaAge > gaExpireAge:
            logger.debug(_d("GA settings cache has expired."))
            _getGASettings(ga_cache)
        else:
            logger.debug(_d("GA settings cache found. gaAge={0}", gaAge))

    except:
        logger.debug(_d("GA settings are not cached."))
        _getGASettings(ga_cache)

    with open(ga_cache, 'r') as ga:
        return json.loads(ga.read())
Пример #36
0
class mosaicLogging(object):
    """
		A custom logging class that uses the Python logging facility. Logs are automatically saved to a metaMDIO instance,
		and to a file log when DeveloperMode is active.
	"""
    __metaclass__ = metaSingleton

    _loggers = {}

    log = logging.getLogger()
    log.setLevel(logging.DEBUG)

    formatstr = MessageFormatter(
        "%(asctime)-8s %(levelname)-8s %(name)-12s: %(message)s",
        datefmt="%Y-%m-%d %H:%M:%S")

    # Rotating File Handler
    try:
        logdir = mosaic.LogLocation
        log.info("Logs will be saved to: {0}".format(logdir))
    except AttributeError, err:
        if sys.platform.startswith('darwin'):
            logdir = format_path(
                os.path.expanduser('~') + "/Library/Logs/MOSAIC")
            if not os.path.exists(logdir):
                os.mkdir(logdir)
        elif sys.platform.startswith('linux'):
            if os.getuid() == 0:
                logdir = "/var/log/"
            else:
                log.info(
                    "MOSAIC log will be saved to ~/mosaic.log. Run MOSAIC with sudo to save logs to '/var/log/."
                )
                logdir = os.path.expanduser("~")
        else:
            logdir = os.path.expanduser("~")
Пример #37
0
            logdir = format_path(
                os.path.expanduser('~') + "/Library/Logs/MOSAIC")
            if not os.path.exists(logdir):
                os.mkdir(logdir)
        elif sys.platform.startswith('linux'):
            if os.getuid() == 0:
                logdir = "/var/log/"
            else:
                log.info(
                    "MOSAIC log will be saved to ~/mosaic.log. Run MOSAIC with sudo to save logs to '/var/log/."
                )
                logdir = os.path.expanduser("~")
        else:
            logdir = os.path.expanduser("~")

    logname = format_path(logdir + "/mosaic.log")

    rfh = logging.handlers.RotatingFileHandler(filename=logname,
                                               maxBytes=mosaic.LogSizeBytes,
                                               backupCount=5)
    rfh.setFormatter(formatstr)
    if mosaic.DeveloperMode:
        rfh.setLevel(logging.DEBUG)
    else:
        rfh.setLevel(logging.INFO)

    log.addHandler(rfh)

    sh = None

    def __init__(self, *args, **kwargs):
Пример #38
0
	def _updateControls(self):
		self.updateDialogs=False

		model=self.analysisDataModel

		datidx= { 
					"QDF" : self.datTypeComboBox.findText("QDF"), 
					"ABF" : self.datTypeComboBox.findText("ABF"),
					"BIN" : self.datTypeComboBox.findText("BIN")
				}
		path=model["DataFilesPath"] 
		if len(glob.glob(format_path( str(path)+'/*qdf') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["QDF"] )
			model["filter"]="*.qdf"
		elif len(glob.glob( format_path(str(path)+'/*abf') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["ABF"] )
			model["filter"]="*.abf"
		elif len(glob.glob( format_path(str(path)+'/*bin') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["BIN"] )
			model["filter"]="*.bin"
		elif len(glob.glob( format_path(str(path)+'/*dat') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["BIN"] )
			model["filter"]="*.dat"

		# store the  data type in the trajviewer data struct
		model["DataFilesType"] = str(self.datTypeComboBox.currentText())

		self.startIndexLineEdit.setText(str(model["start"]))
		if model["end"]==-1:
			self.endIndexLineEdit.setText("")
		else:
			self.endIndexLineEdit.setText(str(model["end"]))

		self.dcOffsetDoubleSpinBox.setValue(model["dcOffset"])

		if float(model["meanOpenCurr"]) == -1. or float(model["sdOpenCurr"]) == -1.:
			self.baselineAutoCheckBox.setChecked(True)
			self.OnBaselineAutoCheckbox(True)

			# Manually disable baseline mean and SD controls
			self.baselineMeanLineEdit.setText("")
			self.baselineSDLineEdit.setText("")

			self.baselineMeanLineEdit.setPlaceholderText("<auto>")
			self.baselineSDLineEdit.setPlaceholderText("<auto>")

			self.baselineMeanLineEdit.setEnabled(False)
			self.baselineSDLineEdit.setEnabled(False)			
		else:
			# Populate baseline parameters
			self.baselineAutoCheckBox.setChecked(False)
			self.OnBaselineAutoCheckbox(False)

			self.baselineMeanLineEdit.setText(str(model["meanOpenCurr"]))
			self.baselineSDLineEdit.setText(str(model["sdOpenCurr"]))
			self.baselineBlockSizeDoubleSpinBox.setValue(float(model["blockSizeSec"]))

			# Manually enable baseline mean and SD controls
			self.baselineMeanLineEdit.setEnabled(True)
			self.baselineSDLineEdit.setEnabled(True)
	
		# Populate EventSegment parameters
		self._setThreshold(float(model["meanOpenCurr"]), float(model["sdOpenCurr"]), float(model["eventThreshold"]))
		
		# Populate misc parameters
		self.writeEventsCheckBox.setChecked(int(model["writeEventTS"]))

		# Populate plot widgets
		self.plotEventFitsCheckBox.setEnabled(int(model["writeEventTS"]))

		# check if parallel is available
		try:
			import zmq
			
			self.parallelCheckBox.setChecked(int(model["parallelProc"]))				
			self.parallelCoresSpinBox.setValue(multiprocessing.cpu_count()-int(model["reserveNCPU"]))
		except ImportError:
			self.parallelCheckBox.hide()
			self.parallelCoresSpinBox.hide()
			self.parallelCoresLabel.hide()	

		procidx= {}
		for v in self.analysisDataModel.eventProcessingAlgoKeys.values():
			procidx[v]=self.processingAlgorithmComboBox.findText(v)
		
		self.processingAlgorithmComboBox.setCurrentIndex( procidx[self.analysisDataModel["ProcessingAlgorithm"]] )

		# If an advanced mode dialog exists, update its settings
		if self.advancedSettingsDialog:
			if self.dataFilterDenoise:
				fltr=self.analysisDataModel["FilterAlgorithm"]
			else:
				fltr=None
			self.advancedSettingsDialog.updateSettingsString(
					model.GenerateSettingsView(
							eventPartitionAlgo=str(self.partitionAlgorithmComboBox.currentText()), 
							eventProcessingAlgo=str(self.processingAlgorithmComboBox.currentText()),
							dataFilterAlgo=fltr
						)
				)

		# If the trajviewer is initialized, update the denoising settings
		if self.trajViewerWindow:
			self.trajViewerWindow.waveletLevelSpinBox.setValue(int(self.analysisDataModel["level"]))

		self.updateDialogs=True
Пример #39
0
	def _getdbfiles(self):
		path=self.analysisDataModel["DataFilesPath"]
		return glob.glob(format_path(path+'/*sqlite'))
Пример #40
0
 def _buildFileList(self, dirname, filter):
     flist = set(glob.glob(format_path(dirname + "/" + filter)))
     for ignorefilter in ignorelist:
         flist = flist - set(
             glob.glob(format_path(dirname + "/" + ignorefilter)))
     return list(flist)
Пример #41
0
	def _buildFileList(self, dirname, filter):
		flist=set(glob.glob(format_path(dirname+"/"+filter)))
		for ignorefilter in ignorelist:
			flist=flist-set(glob.glob(format_path(dirname+"/"+ignorefilter)))
		return list(flist)
Пример #42
0
    def _updateControls(self):
        self.updateDialogs = False

        model = self.analysisDataModel

        datidx = {
            "QDF": self.datTypeComboBox.findText("QDF"),
            "ABF": self.datTypeComboBox.findText("ABF"),
            "BIN": self.datTypeComboBox.findText("BIN")
        }
        path = model["DataFilesPath"]
        if len(glob.glob(format_path(str(path) + '/*qdf'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["QDF"])
            model["filter"] = "*.qdf"
        elif len(glob.glob(format_path(str(path) + '/*abf'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["ABF"])
            model["filter"] = "*.abf"
        elif len(glob.glob(format_path(str(path) + '/*bin'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["BIN"])
            model["filter"] = "*.bin"
        elif len(glob.glob(format_path(str(path) + '/*dat'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["BIN"])
            model["filter"] = "*.dat"

        # store the  data type in the trajviewer data struct
        model["DataFilesType"] = str(self.datTypeComboBox.currentText())

        self.startIndexLineEdit.setText(str(model["start"]))
        if model["end"] == -1:
            self.endIndexLineEdit.setText("")
        else:
            self.endIndexLineEdit.setText(str(model["end"]))

        self.dcOffsetDoubleSpinBox.setValue(model["dcOffset"])

        if float(model["meanOpenCurr"]) == -1. or float(
                model["sdOpenCurr"]) == -1.:
            self.baselineAutoCheckBox.setChecked(True)
            self.OnBaselineAutoCheckbox(True)

            # Manually disable baseline mean and SD controls
            self.baselineMeanLineEdit.setText("")
            self.baselineSDLineEdit.setText("")

            self.baselineMeanLineEdit.setPlaceholderText("<auto>")
            self.baselineSDLineEdit.setPlaceholderText("<auto>")

            self.baselineMeanLineEdit.setEnabled(False)
            self.baselineSDLineEdit.setEnabled(False)
        else:
            # Populate baseline parameters
            self.baselineAutoCheckBox.setChecked(False)
            self.OnBaselineAutoCheckbox(False)

            self.baselineMeanLineEdit.setText(str(model["meanOpenCurr"]))
            self.baselineSDLineEdit.setText(str(model["sdOpenCurr"]))
            self.baselineBlockSizeDoubleSpinBox.setValue(
                float(model["blockSizeSec"]))

            # Manually enable baseline mean and SD controls
            self.baselineMeanLineEdit.setEnabled(True)
            self.baselineSDLineEdit.setEnabled(True)

        # Populate EventSegment parameters
        self._setThreshold(float(model["meanOpenCurr"]),
                           float(model["sdOpenCurr"]),
                           float(model["eventThreshold"]))

        # Populate misc parameters
        self.writeEventsCheckBox.setChecked(int(model["writeEventTS"]))

        # Populate plot widgets
        self.plotEventFitsCheckBox.setEnabled(int(model["writeEventTS"]))

        # check if parallel is available
        try:
            import zmq

            self.parallelCheckBox.setChecked(int(model["parallelProc"]))
            self.parallelCoresSpinBox.setValue(multiprocessing.cpu_count() -
                                               int(model["reserveNCPU"]))
        except ImportError:
            self.parallelCheckBox.hide()
            self.parallelCoresSpinBox.hide()
            self.parallelCoresLabel.hide()

        procidx = {}
        for v in self.analysisDataModel.eventProcessingAlgoKeys.values():
            procidx[v] = self.processingAlgorithmComboBox.findText(v)

        self.processingAlgorithmComboBox.setCurrentIndex(
            procidx[self.analysisDataModel["ProcessingAlgorithm"]])

        # If an advanced mode dialog exists, update its settings
        if self.advancedSettingsDialog:
            if self.dataFilterDenoise:
                fltr = self.analysisDataModel["FilterAlgorithm"]
            else:
                fltr = None
            self.advancedSettingsDialog.updateSettingsString(
                model.GenerateSettingsView(
                    eventPartitionAlgo=str(
                        self.partitionAlgorithmComboBox.currentText()),
                    eventProcessingAlgo=str(
                        self.processingAlgorithmComboBox.currentText()),
                    dataFilterAlgo=fltr))

        # If the trajviewer is initialized, update the denoising settings
        if self.trajViewerWindow:
            self.trajViewerWindow.waveletLevelSpinBox.setValue(
                int(self.analysisDataModel["level"]))

        self.updateDialogs = True
Пример #43
0
    def __init__(self, trajDataObj, eventProcHnd, eventPartitionSettings,
                 eventProcSettings, settingsString):
        """
			Initialize a new event segment object
		"""
        # Required arguments
        self.trajDataObj = trajDataObj
        self.eventProcHnd = eventProcHnd

        self.settingsDict = eventPartitionSettings
        self.eventProcSettingsDict = eventProcSettings

        try:
            self.writeEventTS = int(self.settingsDict.pop("writeEventTS", 1))
            self.parallelProc = int(self.settingsDict.pop("parallelProc", 1))
            self.reserveNCPU = int(self.settingsDict.pop("reserveNCPU", 2))
        except ValueError as err:
            raise commonExceptions.SettingsTypeError(err)

        sys.stdout.flush()

        self.logFileHnd = open(
            format_path(self.trajDataObj.datPath + '/eventProcessing.log'),
            'w')

        self.tEventProcObj = self.eventProcHnd(
            [],
            self.trajDataObj.FsHz,
            eventstart=0,
            eventend=0,
            baselinestats=[0, 0, 0],
            algosettingsdict=self.eventProcSettingsDict.copy(),
            savets=False,
            absdatidx=0,
            datafileHnd=None)

        self.mdioDBHnd = sqlite3MDIO.sqlite3MDIO()
        self.mdioDBHnd.initDB(
            dbPath=self.trajDataObj.datPath,
            tableName='metadata',
            colNames=(self.tEventProcObj.mdHeadings()) + ['TimeSeries'],
            colNames_t=(self.tEventProcObj.mdHeadingDataType()) +
            ['REAL_LIST'])
        self.mdioDBHnd.writeSettings(settingsString)
        if self.trajDataObj.dataFilter:
            fstring = type(self.trajDataObj.dataFilterObj).__name__
        else:
            fstring = 'None'
        self.mdioDBHnd.writeAnalysisInfo([
            self.trajDataObj.datPath,
            self.trajDataObj.fileFormat,
            type(self).__name__,
            type(self.tEventProcObj).__name__,
            fstring,
        ])

        if self.parallelProc:
            self._setupparallel()

        self._init(trajDataObj, eventProcHnd, eventPartitionSettings,
                   eventProcSettings)
Пример #44
0
    def _updateControls(self):
        self.updateDialogs = False

        model = self.analysisDataModel

        datidx = {
            "QDF": self.datTypeComboBox.findText("QDF"),
            "ABF": self.datTypeComboBox.findText("ABF"),
            "BIN": self.datTypeComboBox.findText("BIN"),
            "TSV": self.datTypeComboBox.findText("TSV"),
            "LOG": self.datTypeComboBox.findText("LOG")
        }
        path = model["DataFilesPath"]
        if len(glob.glob(format_path(str(path) + '/*qdf'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["QDF"])
            model["filter"] = "*.qdf"
        elif len(glob.glob(format_path(str(path) + '/*abf'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["ABF"])
            model["filter"] = "*.abf"
        elif len(glob.glob(format_path(str(path) + '/*bin'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["BIN"])
            model["filter"] = "*.bin"
        elif len(glob.glob(format_path(str(path) + '/*dat'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["BIN"])
            model["filter"] = "*.dat"
        elif len(glob.glob(format_path(str(path) + '/*tsv'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["TSV"])
            model["filter"] = "*.tsv"
        elif len(glob.glob(format_path(str(path) + '/*txt'))) > 0 and len(
                glob.glob(format_path(str(path) + '/*log'))) == 0:
            self.datTypeComboBox.setCurrentIndex(datidx["TSV"])
            model["filter"] = "*.txt"
        elif len(glob.glob(format_path(str(path) + '/*log'))) > 0:
            self.datTypeComboBox.setCurrentIndex(datidx["LOG"])
            model["filter"] = "*.log"

        # store the  data type in the trajviewer data struct
        model["DataFilesType"] = str(self.datTypeComboBox.currentText())

        self.startIndexLineEdit.setText(str(model["start"]))
        if model["end"] == -1:
            self.endIndexLineEdit.setText("")
        else:
            self.endIndexLineEdit.setText(str(model["end"]))

        self.dcOffsetDoubleSpinBox.setValue(model["dcOffset"])

        if float(model["meanOpenCurr"]) == -1. or float(
                model["sdOpenCurr"]) == -1.:
            self.baselineAutoCheckBox.setChecked(True)
            self.OnBaselineAutoCheckbox(True)

            # Manually disable baseline mean and SD controls
            self.baselineMeanLineEdit.setText("")
            self.baselineSDLineEdit.setText("")

            self.baselineMeanLineEdit.setPlaceholderText("<auto>")
            self.baselineSDLineEdit.setPlaceholderText("<auto>")

            self.baselineMeanLineEdit.setEnabled(False)
            self.baselineSDLineEdit.setEnabled(False)
        else:
            # Populate baseline parameters
            self.baselineAutoCheckBox.setChecked(False)
            self.OnBaselineAutoCheckbox(False)

            self.baselineMeanLineEdit.setText(str(model["meanOpenCurr"]))
            self.baselineSDLineEdit.setText(str(model["sdOpenCurr"]))
            self.baselineBlockSizeDoubleSpinBox.setValue(
                float(model["blockSizeSec"]))

            # Manually enable baseline mean and SD controls
            self.baselineMeanLineEdit.setEnabled(True)
            self.baselineSDLineEdit.setEnabled(True)

        # Populate EventSegment parameters
        self._setThreshold(float(model["meanOpenCurr"]),
                           float(model["sdOpenCurr"]),
                           float(model["eventThreshold"]))

        # Populate misc parameters
        self.writeEventsCheckBox.setChecked(int(model["writeEventTS"]))

        # Populate plot widgets
        self.plotEventFitsCheckBox.setEnabled(int(model["writeEventTS"]))

        # check if parallel is available
        # try:
        # 	import zmq

        # 	self.parallelCheckBox.setChecked(int(model["parallelProc"]))
        # 	self.parallelCoresSpinBox.setValue(multiprocessing.cpu_count()-int(model["reserveNCPU"]))
        # except ImportError:
        # 	self.parallelCheckBox.hide()
        # 	self.parallelCoresSpinBox.hide()
        # 	self.parallelCoresLabel.hide()
        self.parallelCheckBox.hide()
        self.parallelCoresSpinBox.hide()
        self.parallelCoresLabel.hide()

        procidx = {}
        for v in self.analysisDataModel.eventProcessingAlgoKeys.keys():
            procidx[v] = self.processingAlgorithmComboBox.findText(v)

        self.processingAlgorithmComboBox.setCurrentIndex(
            procidx[self.analysisDataModel.EventProcessingAlgorithmLabel()])

        # If an advanced mode dialog exists, update its settings
        if self.advancedSettingsDialog:
            if self.dataFilterDenoise:
                fltr = self.analysisDataModel["FilterAlgorithm"]
            else:
                fltr = None
            self.advancedSettingsDialog.updateSettingsString(
                model.GenerateSettingsView(
                    eventPartitionAlgo=str(
                        self.partitionAlgorithmComboBox.currentText()),
                    eventProcessingAlgo=str(
                        self.processingAlgorithmComboBox.currentText()),
                    dataFilterAlgo=fltr))

        # If the trajviewer is initialized, update the denoising settings
        if self.trajViewerWindow:
            self.trajViewerWindow.waveletLevelSpinBox.setValue(
                int(self.analysisDataModel["level"]))

        # Hide Rfb and Cfb for QDF files
        [
            control.hide() for control in [
                self.RfbLabel, self.qdfRfbLineEdit, self.RfbUnitsLabel,
                self.CfbLabel, self.qdfCfbLineEdit, self.CfbUnitsLabel
            ]
        ]

        # Set ga toggle.
        try:
            ga_cache = format_path(tempfile.gettempdir() + '/.ga')
            with open(ga_cache, "r") as garead:
                gac = json.load(garead)

            if eval(gac["gauimode"]):
                self.actionAggregate_Usage.setVisible(True)
            else:
                self.actionAggregate_Usage.setVisible(False)

            if eval(gac["gaenable"]):
                self.actionAggregate_Usage.setChecked(True)
            else:
                self.actionAggregate_Usage.setChecked(False)
        except:
            self.actionAggregate_Usage.setVisible(False)

        self.updateDialogs = True
Пример #45
0
	def _updateControls(self):
		self.updateDialogs=False

		model=self.analysisDataModel

		datidx= { 
					"QDF" : self.datTypeComboBox.findText("QDF"), 
					"ABF" : self.datTypeComboBox.findText("ABF"),
					"BIN" : self.datTypeComboBox.findText("BIN"),
					"TSV" : self.datTypeComboBox.findText("TSV")
				}
		path=model["DataFilesPath"] 
		if len(glob.glob(format_path( str(path)+'/*qdf') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["QDF"] )
			model["filter"]="*.qdf"
		elif len(glob.glob( format_path(str(path)+'/*abf') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["ABF"] )
			model["filter"]="*.abf"
		elif len(glob.glob( format_path(str(path)+'/*bin') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["BIN"] )
			model["filter"]="*.bin"
		elif len(glob.glob( format_path(str(path)+'/*dat') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["BIN"] )
			model["filter"]="*.dat"
		elif len(glob.glob( format_path(str(path)+'/*tsv') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["TSV"] )
			model["filter"]="*.tsv"
		elif len(glob.glob( format_path(str(path)+'/*txt') )) > 0:
			self.datTypeComboBox.setCurrentIndex( datidx["TSV"] )
			model["filter"]="*.txt"

		# store the  data type in the trajviewer data struct
		model["DataFilesType"] = str(self.datTypeComboBox.currentText())

		self.startIndexLineEdit.setText(str(model["start"]))
		if model["end"]==-1:
			self.endIndexLineEdit.setText("")
		else:
			self.endIndexLineEdit.setText(str(model["end"]))

		self.dcOffsetDoubleSpinBox.setValue(model["dcOffset"])

		if float(model["meanOpenCurr"]) == -1. or float(model["sdOpenCurr"]) == -1.:
			self.baselineAutoCheckBox.setChecked(True)
			self.OnBaselineAutoCheckbox(True)

			# Manually disable baseline mean and SD controls
			self.baselineMeanLineEdit.setText("")
			self.baselineSDLineEdit.setText("")

			self.baselineMeanLineEdit.setPlaceholderText("<auto>")
			self.baselineSDLineEdit.setPlaceholderText("<auto>")

			self.baselineMeanLineEdit.setEnabled(False)
			self.baselineSDLineEdit.setEnabled(False)			
		else:
			# Populate baseline parameters
			self.baselineAutoCheckBox.setChecked(False)
			self.OnBaselineAutoCheckbox(False)

			self.baselineMeanLineEdit.setText(str(model["meanOpenCurr"]))
			self.baselineSDLineEdit.setText(str(model["sdOpenCurr"]))
			self.baselineBlockSizeDoubleSpinBox.setValue(float(model["blockSizeSec"]))

			# Manually enable baseline mean and SD controls
			self.baselineMeanLineEdit.setEnabled(True)
			self.baselineSDLineEdit.setEnabled(True)
	
		# Populate EventSegment parameters
		self._setThreshold(float(model["meanOpenCurr"]), float(model["sdOpenCurr"]), float(model["eventThreshold"]))
		
		# Populate misc parameters
		self.writeEventsCheckBox.setChecked(int(model["writeEventTS"]))

		# Populate plot widgets
		self.plotEventFitsCheckBox.setEnabled(int(model["writeEventTS"]))

		# check if parallel is available
		# try:
		# 	import zmq
			
		# 	self.parallelCheckBox.setChecked(int(model["parallelProc"]))				
		# 	self.parallelCoresSpinBox.setValue(multiprocessing.cpu_count()-int(model["reserveNCPU"]))
		# except ImportError:
		# 	self.parallelCheckBox.hide()
		# 	self.parallelCoresSpinBox.hide()
		# 	self.parallelCoresLabel.hide()
		self.parallelCheckBox.hide()
		self.parallelCoresSpinBox.hide()
		self.parallelCoresLabel.hide()	

		procidx= {}
		for v in self.analysisDataModel.eventProcessingAlgoKeys.keys():
			procidx[v]=self.processingAlgorithmComboBox.findText(v)
		
		self.processingAlgorithmComboBox.setCurrentIndex( procidx[self.analysisDataModel.EventProcessingAlgorithmLabel()] )

		# If an advanced mode dialog exists, update its settings
		if self.advancedSettingsDialog:
			if self.dataFilterDenoise:
				fltr=self.analysisDataModel["FilterAlgorithm"]
			else:
				fltr=None
			self.advancedSettingsDialog.updateSettingsString(
					model.GenerateSettingsView(
							eventPartitionAlgo=str(self.partitionAlgorithmComboBox.currentText()), 
							eventProcessingAlgo=str(self.processingAlgorithmComboBox.currentText()),
							dataFilterAlgo=fltr
						)
				)

		# If the trajviewer is initialized, update the denoising settings
		if self.trajViewerWindow:
			self.trajViewerWindow.waveletLevelSpinBox.setValue(int(self.analysisDataModel["level"]))

		# Hide Rfb and Cfb for QDF files
		[control.hide() for control in [self.RfbLabel, self.qdfRfbLineEdit, self.RfbUnitsLabel, self.CfbLabel, self.qdfCfbLineEdit, self.CfbUnitsLabel]]

		# Set ga toggle.
		try:
			with open(resource_path("mosaic/utilities/.ga"), "r") as garead:
				gac = json.load(garead)
		
			if eval(gac["gauimode"]):
				self.actionAggregate_Usage.setVisible(True)
			else:
				self.actionAggregate_Usage.setVisible(False)

			if eval(gac["gaenable"]):
				self.actionAggregate_Usage.setChecked(True)
			else:
				self.actionAggregate_Usage.setChecked(False)
		except:
			self.actionAggregate_Usage.setVisible(False)

		self.updateDialogs=True
Пример #46
0
class metaTrajIO(object):
    """
			.. warning:: |metaclass|

			Initialize a TrajIO object. The object can load all the data in a directory,
			N files from a directory or from an explicit list of filenames. In addition 
			to the arguments defined below, implementations of this meta class may require 
			the definition of additional arguments. See the documentation of those classes
			for what those may be. For example, the qdfTrajIO implementation of metaTrajIO also requires
			the feedback resistance (Rfb) and feedback capacitance (Cfb) to be passed at initialization.

			:Parameters:

				- `dirname` :		all files from a directory ('<full path to data directory>')
				- `nfiles` :		if requesting N files (in addition to dirname) from a specified directory
				- `fnames` : 		explicit list of filenames ([file1, file2,...]). This argument cannot be used in conjuction with dirname/nfiles. The filter argument is ignored when used in combination with fnames. 
				- `filter` :		'<wildcard filter>' (optional, filter is '*' if not specified)
				- `start` : 		Data start point in seconds.
				- `end` : 			Data end point in seconds.
				- `datafilter` :	Handle to the algorithm to use to filter the data. If no algorithm is specified, datafilter	is None and no filtering is performed.
				- `dcOffset` :		Subtract a DC offset from the ionic current data.
		

			:Properties:

				- `FsHz` :					sampling frequency in Hz. If the data was decimated, this property will hold the sampling frequency after decimation.
				- `LastFileProcessed` :		return the data file that was last processed.
				- `ElapsedTimeSeconds` : 	return the analysis time in sec.
			

			:Errors:

				- `IncompatibleArgumentsError` : 	when conflicting arguments are used.
				- `EmptyDataPipeError` : 			when out of data.
				- `FileNotFoundError` : 			when data files do not exist in the specified path.
				- `InsufficientArgumentsError` : 	when incompatible arguments are passed
	"""
    __metaclass__ = ABCMeta

    def __init__(self, **kwargs):
        """
		"""
        self.CHUNKSIZE = 10000
        self.dataGenerator = None

        # start by setting all passed keyword arguments as class attributes
        for (k, v) in kwargs.iteritems():
            setattr(self, k, v)

        # Check if the passed arguments are sane
        if hasattr(self, 'dirname') and hasattr(self, 'fnames'):
            raise IncompatibleArgumentsError(
                "Incompatible arguments: expect either 'dirname' or 'fnames' when initializing class {0}."
                .format(type(self).__name__))

        # Check for the filter arg
        if not hasattr(self, 'filter'):
            self.filter = '*'

        if hasattr(self, 'fnames'):
            # set fnames here.
            self.dataFiles = self.fnames
            delattr(self, 'fnames')
        else:
            try:
                if hasattr(self, 'dirname') and hasattr(self, 'nfiles'):
                    # N files from a directory
                    self.dataFiles = self._buildFileList(
                        self.dirname, self.filter)[:int(self.nfiles)]
                    delattr(self, 'dirname')
                    delattr(self, 'nfiles')
                elif hasattr(self, 'dirname'):
                    # all files from a directory
                    self.dataFiles = self._buildFileList(
                        self.dirname, self.filter)
                    delattr(self, 'dirname')
                else:
                    raise IncompatibleArgumentsError(
                        "Missing arguments: 'dirname' or 'fnames' must be supplied to initialize {0}"
                        .format(type(self).__name__))
            except AttributeError, err:
                raise IncompatibleArgumentsError(err)

        # set additional meta-data
        self.nFiles = len(self.dataFiles)
        self.fileFormat = 'N/A'
        try:
            sep = path_separator()
            self.datPath = format_path(
                sep.join((self.dataFiles[0].split(sep))[:-1]))
        except IndexError, err:
            raise FileNotFoundError("Files not found.")
Пример #47
0
	def OnStartAnalysis(self):
		try:
			if not self.analysisRunning:
				if self.analysisDataModel["DataFilesPath"]:
					self._setEnableSettingsWidgets(False)
					self._setEnableDataSettingsWidgets(False)

					if self.dataFilterDenoise:
						fltr=self.analysisDataModel["FilterAlgorithm"]
					else:
						fltr=None

					with open(format_path(self.analysisDataModel["DataFilesPath"]+"/.settings"), 'w') as f:
						f.write(
							self.analysisDataModel.GenerateSettingsView(
								eventPartitionAlgo=str(self.partitionAlgorithmComboBox.currentText()), 
								eventProcessingAlgo=str(self.processingAlgorithmComboBox.currentText()),
								dataFilterAlgo=fltr
							)
						)

					self.startAnalysisPushButton.setEnabled(False)
					self.actionStart_Analysis.setEnabled(False)

					self.startAnalysisPushButton.setStyleSheet("")
					self.startAnalysisPushButton.setText("Starting...")
					self.actionStart_Analysis.setText("Starting...")

					self.trajViewerWindow.hide()

					# Query the number of database files in the analysis directory
					self.nDBFiles=len(self._getdbfiles())	

					# setup the worker thread
					self.aThread=QtCore.QThread(parent=self)
					self.aWorker=analysisworker.analysisWorker(
						self.analysisDataModel.GenerateAnalysisObject(
								eventPartitionAlgo=str(self.partitionAlgorithmComboBox.currentText()), 
								eventProcessingAlgo=str(self.processingAlgorithmComboBox.currentText()),
								dataFilterAlgo=fltr
							)
						)
					self.aWorker.moveToThread(self.aThread)

					self.aWorker.analysisFinished.connect(self.OnAnalysisFinished)
					
					# Handle the threads finished signal
					# self.aThread.finished.connect(self.OnThreadFinished)
					# self.aThread.finished.connect(self.aThread.quit)

					self.aThread.start()

					# Start the analysis
					QtCore.QMetaObject.invokeMethod(self.aWorker, 'startAnalysis', Qt.QueuedConnection)

					self.startButtonClicked=True	
			else:
				# Stop the analysis
				QtCore.QMetaObject.invokeMethod(self.aWorker, 'stopAnalysis', Qt.QueuedConnection)

				self.startAnalysisPushButton.setEnabled(False)
				self.actionStart_Analysis.setEnabled(False)
				# self._setEnableSettingsWidgets(True)
				# self._setEnableDataSettingsWidgets(True)

				self.startAnalysisPushButton.setStyleSheet("")
				self.startAnalysisPushButton.setText("Stopping...")
				self.actionStart_Analysis.setText("Stopping...")

				# self.analysisRunning=False
		except FileNotFoundError:
			QtGui.QMessageBox.warning(self, "Data Error", "No data files found in " + str(self.analysisDataModel["DataFilesPath"]) )

			self._setEnableSettingsWidgets(True)
			self._setEnableDataSettingsWidgets(True)

			self.startAnalysisPushButton.setStyleSheet("")
			self.startAnalysisPushButton.setText("Start Analysis")
			self.actionStart_Analysis.setText("Start Analysis")
Пример #48
0
		log.info("Logs will be saved to: {0}".format(logdir))
	except AttributeError, err:
		if sys.platform.startswith('darwin'):
			logdir=format_path(os.path.expanduser('~')+"/Library/Logs/MOSAIC")
			if not os.path.exists(logdir):
				os.mkdir(logdir)
		elif sys.platform.startswith('linux'):
			if os.getuid()==0:
				logdir="/var/log/"
			else:
				log.info("MOSAIC log will be saved to ~/mosaic.log. Run MOSAIC with sudo to save logs to '/var/log/.")
				logdir=os.path.expanduser("~")
		else:
			logdir=os.path.expanduser("~")

	logname=format_path(logdir+"/mosaic.log")


	rfh=logging.handlers.RotatingFileHandler(filename=logname, maxBytes=mosaic.LogSizeBytes, backupCount=5)
	rfh.setFormatter(formatstr)
	if mosaic.DeveloperMode:
		rfh.setLevel(logging.DEBUG)
	else:
		rfh.setLevel(logging.INFO)
	
	log.addHandler(rfh)

	sh=None

	def __init__(self, *args, **kwargs):
		pass