Esempio n. 1
0
	def isReliable(filename):
		# Target file
		mesaid = int(filename[-4:])
		X,y = fs.load_csv(filename)
		criteria = []

		# MESA variables
		filter = ['ai_all5','overall5','slewake5',]
		df = datasetCsv[datasetCsv['mesaid'] == mesaid][filter].iloc[0]
		criteria += [
			df[0] >= ai_all5,	# low ai index (events per hour)
			df[1] >= overall5,	# low overall quality
			df[2] == slewake5,	# poor EEG (no sleep stage / arousal scoring)
			]

		# Doublecheck arousals
		criteria += [sum(y) > 0]

		# Mask threshhold
		X,_,_ = wake_removal_endpoints(X,None,None,settings.SAMPLE_RATE)
		masklist, mask = make_masks(X)
		criteria += [sum(m)/len(m) <= maskTreshhold_single for m in masklist]
		criteria += [sum(mask)/len(mask) <= maskThreshold_all]

		return criteria
Esempio n. 2
0
def compile_epochs(files, save = True):
	'''
	Compiles a single list of all epochs from all files given. this
	can be stored in a file for later use in modeltraining. Process,
	errors and amounts are logged for test and evaluations purposes.
	'''
	# initilise log
	log = get_log('Epochs', True)
	log.print('Total files: {0}'.format(len(files)))
	log.printHL()

	# run through list with try/catch in case of errors
	epochs = []
	for i, filename in enumerate(files):
		try:
			X,y = fs.load_csv(filename)
			eps = epochs_from_prep(X, y)
			epochs.extend(eps)
			log.print('{0} created {1} epochs'.format(filename, len(eps)))
			# backup saves if saveing is optionally on
			if save and i > 0 and i % int(len(files)/14) == 0:
				epoch.save_epochs(epochs)
				log.printHL()
				log.print('Backup save of {0} epochs'.format(len(epochs)))
				log.printHL()
		except Exception as e:
			log.print('{0} Exception: {1}'.format(filename, str(e)))
	# optionally stores the epochs
	if save:
		epoch.save_epochs(epochs)
		log.printHL()
		log.print('Final save of {0} epochs'.format(len(epochs)))
		log.printHL()
	return epochs
Esempio n. 3
0
def hours_of_sleep_files(files):
	'''
	Calculated the hours of sleep for a list of files,
	the results are logged for testing purposes.
	'''
	total = 0.0
	log = get_log("SleepHour", echo=True)
	for file in files:
		X,_ = fs.load_csv(file)
		X,_,_ = wake_removal_endpoints(X,None,None,settings.SAMPLE_RATE)
		t = count_hours_of_sleep(transpose(X)[0])
		log.print(file + ' -- {0} hours'.format(t))
		total += t
	log.printHL()
	log.print('total -- {0} hours'.format(total))
	return total
def predict_file(filename,
                 model=None,
                 filter=False,
                 removal=True,
                 return_probabilities=False):
    '''
	Performs predictions on a given file using a given model.
	Loaded file is already preprocessed.
	If no model is specified, a model is is loaded from default path.
	'''
    X, y = fs.load_csv(filename)
    epochs = epochs_from_prep(X, y, settings.EPOCH_LENGTH,
                              settings.OVERLAP_FACTOR, settings.SAMPLE_RATE,
                              filter, removal)
    if model == None:
        model = gru(load_graph=True, path='gru.h5')
    epochs = dataset(epochs, shuffle=False, exclude_ptt=True).epochs
    epochs = model.predict(epochs, return_probabilities=return_probabilities)
    epochs.sort(key=lambda x: x.index_start, reverse=False)
    yhat, timecol = reconstruct(X, epochs, settings.PREDICT_THRESHOLD)
    return y, yhat, timecol