def __get_mda_timestamps(nwb_content): try: timestamps = np.array( nwb_content.acquisition['e-series'].timestamps) except KeyError: raise MissingDataException('MDA timestamps are not found') if timestamps.any(): return timestamps raise MissingDataException('MDA timestamps are not found')
def __extract_data_for_single_dataset(self, dataset): data_from_current_dataset = self.__get_data_from_current_dataset(dataset) if not self.__data_exist(data_from_current_dataset, dataset): raise MissingDataException("Incomplete data in dataset " + str(dataset.name) + ", missing mda files") return data_from_current_dataset, [dataset.get_mda_timestamps()], dataset.get_continuous_time()
def create_summary(self): missing_metadata = self.__get_missing_metadata() if missing_metadata: message = '' for missing_metadata_file in missing_metadata: message += missing_metadata_file + '\n' raise (MissingDataException(message)) return MetadataValidationSummary(missing_metadata)
def __build_fl_shanks(self, probe_metadata, shanks_electrodes): for shank in probe_metadata['shanks']: shanks_electrodes_in_shank = [] for _ in shank['electrodes']: if shanks_electrodes: shanks_electrodes_in_shank.append(shanks_electrodes.pop(0)) else: raise MissingDataException('Not enough shanks_electrodes') yield self.__build_single_fl_shank(shank['shank_id'], shanks_electrodes_in_shank)
def __get_pos_timestamps(nwb_content): timestamps = [ np.array(spatial_series.timestamps) for spatial_series in nwb_content.processing['behavior']. data_interfaces['position'].spatial_series.values() ] timestamp = np.hstack(timestamps) if timestamp.any(): return timestamp raise MissingDataException('POS timestamps are not found!')
def create_summary(self): """Creates ValidationSummary object with the results of validation Returns: PreprocessingValidationSummary: missing preprocessing files """ missing_preprocessing_data = self.__get_missing_preprocessing_data() message = '' if missing_preprocessing_data: for missing_preprocessing_file in missing_preprocessing_data: message += missing_preprocessing_file[ 0] + ' from epoch ' + missing_preprocessing_file[1] + '\n' raise MissingDataException(message + "are missing") return PreprocessingValidationSummary(missing_preprocessing_data)
def __extract_data(self): all_pos = [] continuous_time = [] for dataset in self.datasets: data_from_current_dataset = [ dataset.get_data_path_from_dataset('pos') + pos_file for pos_file in dataset.get_all_data_from_dataset('pos') if (pos_file.endswith('.pos_online.dat'))] if dataset.get_continuous_time() is None: raise MissingDataException( 'Incomplete data in dataset ' + str(dataset.name) + 'missing continuous time file') all_pos.append(data_from_current_dataset) continuous_time.append(dataset.get_continuous_time()) return all_pos, continuous_time
def __extract_data(self): """Gets online position tracking file and corresponding continuous time file""" all_position_directories = [] continuous_time_directories = [] for dataset in self.datasets: pos_online_paths = [ os.path.join(dataset.get_data_path_from_dataset('pos'), pos_file) for pos_file in dataset.get_all_data_from_dataset('pos') if pos_file.endswith('.pos_online.dat') ] if dataset.get_continuous_time() is None: raise MissingDataException('Incomplete data in dataset ' + str(dataset.name) + 'missing continuous time file') all_position_directories.append(pos_online_paths) continuous_time_directories.append(dataset.get_continuous_time()) return all_position_directories, continuous_time_directories
def validate_sections(self): if 'experimenter_name' not in self.metadata: raise MissingDataException('metadata is missing experimenter_name') if 'lab' not in self.metadata: raise MissingDataException('metadata is missing lab') if 'institution' not in self.metadata: raise MissingDataException('metadata is missing institution') if 'experiment_description' not in self.metadata: raise MissingDataException( 'metadata is missing experiment_description') if 'session_description' not in self.metadata: raise MissingDataException( 'metadata is missing session_description') if 'session_id' not in self.metadata: raise MissingDataException('metadata is missing session_id') if 'subject' not in self.metadata: raise MissingDataException('metadata is missing subject') if 'units' not in self.metadata: raise MissingDataException('metadata is missing units') if 'data_acq_device' not in self.metadata: raise MissingDataException('metadata is missing data_acq_device') if 'cameras' not in self.metadata: raise MissingDataException('metadata is missing cameras') if 'tasks' not in self.metadata: raise MissingDataException('metadata is missing tasks') if 'associated_files' not in self.metadata: raise MissingDataException('metadata is missing associated_files') if 'associated_video_files' not in self.metadata: raise MissingDataException( 'metadata is missing associated_video_files') if 'times_period_multiplier' not in self.metadata: raise MissingDataException( 'metadata is missing times_period_multiplier') if 'behavioral_events' not in self.metadata: raise MissingDataException('metadata is missing behavioral_events') if 'electrode_groups' not in self.metadata: raise MissingDataException('metadata is missing electrode_groups') if 'ntrode_electrode_group_channel_map' not in self.metadata: raise MissingDataException( 'metadata is missing ntrode_electrode_group_channel_map')
def __check_if_path_exists(self, path): if not (os.path.exists(path)): raise MissingDataException('missing ' + self.data_path + ' directory')
def create_summary(self): if not os.path.exists(self.path): raise MissingDataException('xml file ' + self.path + ' does not exist!') return XmlFilesValidationSummary()