def _get_brainvision_encoding(vhdr_file): """Get the encoding of .vhdr and .vmrk files. Parameters ---------- vhdr_file : str Path to the header file. Returns ------- enc : str Encoding of the .vhdr file to pass it on to open() function either 'UTF-8' (default) or whatever encoding scheme is specified in the header. """ with open(vhdr_file, 'rb') as ef: enc = ef.read() if enc.find(b'Codepage=') != -1: enc = enc[enc.find(b'Codepage=') + 9:] enc = enc.split()[0] enc = enc.decode() src = '(read from header)' else: enc = 'UTF-8' src = '(default)' logger.debug(f'Detected file encoding: {enc} {src}.') return enc
def map_ch_coords_to_mne(inst): """Transform sensors to MNE coordinates .. note:: operates in place .. warning:: For several reasons we do not use the MNE coordinates for the inverse modeling. This however won't always play nicely with visualization. Parameters ---------- inst : MNE data containers Raw, Epochs, Evoked. """ bti_dev_t = Transform('ctf_meg', 'meg', _get_bti_dev_t()) dev_ctf_t = inst.info['dev_ctf_t'] for ch in inst.info['chs']: loc = ch['loc'][:] if loc is not None: logger.debug('converting %s' % ch['ch_name']) t = _loc_to_coil_trans(loc) t = _convert_coil_trans(t, dev_ctf_t, bti_dev_t) loc = _coil_trans_to_loc(t) ch['loc'] = loc
def _handle_scans_reading(scans_fname, raw, bids_path, verbose=False): """Read associated scans.tsv and set meas_date.""" scans_tsv = _from_tsv(scans_fname) fname = bids_path.fpath.name if fname.endswith('.pdf'): # for BTI files, the scan is an entire directory fname = fname.split('.')[0] # get the row corresponding to the file # use string concatenation instead of os.path # to work nicely with windows data_fname = bids_path.datatype + '/' + fname fnames = scans_tsv['filename'] acq_times = scans_tsv['acq_time'] row_ind = fnames.index(data_fname) # check whether all split files have the same acq_time # and throw an error if they don't if '_split-' in fname: split_idx = fname.find('split-') pattern = re.compile(bids_path.datatype + '/' + bids_path.basename[:split_idx] + r'split-\d+_' + bids_path.datatype + bids_path.fpath.suffix) split_fnames = list(filter(pattern.match, fnames)) split_acq_times = [] for split_f in split_fnames: split_acq_times.append(acq_times[fnames.index(split_f)]) if len(set(split_acq_times)) != 1: raise ValueError("Split files must have the same acq_time.") # extract the acquisition time from scans file acq_time = acq_times[row_ind] if acq_time != 'n/a': # microseconds in the acquisition time is optional if '.' not in acq_time: # acquisition time ends with '.%fZ' microseconds string acq_time += '.0Z' acq_time = datetime.strptime(acq_time, '%Y-%m-%dT%H:%M:%S.%fZ') acq_time = acq_time.replace(tzinfo=timezone.utc) if verbose: logger.debug(f'Loaded {scans_fname} scans file to set ' f'acq_time as {acq_time}.') raw.set_meas_date(acq_time) return raw
def transform_sensors_to_mne(inst): """ Transform sensors to MNE coordinates For several reasons we do not use the MNE coordinates for the inverse modeling. This however won't always play nicely with visualization. """ bti_dev_t = Transform('ctf_meg', 'meg', _get_bti_dev_t()) dev_ctf_t = inst.info['dev_ctf_t'] for ch in inst.info['chs']: loc = ch['loc'][:] if loc is not None: logger.debug('converting %s' % ch['ch_name']) t = _loc_to_coil_trans(loc) t = _convert_coil_trans(t, dev_ctf_t, bti_dev_t) loc = _coil_trans_to_loc(t) ch['loc'] = loc
def _handle_scans_reading(scans_fname, raw, bids_path, verbose=False): """Read associated scans.tsv and set meas_date.""" scans_tsv = _from_tsv(scans_fname) fname = bids_path.fpath.name if '_split-' in fname: # for split files, scans only stores the filename without ``split`` extension = bids_path.fpath.suffix bids_path.update(split=None, extension=extension) fname = bids_path.basename elif fname.endswith('.pdf'): # for BTI files, the scan is an entire directory fname = fname.split('.')[0] # get the row corresponding to the file # use string concatenation instead of os.path # to work nicely with windows data_fname = bids_path.datatype + '/' + fname fnames = scans_tsv['filename'] acq_times = scans_tsv['acq_time'] row_ind = fnames.index(data_fname) # extract the acquisition time from scans file acq_time = acq_times[row_ind] if acq_time != 'n/a': # microseconds in the acquisition time is optional if '.' not in acq_time: # acquisition time ends with '.%fZ' microseconds string acq_time += '.0Z' acq_time = datetime.strptime(acq_time, '%Y-%m-%dT%H:%M:%S.%fZ') acq_time = acq_time.replace(tzinfo=timezone.utc) if verbose: logger.debug(f'Loaded {scans_fname} scans file to set ' f'acq_time as {acq_time}.') raw.set_meas_date(acq_time) return raw
def _send_command(self, command): """Send a command to the server. Parameters ---------- command : str The command to send. Returns ------- resp : str The response from the server. """ logger.debug('Sending command: %s' % command) command += '\n' self._cmd_sock.sendall(command.encode('utf-8')) buf, chunk, begin = [], '', time.time() while True: # if we got some data, then break after wait sec if buf and time.time() - begin > self._timeout: break # if we got no data at all, wait a little longer elif time.time() - begin > self._timeout * 2: break try: chunk = self._cmd_sock.recv(8192) if chunk: buf.append(chunk) begin = time.time() else: time.sleep(0.1) except Exception: pass return ''.join(buf)
def _fun(verbose=None): logger.debug('Test')
def update_sidecar_json(bids_path, entries, verbose=True): """Update sidecar files using a dictionary or JSON file. Will update metadata fields inside the path defined by ``bids_path.fpath`` according to the ``entries``. If a field does not exist in the corresponding sidecar file, then that field will be created according to the ``entries``. If a field does exist in the corresponding sidecar file, then that field will be updated according to the ``entries``. For example, if ``InstitutionName`` is not defined in the sidecar json file, then trying to update ``InstitutionName`` to ``Martinos Center`` will update the sidecar json file to have ``InstitutionName`` as ``Martinos Center``. Parameters ---------- bids_path : BIDSPath The set of paths to update. The :class:`mne_bids.BIDSPath` instance passed here **must** have the ``.root`` attribute set. The ``.datatype`` attribute **may** be set. If ``.datatype`` is not set and only one data type (e.g., only EEG or MEG data) is present in the dataset, it will be selected automatically. This must uniquely identify an existing file path, else an error will be raised. entries : dict | str | pathlib.Path A dictionary, or JSON file that defines the sidecar fields and corresponding values to be updated to. verbose : bool The verbosity level. Notes ----- This function can only update JSON files. Sidecar JSON files include files such as ``*_ieeg.json``, ``*_coordsystem.json``, ``*_scans.json``, etc. You should double check that your update dictionary is correct for the corresponding sidecar JSON file because it will perform a dictionary update of the sidecar fields according to the passed in dictionary overwriting any information that was previously there. Raises ------ RuntimeError If the specified ``bids_path.fpath`` cannot be found in the dataset. RuntimeError If the ``bids_path.fpath`` does not have ``.json`` extension. Examples -------- >>> # update sidecar json file >>> bids_path = BIDSPath(root='./', subject='001', session='001', task='test', run='01', suffix='ieeg', extension='.json') >>> entries = {'PowerLineFrequency': 50} >>> update_sidecar_json(bids_path, entries) >>> # update sidecar coordsystem json file >>> bids_path = BIDSPath(root='./', subject='001', session='001', suffix='coordsystem', extension='.json') >>> entries = {'iEEGCoordinateSystem,': 'Other'} >>> update_sidecar_json(bids_path, entries) """ # get all matching json files bids_path = bids_path.copy() if bids_path.extension != '.json': raise RuntimeError('Only works for ".json" files. The ' 'BIDSPath object passed in has ' f'{bids_path.extension} extension.') # get the file path fpath = bids_path.fpath if not fpath.exists(): raise RuntimeError(f'Sidecar file does not ' f'exist for {fpath}.') # sidecar update either from file, or as dictionary if isinstance(entries, dict): sidecar_tmp = entries else: with open(entries, 'r') as tmp_f: sidecar_tmp = json.load(tmp_f, object_pairs_hook=OrderedDict) if verbose: logger.debug(sidecar_tmp) logger.debug(f'Updating {fpath}...') # load in sidecar filepath with open(fpath, 'r') as tmp_f: sidecar_json = json.load(tmp_f, object_pairs_hook=OrderedDict) # update sidecar JSON file with the fields passed in sidecar_json.update(**sidecar_tmp) # write back the sidecar JSON _write_json(fpath, sidecar_json, overwrite=True, verbose=verbose)
def _handle_scans_reading(scans_fname, raw, bids_path): """Read associated scans.tsv and set meas_date.""" scans_tsv = _from_tsv(scans_fname) fname = bids_path.fpath.name if fname.endswith('.pdf'): # for BTI files, the scan is an entire directory fname = fname.split('.')[0] # get the row corresponding to the file # use string concatenation instead of os.path # to work nicely with windows data_fname = bids_path.datatype + '/' + fname fnames = scans_tsv['filename'] if 'acq_time' in scans_tsv: acq_times = scans_tsv['acq_time'] else: acq_times = ['n/a'] * len(fnames) row_ind = fnames.index(data_fname) # check whether all split files have the same acq_time # and throw an error if they don't if '_split-' in fname: split_idx = fname.find('split-') pattern = re.compile(bids_path.datatype + '/' + bids_path.basename[:split_idx] + r'split-\d+_' + bids_path.datatype + bids_path.fpath.suffix) split_fnames = list(filter(pattern.match, fnames)) split_acq_times = [] for split_f in split_fnames: split_acq_times.append(acq_times[fnames.index(split_f)]) if len(set(split_acq_times)) != 1: raise ValueError("Split files must have the same acq_time.") # extract the acquisition time from scans file acq_time = acq_times[row_ind] if acq_time != 'n/a': # microseconds in the acquisition time is optional if '.' not in acq_time: # acquisition time ends with '.%fZ' microseconds string acq_time += '.0Z' acq_time = datetime.strptime(acq_time, '%Y-%m-%dT%H:%M:%S.%fZ') acq_time = acq_time.replace(tzinfo=timezone.utc) logger.debug(f'Loaded {scans_fname} scans file to set ' f'acq_time as {acq_time}.') # First set measurement date to None and then call call anonymize() to # remove any traces of the measurement date we wish # to replace – it might lurk out in more places than just # raw.info['meas_date'], e.g. in info['meas_id]['secs'] and in # info['file_id'], which are not affected by set_meas_date(). # The combined use of set_meas_date(None) and anonymize() is suggested # by the MNE documentation, and in fact we cannot load e.g. OpenNeuro # ds003392 without this combination. raw.set_meas_date(None) with warnings.catch_warnings(): # This is to silence a warning emitted by MNE-Python < 0.24. The # warnings filter can be safely removed once we drop support for # MNE-Python 0.23 and older. warnings.filterwarnings( action='ignore', message="Input info has 'meas_date' set to None", category=RuntimeWarning, module='mne') raw.anonymize(daysback=None, keep_his=True) raw.set_meas_date(acq_time) return raw