def get(silent=False): par = iopar.read(_PAR_ID_STR, {}) if not par and not silent: setup() elif not par and silent: setup_silent() return iopar.read(_PAR_ID_STR, default=default())
def test_setup_silent(self): self.assertIsNone(iopar.read(params._PAR_ID_STR)) params.setup_silent() par = iopar.read(params._PAR_ID_STR) self.assertIsNotNone(par) # now do another test to see if it preserves current values par = par.as_dict() par['ALYX_LOGIN'] = '******' iopar.write(params._PAR_ID_STR, par) params.setup_silent() par2 = iopar.read(params._PAR_ID_STR) self.assertEqual(par, par2.as_dict())
def default_data_root(): """Returns the path to the integration data. The path is loaded from the '.ibl_ci' parameter file's 'data_root' parameter, or the current working directory. """ return Path(params.read('ibl_ci', {'data_root': '.'}).data_root)
def setup_alyx_params(): setup_silent() par = iopar.read(_PAR_ID_STR).as_dict() [usr, pwd] = login(title='Alyx credentials') par['ALYX_LOGIN'] = usr par['ALYX_PWD'] = pwd iopar.write(_PAR_ID_STR, par)
def make_graph(self, out_dir=None, show=True): if not out_dir: par = params.read('one_params') out_dir = par.CACHE_DIR m = Digraph('G', filename=str( Path(out_dir).joinpath(self.__module__ + '_graphs.gv'))) m.attr(rankdir='TD') e = Digraph(name='cluster_' + self.label) e.attr('node', shape='box') e.node('root', label=self.label) e.attr('node', shape='ellipse') for k in self.tasks: j = self.tasks[k] if len(j.parents) == 0: e.edge('root', j.name) else: [e.edge(p.name, j.name) for p in j.parents] m.subgraph(e) m.attr(label=r'\n\Pre-processing\n') m.attr(fontsize='20') if show: m.view() return m
def setup(): par_default = default() par_current = iopar.read(_PAR_ID_STR, par_default) par = iopar.as_dict(par_default) for k in par.keys(): cpar = _get_current_par(k, par_current) if "PWD" not in k: par[k] = input("Param " + k + ", current value is [" + str(cpar) + "]:") or cpar cpar = _get_current_par("ALYX_PWD", par_current) prompt = "Enter the Alyx password for " + par[ "ALYX_LOGIN"] + '(leave empty to keep current):' par["ALYX_PWD"] = getpass(prompt) or cpar cpar = _get_current_par("HTTP_DATA_SERVER_PWD", par_current) prompt = "Enter the FlatIron HTTP password for " + par["HTTP_DATA_SERVER_LOGIN"] +\ '(leave empty to keep current): ' par["HTTP_DATA_SERVER_PWD"] = getpass(prompt) or cpar # default to home dir if empty dir somehow made it here if len(par['CACHE_DIR']) == 0: par['CACHE_DIR'] = str(Path.home() / "Downloads" / "FlatIron") par = iopar.from_dict(par) # create directory if needed if par.CACHE_DIR and not os.path.isdir(par.CACHE_DIR): os.mkdir(par.CACHE_DIR) iopar.write(_PAR_ID_STR, par) print('ONE Parameter file location: ' + iopar.getfile(_PAR_ID_STR))
def test_params(self): # first go to and from dictionary par_dict = self.par_dict par = params.from_dict(par_dict) self.assertEqual(params.as_dict(par), par_dict) # next go to and from dictionary via json par2 = params.read('toto') self.assertEqual(par, par2)
def login_auto(globus_client_id, str_app='globus'): token = params.read(str_app) if not token: raise ValueError( "Token file doesn't exist, run ibllib.io.globus.setup first") client = globus.NativeAppAuthClient(globus_client_id) client.oauth2_start_flow(refresh_tokens=True) authorizer = globus.RefreshTokenAuthorizer(token.transfer_rt, client) return globus.TransferClient(authorizer=authorizer)
def login_auto(globus_client_id, str_app='globus/default'): token = params.read(str_app) required_fields = {'refresh_token', 'access_token', 'expires_at_seconds'} if not (token and required_fields.issubset(token.as_dict())): raise ValueError( "Token file doesn't exist, run ibllib.io.globus.setup first") client = globus.NativeAppAuthClient(globus_client_id) client.oauth2_start_flow(refresh_tokens=True) authorizer = globus.RefreshTokenAuthorizer(token.refresh_token, client) return globus.TransferClient(authorizer=authorizer)
def download_histology_data(subject, lab): if lab == 'hoferlab': lab_temp = 'mrsicflogellab' else: lab_temp = lab par = params.read('one_params') try: FLAT_IRON_HIST_REL_PATH = Path('histology', lab_temp, subject, 'downsampledStacks_25', 'sample2ARA') baseurl = (par.HTTP_DATA_SERVER + '/' + '/'.join(FLAT_IRON_HIST_REL_PATH.parts)) r = requests.get(baseurl, auth=(par.HTTP_DATA_SERVER_LOGIN, par.HTTP_DATA_SERVER_PWD)) r.raise_for_status() except Exception as err: print(err) try: subject_rem = subject.replace("_", "") FLAT_IRON_HIST_REL_PATH = Path('histology', lab_temp, subject_rem, 'downsampledStacks_25', 'sample2ARA') baseurl = (par.HTTP_DATA_SERVER + '/' + '/'.join(FLAT_IRON_HIST_REL_PATH.parts)) r = requests.get(baseurl, auth=(par.HTTP_DATA_SERVER_LOGIN, par.HTTP_DATA_SERVER_PWD)) r.raise_for_status() except Exception as err: print(err) path_to_nrrd = None return path_to_nrrd tif_files = [] for line in r.text.splitlines(): result = re.findall('href="(.*).tif"', line) if result: tif_files.append(result[0] + '.tif') CACHE_DIR = Path(Path.home(), 'Downloads', 'FlatIron', lab, 'Subjects', subject, 'histology') CACHE_DIR.mkdir(exist_ok=True, parents=True) path_to_files = [] for file in tif_files: path_to_image = Path(CACHE_DIR, file) if not path_to_image.exists(): url = (baseurl + '/' + file) http_download_file(url, cache_dir=CACHE_DIR, username=par.HTTP_DATA_SERVER_LOGIN, password=par.HTTP_DATA_SERVER_PWD) path_to_nrrd = tif2nrrd(path_to_image) path_to_files.append(path_to_nrrd) if len(path_to_files) > 3: path_to_files = path_to_files[1:3] return path_to_files
def test_new_default_param(self): # in this case an updated version of the codes brings in a new parameter default = {'A': 'tata2', 'O': 'toto2', 'I': 'titi2', 'E': 'tete2', 'num': 15, 'liste': [1, 'turlu']} expected_result = {'A': 'tata', 'O': 'toto', 'I': 'titi', 'num': 15, 'liste': [1, 'turlu'], 'apath': str(Path('/gna/gna/gna')), 'E': 'tete2', } par2 = params.read('toto', default=default) self.assertEqual(par2, params.from_dict(expected_result)) # on the next path the parameter has been added to the param file par2 = params.read('toto', default=default) self.assertEqual(par2, params.from_dict(expected_result)) # check that it doesn't break if a named tuple is given instead of a dict par3 = params.read('toto', default=par2) self.assertEqual(par2, par3) # check that a non-existing parfile returns None pstring = str(uuid.uuid4()) par = params.read(pstring) self.assertIsNone(par) # check that a non-existing parfile with default returns default par = params.read(pstring, default=default) self.assertEqual(par, params.from_dict(default)) # even if this default is a Params named tuple par = params.read(pstring, default=par) self.assertEqual(par, params.from_dict(default))
def __init__(self, res_um=25, par=None, scaling=np.array([1, 1, 1]), mock=False): """ :param res_um: 10, 25 or 50 um :param par: dictionary of parameters to override systems ones :param scaling: :param mock: :return: atlas.BrainAtlas """ par = params.read('one_params') FILE_REGIONS = str( Path(__file__).parent.joinpath('allen_structure_tree.csv')) FLAT_IRON_ATLAS_REL_PATH = Path('histology', 'ATLAS', 'Needles', 'Allen') if mock: image, label = [ np.zeros((528, 456, 320), dtype=np.bool) for _ in range(2) ] else: path_atlas = Path(par.CACHE_DIR).joinpath(FLAT_IRON_ATLAS_REL_PATH) file_image = path_atlas.joinpath(f'average_template_{res_um}.nrrd') file_label = path_atlas.joinpath(f'annotation_{res_um}.nrrd') if not file_image.exists(): _download_atlas_flatiron(file_image, FLAT_IRON_ATLAS_REL_PATH, par) if not file_label.exists(): _download_atlas_flatiron(file_label, FLAT_IRON_ATLAS_REL_PATH, par) image, _ = nrrd.read(file_image, index_order='C') # dv, ml, ap label, _ = nrrd.read(file_label, index_order='C') # dv, ml, ap label = np.swapaxes(np.swapaxes(label, 2, 0), 1, 2) # label[iap, iml, idv] image = np.swapaxes(np.swapaxes(image, 2, 0), 1, 2) # image[iap, iml, idv] # resulting volumes origin: x right, y front, z top regions = _regions_from_allen_csv(FILE_REGIONS) xyz2dims = np.array([1, 0, 2]) dims2xyz = np.array([1, 0, 2]) dxyz = res_um * 1e-6 * np.array([1, -1, -1]) * scaling # we use Bregma as the origin ibregma = (ALLEN_CCF_LANDMARKS_MLAPDV_UM['bregma'] / res_um) self.res_um = res_um super().__init__(image, label, dxyz, regions, ibregma, dims2xyz=dims2xyz, xyz2dims=xyz2dims)
def AllenAtlas(res_um=25, par=None): """ Instantiates an atlas.BrainAtlas corresponding to the Allen CCF at the given resolution using the IBL Bregma and coordinate system :param res_um: 25 or 50 um :return: atlas.BrainAtlas """ if par is None: # Bregma indices for the 10um Allen Brain Atlas, mlapdv pdefault = { 'PATH_ATLAS': '/datadisk/BrainAtlas/ATLASES/Allen/', 'FILE_REGIONS': str(Path(__file__).parent.joinpath('allen_structure_tree.csv')), 'INDICES_BREGMA': list(np.array([1140 - (570 + 3.9), 540, 0 + 33.2])) } par = params.read('ibl_histology', default=pdefault) if not Path(par.PATH_ATLAS).exists(): raise NotImplementedError( "Atlas doesn't exist ! Mock option not implemented yet") # TODO: mock atlas to get only the coordinate framework pass params.write('ibl_histology', par) else: par = Bunch(par) # file_image = Path(path_atlas).joinpath(f'ara_nissl_{res_um}.nrrd') file_image = Path( par.PATH_ATLAS).joinpath(f'average_template_{res_um}.nrrd') file_label = Path(par.PATH_ATLAS).joinpath(f'annotation_{res_um}.nrrd') image, header = nrrd.read(file_image, index_order='C') # dv, ml, ap image = np.swapaxes(np.swapaxes(image, 2, 0), 1, 2) # image[iap, iml, idv] label, header = nrrd.read(file_label, index_order='C') # dv, ml, ap label = np.swapaxes(np.swapaxes(label, 2, 0), 1, 2) # label[iap, iml, idv] # resulting volumes origin: x right, y front, z top df_regions = pd.read_csv(par.FILE_REGIONS) regions = BrainRegions(id=df_regions.id.values, name=df_regions.name.values, acronym=df_regions.acronym.values) xyz2dims = np.array([1, 0, 2]) dims2xyz = np.array([1, 0, 2]) dxyz = res_um * 1e-6 * np.array([-1, -1, -1]) ibregma = (np.array(par.INDICES_BREGMA) * 10 / res_um) return BrainAtlas(image, label, regions, dxyz, ibregma, dims2xyz=dims2xyz, xyz2dims=xyz2dims)
def setup_silent(): par_current = iopar.read(_PAR_ID_STR) par_default = default() if par_current is None: par = par_default else: par = iopar.as_dict(par_default) for k in par.keys(): cpar = _get_current_par(k, par_current) par[k] = cpar par = iopar.from_dict(par) if par.CACHE_DIR: Path(par.CACHE_DIR).mkdir(parents=True, exist_ok=True) iopar.write(_PAR_ID_STR, par)
def test_new_default_param(self): # in this case an updated version of the codes brings in a new parameter default = { 'A': 'tata2', 'O': 'toto2', 'I': 'titi2', 'E': 'tete2', 'num': 15, 'liste': [1, 'turlu'] } expected_result = { 'A': 'tata', 'O': 'toto', 'I': 'titi', 'num': 15, 'liste': [1, 'turlu'], 'apath': str(Path('/gna/gna/gna')), 'E': 'tete2', } par2 = params.read('toto', default=default) self.assertCountEqual(par2.as_dict(), expected_result) # on the next path the parameter has been added to the param file par2 = params.read('toto', default=default) self.assertCountEqual(par2.as_dict(), expected_result) # check that it doesn't break if a named tuple is given instead of a dict par3 = params.read('toto', default=par2) self.assertEqual(par2, par3) # check that a non-existing parfile raises error pstring = str(uuid.uuid4()) with self.assertRaises(FileNotFoundError): params.read(pstring) # check that a non-existing parfile with default returns default par = params.read(pstring, default=default) self.assertCountEqual(par, params.from_dict(default)) # even if this default is a Params named tuple par = params.read(pstring, default=par) self.assertEqual(par, params.from_dict(default)) # check default empty dict pstring = 'foobar' filename = Path(params.getfile(pstring)) self.assertFalse(filename.exists()) par = params.read(pstring, default={}) self.assertIsNone(par) self.assertTrue(filename.exists())
def __init__(self, *args, data_path=None, **kwargs): """A base class for locating integration test data Upon initialization, loads the path to the integration test data. The path is loaded from the '.ibl_ci' parameter file's 'data_root' parameter, or the current working directory. The data root may be overridden with the `data_path` keyword arg. The data path must be an existing directory containing a 'Subjects_init' folder. :param data_path: The data root path to the integration data directory """ super().__init__(*args, **kwargs) # Store the path to the integration data default_data_root = params.read('ibl_ci', {'data_root': '.'}).data_root self.data_path = Path(data_path or default_data_root) # default to current directory data_present = (self.data_path.exists() and self.data_path.is_dir() and any(self.data_path.glob('Subjects_init'))) if not data_present: raise FileNotFoundError(f'Invalid data root folder {self.data_path.absolute()}\n\t' 'must contain a "Subjects_init" folder')
def setUp(self): self.one = _ONE remote_repo = '15f76c0c-10ee-11e8-a7ed-0a448319c2f8' # flatiron self.par = params.read('globus') label = 'test_patcher' authorizer = globus_sdk.AccessTokenAuthorizer(self.par.TRANSFER_TOKEN) self.gtc = globus_sdk.TransferClient(authorizer=authorizer) globus_transfer = globus_sdk.TransferData(self.gtc, self.par.LOCAL_REPO, remote_repo, verify_checksum=True, sync_level='checksum', label=label) globus_delete = globus_sdk.DeleteData(self.gtc, remote_repo, verify_checksum=True, sync_level='checksum', label=label) self.patcher = GlobusPatcher(one=self.one, globus_delete=globus_delete, globus_transfer=globus_transfer)
def __init__(self, res_um=25, par=None, scaling=np.array([1, 1, 1]), mock=False, hist_path=None): """ :param res_um: 10, 25 or 50 um :param par: dictionary of parameters to override systems ones :param scaling: :param mock: :return: atlas.BrainAtlas """ par = params.read('one_params') FLAT_IRON_ATLAS_REL_PATH = Path('histology', 'ATLAS', 'Needles', 'Allen') if mock: image, label = [np.zeros((528, 456, 320), dtype=np.bool) for _ in range(2)] label[:, :, 100:105] = True else: path_atlas = Path(par.CACHE_DIR).joinpath(FLAT_IRON_ATLAS_REL_PATH) file_image = hist_path or path_atlas.joinpath(f'average_template_{res_um}.nrrd') file_label = path_atlas.joinpath(f'annotation_{res_um}.nrrd') if not file_image.exists(): file_image = path_atlas.joinpath(f'average_template_{res_um}.npz') if not file_label.exists(): file_label = path_atlas.joinpath(f'annotation_{res_um}.npz') if not file_image.exists(): _download_atlas_flatiron(file_image, FLAT_IRON_ATLAS_REL_PATH, par) if not file_label.exists(): _download_atlas_flatiron(file_label, FLAT_IRON_ATLAS_REL_PATH, par) # loads the files image = self._read_volume(file_image) label = self._read_volume(file_label) regions = regions_from_allen_csv(FILE_REGIONS) xyz2dims = np.array([1, 0, 2]) # this is the c-contiguous ordering dims2xyz = np.array([1, 0, 2]) dxyz = res_um * 1e-6 * np.array([1, -1, -1]) * scaling # we use Bregma as the origin ibregma = (ALLEN_CCF_LANDMARKS_MLAPDV_UM['bregma'] / res_um) self.res_um = res_um super().__init__(image, label, dxyz, regions, ibregma, dims2xyz=dims2xyz, xyz2dims=xyz2dims)
import time from datetime import datetime, timedelta import logging from ibllib.io import params, globus from ibllib.io.globus import as_globus_path import oneibl.params import globus_sdk from globus_sdk.exc import TransferAPIError GLOBUS_PARAM_STRING = 'globus/admin' logger = logging.getLogger('ibllib') # Read in parameters p = params.read(GLOBUS_PARAM_STRING, {'local_endpoint': None, 'remote_endpoint': None}) LOCAL_REPO = p.local_endpoint # Endpoint UUID from Website SERVER_ID = p.remote_endpoint # FlatIron DST_DIR = params.read('ibl_ci', {'data_root': '.'}).data_root GLOBUS_CLIENT_ID = oneibl.params.get().GLOBUS_CLIENT_ID # Constants SRC_DIR = '/integration' POLL = (5, 60*60) # min max seconds between pinging server TIMEOUT = 24*60*60 # seconds before timeout status_map = { 'ACTIVE': ('QUEUED', 'ACTIVE'), 'FAILED': ('ENDPOINT_ERROR', 'PERMISSION_DENIED', 'CONNECT_FAILED'), 'INACTIVE': 'PAUSED_BY_ADMIN' } try:
def __init__(self, res_um=25, brainmap='Allen', scaling=np.array([1, 1, 1]), mock=False, hist_path=None): """ :param res_um: 10, 25 or 50 um :param brainmap: defaults to 'Allen', see ibllib.atlas.BrainRegion for re-mappings :param scaling: scale factor along ml, ap, dv for squeeze and stretch ([1, 1, 1]) :param mock: for testing purpose :param hist_path :return: atlas.BrainAtlas """ par = params.read('one_params') FLAT_IRON_ATLAS_REL_PATH = Path('histology', 'ATLAS', 'Needles', 'Allen') LUT_VERSION = "v01" # version 01 is the lateralized version regions = BrainRegions() xyz2dims = np.array([1, 0, 2]) # this is the c-contiguous ordering dims2xyz = np.array([1, 0, 2]) # we use Bregma as the origin self.res_um = res_um ibregma = (ALLEN_CCF_LANDMARKS_MLAPDV_UM['bregma'] / self.res_um) dxyz = self.res_um * 1e-6 * np.array([1, -1, -1]) * scaling if mock: image, label = [ np.zeros((528, 456, 320), dtype=np.int16) for _ in range(2) ] label[:, :, 100: 105] = 1327 # lookup index for retina, id 304325711 (no id 1327) else: path_atlas = Path(par.CACHE_DIR).joinpath(FLAT_IRON_ATLAS_REL_PATH) file_image = hist_path or path_atlas.joinpath( f'average_template_{res_um}.nrrd') # get the image volume if not file_image.exists(): _download_atlas_flatiron(file_image, FLAT_IRON_ATLAS_REL_PATH, par) # get the remapped label volume file_label = path_atlas.joinpath(f'annotation_{res_um}.nrrd') if not file_label.exists(): _download_atlas_flatiron(file_label, FLAT_IRON_ATLAS_REL_PATH, par) file_label_remap = path_atlas.joinpath( f'annotation_{res_um}_lut_{LUT_VERSION}.npz') if not file_label_remap.exists(): label = self._read_volume(file_label) _logger.info("computing brain atlas annotations lookup table") # lateralize atlas: for this the regions of the left hemisphere have primary # keys opposite to to the normal ones lateral = np.zeros(label.shape[xyz2dims[0]]) lateral[int(np.floor(ibregma[0]))] = 1 lateral = np.sign( np.cumsum(lateral)[np.newaxis, :, np.newaxis] - 0.5) label = label * lateral _, im = ismember(label, regions.id) label = np.reshape(im.astype(np.uint16), label.shape) _logger.info(f"saving {file_label_remap} ...") np.savez_compressed(file_label_remap, label) # loads the files label = self._read_volume(file_label_remap) image = self._read_volume(file_image) super().__init__(image, label, dxyz, regions, ibregma, dims2xyz=dims2xyz, xyz2dims=xyz2dims)
def load_videopc_params(): if not VIDEOPC_PARAMS_FILE.exists(): create_videopc_params() return params.read('videopc_params')
from ibllib.io.globus import as_globus_path import globus_sdk from globus_sdk.exc import TransferAPIError GLOBUS_PARAM_STRING = 'globus/admin' DEFAULT_PAR = { 'local_endpoint': None, 'remote_endpoint': None, 'GLOBUS_CLIENT_ID': None } logger = logging.getLogger('ibllib') logger.setLevel(logging.DEBUG) # For logging transferred files # Read in parameters p = params.read(GLOBUS_PARAM_STRING, DEFAULT_PAR) LOCAL_REPO = p.local_endpoint # Endpoint UUID from Website SERVER_ID = p.remote_endpoint # FlatIron GLOBUS_CLIENT_ID = p.GLOBUS_CLIENT_ID DST_DIR = params.read('ibl_ci', {'data_root': '.'}).data_root # Constants SRC_DIR = '/integration' POLL = (5, 60 * 60) # min max seconds between pinging server TIMEOUT = 24 * 60 * 60 # seconds before timeout status_map = { 'ACTIVE': ('QUEUED', 'ACTIVE'), 'FAILED': ('ENDPOINT_ERROR', 'PERMISSION_DENIED', 'CONNECT_FAILED'), 'INACTIVE': 'PAUSED_BY_ADMIN' } try:
def load_ephyspc_params(): if not EPHYSPC_PARAMS_FILE.exists(): create_ephyspc_params() return params.read('ephyspc_params')
'local_endpoint': None, 'remote_endpoint': None, 'GLOBUS_CLIENT_ID': None } print("""Setting up Globus 1. Login to the Globus Website (ask devs for the login credentials) 2. Go to Endpoints and create a new endpoint for the local device (the one that will run this script). 3. In the new endpoint's overview page, copy the 'Endpoint UUID' field. This is the LOCAL_REPO_ID. 4. Go to the 'IBL Top Level' endpoint overview page and copy the 'Endpoint UUID' field. This is the REMOTE_REPO_ID. 5. Copy your GLOBUS_CLIENT_ID (ask the software devs for this). """) params_id = 'globus/admin' pars = params.read(params_id, DEFAULT_PAR) default = pars.local_endpoint local_endpoint = input(f'Enter your LOCAL_REPO_ID ({default}):') pars = pars.set('local_endpoint', local_endpoint.strip() or default) params.write(params_id, pars) default = pars.remote_endpoint remote_endpoint = input(f'Enter your REMOTE_REPO_ID ({default}):') pars = pars.set('remote_endpoint', remote_endpoint.strip() or default) params.write(params_id, pars) default = pars.GLOBUS_CLIENT_ID globus_client_id = input(f'Enter your GLOBUS_CLIENT_ID ({default}):').strip() pars = pars.set('GLOBUS_CLIENT_ID', globus_client_id or default) params.write(params_id, pars)
def get(): par = iopar.read(_PAR_ID_STR) if par is None: setup() return iopar.read(_PAR_ID_STR, default=default())
def setup_silent(): par = iopar.read(_PAR_ID_STR, default()) if par.CACHE_DIR: Path(par.CACHE_DIR).mkdir(parents=True, exist_ok=True)