def test_tnet_make_parcellation(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', bids_suffix='preproc', bids_tags={ 'sub': '001', 'task': 'a', 'run': 'alpha' }, raw_data_exists=False) # Set the confound pipeline in fmriprep tnet.make_parcellation('gordon2014_333+sub-maxprob-thr25-1mm') tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', bids_suffix='preproc', bids_tags={ 'sub': '001', 'task': 'a', 'run': 'alpha' }, raw_data_exists=False) tnet.make_parcellation('gordon2014_333') tnet.load_data('parcellation') # Hard coded facts about dummy data assert tnet.parcellation_data_[0].shape == (2, 333)
def test_tnet_make_parcellation(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', last_analysis_step='preproc', subjects='001', tasks='a', runs='alpha', raw_data_exists=False) # Set the confound pipeline in fmriprep tnet.make_parcellation('gordon2014_333+sub-maxprob-thr25-1mm') tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', last_analysis_step='preproc', subjects='001', tasks='a', runs='alpha', raw_data_exists=False) tnet.make_parcellation('gordon2014_333') tnet.load_parcellation_data() # Hard coded facts about dummy data assert tnet.parcellation_data_.max() == 1 assert tnet.parcellation_data_.shape == (1, 333, 2) tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', last_analysis_step='preproc', subjects='001', tasks='a', runs='alpha', raw_data_exists=False)
def test_define(): dataset_path = teneto.__path__[0] + '/data/testdata/dummybids/' tnet = teneto.TenetoBIDS(dataset_path, pipeline='fmriprep', raw_data_exists=False) assert len(tnet.get_selected_files(quiet=1)) == 6 tnet = teneto.TenetoBIDS(dataset_path, pipeline='fmriprep', raw_data_exists=False) assert len(tnet.get_selected_files(quiet=1, forfile={'sub': '001'})) == 3 fname = 'sub-001_task-a_run-beta_bold_preproc.nii.gz' assert len(tnet.get_selected_files(quiet=1, forfile=fname)) == 1
def test_get_pipeline_subdir_alternatives(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', bids_tags={'task': 'a'}, raw_data_exists=True) subdir = tnet.get_pipeline_subdir_alternatives() if not 'parcellation' in subdir: raise AssertionError() if not 'tvc' in subdir: raise AssertionError() tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', bids_tags={'task': 'a'}, raw_data_exists=True) subdir = tnet.get_pipeline_subdir_alternatives() if not subdir is None: raise AssertionError()
def test_tnet_derive(): # load parc file with data tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={ 'sub': '001', 'task': 'a', 'run': 'alpha' }, raw_data_exists=False) tnet.load_data('parcellation') tnet.set_confound_pipeline('fmriprep') # Turn the confound_corr_report to True once matplotlib works withconcurrent tnet.derive({ 'method': 'jackknife', 'dimord': 'node,time' }, update_pipeline=True, confound_corr_report=False) tnet.load_data('tvc') parcdata = tnet.parcellation_data_[0] parcdata.drop('0', axis=1, inplace=True) R_jc = parcdata.transpose().corr().values[0, 1] * -1 jc = float(tnet.tvc_data_[0][(tnet.tvc_data_[0]['i'] == 0) & (tnet.tvc_data_[0]['j'] == 1) & (tnet.tvc_data_[0]['t'] == 0)]['weight']) assert np.round(R_jc, 12) == np.round(jc, 12)
def test_tnet_scrubbing_and_spline(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={ 'sub': '001', 'task': 'a', 'run': 'alpha' }, raw_data_exists=False) # Set the confound pipeline in fmriprep tnet.load_data('parcellation') dat_orig = np.squeeze(tnet.parcellation_data_[0].values) tnet.set_confound_pipeline('fmriprep') alt = tnet.get_confound_alternatives() tnet.set_exclusion_timepoint('confound1', '>1', replace_with='cubicspline') tnet.load_data('parcellation') dat_scrub = tnet.parcellation_data_[0].values targ = np.array([[0, 0, 1, 1], [4, 5, 4, 5]]) # Make sure there is a difference assert np.sum(dat_scrub != dat_orig) # Show that the difference between the original data at scrubbed time point is larger in data_orig assert np.sum( np.abs(np.diff(dat_orig[0])) - np.abs(np.diff(dat_scrub[0]))) > 0
def test_halftests(): #these tests could be made better tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', tasks='a', raw_data_exists=False) tnet.get_space_alternatives()
def test_tnet_derive_with_removeconfounds(): # load parc file with data tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={ 'sub': '001', 'task': 'a', 'run': '01' }, raw_data_exists=False) # Set the confound pipeline in fmriprep tnet.set_confound_pipeline('fmriprep') alt = tnet.get_confound_alternatives() if not 'confound1' in alt: raise AssertionError() if not 'confound2' in alt: raise AssertionError() # Set the confounds tnet.set_confounds('confound1') # Remove confounds tnet.removeconfounds(transpose=True) f = tnet.get_selected_files()[0] f = f.replace('.tsv', '.json') with open(f) as fs: sidecar = json.load(fs) if not 'confoundremoval' in sidecar: raise AssertionError()
def test_tnet_checksidecar(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', bids_suffix='bold', bids_tags={ 'sub': '001', 'task': 'a', 'run': '01', 'desc': 'preproc' }, raw_data_exists=False) tnet.make_parcellation(atlas='Schaefer2018', atlas_desc='400Parcels17Networks') tnet.load_data('parcellation') tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_timepoint('confound1', '<=0', replace_with='nan') with open( teneto.__path__[0] + '/data/testdata/dummybids/derivatives/teneto_' + teneto.__version__ + '/sub-001/func/parcellation/sub-001_task-a_run-01_desc-preproc_roi.json' ) as fs: sidecar = json.load(fs) # Check both steps are in sidecar if not 'parcellation' in sidecar.keys(): raise AssertionError() if not 'scrubbed_timepoints' in sidecar.keys(): raise AssertionError()
def test_communitydetection(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='tvc', last_analysis_step='tvc', subjects='001', tasks='b', runs='alpha', raw_data_exists=False) community_detection_params = { 'resolution_parameter': 1, 'interslice_weight': 0, 'quality_function': 'ReichardtBornholdt2006' } tnet.communitydetection(community_detection_params, 'temporal') # Compensating for data not being in a versioen directory tnet.set_pipeline('teneto_' + teneto.__version__) tnet.load_community_data('temporal') C = np.squeeze(tnet.community_data_) assert C[0, 0] == C[1, 0] == C[2, 0] assert C[3, 0] == C[4, 0] == C[5, 0] assert C[0, 2] == C[1, 2] == C[2, 2] == C[3, 2] assert C[4, 2] == C[5, 2] assert C[3, 0] != C[0, 0] assert C[4, 2] != C[0, 2]
def test_define_run_then_sub(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', runs='alpha', raw_data_exists=False) assert len(tnet.get_selected_files(quiet=1)) == 4 tnet.set_subjects('001') assert len(tnet.get_selected_files(quiet=1)) == 2
def test_set_space_error(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', tasks='a', raw_data_exists=False, njobs=1) with pytest.raises(ValueError): tnet.set_space('bla')
def test_set_bad_subjects(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', tasks='a', raw_data_exists=False) tnet.set_bad_subjects('001') tnet.set_bad_subjects(['002']) assert len(tnet.bad_subjects) == 2
def test_define_task_then_run(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', tasks='a', raw_data_exists=False) assert len(tnet.get_selected_files(quiet=1)) == 4 tnet.set_runs('beta') assert len(tnet.get_selected_files(quiet=1)) == 2
def test_define_run_then_sub(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', bids_tags={'run': 'alpha'}, raw_data_exists=False) assert len(tnet.get_selected_files(quiet=1)) == 4 tnet.set_bids_tags({'sub': '001'}) assert len(tnet.get_selected_files(quiet=1)) == 2
def test_define_sub_then_task(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', subjects='001', raw_data_exists=False) assert len(tnet.get_selected_files(quiet=1)) == 3 tnet.set_tasks('a') assert len(tnet.get_selected_files(quiet=1)) == 2
def test_define_task_then_run(): tnet = teneto.TenetoBIDS(teneto.__path__[ 0] + '/data/testdata/dummybids/', pipeline='fmriprep', bids_tags={'task': 'a'}, raw_data_exists=False) if not len(tnet.get_selected_files(quiet=1)) == 4: raise AssertionError() tnet.set_bids_tags({'run': '02'}) if not len(tnet.get_selected_files(quiet=1)) == 2: raise AssertionError()
def test_get_pipeline_alternatives(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', bids_tags={'task': 'a'}, raw_data_exists=False) pipeline = tnet.get_pipeline_alternatives() if not 'fmriprep' in pipeline: raise AssertionError() if not 'teneto-tests' in pipeline: raise AssertionError()
def test_get_pipeline_alternatives(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', tasks='a', raw_data_exists=False) pipeline = tnet.get_pipeline_alternatives() assert 'fmriprep' in pipeline assert 'teneto-tests' in pipeline
def test_tnet_io(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', bids_suffix='preproc', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.save_aspickle(teneto.__path__[0] + '/data/testdata/dummybids/teneosave.pkl') tnet2 = teneto.TenetoBIDS.load_frompickle( teneto.__path__[0] + '/data/testdata/dummybids/teneosave.pkl') if not tnet2.get_selected_files() == tnet.get_selected_files(): raise AssertionError()
def test_communitydetection(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='tvc', bids_suffix='tvcconn', bids_tags={'sub': '001', 'task': 'b', 'run': 'alpha'}, raw_data_exists=False) community_detection_params = {'resolution': 1, 'intersliceweight': 0} tnet.communitydetection(community_detection_params, 'temporal') # Compensating for data not being in a versioen directory tnet.set_pipeline('teneto_' + teneto.__version__) tnet.load_data('communities')
def test_make_fc_and_tvc(): # Load parc data, make FC JC method tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.load_data('parcellation') r = tnet.make_functional_connectivity(returngroup=True)[0, 1] fc_files = tnet.get_selected_files(pipeline='functionalconnectivity') if not '_conn.tsv' in fc_files[0]: raise AssertionError() if not len(fc_files) == 1: raise AssertionError() R = tnet.parcellation_data_[0].transpose().corr().values[0, 1] tnet.derive_temporalnetwork({'method': 'jackknife', 'dimord': 'node,time', 'postpro': 'standardize'}, update_pipeline=True, confound_corr_report=False) tnet.load_data('tvc') JC = tnet.tvc_data_[0].iloc[0].values[-1] # Load parc data, make FC JC method with FC dual weighting tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.derive_temporalnetwork({'method': 'jackknife', 'dimord': 'node,time', 'weight-mean': 'from-subject-fc', 'weight-var': 'from-subject-fc'}, update_pipeline=True, confound_corr_report=False) tnet.load_data('tvc') JCw = tnet.tvc_data_[0].iloc[0].values[-1] # Load parc data, make FC JC method with FC mean weighting tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.derive_temporalnetwork({'method': 'jackknife', 'dimord': 'node,time', 'weight-mean': 'from-subject-fc'}, update_pipeline=True, confound_corr_report=False) tnet.load_data('tvc') JCm = tnet.tvc_data_[0].iloc[0].values[-1] # Load parc data, make FC JC method with FC variance weighting tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.derive_temporalnetwork({'method': 'jackknife', 'dimord': 'node,time', 'weight-var': 'from-subject-fc'}, update_pipeline=True, confound_corr_report=False) tnet.load_data('tvc') JCv = tnet.tvc_data_[0].iloc[0].values[-1] if not np.round(JCw, 15) == np.round((JC*r)+R, 15): raise AssertionError() if not np.round(JCv, 15) == np.round((JC*r), 15): raise AssertionError() if not np.round(JCm, 15) == np.round((JC)+R, 15): raise AssertionError()
def test_tnet_scrubbing(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) # Set the confound pipeline in fmriprep tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_timepoint('confound1', '>1', replace_with='nan') tnet.load_data('parcellation') dat = np.where(np.isnan(np.squeeze(tnet.parcellation_data_[0].values))) targ = np.array([[0, 0, 1, 1], [4, 5, 4, 5]]) if not np.all(targ == dat): raise AssertionError()
def test_savesnapshot(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={ 'sub': '001', 'task': 'a', 'run': '01' }, raw_data_exists=False) tnet.save_tenetobids_snapshot(teneto.__path__[0]) with open(teneto.__path__[0] + '/TenetoBIDS_snapshot.json') as f: params = json.load(f) tnet2 = teneto.TenetoBIDS(**params) for n in tnet2.__dict__: if tnet.__dict__[n] != tnet2.__dict__[n]: raise AssertionError() if tnet2.__dict__.keys() != tnet.__dict__.keys(): raise AssertionError()
def test_networkmeasure(): # calculate and load a network measure bids_path = teneto.__path__[0] + '/data/testdata/dummybids/' pipeline = 'teneto_' + teneto.__version__ tags = {'sub': '001', 'task': 'a', 'run': 'alpha'} tnet = teneto.TenetoBIDS(bids_path, pipeline=pipeline, pipeline_subdir='tvc', bids_suffix='tvcconn', bids_tags=tags, raw_data_exists=False) tnet.networkmeasures('volatility', {'calc': 'time'}, tag='time') tnet.load_data('temporalnetwork', measure='volatility', tag='time') if not tnet.temporalnetwork_data_['volatility'][0].shape == (19, 1): raise AssertionError()
def test_setanalysisstep(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', tasks='a', raw_data_exists=False) tnet.set_analysis_steps('a') tnet.set_analysis_steps('b', add_step=True) assert tnet.analysis_steps == ['a', 'b'] tnet.set_analysis_steps(['a', 'b']) assert tnet.analysis_steps == ['a', 'b'] tnet.set_analysis_steps(['c', 'd'], add_step=True) assert tnet.analysis_steps == ['a', 'b', 'c', 'd']
def test_tnet_scrubbing_and_exclusion_options(): # <= tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_timepoint('confound1', '<=0', replace_with='nan') tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_file('confound2', '<=1') # < tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_timepoint('confound1', '<0', replace_with='nan') tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_file('confound2', '<1') # >= tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_timepoint('confound2', '>=2', replace_with='nan') tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_file('confound2', '>=1')
def test_tnet_io(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='fmriprep', last_analysis_step='preproc', subjects='001', tasks='a', runs='alpha', raw_data_exists=False) tnet.save_aspickle(teneto.__path__[0] + '/data/testdata/dummybids/teneosave.pkl') tnet2 = teneto.TenetoBIDS.load_frompickle( teneto.__path__[0] + '/data/testdata/dummybids/teneosave.pkl') assert tnet2.get_selected_files() == tnet.get_selected_files()
def test_networkmeasure(): # calculate and load a network measure tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto_' + teneto.__version__, pipeline_subdir='tvc', last_analysis_step='tvc', subjects='001', tasks='a', runs='alpha', raw_data_exists=False) tnet.networkmeasures('volatility', {'calc': 'time'}, save_tag='time') tnet.load_network_measure('volatility', tag='time') assert tnet.networkmeasure_.shape == (1, 19)
def test_tnet_set_bad_files(): tnet = teneto.TenetoBIDS(teneto.__path__[0] + '/data/testdata/dummybids/', pipeline='teneto-tests', pipeline_subdir='parcellation', bids_suffix='roi', bids_tags={'sub': '001', 'task': 'a', 'run': 'alpha'}, raw_data_exists=False) # Set the confound pipeline in fmriprep tnet.load_data('parcellation') tnet.set_confound_pipeline('fmriprep') tnet.set_exclusion_file('confound2', '>0.5') if not len(tnet.bad_files) == 1: raise AssertionError() if not tnet.bad_files[0] == tnet.BIDS_dir + 'derivatives/' + tnet.pipeline + \ '/sub-001/func/' + tnet.pipeline_subdir + \ '/sub-001_task-a_run-alpha_roi.tsv': raise AssertionError()