def test_lochness_to_lochness_transfer_all(Lochness): print() protected_dir = Path(Lochness['phoenix_root']) / 'PROTECTED' for i in range(10): with tf.NamedTemporaryFile(suffix='tmp.text', delete=False, dir=protected_dir) as tmpfilename: with open(tmpfilename.name, 'w') as f: f.write('ha') #pull all # lochness_to_lochness_transfer(Lochness, general_only=False) with tf.NamedTemporaryFile(suffix='tmp.tar', delete=False, dir='.') as tmpfilename: # compress compress_new_files(Lochness['lochness_sync_history_csv'], Lochness['phoenix_root'], tmpfilename.name, False) show_tree_then_delete('tmp_lochness') compressed_file = list(Path('.').glob('tmp*tar'))[0] os.popen(f'tar -xf {compressed_file}').read() os.remove(str(compressed_file)) show_tree_then_delete('PHOENIX')
def test_do_init(args): syncArgs = SyncArgs('tmp_lochness') create_lochness_template(args) syncArgs.config = args.outdir / 'config.yml' _ = KeyringAndEncrypt(args.outdir) Lochness = config_load_test(syncArgs.config, syncArgs.archive_base) show_tree_then_delete('tmp_lochness')
def test_box_sync_module_protect_processed(args_and_Lochness): args, Lochness = args_and_Lochness # change protect to true for all actigraphy for study in args.studies: new_list = [] for i in Lochness['box'][study]['file_patterns']['actigraphy']: i['protect'] = True i['processed'] = True new_list.append(i) Lochness['box'][study]['file_patterns']['actigraphy'] = new_list for subject in lochness.read_phoenix_metadata(Lochness): sync(Lochness, subject, dry=False) for study in args.studies: subject_dir = protected_root / study / 'processed' / '1001' assert (subject_dir / 'actigraphy').is_dir() assert len(list((subject_dir / 'actigraphy').glob('*csv'))) > 1 subject_dir = general_root / study / 'processed' / '1001' assert not (subject_dir / 'actigraphy').is_dir() assert len(list((subject_dir / 'actigraphy/processed/').glob('*csv'))) == 0 show_tree_then_delete('tmp_lochness')
def test_decompress_transferred_file_and_copy(): target_phoenix_root = Path('DPACC_PHOENIX') tar_file_trasferred = list(Path('.').glob('tmp*tar'))[0] decompress_transferred_file_and_copy(target_phoenix_root, tar_file_trasferred) show_tree_then_delete(target_phoenix_root)
def test_sync_from_empty(Lochness): dry=False study_name = 'StudyA' initialize_metadata(Lochness, study_name) for subject in lochness.read_phoenix_metadata(Lochness, studies=[study_name]): sync(Lochness, subject, dry) show_tree_then_delete('tmp_lochness')
def test_create_lochness_template_for_documentation(args): args.outdir = 'lochness_root' args.studies = ['BWH', 'McLean'] args.sources = ['redcap', 'xnat', 'box', 'mindlamp'] args.poll_interval = 43200 args.det_csv = '/data/data_entry_trigger_db.csv' args.pii_csv = '/data/personally_identifiable_process_mappings.csv' create_lochness_template(args) show_tree_then_delete('lochness_root')
def test_box_sync_module_missing_root(args_and_Lochness): args, Lochness = args_and_Lochness # change base for StudyA to missing path Lochness['box']['StudyA']['base'] = 'hahah' for subject in lochness.read_phoenix_metadata(Lochness): sync(Lochness, subject, dry=False) study = 'StudyA' subject_dir = protected_root / study / 'raw' / '1001' assert (subject_dir / 'actigraphy').is_dir() == False show_tree_then_delete('tmp_lochness')
def test_get_updated_files(Lochness): timestamp_a_day_ago = datetime.timestamp( datetime.fromtimestamp(time()) - timedelta(days=1)) posttime = time() file_lists = get_updated_files(Lochness['phoenix_root'], timestamp_a_day_ago, posttime) assert Path('PHOENIX/GENERAL/StudyA/StudyA_metadata.csv') in file_lists assert Path('PHOENIX/GENERAL/StudyB/StudyB_metadata.csv') in file_lists show_tree_then_delete('tmp_lochness')
def test_initialize_metadata_then_sync(args_and_Lochness): args, Lochness = args_and_Lochness # before initializing metadata for study in args.studies: phoenix_path = Path(Lochness['phoenix_root']) general_path = phoenix_path / 'GENERAL' metadata = general_path / study / f"{study}_metadata.csv" initialize_metadata(Lochness, study, 'record_id1', 'cons_date') for subject in lochness.read_phoenix_metadata(Lochness, studies=['StudyA']): sync(Lochness, subject, False) show_tree_then_delete('tmp_lochness')
def test_create_lochness_template(Lochness): create_fake_rpms_repo() # create_lochness_template(args) study_name = 'StudyA' initialize_metadata(Lochness, study_name, 'record_id1', 'Consent') for subject in lochness.read_phoenix_metadata(Lochness, studies=['StudyA']): # print(subject) for module in subject.rpms: print(module) print(module) print(module) # break # break show_tree_then_delete('tmp_lochness')
def test_do_with_mindlamp(args): syncArgs = SyncArgs(args.outdir) sources = ['mindlamp'] syncArgs.update_source(sources) create_lochness_template(args) syncArgs.config = args.outdir / 'config.yml' _ = KeyringAndEncryptMindlamp(args.outdir) phoenix_root = args.outdir / 'PHOENIX' information_to_add_to_metadata = {'mindlamp': {'subject_id': '1001', 'source_id': 'U7045332804'}} initialize_metadata_test(phoenix_root, 'StudyA', information_to_add_to_metadata) do(syncArgs) show_tree_then_delete('tmp_lochness')
def test_sync_from_empty(args): outdir = 'tmp_lochness' args.outdir = outdir create_lochness_template(args) KeyringAndEncryptRPMS(args.outdir) create_fake_rpms_repo() dry = False study_name = 'StudyA' Lochness = config_load_test(f'{args.outdir}/config.yml', '') initialize_metadata(Lochness, study_name, 'record_id1', 'Consent') for subject in lochness.read_phoenix_metadata(Lochness, studies=['StudyA']): sync(Lochness, subject, dry) # print the structure show_tree_then_delete('tmp_lochness')
def test_compress_list_of_files(Lochness): print() timestamp_a_day_ago = datetime.timestamp( datetime.fromtimestamp(time()) - timedelta(days=1)) posttime = time() phoenix_root = Lochness['phoenix_root'] file_lists = get_updated_files(phoenix_root, timestamp_a_day_ago, posttime) compress_list_of_files(phoenix_root, file_lists, 'prac.tar') show_tree_then_delete('tmp_lochness') # shutil.rmtree('tmp_lochness') assert Path('prac.tar').is_file() os.popen('tar -xf prac.tar').read() show_tree_then_delete('PHOENIX') os.remove('prac.tar')
def test_compress_new_files(Lochness): print() phoenix_root = Lochness['phoenix_root'] compress_new_files('nodb', phoenix_root, 'prac.tar') shutil.rmtree('tmp_lochness') # shutil.rmtree('tmp_lochness') assert Path('prac.tar').is_file() assert Path('nodb').is_file() with open('nodb', 'r') as f: print(f.read()) os.popen('tar -xf prac.tar').read() os.remove('nodb') os.remove('prac.tar') show_tree_then_delete('PHOENIX')
def test_box_sync_module_no_redownload(args_and_Lochness): args, Lochness = args_and_Lochness # change subject name for subject in lochness.read_phoenix_metadata(Lochness): sync(Lochness, subject, dry=False) a_file_path = general_root / 'StudyA' / 'raw' / '1001' / 'actigraphy' / \ 'BLS-F6VVM-GENEActivQC-day22to51.csv' init_time = a_file_path.stat().st_mtime # change subject name for subject in lochness.read_phoenix_metadata(Lochness): sync(Lochness, subject, dry=False) post_time = a_file_path.stat().st_mtime assert init_time == post_time show_tree_then_delete('tmp_lochness')
def test_box_sync_module_missing_subject(args_and_Lochness): args, Lochness = args_and_Lochness # change subject name keyring = KeyringAndEncrypt(args.outdir) information_to_add_to_metadata = {'box': { 'subject_id': '1001', 'source_id': 'O12341234'}} for study in args.studies: keyring.update_for_box(study) # update box metadata initialize_metadata_test('tmp_lochness/PHOENIX', study, information_to_add_to_metadata) for subject in lochness.read_phoenix_metadata(Lochness): sync(Lochness, subject, dry=False) show_tree_then_delete('tmp_lochness')
def test_initializing_based_on_rpms(Lochness): '''Test updating the metadata Current model ============= - RPMS_PATH - subject01 - subject01.csv - subject02 - subject02.csv - subject03 - subject03.csv - ... ''' create_fake_rpms_repo() initialize_metadata(Lochness, 'StudyA', 'record_id1', 'Consent') df = pd.read_csv('tmp_lochness/PHOENIX/GENERAL/StudyA/StudyA_metadata.csv') show_tree_then_delete('tmp_lochness') print(df) assert len(df) == 5
def test_lochness_to_lochness_transfer(Lochness): print() protected_dir = Path(Lochness['phoenix_root']) / 'PROTECTED' for i in range(10): with tf.NamedTemporaryFile(suffix='tmp.text', delete=False, dir=protected_dir) as tmpfilename: with open(tmpfilename.name, 'w') as f: f.write('ha') lochness_to_lochness_transfer(Lochness, False) print(os.popen('tree').read()) shutil.rmtree('tmp_lochness') compressed_file = list(Path('.').glob('tmp*tar'))[0] os.popen(f'tar -xf {compressed_file}').read() os.remove(str(compressed_file)) show_tree_then_delete('PHOENIX')
def test_sync_mindlamp(args): syncArgs = SyncArgs(args.outdir) syncArgs.studies = ['StudyA'] sources = ['mindlamp'] syncArgs.update_source(sources) create_lochness_template(args) syncArgs.config = args.outdir / 'config.yml' _ = KeyringAndEncryptMindlamp(args.outdir) phoenix_root = args.outdir / 'PHOENIX' information_to_add_to_metadata = {'mindlamp': {'subject_id': '1001', 'source_id': 'U7045332804'}} initialize_metadata_test(phoenix_root, 'StudyA', information_to_add_to_metadata) Lochness = config_load_test(syncArgs.config) for subject in lochness.read_phoenix_metadata(Lochness, syncArgs.studies): sync(Lochness, subject, False) show_tree_then_delete('tmp_lochness')
def test_lochness_to_lochness_transfer_receive(Lochness): print() protected_dir = Path(Lochness['phoenix_root']) / 'PROTECTED' for i in range(10): with tf.NamedTemporaryFile(suffix='tmp.text', delete=False, dir=protected_dir) as tmpfilename: with open(tmpfilename.name, 'w') as f: f.write('ha') #pull all # lochness_to_lochness_transfer(Lochness, general_only=False) with tf.NamedTemporaryFile(suffix='tmp.tar', delete=False, dir='.') as tmpfilename: # compress compress_new_files(Lochness['lochness_sync_history_csv'], Lochness['phoenix_root'], tmpfilename.name, False) show_tree_then_delete('tmp_lochness') compressed_file = list(Path('.').glob('tmp*tar'))[0] os.popen(f'tar -xf {compressed_file}').read() os.remove(str(compressed_file)) show_tree_then_delete('PHOENIX') out_dir = 'DPACC' args = DpaccArgs(out_dir) create_lochness_template(args) update_keyring_and_encrypt_DPACC(args.outdir) lochness = config_load_test(f'{out_dir}/config.yml', '') lochness_to_lochness_transfer_receive(lochness) show_tree_then_delete('DPACC')
def test_create_lochness_template_multiple_study(args): args.studies = ['StudyA', 'StudyB'] create_lochness_template(args) show_tree_then_delete('tmp_lochness')
def test_read_phoenix_data(Lochness): print_keyring(Lochness) show_tree_then_delete('tmp_lochness')
def test_create_lochness_template(args): create_lochness_template(args) show_tree_then_delete('tmp_lochness')
def test_do_with_lochness_sync_receive(syncArgsForLochnessSync): syncArgsForLochnessSync.lochness_sync_receive = True do(syncArgsForLochnessSync) show_tree_then_delete('tmp_lochness')
def test_do_with_lochness_sync_send(syncArgsForLochnessSync): '''This module fails outside BWH firewall''' syncArgsForLochnessSync.lochness_sync_send = True do(syncArgsForLochnessSync) show_tree_then_delete('tmp_lochness')
def test_do_REDCap(syncArgsForLochnessSync): syncArgsForLochnessSync.update_source(['redcap']) do(syncArgsForLochnessSync) show_tree_then_delete('tmp_lochness')
def test_using_sync_do_send(Lochness): syncArg = SyncArgs('tmp_lochness') syncArg.lochness_sync_send = True do(syncArg) show_tree_then_delete('tmp_lochness')