Ejemplo n.º 1
0
        rlog.to_csv(file.parent / (file.stem + '_clean.csv'), index=False)
    
    return

#%%
if __name__ == '__main__':
    if PREPROCESSING_REQUIRED == True:
        robot_logs = list(ROOT_DIR.rglob('*runlog.csv'))
        
        preprocess_runlogs(robot_logs)
        update_sourceplate_files(SOURCEPLATE_FILES)
 #%%       
    for file in SOURCEPLATE_FILES:
        robot_metadata = merge_robot_metadata(file,
                                              saveto=None,
                                              del_if_exists=True,
                                              compact_drug_plate=True,
                                              drug_by_column=False)
        robot_metadata.sort_values(by=['source_plate_number',
                                       'destination_well'],
                                   ignore_index=True,
                                   inplace=True)
        robot_metadata['robot_run_number'] = robot_metadata.destination_slot.map(DSLOT_RUN_NUMBER_DICT)
        
        # add in the unshuffled plate a plate 4
        _p04 = pd.read_csv(file)
        _p04['robot_run_number'] = 4
        _p04.rename(columns={'source_well':'destination_well'},
                    inplace=True)
        robot_metadata = pd.concat([robot_metadata, _p04], axis=0)
        
Ejemplo n.º 2
0
Created on Fri Nov 15 15:50:39 2019

@author: em812
"""

from tierpsytools.hydra.compile_metadata import merge_robot_metadata
from tierpsytools.hydra.compile_metadata import get_day_metadata
from tierpsytools import EXAMPLES_DIR
from pathlib import Path

if __name__=="__main__":
    # Input
    data_dir = Path(EXAMPLES_DIR) / 'hydra_metadata' / 'data'
    day_root_dir = data_dir / 'AuxiliaryFiles' / 'day1'
    sourceplate_file = day_root_dir / '20191107_sourceplates.csv'
    manual_meta_file = day_root_dir / '20191108_manual_metadata.csv'
    
    # Save to
    #robot_metadata_file = day_root_dir.joinpath('20191107_robot_metadata.csv')
    metadata_file = day_root_dir / '{}_day_metadata.csv'.format(
        day_root_dir.stem)
    
    # Run
    robot_metadata = merge_robot_metadata(sourceplate_file, saveto=False)
    day_metadata = get_day_metadata(
        robot_metadata, manual_meta_file, saveto=metadata_file,
        del_if_exists=True, include_imgstore_name = True, 
        raw_day_dir=data_dir / 'RawVideos' / 'day1')
        

        print(
            'Collating manual metadata files: {},'.format(sourceplates_file) +
            '\n'
            '{}'.format(wormsorter_file))

        bad_wells_df = convert_bad_wells_lut(bad_wells_file)

        plate_metadata = pd.merge(plate_metadata,
                                  bad_wells_df,
                                  on=['imaging_plate_id', 'well_name'],
                                  how='outer')

        plate_metadata['is_bad_well'].fillna(False, inplace=True)

        robot_metadata = merge_robot_metadata(sourceplates_file,
                                              del_if_exists=True)
        metadata_file = day / '{}_day_metadata.csv'.format(exp_date)

        assert robot_metadata['drug_type'].isna().sum() == 0

        # plate_file = list(day.rglob("*plate_metadata.csv"))[0]
        # plate_metadata = pd.read_csv(plate_file)
        # robot_file = list(day.rglob("*robot_metadata.csv"))[0]
        # robot_metadata = pd.read_csv(robot_file)

        concat_metadata = merge_robot_wormsorter(day,
                                                 robot_metadata,
                                                 plate_metadata,
                                                 bad_wells_file,
                                                 del_if_exists=True)
Ejemplo n.º 4
0
    sourceplate_files = [[file for file in d.glob('*_sourceplates.csv')]
                         for d in day_root_dirs]

    manual_meta_files = [[file for file in d.glob('*_manual_metadata.csv')]
                         for d in day_root_dirs]

    # Saveto
    metadata_files = [
        d.joinpath('{}_day_metadata.csv'.format(d.stem)) for d in day_root_dirs
    ]

    # Run compilation of day metadata for all the days
    for day, source, manual_meta, saveto in zip(day_root_dirs,
                                                sourceplate_files,
                                                manual_meta_files,
                                                metadata_files):
        if len(source) != 1:
            print('There is not a unique sourceplates file ' +
                  'in day {}. Metadata cannot be compiled'.format(day))
            continue
        if len(manual_meta) != 1:
            print('There is not a unique manual_metadata file ' +
                  'in day {}. Metadata cannot be compiled'.format(day))
            continue
        robot_metadata = merge_robot_metadata(source[0], saveto=False)
        day_metadata = get_day_metadata(robot_metadata,
                                        manual_meta[0],
                                        saveto=saveto)

    # Conactenate all metadata
    concatenate_days_metadata(aux_root_dir)