Esempio n. 1
0
def test_HDF_Datastore_Build_with_fiducialtracks():
    """The datastore build is performed successfully.
    
    """
    dbName = testDataRoot / Path('database_test_files/myDB_Build_Avg.h5')
    if dbName.exists():
        remove(str(dbName))
    parser = parsers.PositionParser(positionIDs={
        1: 'prefix',
        3: 'channelID',
        4: 'acqID'
    })
    readerDict = {
        'FiducialTracks': readers.CSVReader(),
        'AverageFiducial': readers.CSVReader()
    }

    # Directory to traverse for acquisition files
    searchDirectory = testDataRoot / Path('test_experiment_2')

    # Build datastore
    with db.HDFDatastore(dbName) as myDB:
        myDB.build(parser,
                   searchDirectory,
                   filenameStrings={
                       'FiducialTracks': '_Fids.dat',
                       'AverageFiducial': '_AvgFid.dat'
                   },
                   readers=readerDict,
                   dryRun=False)

    # Test for existence of the data
    with h5py.File(str(dbName), mode='r') as hdf:
        key1 = 'Control/Control_1/'
        name1 = 'FiducialTracks_ChannelA647'
        name2 = 'AverageFiducial_ChannelA647'
        ok_(key1 + name1 in hdf)
        ok_(key1 + name2 in hdf)
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_prefix'))
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_acqID'))
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_datasetType'))
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_channelID'))
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_dateID'))
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_posID'))
        ok_(hdf[key1 + name1].attrs.__contains__('SMLM_sliceID'))

        key2 = 'Control/Control_2/'
        ok_(key2 + name1 in hdf)
        ok_(key2 + name2 in hdf)

        key3 = 'shTRF2/shTRF2_1/'
        ok_(key3 + name1 in hdf)
        ok_(key3 + name2 in hdf)

        key4 = 'shTRF2/shTRF2_2/'
        ok_(key4 + name1 in hdf)
        ok_(key4 + name2 in hdf)

    # Remove test datastore file
    remove(str(dbName))
Esempio n. 2
0
def test_HDF_Datastore_Query_with_Localizations():
    """The datastore query is performed successfully with the datasetType.
    
    """
    dbName = testDataRoot / Path('database_test_files/myDB_Build.h5')
    if dbName.exists():
        remove(str(dbName))
    parser = parsers.PositionParser(positionIDs={
        1: 'prefix',
        3: 'channelID',
        4: 'acqID'
    })

    # Directory to traverse for acquisition files
    searchDirectory = testDataRoot / Path('test_experiment_2')

    # Build datastore
    with db.HDFDatastore(dbName) as myDB:
        myDB.build(parser,
                   searchDirectory,
                   filenameStrings={'Localizations': '_DC.dat'},
                   dryRun=False)

    results = myDB.query(datasetType='Localizations')

    ok_(len(results) != 0, 'Error: No dataset types found in DB.')
    for ds in results:
        assert_equal(ds.datasetType, 'Localizations')

    # Remove test datastore file
    remove(str(dbName))
Esempio n. 3
0
def test_PositionParser_ParseFilename():
    """PositionParser's full parseFilename() function works as expected.
    
    """
    f = 'HeLaL_Control_1.csv'
    inputFile = testDataRoot / Path('parsers_test_files') \
                             / Path('SimpleParser/') / Path(f)

    parser = parsers.PositionParser(positionIDs={
        0: 'prefix',
        1: None,
        2: 'acqID'
    })
    # Note: 'Control' will be dropped because it's surrounded by underscores
    parser.parseFilename(inputFile)

    assert_equal(parser.dataset.datasetIDs['acqID'], 1)
    assert_equal(parser.dataset.datasetIDs['prefix'], 'HeLaL')
    assert_equal(parser.dataset.datasetType, 'Localizations')

    f = 'HeLaS_Control_2.csv'
    inputFile = testDataRoot / Path('parsers_test_files') \
                             / Path('SimpleParser/') / Path(f)
    parser.parseFilename(inputFile)

    assert_equal(parser.dataset.datasetIDs['acqID'], 2)
    assert_equal(parser.dataset.datasetIDs['prefix'], 'HeLaS')
    assert_equal(parser.dataset.datasetType, 'Localizations')
Esempio n. 4
0
def test_HDF_Datastore_WidefieldPixelSize_OMEXML_Only():
    """element_size_um is correct when only OME-XML metadata is present."
    
    """
    dbName   = testDataRoot / Path('database_test_files/myDB_Build.h5')
    if dbName.exists():
        remove(str(dbName))
    parser = parsers.PositionParser(positionIDs = {
        0 : 'prefix', 
        1 : 'channelID', 
        2 : 'acqID'})
    
    # Directory to traverse for acquisition files
    searchDirectory = testDataRoot \
                    / Path('database_test_files/OME-TIFF_No_MM_Metadata')
    
    # Build datastore
    with db.HDFDatastore(dbName) as myDB:
        myDB.build(parser, searchDirectory,
                   filenameStrings  = {'WidefieldImage' : '2_MMStack*.ome.tif'},
                   dryRun = False, readTiffTags = True)
    
    # Test for existence of the data
    with h5py.File(str(dbName), mode = 'r') as hdf:
        key1 = ('Cos7/Cos7_2/WidefieldImage_ChannelA647/image_data')
        ok_('Cos7/Cos7_2/WidefieldImage_ChannelA647' in hdf)
        ok_('element_size_um' in hdf[key1].attrs)
        assert_equal(hdf[key1].attrs['element_size_um'][0], 1)
        assert_equal(hdf[key1].attrs['element_size_um'][1], 0.1)
        assert_equal(hdf[key1].attrs['element_size_um'][2], 0.1)
    
    # Remove test datastore file
    remove(str(dbName))
Esempio n. 5
0
def test_PositionParser_parse():
    """PositionParser correctly parses a number of different example filenames.
    
    """
    # filename, position ids, expected result
    f = [('HeLa_2', {
        0: 'prefix',
        1: 'acqID'
    }, {
        'prefix': 'HeLa',
        'acqID': 2
    }),
         ('HeLa_A647_2', {
             0: 'prefix',
             1: 'channelID',
             2: 'acqID'
         }, {
             'prefix': 'HeLa',
             'channelID': 'A647',
             'acqID': 2
         }),
         ('2016-12-11_Cos7_A647_5_4_3', {
             0: 'dateID',
             1: 'prefix',
             2: 'channelID',
             3: 'posID',
             4: 'sliceID',
             5: 'acqID'
         }, {
             'dateID': '2016-12-11',
             'prefix': 'Cos7',
             'channelID': 'A647',
             'posID': 5,
             'sliceID': 4,
             'acqID': 3
         }),
         ('HeLa_1_MMStack_0', {
             0: 'prefix',
             1: 'acqID',
             2: None,
             3: 'posID'
         }, {
             'prefix': 'HeLa',
             'acqID': 1,
             'posID': 0
         })]

    for currExample in f:
        parser = parsers.PositionParser(positionIDs=currExample[1])
        idDict = parser._parse(currExample[0])

        for key, value in idDict.items():
            assert_equal(value, currExample[2][key])
Esempio n. 6
0
def test_PositionParser_BadParse():
    """PositionParser correctly catches errors during parsing.
    
    """
    f = 'HeLaL.tif'  # No acqID; file shouldn't parse

    parser = parsers.PositionParser(positionIDs={
        0: 'prefix',
        1: None,
        2: 'acqID'
    })
    # Note: There are more position IDs than there are actual positions in f
    parser.parseFilename(f)
Esempio n. 7
0
def test_HDF_Datastore_Build():
    """The datastore build is performed successfully.
    
    Notes
    -----
    This also tests that the Micro-Manager metadata is read correctly to obtain
    the widefield image pixel size.
    
    """
    dbName   = testDataRoot / Path('database_test_files/myDB_Build.h5')
    if dbName.exists():
        remove(str(dbName))
    parser = parsers.PositionParser(positionIDs = {
        0 : 'prefix', 
        2 : 'channelID', 
        3 : 'acqID'})
    
    # Directory to traverse for acquisition files
    searchDirectory = testDataRoot / Path('test_experiment')
    
    # Build datastore
    with db.HDFDatastore(dbName) as myDB:
        myDB.build(parser, searchDirectory,
                   filenameStrings  = {'WidefieldImage' : '.ome.tif',
                                       'Localizations'  : 'locResults.dat'},
                   dryRun = False, readTiffTags = True)
               
    # Test for existence of the data.
    # Pixel sizes should have been obtained from Micro-Manager meta data.
    with h5py.File(str(dbName), mode = 'r') as hdf:
        key1 = ('HeLaL/HeLaL_1/WidefieldImage_ChannelA647/'
                'image_data')
        ok_('HeLaL/HeLaL_1/Localizations_ChannelA647' in hdf)
        ok_('HeLaL/HeLaL_1/WidefieldImage_ChannelA647' in hdf)
        ok_('element_size_um' in hdf[key1].attrs)
        assert_equal(hdf[key1].attrs['element_size_um'][0],     1)
        assert_equal(hdf[key1].attrs['element_size_um'][1], 0.108)
        assert_equal(hdf[key1].attrs['element_size_um'][2], 0.108)
        
        key2 = ('HeLaS/HeLaS_2/WidefieldImage_ChannelA647/'
                'image_data')
        ok_('HeLaS/HeLaS_2/Localizations_ChannelA647' in hdf)
        ok_('HeLaS/HeLaS_2/WidefieldImage_ChannelA647' in hdf)
        ok_('element_size_um' in hdf[key2].attrs)
        assert_equal(hdf[key2].attrs['element_size_um'][0],     1)
        assert_equal(hdf[key2].attrs['element_size_um'][1], 0.108)
        assert_equal(hdf[key2].attrs['element_size_um'][2], 0.108)
    
    # Remove test datastore file
    remove(str(dbName))
Esempio n. 8
0
def test_Put_Data():
    """The datasetType can put its own data and datasetIDs.
    
    Notes
    -----
    This also tests that the pixel size is correctly extracted from the
    Micro-Manager metadata.
    
    """
    imgPath = testDataRoot / Path('database_test_files') \
              / Path('Cos7_A647_WF1/') \
              / Path('Cos7_A647_1_MMStack_Pos0.ome.tif')
        
    try:
        # Make up some dataset IDs and a dataset
        parser = parsers.PositionParser(positionIDs = {
                                            0 : 'prefix', 
                                            1 : 'channelID', 
                                            2 : 'acqID'})
        parser.parseFilename(str(imgPath), 'WidefieldImage')
        ds = parser.dataset
        ds.data = ds.readFromFile(str(imgPath), readTiffTags = True)
        
        pathToDB = testDataRoot
        # Remove datastore if it exists
        if exists(str(pathToDB / Path('test_db.h5'))):
            remove(str(pathToDB / Path('test_db.h5')))
        
        with db.HDFDatastore(pathToDB / Path('test_db.h5')) as myDB:
            myDB.put(ds)
        
        key = 'Cos7/Cos7_1/WidefieldImage_ChannelA647'
        with h5py.File(str(pathToDB / Path('test_db.h5')), 'r') as hdf:
            assert_equal(hdf[key].attrs['SMLM_datasetType'], 'WidefieldImage')
            imgData = hdf[key + '/image_data'].value
            
            assert_equal(hdf[key + '/image_data'].attrs['element_size_um'][0],
                         1)
            assert_equal(hdf[key + '/image_data'].attrs['element_size_um'][1],
                         0)
            assert_equal(hdf[key + '/image_data'].attrs['element_size_um'][2],
                         0)
    
        assert_equal(imgData.shape, (512, 512))
    finally:
        # Remove the test datastore
        remove(str(pathToDB / Path('test_db.h5')))
Esempio n. 9
0
def test_WidefieldImage_DatasetID_Attributes():
    """Dataset IDs are written as attributes of the widefieldImage dataset.
    
    """
    # Remake the datastore
    dbName   = testDataRoot / Path('database_test_files/myDB_WF_Metadata.h5')
    if dbName.exists():
        remove(str(dbName))
    
    # Load the widefield image and convert it to a dataset
    f = 'Cos7_A647_1_MMStack_Pos0.ome.tif'
    inputFile = testDataRoot / Path('database_test_files') \
              / Path('Cos7_A647_WF1/') / Path(f)
    
    # Set the parser to read TiffTags
    parser = parsers.PositionParser(positionIDs = {
                                            0 : 'prefix', 
                                            1 : 'channelID', 
                                            2 : 'acqID'})
    parser.parseFilename(str(inputFile), 'WidefieldImage')
    ds = parser.dataset
    ds.data = ds.readFromFile(str(inputFile), readTiffTags = False)
    
    # Put the widefield image into the datastore
    with db.HDFDatastore(dbName) as myDB:
        myDB.put(parser.dataset)
    
    # Check that the dataset IDs were put correctly
    saveKey = 'Cos7/Cos7_1/WidefieldImage_ChannelA647'
    with h5py.File(myDB._dsName, mode = 'r') as dbFile:
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_prefix'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_acqID'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_datasetType'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_channelID'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_dateID'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_posID'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_sliceID'))
        ok_(dbFile[saveKey].attrs.__contains__('SMLM_Version'))
        
        assert_equal(dbFile[saveKey].attrs['SMLM_prefix'], 'Cos7')
        assert_equal(dbFile[saveKey].attrs['SMLM_acqID'], 1)
        assert_equal(dbFile[saveKey].attrs['SMLM_datasetType'],
                         'WidefieldImage')
        assert_equal(dbFile[saveKey].attrs['SMLM_channelID'], 'A647')
        assert_equal(dbFile[saveKey].attrs['SMLM_dateID'], 'None')
        assert_equal(dbFile[saveKey].attrs['SMLM_posID'], 'None')
        assert_equal(dbFile[saveKey].attrs['SMLM_sliceID'], 'None')
Esempio n. 10
0
def test_Get_Data():
    """The datasetType can get its own data and datasetIDs.
    
    """
    dsID = db.DatasetID
     # Load the datastore
    imgPath = testDataRoot / Path('database_test_files') \
              / Path('Cos7_A647_WF1/') \
              / Path('Cos7_A647_1_MMStack_Pos0.ome.tif')
    img     = imread(str(imgPath))
    try:
        # Make up some dataset IDs and a dataset
        parser = parsers.PositionParser(positionIDs = {
                                            0 : 'prefix', 
                                            1 : 'channelID', 
                                            2 : 'acqID'})
        parser.parseFilename(str(imgPath), 'WidefieldImage')
        ds = parser.dataset
        ds.data = ds.readFromFile(str(imgPath))
        
        pathToDB = testDataRoot
        # Remove datastore if it exists
        if exists(str(pathToDB / Path('test_db.h5'))):
            remove(str(pathToDB / Path('test_db.h5')))
        
        with db.HDFDatastore(pathToDB / Path('test_db.h5')) as myDB:
            myDB.put(ds, widefieldPixelSize = (0.13, 0.13))
        
        myNewDSID = dsID('Cos7', 1, 'WidefieldImage', None,
                         'A647', None, None, None, None)
        imgDS = myDB.get(myNewDSID)
        ids     = imgDS.datasetIDs
        assert_equal(ids['prefix'],                     'Cos7')
        assert_equal(ids['acqID'],                           1)
        assert_equal(imgDS.datasetType,       'WidefieldImage')
        assert_equal(ids['channelID'],                  'A647')
        assert_equal(ids['dateID'],                       None)
        assert_equal(ids['posID'],                        None)
        assert_equal(ids['sliceID'],                      None)
        assert_equal(ids['replicateID'],                  None)
        assert_equal(imgDS.data.shape, img.shape)
    finally:
        # Remove the test datastore
        remove(str(pathToDB / Path('test_db.h5')))
Esempio n. 11
0
def test_Put_Data_kwarg_WidefieldPixelSize():
    """The WidefieldImage will write the correct pixel size if provided.
    
    """
    # TODO: Rewrite this test to ensure that we really overwrite the metadata
    # pixel size.
    imgPath = testDataRoot / Path('database_test_files') \
              / Path('Cos7_A647_WF1/') \
              / Path('Cos7_A647_1_MMStack_Pos0.ome.tif')
    try:
        # Make up some dataset IDs and a dataset
        parser = parsers.PositionParser(positionIDs = {
                                            0 : 'prefix', 
                                            1 : 'channelID', 
                                            2 : 'acqID'})
        parser.parseFilename(str(imgPath), 'WidefieldImage')
        ds = parser.dataset
        ds.data = ds.readFromFile(str(imgPath), readTiffTags = False)
        
        pathToDB = testDataRoot
        # Remove datastore if it exists
        if exists(str(pathToDB / Path('test_db.h5'))):
            remove(str(pathToDB / Path('test_db.h5')))
        
        with db.HDFDatastore(pathToDB / Path('test_db.h5')) as myDB:
            myDB.put(ds, widefieldPixelSize = (0.13, 0.14))

        # Note that pixel sizes are saved in zyx order.
        # These values will be equal to 0.108, 0.108 if no widefieldPixelSize
        # is supplied because the default behavior is to read the MM or OME-XML
        # metadata.        
        key = 'Cos7/Cos7_1/WidefieldImage_ChannelA647'
        with h5py.File(str(pathToDB / Path('test_db.h5')), 'r') as hdf:
            assert_equal(hdf[key + '/image_data'].attrs['element_size_um'][0],
                         1)
            assert_equal(hdf[key + '/image_data'].attrs['element_size_um'][1],
                         0.14)
            assert_equal(hdf[key + '/image_data'].attrs['element_size_um'][2],
                         0.13)
    finally:
        # Remove the test datastore
        remove(str(pathToDB / Path('test_db.h5')))
Esempio n. 12
0
def test_Put_WidefieldImage_TiffFile():
    """Insertion of widefield image data works when parsed as a TiffFile.
    
    """
    # Remake the datastore
    dbName   = testDataRoot / Path('database_test_files/myDB_WF_Metadata.h5')
    if dbName.exists():
        remove(str(dbName))
    
    # Load the widefield image and convert it to an atom
    f = 'Cos7_A647_1_MMStack_Pos0.ome.tif'
    inputFile = testDataRoot / Path('database_test_files') \
              / Path('Cos7_A647_WF1/') / Path(f)
    
    # Read TiffTags
    parser = parsers.PositionParser(positionIDs = {
        0 : 'prefix', 
        1 : 'channelID', 
        2 : 'acqID'})
    parser.parseFilename(str(inputFile), 'WidefieldImage')
    ds = parser.dataset
    ds.data = ds.readFromFile(str(inputFile), readTiffTags = True)
    
    # Put the widefield image into the datastore
    with db.HDFDatastore(dbName) as myDB:
        myDB.put(parser.dataset)
    
    # Check that the data was put correctly
    saveKey = 'Cos7/Cos7_1/WidefieldImage_ChannelA647'
    with h5py.File(myDB._dsName, mode = 'r') as dbFile:
        ok_(saveKey + '/image_data' in dbFile,
            'Error: Could not find widefield image key.')
            
        # Check that metadata was correctly inserted
        ok_(saveKey + '/OME-XML' in dbFile,
            'Error: Could not find OME-XML metadata.')
        ok_(saveKey + '/MM_Metadata' in dbFile,
            'Error: Could not find Micro-Manager metadata.')
        ok_(saveKey + '/MM_Summary_Metadata' in dbFile,
            'Error: Could not find Micro-Manager summary metadata.')
Esempio n. 13
0
def test_PositionParser_GetDataset_NotInitialized():
    """PositionParser returns raises a not-initialized error.
    
    """
    parser = parsers.PositionParser()
    parser.dataset