Beispiel #1
0
def test_carina():

    # Load the data.
    carina_location = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   'data/carina.tiff')
    image_data = Data(location=carina_location,
                      radec=(10.7502222, -59.8677778),
                      meta={},
                      processing=[])
    image_data.get_data()

    # Add to the data collection
    dc = DataCollection()
    dc.add(image_data)

    assert len(dc) == 1

    #
    #  Create the cutouts with a processing step applied
    #
    sliding_window_cutouts = BasicCutoutGenerator(output_size=224,
                                                  step_size=550)

    cc = CutoutCollection()
    for cutout in sliding_window_cutouts.create_cutouts(image_data):
        cc.add(cutout)

    assert len(cc) == 35

    cmp_arr = np.array(
        [[[51, 66, 69], [50, 70, 78]], [[48, 66, 72], [49, 65, 72]]],
        dtype=np.uint8)
    assert np.allclose(cc[0].get_data()[:2, :2], cmp_arr)

    #
    #  Compute the fingerprints for each cutout
    #
    fc = FingerprintCollection()
    fc_save = FingerprintCalculatorResnet().save()
    for fingerprint in fingerprint_calculate(cc, fc_save):
        fc.add(fingerprint)

    assert [x[1] for x in fc[0].predictions[:3]
            ] == ['hammerhead', 'stingray', 'binder']

    #
    #  Compute the similarity metrics
    #
    similarity_tsne = similarity_calculate(fc, 'tsne')

    assert True
Beispiel #2
0
def test_end2end():

    # Load the data.
    carina_location = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                   'data/carina.tiff')
    image_data = Data(location=carina_location,
                      radec=(10.7502222, -59.8677778),
                      meta={},
                      processing=[])
    image_data.get_data()

    # Add to the data collection
    dc = DataCollection()
    dc.add(image_data)

    #
    #  Create the cutouts with a processing step applied
    #
    sliding_window_cutouts = BasicCutoutGenerator(output_size=224,
                                                  step_size=550)

    cc = CutoutCollection()
    for cutout in sliding_window_cutouts.create_cutouts(image_data):
        cc.add(cutout)

    #
    #  Compute the fingerprints for each cutout
    #
    fc = FingerprintCollection()
    fc_save = FingerprintCalculatorResnet().save()
    for fingerprint in fingerprint_calculate(cc, fc_save):
        fc.add(fingerprint)

    #
    #  Compute the similarity metrics
    #
    similarity_tsne = similarity_calculate(fc, 'tsne')
    new_similarity_tsne = Similarity.factory(similarity_tsne.save())

    assert json.dumps(new_similarity_tsne.save(),
                      sort_keys=True) == json.dumps(similarity_tsne.save(),
                                                    sort_keys=True)
Beispiel #3
0
from transfer_learning.fingerprint.processing import FingerprintCalculatorResnet
from transfer_learning.fingerprint.processing import calculate as fingerprint_calculate
from transfer_learning.similarity.similarity import calculate as similarity_calculate
from transfer_learning.data import Data, DataCollection
from transfer_learning.cutout import CutoutCollection
from transfer_learning.cutout.generators import BasicCutoutGenerator

config = ConfigParser()
config.read('config.ini')

#
# Load the data
#

print('Going to load the HST Heritage data')
data = DataCollection()
for filename in glob.glob('../../data/heritage/*.???'):

    print('   adding data {}'.format(filename))
    image_data = Data(location=filename, radec=(-32, 12), meta={})
    data.add(image_data)

#
# Create the cutout generator.
#

print('Going to calculate the sliding window cutouts')
sliding_window_cutouts = BasicCutoutGenerator(output_size=224, step_size=112)

#
#  Create the cutouts
Beispiel #4
0
print('Loading the Hubble meta data and location information')
processing_dict = pickle.load(open('../../data/hubble_acs.pck', 'rb'))

#
# Setup an image processing step on the data. This will convert
# any data that is not gray scale into gray scal.
#

gray_scale = image_processing.GrayScale()

#
#  Now create teh actual data and add to the data collection.
#

print('Setting up the data structure required')
data_collection = DataCollection()
np.random.seed(12)
for fileinfo in np.random.choice(processing_dict, 200, replace=False):
    im = Data(location=fileinfo['location'],
              radec=fileinfo['radec'],
              meta=fileinfo['meta'])
    im.add_processing(gray_scale)

    # Add to the data collection
    data_collection.add(im)

#
#  Create cutout pre-processing steps, which for this,
#  is just crop and resize.
#
Beispiel #5
0
print('Loading the processing dict')
processing_dict = pickle.load(open('../../data/hubble_acs.pck', 'rb'))

#
# Create the data pre-processing.
#

resize_224 = Resize(output_size=(224, 224))

#
#  Create the datasets
#

print('Creating data objects')
data = DataCollection()
for fileinfo in processing_dict[:20]:
    im = Data(location=fileinfo['location'],
              radec=fileinfo['radec'],
              meta=fileinfo['meta'])
    im.add_processing(resize_224)
    data.append(im)

#
#  Create cutout generator
#

print('Creating the cutout generator')
full_cutout = FullImageCutoutGenerator(output_size=(224, 224))

#
Beispiel #6
0
#
# Load the data
#

print('Going to load the carina data')
image_data = Data(location='../../data/carina.tiff',
                  radec=(10.7502222, -59.8677778),
                  meta={},
                  processing=[])
image_data.get_data()

#
# Add to the data collection
#

dc = DataCollection()
dc.add(image_data)

#
#  Create the sliding window cutout generator.
#

print('Creating cutout generator')
sliding_window_cutouts = BasicCutoutGenerator(output_size=224, step_size=100)

#
#  Create the cutouts using a sliding window cutout generator.
#

print('Creating cutouts')
cutouts = sliding_window_cutouts.create_cutouts(image_data)