Esempio n. 1
0
                                 OUTER_ANNULUS_RADIUS), 'stars'),

    # shrink the images for viewing and make them normalized to 8bit
    'gray': (ip.image.NormDtype(np.uint8), 'stacked'),
    'enhanced': (ip.image.LinearHistEnhance(0, 0.1), 'gray'),
    'color_display': (ip.image.Gray2RGB(), 'enhanced'),
    'source_outlined':
    (ip.astro.DrawSourceOutlines(radius=20,
                                 thickness=4), 'color_display', 'stars'),
    'numbered_sources': (ip.image.NumberImage(), 'source_outlined'),
    'viewable': (ip.image.Resize(scale_w=0.5,
                                 scale_h=0.5), 'numbered_sources'),
    # display the sources in the image
    'null.1': (ip.image.QuickView(100), 'viewable'),
}
star_judge = ip.Pipeline(find_reliable_stars, name='StarJudge')
processed = star_judge.process(filenames)

aper_phot_tasks = {
    # inputs
    # 'headers': ip.Input(0),
    'images':
    ip.Input(0),
    'apertures':
    ip.Input(1),
    'annuli':
    ip.Input(2),
    #  integrate stellar counts
    ('counts', 'count_errs'):
    (ip.astro.AperturePhotometry(), 'images', 'apertures', 'annuli'),
    # calculate instrumental magnitudes
Esempio n. 2
0
import imagepypelines as ip
import cv2
import time
import os

cam = ip.blocks.CameraBlock(device="/dev/video0", mode='count')
img2png = ip.blocks.PngCompress()
data2stream = ip.blocks.Array2Stream()
ftp = ip.blocks.FTP(
    host="ftp.jeffmagg.io",
    user="******",
    passwd=os.environ["FTP_PASS"],
)

pipeline = ip.Pipeline([cam, img2png, data2stream, ftp])

while True:
    pipeline.process([1])  # capture and upload 1 image every 5 seconds
    time.sleep(5)
Esempio n. 3
0
def testcore():
    import imagepypelines as ip
    # ###############################################################################
    #                                 General Testing
    # ###############################################################################
    # Blockify testing
    @ip.blockify(types={'a': int, 'b': int}, kwargs=dict(value=10))
    def add_val(a, b, value):
        return a + value, b + value

    @ip.blockify(types={'a': int, 'b': int}, kwargs=dict(value=5))
    def minus_val(a, b, value):
        return a - value, b - value

    tasks = {
        # inputs
        'zero': ip.Input(0),
        'one': ip.Input(1),
        # operations
        ('ten', 'eleven'): (add_val, 'zero', 'one'),
        ('twenty', 'eleven2'): (add_val, 'ten', 'one'),
        ('fifteen', 'six'): (minus_val, 'twenty', 'eleven'),
        ('twentyfive', 'twentyone'): (add_val, 'fifteen', 'eleven2'),
        ('negativefour', 'negativefive'): (minus_val, 'one', 'zero'),
    }

    ################################################################################
    # PIPELINE CONSTRUCTION FROM TASKS
    print('RAW CONSTRUCTION')
    pipeline1 = ip.Pipeline(tasks, 'Pipeline1')
    # pipeline1.draw(show=True)

    processed1 = pipeline1.process([0, 0], [1, 1])
    # print(processed1)

    print('types:', pipeline1.types)
    print('shapes:', pipeline1.shapes)
    print('containers:', pipeline1.containers)

    ################################################################################
    # PIPELINE2 CONSTRUCTION FROM get_tasks()
    print('CONSRUCTION FROM get_tasks()')
    static_constructor = pipeline1.get_tasks()

    pipeline2 = ip.Pipeline(static_constructor, name="Pipeline2")
    processed2 = pipeline2.process([0, 0], one=[1, 1])

    assert processed1 == processed2
    assert pipeline1.uuid != pipeline2.uuid

    ################################################################################
    # SAVING AND LOADING CHECK
    print("SAVING AND LOADING")
    checksum = pipeline2.save("pipeline.pck", "password")
    pipeline3 = ip.Pipeline.load("pipeline.pck",
                                 "password",
                                 checksum,
                                 name="Pipeline3")

    processed3 = pipeline3.process([0, 0], one=[1, 1])
    assert processed1 == processed3
    assert pipeline2.uuid != pipeline3.uuid

    ################################################################################
    # COPY CHECK
    print('SHALLOW COPY')
    pipeline4 = pipeline3.copy("Pipeline4")
    assert pipeline3.uuid != pipeline4.uuid

    # check to make sure all blocks are identical
    assert pipeline4.blocks == pipeline4.blocks.intersection(pipeline3.blocks)

    ################################################################################
    # DEEP COPY CHECK
    print('DEEP COPY')
    pipeline5 = pipeline4.deepcopy("Pipeline5")
    assert pipeline4.uuid != pipeline5.uuid

    # check to make sure all blocks are different
    assert len(pipeline5.blocks.intersection(pipeline4.blocks)) == 0
Esempio n. 4
0

tasks = {
    # inputs
    'f[x]_range': ip.Input(0),
    'lsf_range': ip.Input(1),
    'b': ip.Input(2),
    # processing
    'l[x]': (lsf, 'lsf_range'),
    'f[x]_1': (image1, 'f[x]_range'),
    'f[x]_2': (image2, 'f[x]_range', 'b'),
    'g[x]_1': (convolve1d, 'f[x]_1', 'l[x]'),
    'g[x]_2': (convolve1d, 'f[x]_2', 'l[x]'),
}

pipeline = ip.Pipeline(tasks)

lsf_range = np.arange(-10, 11, 1).astype(np.float64)
image_range = np.arange(0, 101, 1).astype(np.float64)

outs = []
for b in [0.01, 0.02, 0.03, 0.05, 0.07, 0.1]:
    out = pipeline.process([image_range], [lsf_range], [b])
    f1, g1, f2, g2 = out['f[x]_1'], out['g[x]_1'], out['f[x]_2'], out['g[x]_2']
    outs.append([f1, g1, f2, g2, b])

import matplotlib.pyplot as plt
import matplotlib.patches as mpatches

red = mpatches.Patch(color='r', label='g[x]')
blue = mpatches.Patch(color='b', label='f[x]')
Esempio n. 5
0
ip.require("image")

ip.get_master_logger().setLevel(10)
ip.get_master_logger().debug("defining our tasks")

tasks = {
    'geckos':
    ip.Input(0),
    # normalize the inputs
    'float_geckos': (ip.image.CastTo(np.float64), 'geckos'),
    'normalized_geckos': (ip.image.NormAB(0, 255), 'float_geckos'),
    'display_safe': (ip.image.DisplaySafe(), 'normalized_geckos'),
    # split into RGB channels
    ('red', 'green', 'blue'): (ip.image.ChannelSplit(), 'display_safe'),
}

ip.get_master_logger().debug("make our pipeline")
pipeline = ip.Pipeline(tasks, name='Lenna')

geckos = [ip.image.gecko() for i in range(10)]
# processed = pipeline.process(geckos)

ip.get_master_logger().debug("processing our data")
try:
    bad_processed = pipeline.process([np.random.rand(512, 512)])
except ip.BlockError:
    pass

# ip.get_master_logger().debug("this will now not throw an error")
# processed = pipeline.process([np.random.rand(512,512)], skip_enforcement=True )