示例#1
0
 def test_12_00_load_version_0(self):
     data = ('eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KTiig8mHyCVCaEUp3QOkVTDBxRrCc'
             'BFRcEGo+urqQIFdXkOr/aABmzwNWCM3BMApGIvBwdQwA0RFQPiw9nWBCVeeb'
             'mlhcWpSam5pXUgziw9KlA5n24kq/HswQfgY0XRJKvy84IfRo+h2ZAD39wkrZ'
             'DlZUdZ65iempDIh060GhvcF+/i6gFAxLdwrMFBo4QgGuciCAHcIvgOZvQuXA'
             'B14IDTNnFIwsgF4OKEDpCZyo6sDlgF9pblJqEZiflp8PpmHlggWJ9qKXAxrs'
             'JBowCsAAVzkQwQ3hV0DzN6FywANqAGprexSMFICrPTuDF1VdSmJJIojOzEtJ'
             'rWCgvD0LKQeY4OWAATd+9TB3LZAi08JhChgZ2CBxAY0QVmCIgphMTExgEWZg'
             'zgYxBIABzQJWoQDVB8nxjIwwcyCAAxjSIDYzowe0SGBBsU8IGmMgNbzNhn6I'
             'HvTgAIwMCL+AaEsDVP4oGAWjYBSMglEwCoYfQG9XWvDjVw/r90yQpKWrhh4g'
             'v10JaakzwuddIAA23IdoX/pAg54HxV7k9qU8uH3ZQEVfkQ9w9bdPQPsjL2Qg'
             'NKH+doEchB7tb49MgKu/zSGLqo7a/W3a9RMjCPYTwwdhPxG9nkiQxascnl93'
             'yNPSVUMP0L6eiCFYT+QMonoCAIv/qlY=')
     data = zlib.decompress(base64.b64decode(data))
     fid, name = tempfile.mkstemp(".h5")
     m = None
     try:
         fd = os.fdopen(fid, "wb")
         fd.write(data)
         fd.close()
         m = cpmeas.load_measurements(name)
         self.assertEqual(tuple(m.get_image_numbers()), (1,))
         self.assertEqual(m.get_measurement(cpmeas.IMAGE, "foo",
                                            image_set_number = 1), 12345)
     finally:
         if m is not None:
             del m
         os.unlink(name)
 def test_12_00_load_version_0(self):
     data = ('eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KTiig8mHyCVCaEUp3QOkVTDBxRrCc'
             'BFRcEGo+urqQIFdXkOr/aABmzwNWCM3BMApGIvBwdQwA0RFQPiw9nWBCVeeb'
             'mlhcWpSam5pXUgziw9KlA5n24kq/HswQfgY0XRJKvy84IfRo+h2ZAD39wkrZ'
             'DlZUdZ65iempDIh060GhvcF+/i6gFAxLdwrMFBo4QgGuciCAHcIvgOZvQuXA'
             'B14IDTNnFIwsgF4OKEDpCZyo6sDlgF9pblJqEZiflp8PpmHlggWJ9qKXAxrs'
             'JBowCsAAVzkQwQ3hV0DzN6FywANqAGprexSMFICrPTuDF1VdSmJJIojOzEtJ'
             'rWCgvD0LKQeY4OWAATd+9TB3LZAi08JhChgZ2CBxAY0QVmCIgphMTExgEWZg'
             'zgYxBIABzQJWoQDVB8nxjIwwcyCAAxjSIDYzowe0SGBBsU8IGmMgNbzNhn6I'
             'HvTgAIwMCL+AaEsDVP4oGAWjYBSMglEwCoYfQG9XWvDjVw/r90yQpKWrhh4g'
             'v10JaakzwuddIAA23IdoX/pAg54HxV7k9qU8uH3ZQEVfkQ9w9bdPQPsjL2Qg'
             'NKH+doEchB7tb49MgKu/zSGLqo7a/W3a9RMjCPYTwwdhPxG9nkiQxascnl93'
             'yNPSVUMP0L6eiCFYT+QMonoCAIv/qlY=')
     data = zlib.decompress(base64.b64decode(data))
     fid, name = tempfile.mkstemp(".h5")
     m = None
     try:
         fd = os.fdopen(fid, "wb")
         fd.write(data)
         fd.close()
         m = cpmeas.load_measurements(name)
         self.assertEqual(tuple(m.get_image_numbers()), (1, ))
         self.assertEqual(
             m.get_measurement(cpmeas.IMAGE, "foo", image_set_number=1),
             12345)
     finally:
         if m is not None:
             del m
         os.unlink(name)
 def test_12_01_load_version_1(self):
     data = (
         "eJzt3E9sFFUcwPH3Zrft0lIpDUGCBWqiUg8124ZAQQslUqwxtkSQlAvplu7a"
         "Ymlr/yQ9mWKIKZgoRw5GS2JMI2rqQSFeEE94MZyM8WBKwMDBIkFDMGDrzr73"
         "dneGzi4sq7vd/X44/Hbe/HZmOrx582aT+R1v27W7snxtubAFAsIvqkSyBW3/"
         "UelYNuu7dJQ6Tuo4bZl2GVu3Rrev1Nt35+17rbXVzl5wMfsJ+lUMCBSjttad"
         "e+zYqZdrdbxoOfP2h4dH+gYHxKvh0MjYcPhIeGB0xG43/bPpIfcrRan6rt5A"
         "SbQH2h8ty4q1+KL92f5QFe2Y/qQjk7qfS2m2owSiV4L92Se7dFf2O/ZXrXu4"
         "nfPr8Yb2xJWSH/a2d+ySwoqPEuMlqfPN9Vrr+y+PaunxGhen9Pn8rkzFdOPi"
         "nWoVGReLk9e4OFvmzGsMNjTUB7fUNzbVNzTaHzY1xtozHxcX779demCYqDZ5"
         "0THi4NwFWSnEcmvq7vzpucsT9+zWK+U75oX/czH7ijjstbUvdbf+Uc1P0l4N"
         "+5er6JzDoFi4rwbTn26UO/N6QqMhO/YN9ITHReIqaMlwv1KULT5L8Ik0swR1"
         "hFJPE8xkxvTriuh1aa+ReoUV264UI71jkUh/OD53kLq9JxzpD40m2uOzDcvq"
         "/FC3+ZOmHMmzDd8Je7aR6RlwMrMEc1faXZE631yvH1VmZfcFw3P2mbZf1alo"
         "OfuVmYSZ/mWWE/3EV9ut2/wqPbbZ+/vJUJb/0gcTu5Msi36wpq4vXH/72C+l"
         "0c9NWzrmRUl01cUnpQgGxMxL32+v+G3yxEarf/j8+qF9V2uit6UzpU81yLDn"
         "0+gnann1ChXd40hQx80rnMfTHRqOxZePhN4It48d6Q6r5Y7uw+FDo4kGM77U"
         "pfn7vI5v+lO1/I3H8Zn8n1zHl+1xzj7JDStFbUC0bH3vqwMnjz3z1sZ37n3w"
         "8bWr1u5vXz+7Ifji7+v+ERPnm5uX+VXfSnfHFo+rwB27OHn15KlqZ17sCovG"
         "9rFD/eG+R+/J6g4l43eoJh6gMuI1YvWuUsuTq1VMNw4EntDxfzpu5Bev59jp"
         "1c68yOBgLLrvuJk+x7pnqi2rUueb+9SNxx5yRwXOaxwY0g0n16qYbhzoWqci"
         "84Hi5DUfmFnrzMv2zNb9u/a7aW5EZnXbmpRpRWcJ/BJy84Buy8UvIXtqUueb"
         "/je9Piu7Lxg5+CXk3Abdlo+/hNj3UbnYfTSY+ns3n3U16CvA51wEAABAgZBJ"
         "0UqKAAAAAAAAAAAAAAAAAAAAAAAgf3i9N/q0R/4P7joXfkcAAAAFKNM6E539"
         "rgbqTAAAABQ06kwAAAAAAAAAAAAAAAAAAAAAAJD/vN4brfHI7wm4GqgzAQBA"
         "wbPnC2KxOhPaxWkV3dMEFId0/aP2jIpVuTpA5NTe9o5dUvji//+zf6TOr9Vx"
         "qEmmSouPN++veISDW0KkKFPvbuvTUhI9A7F3t32qxT7D9oeqgHk2q9XfW6Oi"
         "VF80z23mOq0QTbHvSf0SuBXbrhQjvWORSH84ni91e0840h8aTbQHotu3v++z"
         "rJlrus2f9HhYrf+n7JwdJxrahWjJyvlQ/cqK94Oqz1Lnm/7X+3lWdl8wpCjN"
         "sF/VqWipL5oaAqaOlOlfZjnRT3zjX+g2v0qPbfb+fjKU5b/0wWRaV2vmrKuB"
         "uloAAAAFzUqKpqYWcz4AAAAAAAAAAAAAAAAAAAAAAPKL13uj9R75z7kLIlBX"
         "CwCAgidj/4SuziLESl2hJHBTLdfdUjFdfaWZP1WkvlJxamvduceOnXrZ9Ke2"
         "W868ntBoyI59Az3hcREv95JxNZ4lUI+oJaiPLRf1iKZupUyPX693/srK7gtG"
         "DuoRBW7rtkKqR9QSv8I16hEBAAAUNOoRAQAAAAAAAAAAAAAAAAAAAACQ/7ze"
         "G13nkV9zdPH3RXlnAACAwuVVj+hUk5oXnNumYrp6RC0vqDzqERUnr3pEP29z"
         "zi+LsB7RpfEc1iMKPi9T5pvrdbI5dV6xyUE9olPbE/2kYOoRXWqjHhEAAEAx"
         "oR4RAAAAAAAAAAAAAAAAAAAAAAD572HrEW2lHhEAAEVn/ODcBVkpxHJr6u78"
         "6bnLE/fs+cCV8h3zwp+07s2FS7fvTGzeGl03+7Uc/FuU3bdyU7NeeVuU/Qv/"
         "J+C3"
     )
     data = zlib.decompress(base64.b64decode(data))
     fid, name = tempfile.mkstemp(".h5")
     m = None
     try:
         fd = os.fdopen(fid, "wb")
         fd.write(data)
         fd.close()
         m = cpmeas.load_measurements(name)
         self.assertEqual(tuple(m.get_image_numbers()), (1, 2))
         self.assertEqual(m.get_measurement(cpmeas.IMAGE, "foo", image_set_number=1), 12345)
         self.assertEqual(m.get_measurement(cpmeas.IMAGE, "foo", image_set_number=2), 23456)
         for image_number, expected in ((1, np.array([234567, 123456])), (2, np.array([123456, 234567]))):
             values = m.get_measurement("Nuclei", "bar", image_set_number=image_number)
             self.assertEqual(len(expected), len(values))
             self.assertTrue(np.all(expected == values))
     finally:
         if m is not None:
             del m
         os.unlink(name)
示例#4
0
    def run_with_yield(self):
        # this function acts like a CP pipeline object, allowing us to
        # use the same code path as a non-distributed computation for
        # tracking results and updating the GUI.

        # Returned results are combined as they are received in
        # an HDF5 dict via self.measurements instance
        jobs_finished = 0
        while True:
            finished_job = self.work_server.fetch_result()
            if finished_job is not None:
                if finished_job['pipeline_hash'][0] == self.pipeline_blob_hash:

                    #Read data, write to temp file, load into HDF5_dict instance
                    raw_dat = finished_job['measurements'][0]
                    meas_str = zlib.decompress(raw_dat)

                    temp_hdf5 = tempfile.NamedTemporaryFile(dir=os.path.dirname(self.output_file))
                    temp_hdf5.write(meas_str)
                    temp_hdf5.flush()

                    curr_meas = cpmeas.load_measurements(filename=temp_hdf5.name)
                    self.measurements.combine_measurements(curr_meas,can_overwrite = True)
                    del curr_meas

                    jobs_finished += 1

                    if self.status_callback:
                        self.status_callback(self.total_jobs, jobs_finished)
                else:
                    # out of date result?
                    print "ignored mismatched pipeline hash", finished_job['pipeline_hash'][0], self.pipeline_blob_hash
            else:
                # pretend to be busy
                time.sleep(0.1)

            if jobs_finished == self.total_jobs:
                # when finished, stop serving
                self.stop_serving()
                return

            # this is part of the pipeline mimicry
            if self.frame:
                post_module_runner_done_event(self.frame)

            # continue to yield None until the work is finished
            yield None
 def test_08_01_load(self):
     data = ('eJzt3M1LFGEcwPFnZtZ2VcSXS29Gewm8BJu9KHgxUtvAl6UX8GDpmmMWauIL'
             'SPQH6M2jl6Bb0qGgCK92M+rWRZAg6BJB4LFL2D7zPM/uztS4smtYu9/P5fH5'
             'zTPP7M7+5pnxML/lZFdPXc2JGiHFYiIiGkS+XW31jb9vto/o1tLtkm7XbBO3'
             'vG3HdLxRzx8cd/N6d7ccvRtgjvO5SrUxgUqU7L6cku2g7pt82rT94/rc9NzC'
             'rDvlTs/Pyb7Jy84ijxuWv0lH9Sd0XhbK36/VqiV/K1Mwf80qu1TlH3dtKn3P'
             'Fbm8TZZ43Bv9A10yg03exZ0SJ6xQYetAKqr6M/r6LrQO7NSp1syDyhJcB1p0'
             'u1LtH+etA/0LU6PurNfvc+fTY+n59HBqMj3v5taHxD6PG1wHWqLFfoPKFrYO'
             'DNaq/qK+vgutA0l9A/A/baNShD3Prtb5x8lrXrb3p8fcRVH686xaB+zsOpCo'
             '3Xu8+Vyx20UesExZIqp+C/2DVGXOqPzTdlTEyVzZ8o+GzImOeCPiej91Ri1L'
             '7Wgex8z6UCvavf0s/X+N7c1ribmJhfHxSVfPJbyVXMbH3HHvhmDiscz8cn/H'
             'trd6dSwisptFk/7l5Zi+nub+4jPJL5hX7fV7jzfr3krjgRy+bFjiSJF5pZ4k'
             'LNufV8H8Mv1cnjhPm3QsooZ70/6eJzMH/E33R95HrT/dRws8+Gw1BwL6CnD8'
             'XQAAAJQJK6+181onrwUAAAAAAAAAAAAAAAAAAAAAAIcr7L3RUyHjY7cCgYiv'
             'AQAAZWjxzve3Vl3mOcCJv/zZsf3FedWWiXauRx/+ENHMxs11S8RjIvF+Y/dk'
             '/c6zjtNr71aXZCmXje3W10d3lp9/04X8hh68+CRr6ix+sB49dj4e4lcCAAAF'
             'hNV1TOi6d6lh1Raq67g1olrqOlamsLqOM8P+cQdd17FwPUJV7+24pXKzITvU'
             '1HkrrS6hyfd91CWceKJjkbzLJL/e3JBXb669yDOhBOsRbg7vOTz7QeKjJR22'
             '7BxCPcLEXR0Lq0c49B/WI5yYCgSoRwgAAFDWqEcIAAAAAAAAAAAAAAAAAAAA'
             'AMC/L+y90Y6Q8WdC6hFSVwAAgPJ19cpAr/fOoL7hm/cHo7pNnT3Xev7CRWH/'
             'FpfhS9n3CXNxGW7Lzr9W/5c+OAAAAAAA2LdfZABKkA==')
     data = zlib.decompress(base64.b64decode(data))
     fd, filename = tempfile.mkstemp('.h5')
     try:
         f = os.fdopen(fd, "wb")
         f.write(data)
         f.close()
         m = cpmeas.load_measurements(filename)
         for i in range(1, 4):
             self.assertEqual(
                 m.get_measurement(cpmeas.IMAGE, 'ImageNumber', i), i)
         for i, plate in enumerate(('P-12345', 'P-23456', 'P-34567')):
             self.assertEqual(
                 m.get_measurement(cpmeas.IMAGE, 'Metadata_Plate', i + 1),
                 plate)
     finally:
         try:
             os.unlink(filename)
         except:
             print "Failed to remove file %s" % filename
示例#6
0
 def test_08_01_load(self):
     data = ('eJzt3M1LFGEcwPFnZtZ2VcSXS29Gewm8BJu9KHgxUtvAl6UX8GDpmmMWauIL'
             'SPQH6M2jl6Bb0qGgCK92M+rWRZAg6BJB4LFL2D7zPM/uztS4smtYu9/P5fH5'
             'zTPP7M7+5pnxML/lZFdPXc2JGiHFYiIiGkS+XW31jb9vto/o1tLtkm7XbBO3'
             'vG3HdLxRzx8cd/N6d7ccvRtgjvO5SrUxgUqU7L6cku2g7pt82rT94/rc9NzC'
             'rDvlTs/Pyb7Jy84ijxuWv0lH9Sd0XhbK36/VqiV/K1Mwf80qu1TlH3dtKn3P'
             'Fbm8TZZ43Bv9A10yg03exZ0SJ6xQYetAKqr6M/r6LrQO7NSp1syDyhJcB1p0'
             'u1LtH+etA/0LU6PurNfvc+fTY+n59HBqMj3v5taHxD6PG1wHWqLFfoPKFrYO'
             'DNaq/qK+vgutA0l9A/A/baNShD3Prtb5x8lrXrb3p8fcRVH686xaB+zsOpCo'
             '3Xu8+Vyx20UesExZIqp+C/2DVGXOqPzTdlTEyVzZ8o+GzImOeCPiej91Ri1L'
             '7Wgex8z6UCvavf0s/X+N7c1ribmJhfHxSVfPJbyVXMbH3HHvhmDiscz8cn/H'
             'trd6dSwisptFk/7l5Zi+nub+4jPJL5hX7fV7jzfr3krjgRy+bFjiSJF5pZ4k'
             'LNufV8H8Mv1cnjhPm3QsooZ70/6eJzMH/E33R95HrT/dRws8+Gw1BwL6CnD8'
             'XQAAAJQJK6+181onrwUAAAAAAAAAAAAAAAAAAAAAAIcr7L3RUyHjY7cCgYiv'
             'AQAAZWjxzve3Vl3mOcCJv/zZsf3FedWWiXauRx/+ENHMxs11S8RjIvF+Y/dk'
             '/c6zjtNr71aXZCmXje3W10d3lp9/04X8hh68+CRr6ix+sB49dj4e4lcCAAAF'
             'hNV1TOi6d6lh1Raq67g1olrqOlamsLqOM8P+cQdd17FwPUJV7+24pXKzITvU'
             '1HkrrS6hyfd91CWceKJjkbzLJL/e3JBXb669yDOhBOsRbg7vOTz7QeKjJR22'
             '7BxCPcLEXR0Lq0c49B/WI5yYCgSoRwgAAFDWqEcIAAAAAAAAAAAAAAAAAAAA'
             'AMC/L+y90Y6Q8WdC6hFSVwAAgPJ19cpAr/fOoL7hm/cHo7pNnT3Xev7CRWH/'
             'FpfhS9n3CXNxGW7Lzr9W/5c+OAAAAAAA2LdfZABKkA==')
     data = zlib.decompress(base64.b64decode(data))
     fd, filename = tempfile.mkstemp('.h5')
     try:
         f = os.fdopen(fd, "wb")
         f.write(data)
         f.close()
         m = cpmeas.load_measurements(filename)
         for i in range(1, 4):
             self.assertEqual(m.get_measurement(
                 cpmeas.IMAGE, 'ImageNumber', i), i)
         for i, plate in enumerate(('P-12345', 'P-23456', 'P-34567')):
             self.assertEqual(m.get_measurement(
                 cpmeas.IMAGE, 'Metadata_Plate', i+1), plate)
     finally:
         try:
             os.unlink(filename)
         except:
             print "Failed to remove file %s" % filename
 def execute_merge_files(self, mm):
     input_files = []
     output_fd, output_file = tempfile.mkstemp(".mat")
     pipeline = cpp.Pipeline()
     li = LoadImages()
     li.module_num = 1
     pipeline.add_module(li)
     
     for m in mm:
         input_fd, input_file = tempfile.mkstemp(".mat")
         pipeline.save_measurements(input_file, m)
         input_files.append((input_fd, input_file))
         
     M.MergeOutputFiles.merge_files(output_file, [ x[1] for x in input_files])
     m = cpmeas.load_measurements(output_file)
     gc.collect()
     os.close(output_fd)
     os.remove(output_file)
     for fd, filename in input_files:
         os.close(fd)
         os.remove(filename)
     return m
    def execute_merge_files(self, mm):
        input_files = []
        output_fd, output_file = tempfile.mkstemp(".mat")
        pipeline = cpp.Pipeline()
        li = LoadImages()
        li.module_num = 1
        pipeline.add_module(li)

        for m in mm:
            input_fd, input_file = tempfile.mkstemp(".mat")
            pipeline.save_measurements(input_file, m)
            input_files.append((input_fd, input_file))

        M.MergeOutputFiles.merge_files(output_file,
                                       [x[1] for x in input_files])
        m = cpmeas.load_measurements(output_file)
        gc.collect()
        os.close(output_fd)
        os.remove(output_file)
        for fd, filename in input_files:
            os.close(fd)
            os.remove(filename)
        return m
示例#9
0
 def load_measurements(self, measurements_file_name):
     self.measurements = cpmeas.load_measurements(
         measurements_file_name, can_overwrite = True)
     # Start on the first image
     self.measurements.next_image_set(1)
示例#10
0
def run_pipeline_headless(options, args):
    '''Run a CellProfiler pipeline in headless mode'''
    
    if sys.platform == 'darwin':
        if options.start_awt:
            import bioformats
            from cellprofiler.utilities.jutil import activate_awt
            activate_awt()
        
    if not options.first_image_set is None:
        if not options.first_image_set.isdigit():
            raise ValueError("The --first-image-set option takes a numeric argument")
        else:
            image_set_start = int(options.first_image_set)
    else:
        image_set_start = None
    
    image_set_numbers = None
    if not options.last_image_set is None:
        if not options.last_image_set.isdigit():
            raise ValueError("The --last-image-set option takes a numeric argument")
        else:
            image_set_end = int(options.last_image_set)
            if image_set_start is None:
                image_set_numbers = np.arange(1, image_set_end+1)
            else:
                image_set_numbers = np.arange(image_set_start, image_set_end+1)
    else:
        image_set_end = None
    
    if ((options.pipeline_filename is not None) and 
        (not options.pipeline_filename.lower().startswith('http'))):
        options.pipeline_filename = os.path.expanduser(options.pipeline_filename)
    from cellprofiler.pipeline import Pipeline, EXIT_STATUS, M_PIPELINE
    import cellprofiler.measurements as cpmeas
    pipeline = Pipeline()
    initial_measurements = None
    try:
        if h5py.is_hdf5(options.pipeline_filename):
            initial_measurements = cpmeas.load_measurements(
                options.pipeline_filename,
                image_numbers=image_set_numbers)
    except:
        logging.root.info("Failed to load measurements from pipeline")
    if initial_measurements is not None:
        pipeline_text = \
            initial_measurements.get_experiment_measurement(
                M_PIPELINE)
        pipeline_text = pipeline_text.encode('us-ascii')
        pipeline.load(StringIO(pipeline_text))
        if not pipeline.in_batch_mode():
            #
            # Need file list in order to call prepare_run
            #
            from cellprofiler.utilities.hdf5_dict import HDF5FileList
            with h5py.File(options.pipeline_filename, "r") as src:
                if HDF5FileList.has_file_list(src):
                    HDF5FileList.copy(
                        src, initial_measurements.hdf5_dict.hdf5_file)
    else:
        pipeline.load(options.pipeline_filename)
    if options.groups is not None:
        kvs = [x.split('=') for x in options.groups.split(',')]
        groups = dict(kvs)
    else:
        groups = None
    use_hdf5 = len(args) > 0 and not args[0].lower().endswith(".mat")
    measurements = pipeline.run(
        image_set_start=image_set_start, 
        image_set_end=image_set_end,
        grouping=groups,
        measurements_filename = None if not use_hdf5 else args[0],
        initial_measurements = initial_measurements)
    if len(args) > 0 and not use_hdf5:
        pipeline.save_measurements(args[0], measurements)
    if options.done_file is not None:
        if (measurements is not None and 
            measurements.has_feature(cpmeas.EXPERIMENT, EXIT_STATUS)):
            done_text = measurements.get_experiment_measurement(EXIT_STATUS)
        else:
            done_text = "Failure"
        fd = open(options.done_file, "wt")
        fd.write("%s\n"%done_text)
        fd.close()
    if measurements is not None:
        measurements.close()
示例#11
0
 def load_measurements(self, measurements_file_name):
     self.measurements = cpmeas.load_measurements(measurements_file_name,
                                                  can_overwrite=True)
     # Start on the first image
     self.measurements.next_image_set(1)
示例#12
0
            options.pipeline_filename = os.path.expanduser(
                options.pipeline_filename)
        if options.worker_mode_URL is not None:
            last_success = time.time(
            )  # timeout checking for distributed workers.
        continue_looping = True  # for distributed workers
        while continue_looping:
            from cellprofiler.pipeline import Pipeline, EXIT_STATUS
            import cellprofiler.measurements as cpmeas
            continue_looping = False  # distributed workers reset this, below
            pipeline = Pipeline()
            measurements = None
            try:
                import h5py
                if h5py.is_hdf5(options.pipeline_filename):
                    measurements = cpmeas.load_measurements(
                        options.pipeline_filename)
            except:
                logging.root.info("Failed to load measurements from pipeline")
            if options.worker_mode_URL is None:
                # normal behavior
                pipeline.load(options.pipeline_filename)
            else:
                # distributed worker
                continue_looping = True
                if time.time() - last_success > worker_timeout:
                    logging.root.info("Worker timed out.  Exiting.")
                    break

                try:
                    jobinfo = cpdistributed.fetch_work(options.worker_mode_URL)
                except:
 def test_12_01_load_version_1(self):
     data = ('eJzt3E9sFFUcwPH3Zrft0lIpDUGCBWqiUg8124ZAQQslUqwxtkSQlAvplu7a'
             'Ymlr/yQ9mWKIKZgoRw5GS2JMI2rqQSFeEE94MZyM8WBKwMDBIkFDMGDrzr73'
             'dneGzi4sq7vd/X44/Hbe/HZmOrx582aT+R1v27W7snxtubAFAsIvqkSyBW3/'
             'UelYNuu7dJQ6Tuo4bZl2GVu3Rrev1Nt35+17rbXVzl5wMfsJ+lUMCBSjttad'
             'e+zYqZdrdbxoOfP2h4dH+gYHxKvh0MjYcPhIeGB0xG43/bPpIfcrRan6rt5A'
             'SbQH2h8ty4q1+KL92f5QFe2Y/qQjk7qfS2m2owSiV4L92Se7dFf2O/ZXrXu4'
             'nfPr8Yb2xJWSH/a2d+ySwoqPEuMlqfPN9Vrr+y+PaunxGhen9Pn8rkzFdOPi'
             'nWoVGReLk9e4OFvmzGsMNjTUB7fUNzbVNzTaHzY1xtozHxcX779demCYqDZ5'
             '0THi4NwFWSnEcmvq7vzpucsT9+zWK+U75oX/czH7ijjstbUvdbf+Uc1P0l4N'
             '+5er6JzDoFi4rwbTn26UO/N6QqMhO/YN9ITHReIqaMlwv1KULT5L8Ik0swR1'
             'hFJPE8xkxvTriuh1aa+ReoUV264UI71jkUh/OD53kLq9JxzpD40m2uOzDcvq'
             '/FC3+ZOmHMmzDd8Je7aR6RlwMrMEc1faXZE631yvH1VmZfcFw3P2mbZf1alo'
             'OfuVmYSZ/mWWE/3EV9ut2/wqPbbZ+/vJUJb/0gcTu5Msi36wpq4vXH/72C+l'
             '0c9NWzrmRUl01cUnpQgGxMxL32+v+G3yxEarf/j8+qF9V2uit6UzpU81yLDn'
             '0+gnann1ChXd40hQx80rnMfTHRqOxZePhN4It48d6Q6r5Y7uw+FDo4kGM77U'
             'pfn7vI5v+lO1/I3H8Zn8n1zHl+1xzj7JDStFbUC0bH3vqwMnjz3z1sZ37n3w'
             '8bWr1u5vXz+7Ifji7+v+ERPnm5uX+VXfSnfHFo+rwB27OHn15KlqZ17sCovG'
             '9rFD/eG+R+/J6g4l43eoJh6gMuI1YvWuUsuTq1VMNw4EntDxfzpu5Bev59jp'
             '1c68yOBgLLrvuJk+x7pnqi2rUueb+9SNxx5yRwXOaxwY0g0n16qYbhzoWqci'
             '84Hi5DUfmFnrzMv2zNb9u/a7aW5EZnXbmpRpRWcJ/BJy84Buy8UvIXtqUueb'
             '/je9Piu7Lxg5+CXk3Abdlo+/hNj3UbnYfTSY+ns3n3U16CvA51wEAABAgZBJ'
             '0UqKAAAAAAAAAAAAAAAAAAAAAAAgf3i9N/q0R/4P7joXfkcAAAAFKNM6E539'
             'rgbqTAAAABQ06kwAAAAAAAAAAAAAAAAAAAAAAJD/vN4brfHI7wm4GqgzAQBA'
             'wbPnC2KxOhPaxWkV3dMEFId0/aP2jIpVuTpA5NTe9o5dUvji//+zf6TOr9Vx'
             'qEmmSouPN++veISDW0KkKFPvbuvTUhI9A7F3t32qxT7D9oeqgHk2q9XfW6Oi'
             'VF80z23mOq0QTbHvSf0SuBXbrhQjvWORSH84ni91e0840h8aTbQHotu3v++z'
             'rJlrus2f9HhYrf+n7JwdJxrahWjJyvlQ/cqK94Oqz1Lnm/7X+3lWdl8wpCjN'
             'sF/VqWipL5oaAqaOlOlfZjnRT3zjX+g2v0qPbfb+fjKU5b/0wWRaV2vmrKuB'
             'uloAAAAFzUqKpqYWcz4AAAAAAAAAAAAAAAAAAAAAAPKL13uj9R75z7kLIlBX'
             'CwCAgidj/4SuziLESl2hJHBTLdfdUjFdfaWZP1WkvlJxamvduceOnXrZ9Ke2'
             'W868ntBoyI59Az3hcREv95JxNZ4lUI+oJaiPLRf1iKZupUyPX693/srK7gtG'
             'DuoRBW7rtkKqR9QSv8I16hEBAAAUNOoRAQAAAAAAAAAAAAAAAAAAAACQ/7ze'
             'G13nkV9zdPH3RXlnAACAwuVVj+hUk5oXnNumYrp6RC0vqDzqERUnr3pEP29z'
             'zi+LsB7RpfEc1iMKPi9T5pvrdbI5dV6xyUE9olPbE/2kYOoRXWqjHhEAAEAx'
             'oR4RAAAAAAAAAAAAAAAAAAAAAAD572HrEW2lHhEAAEVn/ODcBVkpxHJr6u78'
             '6bnLE/fs+cCV8h3zwp+07s2FS7fvTGzeGl03+7Uc/FuU3bdyU7NeeVuU/Qv/'
             'J+C3')
     data = zlib.decompress(base64.b64decode(data))
     fid, name = tempfile.mkstemp(".h5")
     m = None
     try:
         fd = os.fdopen(fid, "wb")
         fd.write(data)
         fd.close()
         m = cpmeas.load_measurements(name)
         self.assertEqual(tuple(m.get_image_numbers()), (1, 2))
         self.assertEqual(
             m.get_measurement(cpmeas.IMAGE, "foo", image_set_number=1),
             12345)
         self.assertEqual(
             m.get_measurement(cpmeas.IMAGE, "foo", image_set_number=2),
             23456)
         for image_number, expected in ((1, np.array([234567, 123456])),
                                        (2, np.array([123456, 234567]))):
             values = m.get_measurement("Nuclei",
                                        "bar",
                                        image_set_number=image_number)
             self.assertEqual(len(expected), len(values))
             self.assertTrue(np.all(expected == values))
     finally:
         if m is not None:
             del m
         os.unlink(name)
示例#14
0
    elif options.run_pipeline: # this includes distributed workers
        if (options.pipeline_filename is not None) and (not options.pipeline_filename.lower().startswith('http')):
            options.pipeline_filename = os.path.expanduser(options.pipeline_filename)
        if options.worker_mode_URL is not None:
            last_success = time.time() # timeout checking for distributed workers.
        continue_looping = True # for distributed workers
        while continue_looping:
            from cellprofiler.pipeline import Pipeline, EXIT_STATUS
            import cellprofiler.measurements as cpmeas
            continue_looping = False # distributed workers reset this, below
            pipeline = Pipeline()
            measurements = None
            try:
                import h5py
                if h5py.is_hdf5(options.pipeline_filename):
                    measurements = cpmeas.load_measurements(options.pipeline_filename)
            except:
                logging.root.info("Failed to load measurements from pipeline")
            if options.worker_mode_URL is None:
                # normal behavior
                pipeline.load(options.pipeline_filename)
            else:
                # distributed worker
                continue_looping = True
                if time.time() - last_success > worker_timeout:
                    logging.root.info("Worker timed out.  Exiting.")
                    break

                try:
                    jobinfo = cpdistributed.fetch_work(options.worker_mode_URL)
                except:
示例#15
0
    elif options.run_pipeline: # this includes distributed workers
        if (options.pipeline_filename is not None) and (not options.pipeline_filename.lower().startswith('http')):
            options.pipeline_filename = os.path.expanduser(options.pipeline_filename)
        if options.worker_mode_URL is not None:
            last_success = time.time() # timeout checking for distributed workers.
        continue_looping = True # for distributed workers
        while continue_looping:
            from cellprofiler.pipeline import Pipeline, EXIT_STATUS
            import cellprofiler.measurements as cpmeas
            continue_looping = False # distributed workers reset this, below
            pipeline = Pipeline()
            initial_measurements = None
            try:
                import h5py
                if h5py.is_hdf5(options.pipeline_filename):
                    initial_measurements = cpmeas.load_measurements(options.pipeline_filename)
            except:
                logging.root.info("Failed to load measurements from pipeline")
            if options.worker_mode_URL is None:
                # normal behavior
                pipeline.load(options.pipeline_filename)
            else:
                # distributed worker
                continue_looping = True
                if time.time() - last_success > worker_timeout:
                    logging.root.info("Worker timed out.  Exiting.")
                    break

                try:
                    jobinfo = cpdistributed.fetch_work(options.worker_mode_URL)
                except:
示例#16
0
 if (options.pipeline_filename is not None) and (not options.pipeline_filename.lower().startswith('http')):
     options.pipeline_filename = os.path.expanduser(options.pipeline_filename)
 if options.worker_mode_URL is not None:
     last_success = time.time() # timeout checking for distributed workers.
 continue_looping = True # for distributed workers
 while continue_looping:
     from cellprofiler.pipeline import Pipeline, EXIT_STATUS, M_PIPELINE
     import cellprofiler.measurements as cpmeas
     continue_looping = False # distributed workers reset this, below
     pipeline = Pipeline()
     initial_measurements = None
     try:
         import h5py
         if h5py.is_hdf5(options.pipeline_filename):
             initial_measurements = cpmeas.load_measurements(
                 options.pipeline_filename,
                 image_numbers=image_set_numbers)
     except:
         logging.root.info("Failed to load measurements from pipeline")
     if options.worker_mode_URL is None:
         # normal behavior
         if initial_measurements is not None:
             pipeline_text = \
                 initial_measurements.get_experiment_measurement(
                     M_PIPELINE)
             pipeline_text = pipeline_text.encode('us-ascii')
             pipeline.load(StringIO(pipeline_text))
         else:
             pipeline.load(options.pipeline_filename)
     else:
         # distributed worker
示例#17
0
            options.pipeline_filename = os.path.expanduser(
                options.pipeline_filename)
        if options.worker_mode_URL is not None:
            last_success = time.time(
            )  # timeout checking for distributed workers.
        continue_looping = True  # for distributed workers
        while continue_looping:
            from cellprofiler.pipeline import Pipeline, EXIT_STATUS
            import cellprofiler.measurements as cpmeas
            continue_looping = False  # distributed workers reset this, below
            pipeline = Pipeline()
            initial_measurements = None
            try:
                import h5py
                if h5py.is_hdf5(options.pipeline_filename):
                    initial_measurements = cpmeas.load_measurements(
                        options.pipeline_filename)
            except:
                logging.root.info("Failed to load measurements from pipeline")
            if options.worker_mode_URL is None:
                # normal behavior
                pipeline.load(options.pipeline_filename)
            else:
                # distributed worker
                continue_looping = True
                if time.time() - last_success > worker_timeout:
                    logging.root.info("Worker timed out.  Exiting.")
                    break

                try:
                    jobinfo = cpdistributed.fetch_work(options.worker_mode_URL)
                except:
示例#18
0
 def test_13_01_load_matlab(self):
     data = ('eJztHEtzHEd5pCghVkhi3gmVQ5NylS2XJe3KWlueCpFkSbYV9CpLsRMss+nd'
             '6d3teF7MQ7tLTBXHcM2JOweuUFw4pfIDuFHAkRTwA3wCDqSK7+vu2XnsrPYh'
             'aYuAxrX2dvf36u/VX/f0env1YGv1NinNFcj26sFsjZuM7Jk0qDmepRM7uEbW'
             'PEYDZhDH1slBIySrYZ0UFkmxoC8s6cUlslAoFrXRnonN7Vfw33c07QX450X4'
             'TKqh51V7IvH5Cnwa1DZM5sP4lPay6l+Ez1roecwOJPI2o37oMQs6fG3PYzUG'
             'Y1XA0vZZEHC77ndEQP73pgbjL/untO+o9hfw2QmtCvN2a5sWrTOgrijDt9sM'
             'GK3a1Gz/hBkKO4LedowQZ6GefXrEdsPADYN7TnO3FjBbdB9wi+0H1As6+BIa'
             'u4B2xDLZHbrr3GPVwPHaqnud1WhoBpJ8PKi6BY0EimgfOI7p3wFPsKklhex0'
             '32OmmzIh6m9Ji/X3Qh/9XVD96lk5DfxCAv+rA9hvUsk9LN6EwttL4E3l4H03'
             'gXdRtTFKZgtLs3Hk3Jy7daNUKBRiPY4qT1J/efK8qKXlwfaafhh4of1E+H8f'
             '/G9m8LFdBXxmuYcVahxyqz5bXFwqRPM4bXpugt5EDr2kXhB+pQ//lzL8sS08'
             'nATo+bn2yPpl9CT9MsLbe2O4fBLJ8z34bLRc5nHMXFIkCbUTVk3GE2wFH/Lq'
             'cHy+rtqXUEbuMpPbrBx9Ac48KEMSCUI/w+feK8Pp/7VXjtf/319O6x/ba8w0'
             '9zwH1x+PRDLppBEErj4/32w256oA4SqIOcerTz9gns9hVVqY3n+wc58dcdEq'
             'FhcLi9PTWw41hAJ9/ZElsm3ZDi29+NQ/sstHCvPwcrF4/Vbp8PLTI+pxWgEg'
             'T9FBaEjVQO6p33Ca5Sa3DaepH3ghe2o7AZA9bJVuw2f9aYUG1UbZB80xnXoe'
             'bV+Jhq4RI2i77Psht4OlmcfTBB7MqwR7wdlIhRETBGWGDuT5ETdCahIuxI6B'
             'fWZCegaZiMWChmPoB6wVzG60aDUgFvIWoHJpIU5N4RNuE0arDVL3nNBd1q8L'
             'qAPBuAEfoAFfaCBarFU1QxAjwm3AgoQEqo5lgZ7WHQJzJqHPBBG1qBFqmsQP'
             'KzXHNEChpMmDBuAgPSky0JNjy/qOY0vcTRuWIcmGiGLDdKoUJ6erFUlB3BGI'
             'h62bax3UtQarPlGoPqy0QNwjFvd9WAsJfDVC1+RATBL2l/X3lRbvogaiqVXa'
             'qEZq0ICiWAJgQ06fVJkXUJiCwlej+y6r8lqMBsPMNHy0n9AtkNQloJh1UiUA'
             'QqWyJITMMlUntAO9KA2StIPP8g1ArlSpz2Z9Zvs84EdsRvdhpeeGoLDnYCcA'
             'genBAL7SUMr8itlGK/CE13Rm4jkWaTagPEqY6D6rhyb1wClcj/kYDFLAGvio'
             'H+MqUwsjYqGg/+jK8t5bWDuyt+euzpSx9RBi9m2MhtXZPQwI/FqYvYVfP1r4'
             '6UzZR6B9DhiJkZm0p3pDidNRPnFp0NDnropYVM88kkeW60rI3MH7oY1jl5TT'
             'UdtmZspm0p+Qm+UccVCBh3VSx1/EMPqkCtdNO2CeycCiRiZSq5K4TyDnK0PJ'
             'MMXsJRhwEQzUj/wC3NypfAheBuw24zyxAwJI48t0IsH19Z3V/GFJQ5fLijK6'
             '8HwnDDDx+ihdCjaeniLGIljFa0tAC5l2FZGUAqN8qrj5VWri9ALp01G4Tm8a'
             'sPRBtO3BIki99q5knsrhC905/HrxuBx+65RTuIrz2D652mbElXOINKgSPpcz'
             '5JD0E/oHd4fcZRKDA3Yg3UPhXUPfdnkLlBjaHOhc2eb2tW3amtGLhWuLBYG/'
             'zkGjXsdaaBwfOAk5OjQ9atdZx5AHIBqIZDFPVj7EtzCjRxQwnRObUQ8yJsRf'
             'nXldrpBlGjhhtYGej1wrjmfIeQhFoYrilJxQYdCAsG5AzCZCZjfwQ3LXdCrU'
             'lKJGMBCHnqdWwxrFTYvyqC2nCcxgc0hCF6OpAvEK6UEkC4WrF+YK4rlWVF/k'
             'auZCVdEC+QIRydUol3KVq8HRIAoq7Xj2QEcy3RbSohIN7uOWLOR+g8BaYrlx'
             '4Ij4RydvZ3E82iRi1ceZy7CrsKDJmN2Hxj6H1RdE9C3HCYTCIQsHGF1ySvug'
             'A8yVYnE1oUjA6cmsST2Yk+n4oCJo2zIrWNzmVmjhgg5qNMRsKGyY9ZvRAshQ'
             'r6iEUKy2CObNAgvHDIW+uCqfRVZOsY1t3iNxRJGeTBpQ95gEbCbLmDhiYhtE'
             'RFfDwAHTYeyYbQJ/V0NcgYjfQ0X9MXspo2Ob/NltUxuLt5S3JV29wm3MBXLO'
             'ibW2K+2KaMmZsYq5XmKnQkmZCftEKMmwioXdwCWJbFHXpFVOhbvfpSEsrjQd'
             'LyUZfU1ntmpSXyw/OMxUMxm6yzqAEdEfaQQViWZAMZqM1xtYEYosjeoUtKD0'
             'tAPPcdvL+sMshJwuCFVX6c+PFnmLG0Zi9WgTJY4jKxIHSgYP4z/iUaHVJ7Jn'
             'Wb/TGe2rzsiHovRWU+Ty9Zb1L6Vj564a6aRhXWr1Hp5kidI15sBrWIpjlQPl'
             'n92pEzrD8dKx5sA3O2TK91rCZbsw9FKhkM23Vnw4JjUW+Qwm/Ngz4zwVKwUq'
             'LRfrTyKXUqEgWYaIr6nclIHF1BTtDxvacPvJkc43HMs1QduI/6/p4fbJl1X7'
             '00mkA3VfuXv/nXlEzVeWld0xYJ11rHwH0oEZN9MMttdL6xxqu6AMNUVvavdE'
             'xBwP05Ft0zZY6ziwWJgoEh+oQExJh1tSzOR92O5B9T0AWMx0P7R2axsiGUA1'
             'mFHIRotVxUKDR6LlQjHe3aeoPeRG0OirjwTTXY/Xexkhy3QhvzTV5Fnuhuc5'
             'Xm8gTXv3/lZ/yTRtH6INkkI/0BTTXurA56TxNtx57LNlxP9gSH6jnnuOY15/'
             '+vG/v3hV+7WY18+G5NfvnJho6byF7VK1xJboYmmpslC7sUhLxVKhyG5cv1Uq'
             'FW4slYzFceh3S+GdNZ9x2lFL4J/2efK0lrYjtuUBzZxr12M6rSH5nvY5+jj0'
             '/MmFvzyr/+H1z8Zm19//9aPSP341lrzz7hj1mMw7Y9Hje5+89ed//nYseozw'
             'Ph6Sz3va8fFwOQF/UbXxeFKfn59f0+cxLuYhLuajuJhPBOlY89Dnzso49fzZ'
             'xHD177dUe1mL3liXZR0TFbhb6sy+vMZwk1N+HzGzne91+J/1PMe5jvxwZueP'
             'P59aWxkXv998+2+//ME7q4Lfs+dGv6ewhxto3JtpqUe9dJHlZNftgEEvDUT7'
             '8DXHdLxtqm4IbNEKM9Nd+OwHHnc7bx3j7ifcFfVsuoZd5z5sttsgIHtAzTBC'
             'uAPb3+7ZRNAPxZYzReg06toV7fj8czGBf1F9ovfsh+rKx5fhfXuhD72pFL0p'
             're7R9iB4z6XwntM+ZMFA/LJ478M+ZxC8yRTepLbjnCy/DIs3rXX7XRZvsgsv'
             'TQ/xP780XNy/ptrP4PNAvRQRwZOIiKh/0645+KotGpJuuhNd/Ok8MvP7u7UI'
             'UQ33yCsd+tF7eYWv6Gd7O3zxtUyKzn7Dad7B92sZ+rfxjQ3eVsgMoL5+d7F3'
             'Xkb9vZnoHyR+vqal4wfbXe/rE/SG9Wfc64+aX7J3AQY5L3shQwfb0dnHIPJP'
             'pPAntOvaYOd0z2f4Ylu8wdJG32fFVxMG1/8geWGQvIdnpSfh1+984PXMfLGd'
             'd0viKQqiDT7/UfNpL7yVPnh5960Sb/gGlntUu2X9tXhCfqPki653j2r8JPfM'
             'hvVXfG95En4rffhdyMwb253acAi5e9nrtPiPkh/lVnE0vykWztZPs3g3B8Qb'
             'NZ/1ygONPng3M3rFdq9LQ49WZ/cePyrM3nqcuSgk+mYG8odXM/ywnXnbPNC5'
             'wqUMHWzPXX10eDj/OHmZqNMRXSDSBrTDqPl4VH85y3r6rNeNxGUqbVT9Qh48'
             'UZ0zalyNknei1zrjtE9Uj45q17OuP7vtWTqRflb64OX5YeKyg+gfpC74RoYO'
             'tnOuzQ1ML6/O6LpFocbPct3Nq8vjaxaD8z/tuvSs6qqsnko5eMPwG+1+g7z/'
             'cZbzzOq1VOjOm2dZr+bVI6dZr+b5bXybJV/u05xvrzrx48TvOiYyeHnnF+PU'
             'jzjsEFeVT85/lPwmLzbJe/fcNpiboHcadjrHO8c7xzvHO8f778VbSeANek4e'
             'r1ty2fgyzfccb7z+c9Z1z5fV7/9f67WVPvM+zzfneOd4/7t4FydivOx+PLuP'
             'R/gPEnzy8sVVLZ0vsJ363w3kj139OfwFrrxdMJe94o98Wn343M7wud2Lj/pd'
             'UVv9YFX9eGiu528ZcvR5IYd/Ui+T8OeNN0/nHtawfF+60C3vS33wpoa4p5q1'
             '/5Vj4KPnJPDDzn9iQuL9Ysh5HMcnK9fkCfH+A86LP/E=')
     data = zlib.decompress(base64.b64decode(data))
     fid, name = tempfile.mkstemp(".mat")
     m = None
     try:
         fd = os.fdopen(fid, "wb")
         fd.write(data)
         fd.close()
         m = cpmeas.load_measurements(name)
         self.assertEqual(tuple(m.get_image_numbers()), (1,))
         self.assertEqual(m.get_measurement(cpmeas.IMAGE, "Count_Nuclei",
                                            image_set_number = 1), 1)
         values = m.get_measurement("Nuclei", "Location_Center_X", 1)
         self.assertEqual(len(values), 1)
         self.assertAlmostEqual(values[0], 34.580433355219959)
     finally:
         gc.collect()
         if m is not None:
             del m
         os.unlink(name)
示例#19
0
 def test_03_01_save_and_load(self):
     '''Save a pipeline to batch data, open it to check and load it'''
     data = ('eJztWW1PGkEQXhC1WtPYTzb9tB+llROoGiWNgi9NSYUSIbZGbbvCApvu7ZJ7'
             'UWlj0o/9Wf1J/QndxTs4tsoBRS3JHbkcMzfPPDOzs8uxl8uU9jPbcFWLw1ym'
             'FKsSimGBIqvKDT0FmbUEdwyMLFyBnKVgycYwY9dgIgET8dTqRmolCZPx+AYY'
             '7ghlc0/EJf4cgClxfSTOsHNr0pFDnlPKRWxZhNXMSRABzxz9L3EeIoOgM4oP'
             'EbWx2aFw9VlW5aVmo30rxys2xXmke43Fkbf1M2yY76su0LldIJeYFsk3rKTg'
             'mh3gc2ISzhy841/Vtnm5pfDKOhTmOnUIKXWQdVnw6KX9W9Cxj9xQt6ce+3lH'
             'JqxCzknFRhQSHdXaUbTGwcffRJe/CXAk0BKX9sHNK3HIs4QvrdjeJSpbUEdW'
             'uS79rPv4mVb8SLmcOrGw3ugr/lAXPgRe9Zl3uAsXBnkO+sp7VolXyrscMm5B'
             '28T91/02/lHgphSce7i4GdCJ0y/fOSVfKe9RE1/UsYE1TXP91H38rCh+pCzG'
             'uYwpbRhcLlHGiWXY7OuJaCC9ISZ3q5NdqbhdzLZb+yH4hpmXy3I2ioVtGTFE'
             'myYZxbwcdpzvu68eku8+cGmf/GZAdz9IeaeOGMM0ERtx3P30z24+c6d86jqc'
             'uOP8Il18EdE/DP8L3w8fvnegezyl/Glxq/BaPljhTe1l9LOUPoj15YBfbB5n'
             'YoXTqKvZ4dTW2eZxPLZx+j2xlLy6Ni4SgWwpozfmPUj8fuvhuhK/lGUMRxgZ'
             'TmArV9GYVOU4s+qOLunodlGzo3mgeZMcxbwZir9p8QZFpj4C/kHnUfKO+YJ5'
             'NJ7z6OPsYP8rxuV3NcAFuAAX4P43XNqD63c/pLUZUzO43YCEVXBjnPINcOON'
             'S4OgXwPc8DipvO35Ut2/kfZfQO9+ewG6+03K3s04TW9topsa5ahyvYut7Yuv'
             'Wc+Gdj/P52sKz9ptPOXWK5AzuU8tb5ja9TuRbal4Q1rvCNT6zdzA561DWHwW'
             'pnvXXa13Zxx+bw3DFwn9zffYBxdxKidxP8Fg47zYw97NbVj7P/nFW+E=')
     def callback(caller,event):
         self.assertFalse(isinstance(event, cpp.LoadExceptionEvent))
         self.assertFalse(isinstance(event, cpp.RunExceptionEvent))
     for windows_mode in ((False, True) if sys.platform.startswith("win")
                          else (False,)):
         pipeline = cpp.Pipeline()
         pipeline.add_listener(callback)
         pipeline.load(StringIO(zlib.decompress(base64.b64decode(data))))
         ipath = os.path.join(T.example_images_directory(),'ExampleSBSImages')
         bpath = tempfile.mkdtemp()
         bfile = os.path.join(bpath,C.F_BATCH_DATA)
         hfile = os.path.join(bpath, C.F_BATCH_DATA_H5)
         try:
             li = pipeline.modules()[0]
             self.assertTrue(isinstance(li, LI.LoadImages))
             module = pipeline.modules()[1]
             self.assertTrue(isinstance(module, C.CreateBatchFiles))
             li.location.dir_choice = LI.ABSOLUTE_FOLDER_NAME
             li.location.custom_path = ipath
             module.wants_default_output_directory.value = False
             module.custom_output_directory.value = bpath
             module.remote_host_is_windows.value = windows_mode
             self.assertEqual(len(module.mappings), 1)
             mapping = module.mappings[0]
             mapping.local_directory.value = ipath
             self.assertFalse(pipeline.in_batch_mode())
             measurements = cpmeas.Measurements()
             image_set_list = cpi.ImageSetList()
             result = pipeline.prepare_run(
                 cpw.Workspace(pipeline, None, None, None,
                               measurements, image_set_list))
             self.assertFalse(pipeline.in_batch_mode())
             self.assertFalse(result)
             self.assertFalse(module.batch_mode.value)
             pipeline = cpp.Pipeline()
             pipeline.add_listener(callback)
             fd = open(hfile,'rb')
             try:
                 pipeline.load(fd)
                 fd.seek(0)
             finally:
                 fd.close()
             
             measurements = cpmeas.load_measurements(hfile)
             image_set_list = cpi.ImageSetList()
             self.assertTrue(pipeline.in_batch_mode())
             module = pipeline.modules()[1]
             self.assertTrue(isinstance(module, C.CreateBatchFiles))
             self.assertTrue(module.batch_mode.value)
             image_numbers = measurements.get_image_numbers()
             self.assertTrue([x == i+1 for i, x in enumerate(image_numbers)])
             workspace = cpw.Workspace(pipeline, None, None, None,
                                       measurements, image_set_list)
             pipeline.prepare_run(workspace)
             pipeline.prepare_group(workspace, {}, range(1,97))
             for i in range(96):
                 image_set = image_set_list.get_image_set(i)
                 for image_name in ('DNA', 'Cytoplasm'):
                     pathname = measurements.get_measurement(
                         cpmeas.IMAGE, "PathName_"+image_name, i+1)
                     self.assertEqual(pathname, 
                                      '\\imaging\\analysis' if windows_mode
                                      else '/imaging/analysis')
             del measurements
         finally:
             if os.path.exists(bfile):
                 os.unlink(bfile)
             if os.path.exists(hfile):
                 os.unlink(hfile)
             os.rmdir(bpath)
示例#20
0
def run_pipeline_headless(options, args):
    '''Run a CellProfiler pipeline in headless mode'''
    #
    # Start Ilastik's workers
    #
    try:
        from ilastik.core.jobMachine import GLOBAL_WM
        GLOBAL_WM.set_thread_count(1)
    except:
        logging.root.warn("Failed to stop Ilastik")

    if sys.platform == 'darwin':
        if options.start_awt:
            import bioformats
            from javabridge import activate_awt
            activate_awt()

    if not options.first_image_set is None:
        if not options.first_image_set.isdigit():
            raise ValueError(
                "The --first-image-set option takes a numeric argument")
        else:
            image_set_start = int(options.first_image_set)
    else:
        image_set_start = None

    image_set_numbers = None
    if not options.last_image_set is None:
        if not options.last_image_set.isdigit():
            raise ValueError(
                "The --last-image-set option takes a numeric argument")
        else:
            image_set_end = int(options.last_image_set)
            if image_set_start is None:
                image_set_numbers = np.arange(1, image_set_end + 1)
            else:
                image_set_numbers = np.arange(image_set_start,
                                              image_set_end + 1)
    else:
        image_set_end = None

    if ((options.pipeline_filename is not None)
            and (not options.pipeline_filename.lower().startswith('http'))):
        options.pipeline_filename = os.path.expanduser(
            options.pipeline_filename)
    from cellprofiler.pipeline import Pipeline, EXIT_STATUS, M_PIPELINE
    import cellprofiler.measurements as cpmeas
    import cellprofiler.preferences as cpprefs
    pipeline = Pipeline()
    initial_measurements = None
    try:
        if h5py.is_hdf5(options.pipeline_filename):
            initial_measurements = cpmeas.load_measurements(
                options.pipeline_filename, image_numbers=image_set_numbers)
    except:
        logging.root.info("Failed to load measurements from pipeline")
    if initial_measurements is not None:
        pipeline_text = \
            initial_measurements.get_experiment_measurement(
                M_PIPELINE)
        pipeline_text = pipeline_text.encode('us-ascii')
        pipeline.load(StringIO(pipeline_text))
        if not pipeline.in_batch_mode():
            #
            # Need file list in order to call prepare_run
            #
            from cellprofiler.utilities.hdf5_dict import HDF5FileList
            with h5py.File(options.pipeline_filename, "r") as src:
                if HDF5FileList.has_file_list(src):
                    HDF5FileList.copy(src,
                                      initial_measurements.hdf5_dict.hdf5_file)
    else:
        pipeline.load(options.pipeline_filename)
    if options.groups is not None:
        kvs = [x.split('=') for x in options.groups.split(',')]
        groups = dict(kvs)
    else:
        groups = None
    file_list = cpprefs.get_image_set_file()
    if file_list is not None:
        pipeline.read_file_list(file_list)
    #
    # Fixup CreateBatchFiles with any command-line input or output directories
    #
    if pipeline.in_batch_mode():
        create_batch_files = [
            m for m in pipeline.modules() if m.is_create_batch_module()
        ]
        if len(create_batch_files) > 0:
            create_batch_files = create_batch_files[0]
            if options.output_directory is not None:
                create_batch_files.custom_output_directory.value = \
                    options.output_directory
            if options.image_directory is not None:
                create_batch_files.default_image_directory.value = \
                    options.image_directory

    use_hdf5 = len(args) > 0 and not args[0].lower().endswith(".mat")
    measurements = pipeline.run(
        image_set_start=image_set_start,
        image_set_end=image_set_end,
        grouping=groups,
        measurements_filename=None if not use_hdf5 else args[0],
        initial_measurements=initial_measurements)
    if len(args) > 0 and not use_hdf5:
        pipeline.save_measurements(args[0], measurements)
    if options.done_file is not None:
        if (measurements is not None
                and measurements.has_feature(cpmeas.EXPERIMENT, EXIT_STATUS)):
            done_text = measurements.get_experiment_measurement(EXIT_STATUS)
        else:
            done_text = "Failure"
        fd = open(options.done_file, "wt")
        fd.write("%s\n" % done_text)
        fd.close()
    if measurements is not None:
        measurements.close()
 def test_03_01_save_and_load(self):
     '''Save a pipeline to batch data, open it to check and load it'''
     data = ('eJztWW1PGkEQXhC1WtPYTzb9tB+llROoGiWNgi9NSYUSIbZGbbvCApvu7ZJ7'
             'UWlj0o/9Wf1J/QndxTs4tsoBRS3JHbkcMzfPPDOzs8uxl8uU9jPbcFWLw1ym'
             'FKsSimGBIqvKDT0FmbUEdwyMLFyBnKVgycYwY9dgIgET8dTqRmolCZPx+AYY'
             '7ghlc0/EJf4cgClxfSTOsHNr0pFDnlPKRWxZhNXMSRABzxz9L3EeIoOgM4oP'
             'EbWx2aFw9VlW5aVmo30rxys2xXmke43Fkbf1M2yY76su0LldIJeYFsk3rKTg'
             'mh3gc2ISzhy841/Vtnm5pfDKOhTmOnUIKXWQdVnw6KX9W9Cxj9xQt6ce+3lH'
             'JqxCzknFRhQSHdXaUbTGwcffRJe/CXAk0BKX9sHNK3HIs4QvrdjeJSpbUEdW'
             'uS79rPv4mVb8SLmcOrGw3ugr/lAXPgRe9Zl3uAsXBnkO+sp7VolXyrscMm5B'
             '28T91/02/lHgphSce7i4GdCJ0y/fOSVfKe9RE1/UsYE1TXP91H38rCh+pCzG'
             'uYwpbRhcLlHGiWXY7OuJaCC9ISZ3q5NdqbhdzLZb+yH4hpmXy3I2ioVtGTFE'
             'myYZxbwcdpzvu68eku8+cGmf/GZAdz9IeaeOGMM0ERtx3P30z24+c6d86jqc'
             'uOP8Il18EdE/DP8L3w8fvnegezyl/Glxq/BaPljhTe1l9LOUPoj15YBfbB5n'
             'YoXTqKvZ4dTW2eZxPLZx+j2xlLy6Ni4SgWwpozfmPUj8fuvhuhK/lGUMRxgZ'
             'TmArV9GYVOU4s+qOLunodlGzo3mgeZMcxbwZir9p8QZFpj4C/kHnUfKO+YJ5'
             'NJ7z6OPsYP8rxuV3NcAFuAAX4P43XNqD63c/pLUZUzO43YCEVXBjnPINcOON'
             'S4OgXwPc8DipvO35Ut2/kfZfQO9+ewG6+03K3s04TW9topsa5ahyvYut7Yuv'
             'Wc+Gdj/P52sKz9ptPOXWK5AzuU8tb5ja9TuRbal4Q1rvCNT6zdzA561DWHwW'
             'pnvXXa13Zxx+bw3DFwn9zffYBxdxKidxP8Fg47zYw97NbVj7P/nFW+E=')
     def callback(caller,event):
         self.assertFalse(isinstance(event, cpp.LoadExceptionEvent))
         self.assertFalse(isinstance(event, cpp.RunExceptionEvent))
     for windows_mode in ((False, True) if sys.platform.startswith("win")
                          else (False,)):
         pipeline = cpp.Pipeline()
         pipeline.add_listener(callback)
         pipeline.load(StringIO(zlib.decompress(base64.b64decode(data))))
         ipath = os.path.join(T.example_images_directory(),'ExampleSBSImages')
         bpath = tempfile.mkdtemp()
         bfile = os.path.join(bpath,C.F_BATCH_DATA)
         hfile = os.path.join(bpath, C.F_BATCH_DATA_H5)
         try:
             li = pipeline.modules()[0]
             self.assertTrue(isinstance(li, LI.LoadImages))
             module = pipeline.modules()[1]
             self.assertTrue(isinstance(module, C.CreateBatchFiles))
             li.location.dir_choice = LI.ABSOLUTE_FOLDER_NAME
             li.location.custom_path = ipath
             module.wants_default_output_directory.value = False
             module.custom_output_directory.value = bpath
             module.remote_host_is_windows.value = windows_mode
             self.assertEqual(len(module.mappings), 1)
             mapping = module.mappings[0]
             mapping.local_directory.value = ipath
             self.assertFalse(pipeline.in_batch_mode())
             measurements = cpmeas.Measurements()
             image_set_list = cpi.ImageSetList()
             result = pipeline.prepare_run(
                 cpw.Workspace(pipeline, None, None, None,
                               measurements, image_set_list))
             self.assertFalse(pipeline.in_batch_mode())
             self.assertFalse(result)
             self.assertFalse(module.batch_mode.value)
             pipeline = cpp.Pipeline()
             pipeline.add_listener(callback)
             fd = open(hfile,'rb')
             try:
                 pipeline.load(fd)
                 fd.seek(0)
             finally:
                 fd.close()
             
             measurements = cpmeas.load_measurements(hfile)
             image_set_list = cpi.ImageSetList()
             self.assertTrue(pipeline.in_batch_mode())
             module = pipeline.modules()[1]
             self.assertTrue(isinstance(module, C.CreateBatchFiles))
             self.assertTrue(module.batch_mode.value)
             image_numbers = measurements.get_image_numbers()
             self.assertTrue([x == i+1 for i, x in enumerate(image_numbers)])
             workspace = cpw.Workspace(pipeline, None, None, None,
                                       measurements, image_set_list)
             pipeline.prepare_run(workspace)
             pipeline.prepare_group(workspace, {}, range(1,97))
             for i in range(96):
                 image_set = image_set_list.get_image_set(i)
                 for image_name in ('DNA', 'Cytoplasm'):
                     pathname = measurements.get_measurement(
                         cpmeas.IMAGE, "PathName_"+image_name, i+1)
                     self.assertEqual(pathname, 
                                      '\\imaging\\analysis' if windows_mode
                                      else '/imaging/analysis')
             del measurements
         finally:
             if os.path.exists(bfile):
                 os.unlink(bfile)
             if os.path.exists(hfile):
                 os.unlink(hfile)
             os.rmdir(bpath)
示例#22
0
    def merge_files(destination, sources, force_headless=False):
        is_headless = force_headless or get_headless()
        if not is_headless:
            import wx
        if len(sources) == 0:
            return
        if not is_headless:
            progress = wx.ProgressDialog("Writing " + destination,
                                         "Loading " + sources[0],
                                         maximum = len(sources) * 4 + 1,
                                         style = wx.PD_CAN_ABORT | 
                                         wx.PD_APP_MODAL |
                                         wx.PD_ELAPSED_TIME |
                                         wx.PD_REMAINING_TIME)
        count = 0
        try:
            pipeline = cpp.Pipeline()
            has_error = [False]
            def callback(caller, event):
                if isinstance(event, cpp.LoadExceptionEvent):
                    has_error = True
                    wx.MessageBox(
                        message = "Could not load %s: %s" % (
                            sources[0], event.error),
                        caption = "Failed to load %s" % sources[0])
                    has_error[0] = True

            pipeline.add_listener(callback)

            pipeline.load(sources[0])
            if has_error[0]:
                return
            if destination.lower().endswith(".h5"):
                mdest = cpmeas.Measurements(filename=destination,
                                            multithread=False)
                h5_dest = True
            else:
                mdest = cpmeas.Measurements(multithread=False)
                h5_dest = False
            for source in sources:
                if not is_headless:
                    count += 1
                    keep_going, skip = progress.Update(count, "Loading " + source)
                    if not keep_going:
                        return
                if h5py.is_hdf5(source):
                    msource = cpmeas.Measurements(filename=source, 
                                                  mode="r",
                                                  multithread=False)
                else:
                    msource = cpmeas.load_measurements(source)
                dest_image_numbers = mdest.get_image_numbers()
                source_image_numbers = msource.get_image_numbers()
                if (len(dest_image_numbers) == 0 or 
                    len(source_image_numbers) == 0):
                    offset_source_image_numbers =  source_image_numbers
                else:
                    offset_source_image_numbers = (
                        np.max(dest_image_numbers) -
                        np.min(source_image_numbers) + source_image_numbers + 1)
                for object_name in msource.get_object_names():
                    if object_name in mdest.get_object_names():
                        destfeatures = mdest.get_feature_names(object_name)
                    else:
                        destfeatures = []
                    for feature in msource.get_feature_names(object_name):
                        if object_name == cpmeas.EXPERIMENT:
                            if not mdest.has_feature(object_name, feature):
                                src_value = msource.get_experiment_measurement(
                                    feature)
                                mdest.add_experiment_measurement(feature,
                                                                 src_value)
                            continue
                        src_values = msource.get_measurement(
                            object_name,
                            feature,
                            image_set_number = source_image_numbers)
                        mdest[object_name, 
                              feature, 
                              offset_source_image_numbers] = src_values
                    destset = set(destfeatures)
            if not is_headless:
                keep_going, skip = progress.Update(count+1, "Saving to "+destination)
                if not keep_going:
                    return
            if not h5_dest:
                pipeline.save_measurements(destination, mdest)
        finally:
            if not is_headless:
                progress.Destroy()
    def merge_files(destination, sources, force_headless=False):
        is_headless = force_headless or get_headless()
        if not is_headless:
            import wx
        if len(sources) == 0:
            return
        if not is_headless:
            progress = wx.ProgressDialog("Writing " + destination,
                                         "Loading " + sources[0],
                                         maximum=len(sources) * 4 + 1,
                                         style=wx.PD_CAN_ABORT
                                         | wx.PD_APP_MODAL | wx.PD_ELAPSED_TIME
                                         | wx.PD_REMAINING_TIME)
        count = 0
        try:
            pipeline = cpp.Pipeline()
            has_error = [False]

            def callback(caller, event):
                if isinstance(event, cpp.LoadExceptionEvent):
                    has_error = True
                    wx.MessageBox(message="Could not load %s: %s" %
                                  (sources[0], event.error),
                                  caption="Failed to load %s" % sources[0])
                    has_error[0] = True

            pipeline.add_listener(callback)

            pipeline.load(sources[0])
            if has_error[0]:
                return
            if destination.lower().endswith(".h5"):
                mdest = cpmeas.Measurements(filename=destination,
                                            multithread=False)
                h5_dest = True
            else:
                mdest = cpmeas.Measurements(multithread=False)
                h5_dest = False
            for source in sources:
                if not is_headless:
                    count += 1
                    keep_going, skip = progress.Update(count,
                                                       "Loading " + source)
                    if not keep_going:
                        return
                if h5py.is_hdf5(source):
                    msource = cpmeas.Measurements(filename=source,
                                                  mode="r",
                                                  multithread=False)
                else:
                    msource = cpmeas.load_measurements(source)
                dest_image_numbers = mdest.get_image_numbers()
                source_image_numbers = msource.get_image_numbers()
                if (len(dest_image_numbers) == 0
                        or len(source_image_numbers) == 0):
                    offset_source_image_numbers = source_image_numbers
                else:
                    offset_source_image_numbers = (
                        np.max(dest_image_numbers) -
                        np.min(source_image_numbers) + source_image_numbers +
                        1)
                for object_name in msource.get_object_names():
                    if object_name in mdest.get_object_names():
                        destfeatures = mdest.get_feature_names(object_name)
                    else:
                        destfeatures = []
                    for feature in msource.get_feature_names(object_name):
                        if object_name == cpmeas.EXPERIMENT:
                            if not mdest.has_feature(object_name, feature):
                                src_value = msource.get_experiment_measurement(
                                    feature)
                                mdest.add_experiment_measurement(
                                    feature, src_value)
                            continue
                        src_values = msource.get_measurement(
                            object_name,
                            feature,
                            image_set_number=source_image_numbers)
                        mdest[object_name, feature,
                              offset_source_image_numbers] = src_values
                    destset = set(destfeatures)
            if not is_headless:
                keep_going, skip = progress.Update(count + 1,
                                                   "Saving to " + destination)
                if not keep_going:
                    return
            if not h5_dest:
                pipeline.save_measurements(destination, mdest)
        finally:
            if not is_headless:
                progress.Destroy()
示例#24
0
def run_pipeline_headless(options, args):
    '''Run a CellProfiler pipeline in headless mode'''
    #
    # Start Ilastik's workers
    #
    try:
        from ilastik.core.jobMachine import GLOBAL_WM
        GLOBAL_WM.set_thread_count(1)
    except:
        logging.root.warn("Failed to stop Ilastik")
    
    if sys.platform == 'darwin':
        if options.start_awt:
            import bioformats
            from javabridge import activate_awt
            activate_awt()
        
    if not options.first_image_set is None:
        if not options.first_image_set.isdigit():
            raise ValueError("The --first-image-set option takes a numeric argument")
        else:
            image_set_start = int(options.first_image_set)
    else:
        image_set_start = None
    
    image_set_numbers = None
    if not options.last_image_set is None:
        if not options.last_image_set.isdigit():
            raise ValueError("The --last-image-set option takes a numeric argument")
        else:
            image_set_end = int(options.last_image_set)
            if image_set_start is None:
                image_set_numbers = np.arange(1, image_set_end+1)
            else:
                image_set_numbers = np.arange(image_set_start, image_set_end+1)
    else:
        image_set_end = None
    
    if ((options.pipeline_filename is not None) and 
        (not options.pipeline_filename.lower().startswith('http'))):
        options.pipeline_filename = os.path.expanduser(options.pipeline_filename)
    from cellprofiler.pipeline import Pipeline, EXIT_STATUS, M_PIPELINE
    import cellprofiler.measurements as cpmeas
    import cellprofiler.preferences as cpprefs
    pipeline = Pipeline()
    initial_measurements = None
    try:
        if h5py.is_hdf5(options.pipeline_filename):
            initial_measurements = cpmeas.load_measurements(
                options.pipeline_filename,
                image_numbers=image_set_numbers)
    except:
        logging.root.info("Failed to load measurements from pipeline")
    if initial_measurements is not None:
        pipeline_text = \
            initial_measurements.get_experiment_measurement(
                M_PIPELINE)
        pipeline_text = pipeline_text.encode('us-ascii')
        pipeline.load(StringIO(pipeline_text))
        if not pipeline.in_batch_mode():
            #
            # Need file list in order to call prepare_run
            #
            from cellprofiler.utilities.hdf5_dict import HDF5FileList
            with h5py.File(options.pipeline_filename, "r") as src:
                if HDF5FileList.has_file_list(src):
                    HDF5FileList.copy(
                        src, initial_measurements.hdf5_dict.hdf5_file)
    else:
        pipeline.load(options.pipeline_filename)
    if options.groups is not None:
        kvs = [x.split('=') for x in options.groups.split(',')]
        groups = dict(kvs)
    else:
        groups = None
    file_list = cpprefs.get_image_set_file()
    if file_list is not None:
        pipeline.read_file_list(file_list)
    #
    # Fixup CreateBatchFiles with any command-line input or output directories
    #
    if pipeline.in_batch_mode():
        create_batch_files = [
            m for m in pipeline.modules()
            if m.is_create_batch_module()]
        if len(create_batch_files) > 0:
            create_batch_files = create_batch_files[0]
            if options.output_directory is not None:
                create_batch_files.custom_output_directory.value = \
                    options.output_directory
            if options.image_directory is not None:
                create_batch_files.default_image_directory.value = \
                    options.image_directory
        
    use_hdf5 = len(args) > 0 and not args[0].lower().endswith(".mat")
    measurements = pipeline.run(
        image_set_start=image_set_start, 
        image_set_end=image_set_end,
        grouping=groups,
        measurements_filename = None if not use_hdf5 else args[0],
        initial_measurements = initial_measurements)
    if len(args) > 0 and not use_hdf5:
        pipeline.save_measurements(args[0], measurements)
    if options.done_file is not None:
        if (measurements is not None and 
            measurements.has_feature(cpmeas.EXPERIMENT, EXIT_STATUS)):
            done_text = measurements.get_experiment_measurement(EXIT_STATUS)
        else:
            done_text = "Failure"
        fd = open(options.done_file, "wt")
        fd.write("%s\n"%done_text)
        fd.close()
    if measurements is not None:
        measurements.close()
示例#25
0
    def merge_files(destination, sources, force_headless=False):
        is_headless = force_headless or get_headless()
        if not is_headless:
            import wx
        if len(sources) == 0:
            return
        if not is_headless:
            progress = wx.ProgressDialog("Writing " + destination,
                                         "Loading " + sources[0],
                                         maximum = len(sources) * 4 + 1,
                                         style = wx.PD_CAN_ABORT | 
                                         wx.PD_APP_MODAL |
                                         wx.PD_ELAPSED_TIME |
                                         wx.PD_REMAINING_TIME)
        count = 0
        try:
            pipeline = cpp.Pipeline()
            has_error = [False]
            def callback(caller, event):
                if isinstance(event, cpp.LoadExceptionEvent):
                    has_error = True
                    wx.MessageBox(
                        message = "Could not load %s: %s" % (
                            sources[0], event.error),
                        caption = "Failed to load %s" % sources[0])
                    has_error[0] = True

            pipeline.add_listener(callback)
            if has_error[0]:
                return

            # distributed processing passes a list of functions, not a
            # list of filenames.
            pipeline.load(sources[0]() if callable(sources[0]) else sources[0])
            mdest = cpmeas.Measurements()
            image_set_count = 0
            for source in sources:
                if not is_headless:
                    count += 1
                    keep_going, skip = progress.Update(count, "Loading " + source)
                    if not keep_going:
                        return
                # distributed processing passes a list of functions
                msource = cpmeas.load_measurements(source() if callable(source) else source)
                source_image_set_count = 0
                for object_name in msource.get_object_names():
                    if object_name in mdest.get_object_names():
                        destfeatures = mdest.get_feature_names(object_name)
                    else:
                        destfeatures = []
                    for feature in msource.get_feature_names(object_name):
                        src_values = msource.get_all_measurements(object_name,
                                                                  feature)
                        if np.isscalar(src_values):
                            # For something like "Experiment", there is a single
                            # value. Keep the first value seen among all sources.
                            #
                            if not feature in destfeatures:
                                mdest.add_all_measurements(object_name, feature,
                                                           [src_values])
                        else:
                            source_image_count = max(source_image_set_count,
                                                     len(src_values))
                            if feature in destfeatures:
                                dest_values = mdest.get_all_measurements(object_name,
                                                                         feature)
                                if not isinstance(dest_values, list):
                                    dest_values = dest_values.tolist()
                            else:
                                dest_values = [None] * image_set_count

                            if isinstance(src_values, list):
                                dest_values += src_values
                            else:
                                dest_values += src_values.tolist()
                            mdest.add_all_measurements(object_name, feature, dest_values)
                    destset = set(destfeatures)
                    #
                    # These are features that are in the destination, but not
                    # in the source. We have to add Nones to the destination.
                    #
                    for feature in destset.difference(
                        msource.get_feature_names(object_name)):
                        dest_values = mdest.get_all_measurements(
                            object_name, feature)
                        dest_values += [None] * source_image_count
                #
                # The source may not have all of the objects that are in
                # the destination. Add blanks here.
                #
                destobjects = set(mdest.get_object_names())
                for object_name in destobjects.difference(
                    msource.get_object_names()):
                    for feature in mdest.get_feature_names(object_name):
                        dest_values = mdest.get_all_measurements(
                            object_name, feature)
                        dest_values += [None] * source_image_count
                        mdest.add_all_measurements(object_name, feature, dest_values)
                image_set_count += source_image_count
            mdest.image_set_number = image_set_count
            if not is_headless:
                keep_going, skip = progress.Update(count+1, "Saving to "+destination)
                if not keep_going:
                    return
            pipeline.save_measurements(destination, mdest)
        finally:
            if not is_headless:
                progress.Destroy()
示例#26
0
    def merge_files(destination, sources, force_headless=False):
        is_headless = force_headless or get_headless()
        if not is_headless:
            import wx
        if len(sources) == 0:
            return
        if not is_headless:
            progress = wx.ProgressDialog("Writing " + destination,
                                         "Loading " + sources[0],
                                         maximum = len(sources) * 4 + 1,
                                         style = wx.PD_CAN_ABORT | 
                                         wx.PD_APP_MODAL |
                                         wx.PD_ELAPSED_TIME |
                                         wx.PD_REMAINING_TIME)
        count = 0
        try:
            pipeline = cpp.Pipeline()
            has_error = [False]
            def callback(caller, event):
                if isinstance(event, cpp.LoadExceptionEvent):
                    has_error = True
                    wx.MessageBox(
                        message = "Could not load %s: %s" % (
                            sources[0], event.error),
                        caption = "Failed to load %s" % sources[0])
                    has_error[0] = True

            pipeline.add_listener(callback)

            # distributed processing passes a list of functions, not a
            # list of filenames.
            pipeline.load(sources[0]() if callable(sources[0]) else sources[0])
            if has_error[0]:
                return
            mdest = cpmeas.Measurements()
            for source in sources:
                if not is_headless:
                    count += 1
                    keep_going, skip = progress.Update(count, "Loading " + source)
                    if not keep_going:
                        return
                # distributed processing passes a list of functions
                msource = cpmeas.load_measurements(source() if callable(source) else source)
                dest_image_numbers = mdest.get_image_numbers()
                source_image_numbers = msource.get_image_numbers()
                if len(dest_image_numbers) == 0:
                    offset_source_image_numbers =  source_image_numbers
                else:
                    offset_source_image_numbers = (
                        np.max(dest_image_numbers) + source_image_numbers)
                for object_name in msource.get_object_names():
                    if object_name in mdest.get_object_names():
                        destfeatures = mdest.get_feature_names(object_name)
                    else:
                        destfeatures = []
                    for feature in msource.get_feature_names(object_name):
                        if object_name == cpmeas.EXPERIMENT:
                            if not mdest.has_feature(object_name, feature):
                                src_value = msource.get_experiment_measurement(
                                    feature)
                                mdest.add_experiment_measurement(feature,
                                                                 src_value)
                            continue
                        src_values = msource.get_measurement(
                            object_name,
                            feature,
                            image_set_number = source_image_numbers)
                        for image_number, value in zip(
                            offset_source_image_numbers, src_values):
                            mdest.add_measurement(
                                object_name, feature, value,
                                image_set_number = image_number)
                    destset = set(destfeatures)
            if not is_headless:
                keep_going, skip = progress.Update(count+1, "Saving to "+destination)
                if not keep_going:
                    return
            pipeline.save_measurements(destination, mdest)
        finally:
            if not is_headless:
                progress.Destroy()
 def test_13_01_load_matlab(self):
     data = ('eJztHEtzHEd5pCghVkhi3gmVQ5NylS2XJe3KWlueCpFkSbYV9CpLsRMss+nd'
             '6d3teF7MQ7tLTBXHcM2JOweuUFw4pfIDuFHAkRTwA3wCDqSK7+vu2XnsrPYh'
             'aYuAxrX2dvf36u/VX/f0env1YGv1NinNFcj26sFsjZuM7Jk0qDmepRM7uEbW'
             'PEYDZhDH1slBIySrYZ0UFkmxoC8s6cUlslAoFrXRnonN7Vfw33c07QX450X4'
             'TKqh51V7IvH5Cnwa1DZM5sP4lPay6l+Ez1roecwOJPI2o37oMQs6fG3PYzUG'
             'Y1XA0vZZEHC77ndEQP73pgbjL/untO+o9hfw2QmtCvN2a5sWrTOgrijDt9sM'
             'GK3a1Gz/hBkKO4LedowQZ6GefXrEdsPADYN7TnO3FjBbdB9wi+0H1As6+BIa'
             'u4B2xDLZHbrr3GPVwPHaqnud1WhoBpJ8PKi6BY0EimgfOI7p3wFPsKklhex0'
             '32OmmzIh6m9Ji/X3Qh/9XVD96lk5DfxCAv+rA9hvUsk9LN6EwttL4E3l4H03'
             'gXdRtTFKZgtLs3Hk3Jy7daNUKBRiPY4qT1J/efK8qKXlwfaafhh4of1E+H8f'
             '/G9m8LFdBXxmuYcVahxyqz5bXFwqRPM4bXpugt5EDr2kXhB+pQ//lzL8sS08'
             'nATo+bn2yPpl9CT9MsLbe2O4fBLJ8z34bLRc5nHMXFIkCbUTVk3GE2wFH/Lq'
             'cHy+rtqXUEbuMpPbrBx9Ac48KEMSCUI/w+feK8Pp/7VXjtf/319O6x/ba8w0'
             '9zwH1x+PRDLppBEErj4/32w256oA4SqIOcerTz9gns9hVVqY3n+wc58dcdEq'
             'FhcLi9PTWw41hAJ9/ZElsm3ZDi29+NQ/sstHCvPwcrF4/Vbp8PLTI+pxWgEg'
             'T9FBaEjVQO6p33Ca5Sa3DaepH3ghe2o7AZA9bJVuw2f9aYUG1UbZB80xnXoe'
             'bV+Jhq4RI2i77Psht4OlmcfTBB7MqwR7wdlIhRETBGWGDuT5ETdCahIuxI6B'
             'fWZCegaZiMWChmPoB6wVzG60aDUgFvIWoHJpIU5N4RNuE0arDVL3nNBd1q8L'
             'qAPBuAEfoAFfaCBarFU1QxAjwm3AgoQEqo5lgZ7WHQJzJqHPBBG1qBFqmsQP'
             'KzXHNEChpMmDBuAgPSky0JNjy/qOY0vcTRuWIcmGiGLDdKoUJ6erFUlB3BGI'
             'h62bax3UtQarPlGoPqy0QNwjFvd9WAsJfDVC1+RATBL2l/X3lRbvogaiqVXa'
             'qEZq0ICiWAJgQ06fVJkXUJiCwlej+y6r8lqMBsPMNHy0n9AtkNQloJh1UiUA'
             'QqWyJITMMlUntAO9KA2StIPP8g1ArlSpz2Z9Zvs84EdsRvdhpeeGoLDnYCcA'
             'genBAL7SUMr8itlGK/CE13Rm4jkWaTagPEqY6D6rhyb1wClcj/kYDFLAGvio'
             'H+MqUwsjYqGg/+jK8t5bWDuyt+euzpSx9RBi9m2MhtXZPQwI/FqYvYVfP1r4'
             '6UzZR6B9DhiJkZm0p3pDidNRPnFp0NDnropYVM88kkeW60rI3MH7oY1jl5TT'
             'UdtmZspm0p+Qm+UccVCBh3VSx1/EMPqkCtdNO2CeycCiRiZSq5K4TyDnK0PJ'
             'MMXsJRhwEQzUj/wC3NypfAheBuw24zyxAwJI48t0IsH19Z3V/GFJQ5fLijK6'
             '8HwnDDDx+ihdCjaeniLGIljFa0tAC5l2FZGUAqN8qrj5VWri9ALp01G4Tm8a'
             'sPRBtO3BIki99q5knsrhC905/HrxuBx+65RTuIrz2D652mbElXOINKgSPpcz'
             '5JD0E/oHd4fcZRKDA3Yg3UPhXUPfdnkLlBjaHOhc2eb2tW3amtGLhWuLBYG/'
             'zkGjXsdaaBwfOAk5OjQ9atdZx5AHIBqIZDFPVj7EtzCjRxQwnRObUQ8yJsRf'
             'nXldrpBlGjhhtYGej1wrjmfIeQhFoYrilJxQYdCAsG5AzCZCZjfwQ3LXdCrU'
             'lKJGMBCHnqdWwxrFTYvyqC2nCcxgc0hCF6OpAvEK6UEkC4WrF+YK4rlWVF/k'
             'auZCVdEC+QIRydUol3KVq8HRIAoq7Xj2QEcy3RbSohIN7uOWLOR+g8BaYrlx'
             '4Ij4RydvZ3E82iRi1ceZy7CrsKDJmN2Hxj6H1RdE9C3HCYTCIQsHGF1ySvug'
             'A8yVYnE1oUjA6cmsST2Yk+n4oCJo2zIrWNzmVmjhgg5qNMRsKGyY9ZvRAshQ'
             'r6iEUKy2CObNAgvHDIW+uCqfRVZOsY1t3iNxRJGeTBpQ95gEbCbLmDhiYhtE'
             'RFfDwAHTYeyYbQJ/V0NcgYjfQ0X9MXspo2Ob/NltUxuLt5S3JV29wm3MBXLO'
             'ibW2K+2KaMmZsYq5XmKnQkmZCftEKMmwioXdwCWJbFHXpFVOhbvfpSEsrjQd'
             'LyUZfU1ntmpSXyw/OMxUMxm6yzqAEdEfaQQViWZAMZqM1xtYEYosjeoUtKD0'
             'tAPPcdvL+sMshJwuCFVX6c+PFnmLG0Zi9WgTJY4jKxIHSgYP4z/iUaHVJ7Jn'
             'Wb/TGe2rzsiHovRWU+Ty9Zb1L6Vj564a6aRhXWr1Hp5kidI15sBrWIpjlQPl'
             'n92pEzrD8dKx5sA3O2TK91rCZbsw9FKhkM23Vnw4JjUW+Qwm/Ngz4zwVKwUq'
             'LRfrTyKXUqEgWYaIr6nclIHF1BTtDxvacPvJkc43HMs1QduI/6/p4fbJl1X7'
             '00mkA3VfuXv/nXlEzVeWld0xYJ11rHwH0oEZN9MMttdL6xxqu6AMNUVvavdE'
             'xBwP05Ft0zZY6ziwWJgoEh+oQExJh1tSzOR92O5B9T0AWMx0P7R2axsiGUA1'
             'mFHIRotVxUKDR6LlQjHe3aeoPeRG0OirjwTTXY/Xexkhy3QhvzTV5Fnuhuc5'
             'Xm8gTXv3/lZ/yTRtH6INkkI/0BTTXurA56TxNtx57LNlxP9gSH6jnnuOY15/'
             '+vG/v3hV+7WY18+G5NfvnJho6byF7VK1xJboYmmpslC7sUhLxVKhyG5cv1Uq'
             'FW4slYzFceh3S+GdNZ9x2lFL4J/2efK0lrYjtuUBzZxr12M6rSH5nvY5+jj0'
             '/MmFvzyr/+H1z8Zm19//9aPSP341lrzz7hj1mMw7Y9Hje5+89ed//nYseozw'
             'Ph6Sz3va8fFwOQF/UbXxeFKfn59f0+cxLuYhLuajuJhPBOlY89Dnzso49fzZ'
             'xHD177dUe1mL3liXZR0TFbhb6sy+vMZwk1N+HzGzne91+J/1PMe5jvxwZueP'
             'P59aWxkXv998+2+//ME7q4Lfs+dGv6ewhxto3JtpqUe9dJHlZNftgEEvDUT7'
             '8DXHdLxtqm4IbNEKM9Nd+OwHHnc7bx3j7ifcFfVsuoZd5z5sttsgIHtAzTBC'
             'uAPb3+7ZRNAPxZYzReg06toV7fj8czGBf1F9ovfsh+rKx5fhfXuhD72pFL0p'
             're7R9iB4z6XwntM+ZMFA/LJ478M+ZxC8yRTepLbjnCy/DIs3rXX7XRZvsgsv'
             'TQ/xP780XNy/ptrP4PNAvRQRwZOIiKh/0645+KotGpJuuhNd/Ok8MvP7u7UI'
             'UQ33yCsd+tF7eYWv6Gd7O3zxtUyKzn7Dad7B92sZ+rfxjQ3eVsgMoL5+d7F3'
             'Xkb9vZnoHyR+vqal4wfbXe/rE/SG9Wfc64+aX7J3AQY5L3shQwfb0dnHIPJP'
             'pPAntOvaYOd0z2f4Ylu8wdJG32fFVxMG1/8geWGQvIdnpSfh1+984PXMfLGd'
             'd0viKQqiDT7/UfNpL7yVPnh5960Sb/gGlntUu2X9tXhCfqPki653j2r8JPfM'
             'hvVXfG95En4rffhdyMwb253acAi5e9nrtPiPkh/lVnE0vykWztZPs3g3B8Qb'
             'NZ/1ygONPng3M3rFdq9LQ49WZ/cePyrM3nqcuSgk+mYG8odXM/ywnXnbPNC5'
             'wqUMHWzPXX10eDj/OHmZqNMRXSDSBrTDqPl4VH85y3r6rNeNxGUqbVT9Qh48'
             'UZ0zalyNknei1zrjtE9Uj45q17OuP7vtWTqRflb64OX5YeKyg+gfpC74RoYO'
             'tnOuzQ1ML6/O6LpFocbPct3Nq8vjaxaD8z/tuvSs6qqsnko5eMPwG+1+g7z/'
             'cZbzzOq1VOjOm2dZr+bVI6dZr+b5bXybJV/u05xvrzrx48TvOiYyeHnnF+PU'
             'jzjsEFeVT85/lPwmLzbJe/fcNpiboHcadjrHO8c7xzvHO8f778VbSeANek4e'
             'r1ty2fgyzfccb7z+c9Z1z5fV7/9f67WVPvM+zzfneOd4/7t4FydivOx+PLuP'
             'R/gPEnzy8sVVLZ0vsJ363w3kj139OfwFrrxdMJe94o98Wn343M7wud2Lj/pd'
             'UVv9YFX9eGiu528ZcvR5IYd/Ui+T8OeNN0/nHtawfF+60C3vS33wpoa4p5q1'
             '/5Vj4KPnJPDDzn9iQuL9Ysh5HMcnK9fkCfH+A86LP/E=')
     data = zlib.decompress(base64.b64decode(data))
     fid, name = tempfile.mkstemp(".mat")
     m = None
     try:
         fd = os.fdopen(fid, "wb")
         fd.write(data)
         fd.close()
         m = cpmeas.load_measurements(name)
         self.assertEqual(tuple(m.get_image_numbers()), (1, ))
         self.assertEqual(
             m.get_measurement(cpmeas.IMAGE,
                               "Count_Nuclei",
                               image_set_number=1), 1)
         values = m.get_measurement("Nuclei", "Location_Center_X", 1)
         self.assertEqual(len(values), 1)
         self.assertAlmostEqual(values[0], 34.580433355219959)
     finally:
         gc.collect()
         if m is not None:
             del m
         os.unlink(name)
示例#28
0
    def merge_files(destination, sources, force_headless=False):
        is_headless = force_headless or get_headless()
        if not is_headless:
            import wx
        if len(sources) == 0:
            return
        if not is_headless:
            progress = wx.ProgressDialog("Writing " + destination,
                                         "Loading " + sources[0],
                                         maximum = len(sources) * 4 + 1,
                                         style = wx.PD_CAN_ABORT | 
                                         wx.PD_APP_MODAL |
                                         wx.PD_ELAPSED_TIME |
                                         wx.PD_REMAINING_TIME)
        count = 0
        try:
            pipeline = cpp.Pipeline()
            has_error = [False]
            def callback(caller, event):
                if isinstance(event, cpp.LoadExceptionEvent):
                    has_error = True
                    wx.MessageBox(
                        message = "Could not load %s: %s" % (
                            sources[0], event.error),
                        caption = "Failed to load %s" % sources[0])
                    has_error[0] = True

            pipeline.add_listener(callback)

            # distributed processing passes a list of functions, not a
            # list of filenames.
            pipeline.load(sources[0]() if callable(sources[0]) else sources[0])
            if has_error[0]:
                return
            mdest = cpmeas.Measurements()
            for source in sources:
                if not is_headless:
                    count += 1
                    keep_going, skip = progress.Update(count, "Loading " + source)
                    if not keep_going:
                        return
                # distributed processing passes a list of functions
                msource = cpmeas.load_measurements(source() if callable(source) else source)
                dest_image_numbers = mdest.get_image_numbers()
                source_image_numbers = msource.get_image_numbers()
                if len(dest_image_numbers) == 0:
                    offset_source_image_numbers =  source_image_numbers
                else:
                    offset_source_image_numbers = (
                        np.max(dest_image_numbers) + source_image_numbers)
                for object_name in msource.get_object_names():
                    if object_name in mdest.get_object_names():
                        destfeatures = mdest.get_feature_names(object_name)
                    else:
                        destfeatures = []
                    for feature in msource.get_feature_names(object_name):
                        if object_name == cpmeas.EXPERIMENT:
                            if not mdest.has_feature(object_name, feature):
                                src_value = msource.get_experiment_measurement(
                                    feature)
                                mdest.add_experiment_measurement(feature,
                                                                 src_value)
                            continue
                        src_values = msource.get_measurement(
                            object_name,
                            feature,
                            image_set_number = source_image_numbers)
                        for image_number, value in zip(
                            offset_source_image_numbers, src_values):
                            mdest.add_measurement(
                                object_name, feature, value,
                                image_set_number = image_number)
                    destset = set(destfeatures)
            if not is_headless:
                keep_going, skip = progress.Update(count+1, "Saving to "+destination)
                if not keep_going:
                    return
            pipeline.save_measurements(destination, mdest)
        finally:
            if not is_headless:
                progress.Destroy()