def test_importing_rgb_does_not_set_large_format(self):
     data_item = DataItem.DataItem(numpy.zeros((8, 8, 4), dtype=float))
     with contextlib.closing(data_item):
         data_item_rgb = DataItem.DataItem(numpy.zeros((8, 8, 4), dtype=numpy.uint8))
         with contextlib.closing(data_item_rgb):
             data_element = ImportExportManager.create_data_element_from_data_item(data_item, include_data=True)
             data_element_rgb = ImportExportManager.create_data_element_from_data_item(data_item_rgb, include_data=True)
             data_element.pop("large_format")
             data_element_rgb.pop("large_format")
             with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
                 with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element_rgb)) as data_item_rgb:
                     self.assertTrue(data_item.large_format)
                     self.assertFalse(data_item_rgb.large_format)
Exemplo n.º 2
0
 def test_time_zone_in_extended_data_to_data_element_to_data_item_conversion(
         self):
     # test the whole path, redundant?
     data = numpy.ones((8, 6), numpy.int)
     metadata = {
         "description": {
             "time_zone": {
                 "tz": "+0300",
                 "dst": "+60"
             }
         },
         "hardware_source": {
             "one": 1,
             "two": "b"
         }
     }
     timestamp = datetime.datetime(2013, 11, 18, 14, 5, 4, 1)
     xdata = DataAndMetadata.new_data_and_metadata(data,
                                                   metadata=metadata,
                                                   timestamp=timestamp)
     data_element = ImportExportManager.create_data_element_from_extended_data(
         xdata)
     data_item = ImportExportManager.create_data_item_from_data_element(
         data_element)
     self.assertEqual(data_item.metadata["description"]["time_zone"]["tz"],
                      "+0300")
     self.assertEqual(data_item.metadata["description"]["time_zone"]["dst"],
                      "+60")
     self.assertEqual("2013-11-18 14:05:04.000001", str(data_item.created))
Exemplo n.º 3
0
 def test_data_element_date_gets_set_as_data_item_created_date(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
     data_element["datetime_modified"] = {
         'tz': '+0300',
         'dst': '+60',
         'local_datetime': '2015-06-10T19:31:52.780511'
     }
     data_item = ImportExportManager.create_data_item_from_data_element(
         data_element)
     self.assertIsNotNone(data_item.created)
     self.assertEqual(data_item.timezone_offset, "+0300")
     local_offset_seconds = int(
         round((datetime.datetime.now() -
                datetime.datetime.utcnow()).total_seconds()))
     # check both matches for DST
     match1 = datetime.datetime(year=2015,
                                month=6,
                                day=10,
                                hour=19 - 3,
                                minute=31,
                                second=52,
                                microsecond=780511) + datetime.timedelta(
                                    seconds=local_offset_seconds)
     match2 = datetime.datetime(year=2015,
                                month=6,
                                day=10,
                                hour=19 - 3,
                                minute=31,
                                second=52,
                                microsecond=780511) + datetime.timedelta(
                                    seconds=local_offset_seconds + 3600)
     self.assertTrue(data_item.created_local == match1
                     or data_item.created_local == match2)
Exemplo n.º 4
0
        def acquire_stack_and_sum(number_frames, energy_offset_per_frame,
                                  document_controller, final_layout_fn):
            # grab the document model and workspace for convenience
            with document_controller.create_task_context_manager(
                    _("Multiple Shift EELS Acquire"), "table") as task:
                # acquire the stack. it will be added to the document by queueing to the main thread at the end of this method.
                stack_data_item = acquire_series(number_frames,
                                                 energy_offset_per_frame, task)
                stack_data_item.title = _("Spectrum Stack")

                # align and sum the stack
                data_element = dict()
                summed_image, shifts = align_stack(stack_data_item.data, task)
                # add the summed image to Swift
                data_element["data"] = summed_image
                data_element["title"] = "Aligned and summed spectra"
                # strip off the first dimension that we sum over
                for dimensional_calibration in stack_data_item.dimensional_calibrations[
                        1:]:
                    data_element.setdefault(
                        "spatial_calibrations", list()).append({
                            "origin":
                            dimensional_calibration.offset,  # TODO: fix me
                            "scale":
                            dimensional_calibration.scale,
                            "units":
                            dimensional_calibration.units
                        })
                # set the energy dispersive calibration so that the ZLP is at zero eV
                zlp_position_pixels = numpy.sum(summed_image, axis=0).argmax()
                zlp_position_calibrated_units = -zlp_position_pixels * data_element[
                    "spatial_calibrations"][1]["scale"]
                data_element["spatial_calibrations"][1][
                    "offset"] = zlp_position_calibrated_units
                sum_data_item = ImportExportManager.create_data_item_from_data_element(
                    data_element)

                dispersive_sum = numpy.sum(summed_image, axis=1)
                differential = numpy.diff(dispersive_sum)
                top = numpy.argmax(differential)
                bottom = numpy.argmin(differential)
                _midpoint = numpy.mean([bottom, top]) / dispersive_sum.shape[0]
                _integration_width = float(
                    numpy.abs(bottom - top)) / dispersive_sum.shape[
                        0]  #* data_element["spatial_calibrations"][0]["scale"]

                document_controller.queue_task(final_layout_fn)
                document_controller.queue_task(
                    functools.partial(show_in_panel, stack_data_item,
                                      document_controller,
                                      "multiple_shift_eels_stack"))
                document_controller.queue_task(
                    functools.partial(
                        show_in_panel, sum_data_item, document_controller,
                        "multiple_shift_eels_aligned_summed_stack"))
                document_controller.queue_task(
                    functools.partial(add_line_profile, sum_data_item,
                                      document_controller,
                                      "multiple_shift_eels_spectrum",
                                      _midpoint, _integration_width))
 def test_data_item_to_data_element_and_back_keeps_large_format_flag(self):
     data_item = DataItem.DataItem(numpy.zeros((4, 4, 4)), large_format=True)
     with contextlib.closing(data_item):
         data_element = ImportExportManager.create_data_element_from_data_item(data_item, include_data=True)
         self.assertTrue(data_element.get("large_format"))
         with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
             self.assertTrue(data_item.large_format)
 def test_data_element_to_data_item_includes_time_zone(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
     data_element["datetime_modified"] = {'tz': '+0300', 'dst': '+60', 'local_datetime': '2015-06-10T19:31:52.780511'}
     with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
         self.assertEqual(data_item.timezone_offset, "+0300")
         self.assertEqual(str(data_item.created), "2015-06-10 16:31:52.780511")
 def test_convert_data_element_records_time_zone_in_data_item_metadata(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
     data_element["datetime_modified"] = Utility.get_current_datetime_item()
     with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
         self.assertIsNotNone(data_item.created)
         self.assertEqual(data_item.timezone_offset, data_element["datetime_modified"]["tz"])
 def test_data_element_with_uuid_assigns_uuid_to_data_item(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
     data_element_uuid = uuid.uuid4()
     data_element["uuid"] = str(data_element_uuid)
     with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
         self.assertEqual(data_item.uuid, data_element_uuid)
 def test_convert_data_element_sets_timezone_and_timezone_offset_if_present(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
     data_element["datetime_modified"] = {'tz': '+0300', 'dst': '+60', 'local_datetime': '2015-06-10T19:31:52.780511', 'timezone': 'Europe/Athens'}
     with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
         self.assertIsNotNone(data_item.created)
         self.assertEqual(data_item.timezone, "Europe/Athens")
         self.assertEqual(data_item.timezone_offset, "+0300")
Exemplo n.º 10
0
 def test_creating_data_element_with_sequence_and_implicit_datum_size_data_makes_correct_data_item(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((4, 16, 16), dtype=numpy.double)
     data_element["is_sequence"] = True
     with contextlib.closing(ImportExportManager.create_data_item_from_data_element(data_element)) as data_item:
         self.assertEqual(data_item.is_sequence, True)
         self.assertEqual(data_item.collection_dimension_count, 0)
         self.assertEqual(data_item.datum_dimension_count, 2)
         self.assertEqual(data_item.xdata.is_sequence, True)
         self.assertEqual(data_item.xdata.collection_dimension_count, 0)
         self.assertEqual(data_item.xdata.datum_dimension_count, 2)
Exemplo n.º 11
0
 def test_creating_data_element_with_sequence_data_makes_correct_data_item(self):
     data_element = dict()
     data_element["version"] = 1
     data_element["data"] = numpy.zeros((4, 16, 16), dtype=numpy.double)
     data_element["is_sequence"] = True
     data_element["collection_dimension_count"] = 0
     data_element["datum_dimension_count"] = 2
     data_item = ImportExportManager.create_data_item_from_data_element(data_element)
     self.assertEqual(data_item.is_sequence, True)
     self.assertEqual(data_item.collection_dimension_count, 0)
     self.assertEqual(data_item.datum_dimension_count, 2)
     self.assertEqual(data_item.xdata.is_sequence, True)
     self.assertEqual(data_item.xdata.collection_dimension_count, 0)
     self.assertEqual(data_item.xdata.datum_dimension_count, 2)
Exemplo n.º 12
0
 def test_sub_area_size_change(self):
     with TestContext.create_memory_context() as test_context:
         document_model = test_context.create_document_model()
         data_element = dict()
         data_element["version"] = 1
         data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
         data_item = ImportExportManager.create_data_item_from_data_element(data_element)
         document_model.append_data_item(data_item)
         self.assertEqual(data_item.dimensional_shape, (16, 16))
         self.assertEqual(data_item.data_dtype, numpy.double)
         data_element["data"] = numpy.zeros((8, 8), dtype=numpy.double)
         data_element["sub_area"] = ((0,0), (4, 8))
         ImportExportManager.update_data_item_from_data_element(data_item, data_element)
         self.assertEqual(data_item.dimensional_shape, (8, 8))
         self.assertEqual(data_item.data_dtype, numpy.double)
         data_element["data"] = numpy.zeros((8, 8), dtype=float)
         data_element["sub_area"] = ((0,0), (4, 8))
         ImportExportManager.update_data_item_from_data_element(data_item, data_element)
         self.assertEqual(data_item.dimensional_shape, (8, 8))
         self.assertEqual(data_item.data_dtype, float)
Exemplo n.º 13
0
 def test_sub_area_size_change(self):
     document_model = DocumentModel.DocumentModel()
     with contextlib.closing(document_model):
         data_element = dict()
         data_element["version"] = 1
         data_element["data"] = numpy.zeros((16, 16), dtype=numpy.double)
         data_item = ImportExportManager.create_data_item_from_data_element(data_element)
         document_model.append_data_item(data_item)
         self.assertEqual(data_item.dimensional_shape, (16, 16))
         self.assertEqual(data_item.data_dtype, numpy.double)
         data_element["data"] = numpy.zeros((8, 8), dtype=numpy.double)
         data_element["sub_area"] = ((0,0), (4, 8))
         ImportExportManager.update_data_item_from_data_element(data_item, data_element)
         self.assertEqual(data_item.dimensional_shape, (8, 8))
         self.assertEqual(data_item.data_dtype, numpy.double)
         data_element["data"] = numpy.zeros((8, 8), dtype=numpy.float)
         data_element["sub_area"] = ((0,0), (4, 8))
         ImportExportManager.update_data_item_from_data_element(data_item, data_element)
         self.assertEqual(data_item.dimensional_shape, (8, 8))
         self.assertEqual(data_item.data_dtype, numpy.float)
Exemplo n.º 14
0
        def acquire_stack_and_sum(
                number_frames: int, energy_offset: float,
                dark_ref_enabled: bool,
                dark_ref_data: typing.Optional[_NDArray], cross_cor: bool,
                document_controller: DocumentController.DocumentController,
                final_layout_fn: typing.Callable[[], None]) -> None:
            # grab the document model and workspace for convenience
            with document_controller.create_task_context_manager(
                    _("Multiple Shift EELS Acquire"), "table") as task:
                # acquire the stack. it will be added to the document by
                # queueing to the main thread at the end of this method.
                stack_data_item = acquire_series(number_frames, energy_offset,
                                                 dark_ref_enabled,
                                                 dark_ref_data, task)
                stack_data_item.title = _("Spectrum Stack")

                # align and sum the stack
                data_element: typing.Dict[str, typing.Any] = dict()
                stack_data = stack_data_item.data
                if stack_data is not None:
                    if cross_cor:
                        # Apply cross-correlation between subsequent acquired
                        # images and align the image stack
                        summed_image, _1 = align_stack(stack_data, task)
                    else:
                        # If user does not desire the cross-correlation to happen
                        # then simply sum the stack (eg, when acquiring dark data)
                        summed_image = numpy.sum(stack_data, axis=0)
                    # add the summed image to Swift
                    data_element["data"] = summed_image
                data_element["title"] = "Aligned and summed spectra"
                # strip off the first dimension that we sum over
                for dimensional_calibration in (
                        stack_data_item.dimensional_calibrations[1:]):
                    data_element.setdefault(
                        "spatial_calibrations", list()).append({
                            "origin":
                            dimensional_calibration.offset,  # TODO: fix me
                            "scale":
                            dimensional_calibration.scale,
                            "units":
                            dimensional_calibration.units
                        })
                # set the energy dispersive calibration so that the ZLP is at
                # zero eV
                zlp_position_pixels = numpy.sum(summed_image, axis=0).argmax()
                zlp_position_calibrated_units = (
                    -zlp_position_pixels *
                    data_element["spatial_calibrations"][1]["scale"])
                data_element["spatial_calibrations"][1]["offset"] = (
                    zlp_position_calibrated_units)
                sum_data_item = (
                    ImportExportManager.create_data_item_from_data_element(
                        data_element))

                dispersive_sum = numpy.sum(summed_image, axis=1)
                differential = numpy.diff(dispersive_sum)  # type: ignore
                top = numpy.argmax(differential)
                bottom = numpy.argmin(differential)
                _midpoint = numpy.mean([bottom, top]) / dispersive_sum.shape[0]
                _integration_width = (float(numpy.abs(bottom - top)) /
                                      dispersive_sum.shape[0])

                document_controller.queue_task(final_layout_fn)
                document_controller.queue_task(
                    functools.partial(show_in_panel, stack_data_item,
                                      document_controller,
                                      "multiple_shift_eels_stack"))
                document_controller.queue_task(
                    functools.partial(
                        show_in_panel, sum_data_item, document_controller,
                        "multiple_shift_eels_aligned_summed_stack"))
                document_controller.queue_task(
                    functools.partial(add_line_profile, sum_data_item,
                                      document_controller,
                                      "multiple_shift_eels_spectrum",
                                      _midpoint, _integration_width))