Esempio n. 1
0
def test_image_summary_data():
    """ test_image_summary_data """
    dataset = get_dataset()

    test_data_list = []
    i = 1
    for next_element in dataset:
        tag = "image_" + str(i) + "[:Image]"
        dct = {}
        dct["name"] = tag
        dct["data"] = Tensor(next_element[0])
        test_data_list.append(dct)
        i += 1

    log.debug("begin test_image_summary_sample")
    # step 0: create the thread
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_IMAGE") as test_writer:

        # step 1: create the test data for summary

        # step 2: create the Event
        _cache_summary_tensor_data(test_data_list)
        test_writer.record(1)

        log.debug("finished test_image_summary_sample")
Esempio n. 2
0
def test_scalar_summary_sample_with_shape_1():
    """ test_scalar_summary_sample_with_shape_1 """
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
        for i in range(1, 100):
            test_data = get_test_data_shape_1(i)
            _cache_summary_tensor_data(test_data)
            test_writer.record(i)
Esempio n. 3
0
def test_image_summary_train():
    """ test_image_summary_train """
    dataset = get_dataset()
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_IMAGE") as test_writer:
        model = get_model()
        callback = ImageSummaryCallback(test_writer)
        model.train(2, dataset, callbacks=[callback])
    def run(self):
        """
        Run the explain job and save the result as a summary in summary_dir.

        Note:
            User should call register_saliency() once before running this function.

        Raises:
            ValueError: Be raised for any data or settings' value problem.
            TypeError: Be raised for any data or settings' type problem.
            RuntimeError: Be raised for any runtime problem.
        """
        self._verify_data_n_settings(check_all=True)

        with SummaryRecord(self._summary_dir) as summary:
            print("Start running and writing......")
            begin = time()

            self._summary_timestamp = self._extract_timestamp(
                summary.event_file_name)
            if self._summary_timestamp is None:
                raise RuntimeError(
                    "Cannot extract timestamp from summary filename!"
                    " It should contains a timestamp after 'summary.' .")

            self._save_metadata(summary)

            imageid_labels = self._run_inference(summary)
            if self._is_saliency_registered:
                self._run_saliency(summary, imageid_labels)

            print("Finish running and writing. Total time elapsed: {:.3f} s".
                  format(time() - begin))
Esempio n. 5
0
    def test_raise_exception_with_type_error(self, raise_exception):
        summary_dir = tempfile.mkdtemp(dir=self.base_summary_dir)
        with pytest.raises(TypeError) as exc:
            with SummaryRecord(log_dir=summary_dir,
                               raise_exception=raise_exception):
                pass

        assert "raise_exception" in str(exc.value)
 def run_case(cls, net):
     """ run_case """
     net.set_train()
     steps = 10
     with SummaryRecord(cls.summary_dir) as test_writer:
         for i in range(1, steps):
             net()
             test_writer.record(i)
Esempio n. 7
0
def test_image_summary_sample():
    """ test_image_summary_sample """
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_IMAGE") as test_writer:

        for i in range(1, 5):
            test_data = get_test_data(i)
            _cache_summary_tensor_data(test_data)
            test_writer.record(i)
            test_writer.flush()
def test_summaryrecord_input_invalid_type_dir():
    log.debug("begin test_summaryrecord_input_invalid_type_dir")
    # step 0: create the thread
    try:
        SummaryRecord(32)
    except:
        assert True
    else:
        assert False
    log.debug("finished test_summaryrecord_input_invalid_type_dir")
def test_mulit_layer_directory():
    log.debug("begin test_mulit_layer_directory")
    # step 0: create the thread
    try:
        SummaryRecord("./test_temp_summary_event_file/test/t1/")
    except:
        assert False
    else:
        assert True
    log.debug("finished test_mulit_layer_directory")
def test_summaryrecord_input_null_string():
    log.debug("begin test_summaryrecord_input_null_string")
    # step 0: create the thread
    try:
        SummaryRecord("")
    except:
        assert True
    else:
        assert False
    log.debug("finished test_summaryrecord_input_null_string")
def test_summaryrecord_input_None():
    log.debug("begin test_summaryrecord_input_None")
    # step 0: create the thread
    try:
        SummaryRecord(None)
    except:
        assert True
    else:
        assert False
    log.debug("finished test_summaryrecord_input_None")
def test_summaryrecord_input_relative_dir_1():
    log.debug("begin test_summaryrecord_input_relative_dir_1")
    # step 0: create the thread
    try:
        SummaryRecord("./test_temp_summary_event_file/")
    except:
        assert False
    else:
        assert True
    log.debug("finished test_summaryrecord_input_relative_dir_1")
def test_summaryrecord_input_relative_dir_2():
    log.debug("begin test_summaryrecord_input_relative_dir_2")
    # step 0: create the thread
    try:
        SummaryRecord("../summary/")
    except:
        assert False
    else:
        assert True
    log.debug("finished test_summaryrecord_input_relative_dir_2")
Esempio n. 14
0
def me_scalar_summary(steps, tag=None, value=None):
    with SummaryRecord(SUMMARY_DIR_ME_TEMP) as test_writer:

        x = Tensor(np.array([1.1]).astype(np.float32))
        y = Tensor(np.array([1.2]).astype(np.float32))

        out_me_dict = train_summary_record_scalar_for_1(
            test_writer, steps, x, y)

        return out_me_dict
Esempio n. 15
0
def test_graph_summary_callback2():
    dataset = get_dataset()
    net = Net()
    loss = nn.SoftmaxCrossEntropyWithLogits()
    optim = Momentum(net.trainable_params(), 0.1, 0.9)
    context.set_context(mode=context.GRAPH_MODE)
    model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_GRAPH",
                       network=net) as test_writer:
        summary_cb = SummaryStep(test_writer, 1)
        model.train(2, dataset, callbacks=summary_cb)
Esempio n. 16
0
def test_summary():
    with tempfile.TemporaryDirectory() as tmp_dir:
        steps = 2
        with SummaryRecord(tmp_dir) as test_writer:
            train_summary_record(test_writer, steps=steps)

            file_name = os.path.realpath(test_writer.full_file_name)
        with SummaryReader(file_name) as summary_writer:
            for _ in range(steps):
                event = summary_writer.read_event()
                tags = set(value.tag for value in event.summary.value)
                assert tags == {'tensor', 'histogram', 'scalar', 'image'}
Esempio n. 17
0
def test_histogram_summary_empty_tensor():
    """Test histogram summary, input is an empty tensor."""
    with tempfile.TemporaryDirectory() as tmp_dir:
        with SummaryRecord(tmp_dir, file_suffix="_MS_HISTOGRAM") as test_writer:
            test_data = _wrap_test_data(Tensor([]))
            _cache_summary_tensor_data(test_data)
            test_writer.record(step=1)

        file_name = os.path.join(tmp_dir, test_writer.event_file_name)
        with SummaryReader(file_name) as reader:
            event = reader.read_event()
            assert event.summary.value[0].histogram.count == 0
Esempio n. 18
0
def test_scalar_summary_with_ge_2():
    """ test_scalar_summary_with_ge_2 """
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
        net = SummaryDemo()
        net.set_train()

        steps = 100
        for i in range(1, steps):
            x = Tensor(np.array([1.1]).astype(np.float32))
            y = Tensor(np.array([1.2]).astype(np.float32))
            net(x, y)
            test_writer.record(i)
    def run(self):
        """
        Run the explain job and save the result as a summary in summary_dir.

        Note:
            User should call register_saliency() once before running this function.

        Raises:
            ValueError: Be raised for any data or settings' value problem.
            TypeError: Be raised for any data or settings' type problem.
            RuntimeError: Be raised for any runtime problem.
        """
        self._verify_data_n_settings(check_all=True)
        self._manifest = {
            "saliency_map": False,
            "benchmark": False,
            "uncertainty": False,
            "hierarchical_occlusion": False
        }
        with SummaryRecord(self._summary_dir, raise_exception=True) as summary:
            print("Start running and writing......")
            begin = time()

            self._summary_timestamp = self._extract_timestamp(
                summary.event_file_name)
            if self._summary_timestamp is None:
                raise RuntimeError(
                    "Cannot extract timestamp from summary filename!"
                    " It should contains a timestamp after 'summary.' .")

            self._save_metadata(summary)

            imageid_labels = self._run_inference(summary)
            sample_count = self._sample_index
            if self._is_saliency_registered:
                self._run_saliency(summary, imageid_labels)
                if not self._manifest["saliency_map"]:
                    raise RuntimeError(
                        f"No saliency map was generated in {sample_count} samples. "
                        f"Please make sure the dataset, labels, activation function and network are properly trained "
                        f"and configured.")

            if self._is_hoc_registered and not self._manifest[
                    "hierarchical_occlusion"]:
                raise RuntimeError(
                    f"No Hierarchical Occlusion result was found in {sample_count} samples. "
                    f"Please make sure the dataset, labels, activation function and network are properly trained "
                    f"and configured.")

            self._save_manifest()

            print("Finish running and writing. Total time elapsed: {:.3f} s".
                  format(time() - begin))
Esempio n. 20
0
def test_scalar_summary_with_ge_2():
    """ test_scalar_summary_with_ge_2 """
    log.debug("begin test_scalar_summary_with_ge_2")

    # step 0: create the thread
    test_writer = SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR")

    # step 1: create the network for summary
    x = Tensor(np.array([1.1]).astype(np.float32))
    y = Tensor(np.array([1.2]).astype(np.float32))
    net = SummaryDemo()
    net.set_train()

    # step 2: create the Event
    steps = 100
    for i in range(1, steps):
        x = Tensor(np.array([1.1]).astype(np.float32))
        y = Tensor(np.array([1.2]).astype(np.float32))
        net(x, y)
        test_writer.record(i)

    # step 3: close the writer
    test_writer.close()

    log.debug("finished test_scalar_summary_with_ge_2")
Esempio n. 21
0
def test_image_summary_train():
    """ test_image_summary_train """
    dataset = get_dataset()

    log.debug("begin test_image_summary_sample")
    # step 0: create the thread
    test_writer = SummaryRecord(SUMMARY_DIR, file_suffix="_MS_IMAGE")

    # step 1: create the test data for summary

    # step 2: create the Event

    model = get_model()
    fn = ImageSummaryCallback(test_writer)
    summary_recode = SummaryStep(fn, 1)
    model.train(2, dataset, callbacks=summary_recode)

    # step 3: send the event to mq

    # step 4: accept the event and write the file
    test_writer.close()

    log.debug("finished test_image_summary_sample")
Esempio n. 22
0
def test_tensor_summary_sample():
    """ test_tensor_summary_sample """
    log.debug("begin test_tensor_summary_sample")
    # step 0: create the thread
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_TENSOR") as test_writer:
        # step 1: create the Event
        for i in range(1, 100):
            test_data = get_test_data(i)

            _cache_summary_tensor_data(test_data)
            test_writer.record(i)

        # step 2: accept the event and write the file

        log.debug("finished test_tensor_summary_sample")
Esempio n. 23
0
def test_scalar_summary_with_ge():
    """ test_scalar_summary_with_ge """
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
        net = SummaryDemo()
        net.set_train()

        # step 2: create the Event
        steps = 100
        for i in range(1, steps):
            x = Tensor(
                np.array([1.1 + random.uniform(1, 10)]).astype(np.float32))
            y = Tensor(
                np.array([1.2 + random.uniform(1, 10)]).astype(np.float32))
            net(x, y)
            test_writer.record(i)
Esempio n. 24
0
def test_validate():
    with SummaryRecord(SUMMARY_DIR) as sr:
        sr.record(1)
        with pytest.raises(ValueError):
            sr.record(False)
        with pytest.raises(ValueError):
            sr.record(2.0)
        with pytest.raises(ValueError):
            sr.record((1, 3))
        with pytest.raises(ValueError):
            sr.record([2, 3])
        with pytest.raises(ValueError):
            sr.record("str")
        with pytest.raises(ValueError):
            sr.record(sr)
Esempio n. 25
0
def me_train_tensor(net, input_np, label_np, epoch_size=2):
    context.set_context(mode=context.GRAPH_MODE)
    loss = SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
    opt = ApplyMomentum(Tensor(np.array([0.1])), Tensor(np.array([0.9])),
                        filter(lambda x: x.requires_grad, net.get_parameters()))
    Model(net, loss, opt)
    _network = wrap.WithLossCell(net, loss)
    _train_net = MsWrapper(wrap.TrainOneStepCell(_network, opt))
    _train_net.set_train()
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_GRAPH", network=_train_net) as summary_writer:
        for epoch in range(0, epoch_size):
            print(f"epoch %d" % (epoch))
            output = _train_net(Tensor(input_np), Tensor(label_np))
            summary_writer.record(i)
            print("********output***********")
            print(output.asnumpy())
Esempio n. 26
0
def test_summary_step2_summary_record1():
    """Test record 10 step summary."""
    if platform.system() == "Windows":
        # Summary does not support windows currently.
        return

    with tempfile.TemporaryDirectory() as tmp_dir:
        steps = 2
        with SummaryRecord(tmp_dir) as test_writer:
            train_summary_record(test_writer, steps=steps)

            file_name = os.path.realpath(test_writer.full_file_name)
        with SummaryReader(file_name) as summary_writer:
            for _ in range(steps):
                event = summary_writer.read_event()
                tags = set(value.tag for value in event.summary.value)
                assert tags == {'tensor', 'histogram', 'scalar', 'image'}
Esempio n. 27
0
def test_histogram_summary_same_value():
    """Test histogram summary, input is an ones tensor."""
    with tempfile.TemporaryDirectory() as tmp_dir:
        with SummaryRecord(tmp_dir, file_suffix="_MS_HISTOGRAM") as test_writer:
            dim1 = 100
            dim2 = 100

            test_data = _wrap_test_data(Tensor(np.ones([dim1, dim2])))
            _cache_summary_tensor_data(test_data)
            test_writer.record(step=1)

        file_name = os.path.join(tmp_dir, test_writer.event_file_name)
        with SummaryReader(file_name) as reader:
            event = reader.read_event()
            LOG.debug(event)

            assert len(event.summary.value[0].histogram.buckets) == _calc_histogram_bins(dim1 * dim2)
Esempio n. 28
0
def test_histogram_summary_all_nan_inf():
    """Test histogram summary, input tensor has no valid number."""
    with tempfile.TemporaryDirectory() as tmp_dir:
        with SummaryRecord(tmp_dir, file_suffix="_MS_HISTOGRAM") as test_writer:
            test_data = _wrap_test_data(Tensor(np.array([np.nan, np.nan, np.nan, np.inf, -np.inf])))
            _cache_summary_tensor_data(test_data)
            test_writer.record(step=1)

        file_name = os.path.join(tmp_dir, test_writer.event_file_name)
        with SummaryReader(file_name) as reader:
            event = reader.read_event()
            LOG.debug(event)

            histogram = event.summary.value[0].histogram
            assert histogram.nan_count == 3
            assert histogram.pos_inf_count == 1
            assert histogram.neg_inf_count == 1
Esempio n. 29
0
def test_image_summary_data():
    """ test_image_summary_data """
    dataset = get_dataset()

    test_data_list = []
    i = 1
    for next_element in dataset:
        tag = "image_" + str(i) + "[:Image]"
        dct = {}
        dct["name"] = tag
        dct["data"] = Tensor(next_element[0])
        test_data_list.append(dct)
        i += 1

    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_IMAGE") as test_writer:
        _cache_summary_tensor_data(test_data_list)
        test_writer.record(1)
Esempio n. 30
0
def test_scalar_summary_sample_with_shape_1():
    """ test_scalar_summary_sample_with_shape_1 """
    log.debug("begin test_scalar_summary_sample_with_shape_1")
    # step 0: create the thread
    with SummaryRecord(SUMMARY_DIR, file_suffix="_MS_SCALAR") as test_writer:
        # step 1: create the test data for summary

        # step 2: create the Event
        for i in range(1, 100):
            test_data = get_test_data_shape_1(i)
            _cache_summary_tensor_data(test_data)
            test_writer.record(i)

        # step 3: send the event to mq

        # step 4: accept the event and write the file

        log.debug("finished test_scalar_summary_sample")