Beispiel #1
0
    def testNamedInfeedsDataSerializationStep(self):
        with self.session() as sess:
            num_elements = 1000
            shape = (224, 224, 3)
            shape_2 = (224, 3, 224)
            dataset = tu.create_single_increasing_dataset(num_elements,
                                                          shape=shape)

            def dataset_parser(value):
                image_1 = value
                image_2 = (value + 10.) / 2.0
                return {"a": image_1, "b": array_ops.reshape(image_2, shape_2)}

            dataset = dataset.map(dataset_parser)
            infeed_queue = ipu.ipu_infeed_queue.IPUInfeedQueue(
                dataset, FeedId.Next("infeed"))

            with tempfile.TemporaryDirectory() as tmp_folder:
                output_folder = self._create_tmp_symlink(tmp_folder)
                output_file = os.path.join(output_folder, "infeed.bin")

                sess.run(infeed_queue.initializer)

                utils.export_dataset_to_file(infeed_queue, output_file,
                                             num_elements)

                files = filesInFolder(output_folder)
                self.assertEqual(
                    len(files), 1,
                    "Expected 1 file containing both feed 'a', and feed 'b', found: %s"
                    % files)
Beispiel #2
0
    def testCreateSimpleReplicatedInfeedWrongReplicationFactor(self):
        with self.session() as sess:
            shape = [2]
            dataset = tu.create_single_increasing_dataset(3, shape)

            infeed_queue = ipu.ipu_infeed_queue.IPUInfeedQueue(
                dataset, feed_name=next_feed_id(), replication_factor=4)

            def body(v, x):
                v = ipu.ops.cross_replica_ops.cross_replica_sum(v + x)
                return v

            def my_net():
                v = constant_op.constant(0.0, shape=shape, dtype=np.float32)
                r = ipu.loops.repeat(5, body, [v], infeed_queue)
                return r

            with ipu.scopes.ipu_scope("/device:IPU:0"):
                res = ipu.ipu_compiler.compile(my_net, inputs=[])

            _configure_replicated_ipu_system()

            sess.run(infeed_queue.initializer)
            with self.assertRaisesRegex(
                    errors.FailedPreconditionError,
                    'Current program has been created with replication_factor 2'
            ):
                sess.run(res)
Beispiel #3
0
    def testCreateSimpleReplicatedInfeedOutfeed(self):
        with self.session() as sess:
            shape = [2]
            dataset = tu.create_single_increasing_dataset(3, shape)

            infeed_queue = ipu.ipu_infeed_queue.IPUInfeedQueue(
                dataset, feed_name=next_feed_id(), replication_factor=2)
            outfeed_queue = ipu.ipu_outfeed_queue.IPUOutfeedQueue(
                feed_name=next_feed_id(), replication_factor=2)

            def body(v, x):
                v = ipu.ops.cross_replica_ops.cross_replica_sum(v + x)
                outfeed = outfeed_queue.enqueue(v)
                return (v, outfeed)

            def my_net():
                v = constant_op.constant(0.0, shape=shape, dtype=np.float32)
                r = ipu.loops.repeat(5, body, [v], infeed_queue)
                return r

            with ipu.scopes.ipu_scope("/device:IPU:0"):
                res = ipu.ipu_compiler.compile(my_net, inputs=[])

            outfed = outfeed_queue.dequeue()

            _configure_replicated_ipu_system()

            sess.run(infeed_queue.initializer)
            result = sess.run(res)
            self.assertAllClose(result[0], np.broadcast_to(48, shape))
            outfed_result = sess.run(outfed)

            self.assertTrue(outfed_result.shape[0], 2)
            self.assertAllClose(outfed_result[0][0], outfed_result[0][1])
            self.assertAllClose(outfed_result[0][0], np.broadcast_to(1, shape))

            self.assertAllClose(outfed_result[1][0], outfed_result[1][1])
            self.assertAllClose(outfed_result[1][0], np.broadcast_to(4, shape))

            self.assertAllClose(outfed_result[2][0], outfed_result[2][1])
            self.assertAllClose(outfed_result[2][0],
                                np.broadcast_to(11, shape))

            self.assertAllClose(outfed_result[3][0], outfed_result[3][1])
            self.assertAllClose(outfed_result[3][0],
                                np.broadcast_to(23, shape))

            self.assertAllClose(outfed_result[4][0], outfed_result[4][1])
            self.assertAllClose(outfed_result[4][0],
                                np.broadcast_to(48, shape))
Beispiel #4
0
    def testSimpleDatasetDataSerialization(self):
        num_elements = 10
        shape = (3, 5)
        dataset = tu.create_single_increasing_dataset(num_elements,
                                                      shape=shape)

        with tempfile.TemporaryDirectory() as tmp_folder:
            output_folder = self._create_tmp_symlink(tmp_folder)
            output_file = os.path.join(output_folder, "dataset.json")

            utils.export_dataset_to_file(dataset, output_file, num_elements)

            files = filesInFolder(output_folder)
            self.assertEqual(len(files), 1,
                             "Expected 1 file, found: %s" % files)
Beispiel #5
0
    def testSimpleInfeedsDataSerialization(self):
        with self.session() as sess:
            num_elements = 10
            shape = (3, 5)
            dataset = tu.create_single_increasing_dataset(num_elements,
                                                          shape=shape)
            infeed_queue = ipu.ipu_infeed_queue.IPUInfeedQueue(
                dataset, FeedId.Next("infeed"))

            with tempfile.TemporaryDirectory() as tmp_folder:
                output_folder = self._create_tmp_symlink(tmp_folder)
                output_file = os.path.join(output_folder, "infeed.json")

                sess.run(infeed_queue.initializer)

                utils.export_dataset_to_file(infeed_queue, output_file,
                                             num_elements)

                files = filesInFolder(output_folder)
                self.assertEqual(len(files), 1,
                                 "Expected 1 file, found: %s" % files)
Beispiel #6
0
    def testInfeedsOutfeedInfoSerialization(self):
        poplar_flags = os.environ.get("TF_POPLAR_FLAGS",
                                      "").replace("--use_ipu_model", "")
        with test.mock.patch.dict(
                "os.environ",
            {"TF_POPLAR_FLAGS": poplar_flags}), self.session() as sess:
            dataset = tu.create_single_increasing_dataset(2, shape=[3, 3])
            infeed_name = FeedId.Next("feed")
            outfeed_name = FeedId.Next("feed")
            infeed_spec = dataset.element_spec[0]
            infeed_queue = ipu.ipu_infeed_queue.IPUInfeedQueue(
                dataset, infeed_name)
            outfeed_queue = ipu.ipu_outfeed_queue.IPUOutfeedQueue(outfeed_name)

            def body(const, inp):
                with variable_scope.variable_scope("vs", use_resource=True):
                    inp2 = variable_scope.get_variable("input_2", [3, 3])
                    v = inp * inp2 + const
                    outfeed = outfeed_queue.enqueue(v)
                    return (const, outfeed)

            def my_graph(const):
                return ipu.loops.repeat(4, body, (const), infeed_queue)

            with ops.device("cpu"):
                const = array_ops.placeholder(np.float32, [],
                                              name="my/test/constant/0")

            with ipu.scopes.ipu_scope("/device:IPU:0"):
                output = ipu.ipu_compiler.compile(my_graph, inputs=[const])

            outfed = outfeed_queue.dequeue()
            with tempfile.TemporaryDirectory() as tmp_folder:
                tmp = self._create_tmp_symlink(tmp_folder)
                folder = os.path.join(tmp, "saved")

                self._configureIPU(folder)

                tu.move_variable_initialization_to_cpu()

                sess.run(infeed_queue.initializer)
                sess.run(variables.global_variables_initializer())
                with self.assertRaisesRegex(errors.InvalidArgumentError,
                                            "compilation only"):
                    sess.run(output, {const: np.ones(const.shape)})
                outfed_result = sess.run(outfed)

                with variable_scope.variable_scope("vs",
                                                   use_resource=True,
                                                   reuse=True):
                    inp2 = variable_scope.get_variable("input_2")
                module_hash = None

                self.assertTrue(os.path.isdir(folder))
                files = filesInFolder(folder)
                self.assertEqual(len(files), 2,
                                 "Expected 2 files, found: %s" % files)
                for name in files:
                    if not module_hash:
                        m = re.match(r"([0-9a-f]+)\..*", name)
                        self.assertTrue(
                            m,
                            "Failed to identify module hash from filename %s" %
                            name)
                        module_hash = m.group(1)
                    if name == module_hash + ".json":
                        with open(os.path.join(folder, name),
                                  "r") as metadata_file:
                            metadata = json.load(metadata_file)
                        self._validateStreams(
                            metadata, [(const, "input_data"),
                                       (inp2, "parameter")],
                            [(tensor_spec.TensorSpec(shape=[],
                                                     dtype=dtypes.float32,
                                                     name="XLA_Retvals:0"),
                              "output_data")], [(infeed_spec, infeed_name)],
                            [(outfed_result, outfeed_name)])
                    else:
                        self.assertEqual(name, "%s.ipu_bin" % module_hash)