Beispiel #1
0
    def testPipelineIterationsNotMultiple(self):
        dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
        dataset = dataset.batch(batch_size=2, drop_remainder=True)

        def dataset_parser(value):
            a = value
            b = (value + 10.) / 2.0
            return {"a": a, "b": b}

        dataset = dataset.map(dataset_parser)
        infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed1")
        outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed1")

        def stage1(c, **kwargs):
            with variable_scope.variable_scope("vs", use_resource=True):
                y = layers.Conv2D(
                    2,
                    1,
                    use_bias=True,
                    kernel_initializer=init_ops.ones_initializer(),
                    name='conv1')(kwargs["a"])
                return y + kwargs["b"], c

        def stage2(x, c):
            return math_ops.reduce_sum(x) + c

        def stage3(x):
            return x

        def my_net(c):
            return pipelining_ops.pipeline(
                [stage1, stage2, stage3],
                10,
                inputs=[c],
                infeed_queue=infeed_queue,
                outfeed_queue=outfeed_queue,
                pipeline_schedule=pipelining_ops.PipelineSchedule.Grouped)

        with ops.device('cpu'):
            c = array_ops.placeholder(np.float32, shape=[])

        with tu.ipu_session() as sess:

            with ops.device("/device:IPU:0"):
                r = ipu_compiler.compile(my_net, inputs=[c])

            cfg = utils.create_ipu_config(profiling=True,
                                          profile_execution=True)
            cfg = utils.auto_select_ipus(cfg, 4)
            utils.configure_ipu_system(cfg)
            utils.move_variable_initialization_to_cpu()

            sess.run(variables.global_variables_initializer())
            sess.run(infeed_queue.initializer)
            with self.assertRaisesRegex(
                    errors.FailedPreconditionError,
                    'The pipeline depth of the pipeline must be a multiple of 3'
            ):
                sess.run(r, {c: 10.01})
Beispiel #2
0
        def dataset_fn():
            dataset = tu.create_single_increasing_dataset(7, shape=[4, 4, 2])
            dataset = dataset.batch(batch_size=2, drop_remainder=True)

            def dataset_parser(value):
                img = value / 7
                label = value[0][0][0][0]
                return img, label

            return dataset.map(dataset_parser)
Beispiel #3
0
        def dataset_fn():
            dataset = tu.create_single_increasing_dataset(10, shape=[4])
            dataset = dataset.batch(batch_size=2, drop_remainder=True)

            def dataset_parser(value):
                label = math_ops.reduce_mean(value, axis=[1])
                return math_ops.cast(value, np.int32), math_ops.cast(
                    label / 10, np.int32)

            return dataset.map(dataset_parser)
Beispiel #4
0
    def dataset_fn():
      dataset = tu.create_single_increasing_dataset(7, shape=[4, 4])

      def dataset_parser(value):
        img = value
        label = value[0][0] % 4
        return img, math_ops.cast(label, np.int32)

      dataset = dataset.map(dataset_parser)

      return dataset.batch(batch_size=2, drop_remainder=True)
Beispiel #5
0
        def dataset_fn():
            dataset = tu.create_single_increasing_dataset(100, shape=[4])
            dataset = dataset.batch(batch_size=32, drop_remainder=True)
            dataset = dataset.batch(batch_size=32, drop_remainder=True)
            dataset = dataset.batch(batch_size=2, drop_remainder=True)

            def dataset_parser(value):
                img = value
                label = math_ops.reduce_mean(img, axis=[1, 2, 3])
                return img, math_ops.cast(label, np.int32)

            return dataset.map(dataset_parser)
Beispiel #6
0
    def testBufferDataset(self):
        dataset = tu.create_single_increasing_dataset(10, shape=[4, 4])
        dataset = dataset.take(3)
        dataset = ipu.data.ops.dataset_ops.BufferDataset(dataset, 2)
        itr = compat_v1_data.make_one_shot_iterator(dataset)

        next_data = itr.get_next()
        with self.session() as sess:
            self.assertAllEqual(sess.run(next_data)[0], np.zeros([4, 4]))
            self.assertAllEqual(sess.run(next_data)[0], np.ones([4, 4]))
            with self.assertRaises(errors.OutOfRangeError):
                self.evaluate(sess.run(next_data))
Beispiel #7
0
  def testPipelineWithInfeedsKwargs(self):
    with tu.ipu_session() as sess:
      dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
      dataset = dataset.batch(batch_size=2, drop_remainder=True)

      def dataset_parser(value):
        a = value
        b = (value + 10.) / 2.0
        return {"a": a, "b": b}

      dataset = dataset.map(dataset_parser)
      infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed6")
      outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed6")

      def stage1(c, **kwargs):
        with variable_scope.variable_scope("vs", use_resource=True):
          y = layers.Conv2D(2,
                            1,
                            use_bias=True,
                            kernel_initializer=init_ops.ones_initializer(),
                            name='conv1')(kwargs["a"])
          return y + kwargs["b"], c

      def stage2(x, c):
        return math_ops.reduce_sum(x) + c

      def stage3(x):
        return x

      def my_net(c):
        return pipelining_ops.pipeline(
            [stage1, stage2, stage3],
            12,
            inputs=[c],
            infeed_queue=infeed_queue,
            outfeed_queue=outfeed_queue,
            pipeline_schedule=pipelining_ops.PipelineSchedule.Sequential)

      with ops.device('cpu'):
        c = array_ops.placeholder(np.float32, shape=[])

      with ops.device("/device:IPU:0"):
        r = ipu_compiler.compile(my_net, inputs=[c])

      cfg = utils.create_ipu_config(profiling=True, profile_execution=True)
      cfg = utils.auto_select_ipus(cfg, 4)
      utils.configure_ipu_system(cfg)
      utils.move_variable_initialization_to_cpu()

      outfeed_op = outfeed_queue.dequeue()

      report = tu.ReportJSON(self, sess, configure_device=False)
      report.reset()
      sess.run(variables.global_variables_initializer())
      sess.run(infeed_queue.initializer)
      sess.run(r, {c: 10.01})
      losses_pipeline = sess.run(outfeed_op)
      self.assertAllClose(losses_pipeline, [[
          410.01, 730.01, 650.01, 570.01, 890.01, 410.01, 730.01, 650.01,
          570.01, 890.01, 410.01, 730.01
      ]])
      report.parse_log()
      report.assert_pipeline_stages_on_expected_ipu((0, 1, 3))
Beispiel #8
0
  def testPipelineInvalidDeviceMapping(self):
    dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
    dataset = dataset.batch(batch_size=2, drop_remainder=True)

    def dataset_parser(value):
      a = value
      b = (value + 10.) / 2.0
      return {"a": a, "b": b}

    dataset = dataset.map(dataset_parser)
    infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed3")
    outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed3")

    def stage1(c, **kwargs):
      with variable_scope.variable_scope("vs", use_resource=True):
        y = layers.Conv2D(2,
                          1,
                          use_bias=True,
                          kernel_initializer=init_ops.ones_initializer(),
                          name='conv1')(kwargs["a"])
        return y + kwargs["b"], c

    def stage2(x, c):
      return math_ops.reduce_sum(x) + c

    def stage3(x):
      return x

    with ops.device('cpu'):
      c = array_ops.placeholder(np.float32, shape=[])

    # Wrong type:
    with self.assertRaisesRegex(
        TypeError, 'device_mapping argument needs to be a list or a tuple'):
      pipelining_ops.pipeline(
          [stage1, stage2, stage3],
          3,
          inputs=[c],
          infeed_queue=infeed_queue,
          outfeed_queue=outfeed_queue,
          device_mapping=1,
          pipeline_schedule=pipelining_ops.PipelineSchedule.Sequential)

    # Too many values:
    with self.assertRaisesRegex(ValueError,
                                'Each stage must be mapped to an IPU'):
      pipelining_ops.pipeline(
          [stage1, stage2, stage3],
          3,
          inputs=[c],
          infeed_queue=infeed_queue,
          outfeed_queue=outfeed_queue,
          device_mapping=list(range(4)),
          pipeline_schedule=pipelining_ops.PipelineSchedule.Sequential)

    # Not enough values:
    with self.assertRaisesRegex(ValueError,
                                'Each stage must be mapped to an IPU'):
      pipelining_ops.pipeline(
          [stage1, stage2, stage3],
          3,
          inputs=[c],
          infeed_queue=infeed_queue,
          outfeed_queue=outfeed_queue,
          device_mapping=tuple(range(1)),
          pipeline_schedule=pipelining_ops.PipelineSchedule.Sequential)
Beispiel #9
0
 def dataset_fn():
     dataset = tu.create_single_increasing_dataset(7, shape=[1, 10])
     return dataset.repeat().batch(4, drop_remainder=True)
Beispiel #10
0
    def testPipelineInvalidDeviceMapping(self):
        dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
        dataset = dataset.batch(batch_size=2, drop_remainder=True)

        def dataset_parser(value):
            a = value
            b = (value + 10.) / 2.0
            return {"a": a, "b": b}

        dataset = dataset.map(dataset_parser)
        infeed_queue = ipu_infeed_queue.IPUInfeedQueue(dataset, "__feed3")
        outfeed_queue = ipu_outfeed_queue.IPUOutfeedQueue("__feed3")

        def stage1(c, **kwargs):
            with variable_scope.variable_scope("vs", use_resource=True):
                y = layers.Conv2D(
                    2,
                    1,
                    use_bias=True,
                    kernel_initializer=init_ops.ones_initializer(),
                    name='conv1')(kwargs["a"])
                return y + kwargs["b"], c

        def stage2(x, c):
            return math_ops.reduce_sum(x) + c

        def stage3(x):
            return x

        with ops.device('cpu'):
            c = array_ops.placeholder(np.float32, shape=[])

        # Wrong type:
        with self.assertRaisesRegex(
                NotImplementedError,
                'When using batch serialization, all the pipeline '
                'stages need to be mapped to a single IPU.'):
            pipelining_ops.pipeline(
                [stage1, stage2, stage3],
                3,
                inputs=[c],
                infeed_queue=infeed_queue,
                outfeed_queue=outfeed_queue,
                device_mapping=[0, 1, 0],
                pipeline_schedule=pipelining_ops.PipelineSchedule.Sequential,
                batch_serialization_iterations=4)

        # Wrong type:
        with self.assertRaisesRegex(
                NotImplementedError,
                'Batch serialization is only supported with the '
                '`Sequential` schedule'):
            pipelining_ops.pipeline(
                [stage1, stage2, stage3],
                3,
                inputs=[c],
                infeed_queue=infeed_queue,
                outfeed_queue=outfeed_queue,
                device_mapping=[0, 0, 0],
                pipeline_schedule=pipelining_ops.PipelineSchedule.Grouped,
                batch_serialization_iterations=4)
Beispiel #11
0
 def dataset_fn():
   dataset = tu.create_single_increasing_dataset(5, shape=[4, 4, 2])
   dataset = dataset.batch(batch_size=2, drop_remainder=True)
   return dataset.map(dataset_parser)