Beispiel #1
0
  def test_save_load_checkpoint(self):
    model = delay_model_pb2.DelayModel()
    # Maps an op name to the set of bit configurations we've run that op with.
    data_points = collections.defaultdict(set)

    # Set up some dummy data.
    ops = ["op_a", "op_b", "op_c", "op_d", "op_e"]
    bit_configs = ["3, 1, 1", "4, 1, 2", "5, 2, 1", "6, 2, 2"]
    for op in ops:
      data_points[op] = set()
      for bit_config in bit_configs:
        data_points[op].add(bit_config)
        result = delay_model_pb2.DataPoint()
        result.operation.op = op
        for elem in bit_config.split(",")[1:]:
          operand = delay_model_pb2.Operation.Operand()
          operand.bit_count = int(elem)
          result.operation.operands.append(operand)
        result.operation.bit_count = int(bit_config[0])
        result.delay = 5
        model.data_points.append(result)
    tf = tempfile.NamedTemporaryFile()
    client.save_checkpoint(model, tf.name)
    loaded_data_points, loaded_model = client.init_data(tf.name)

    self.assertEqual(model, loaded_model)
    # Fancy equality checking so we get clearer error messages on
    # mismatch.
    for op in ops:
      self.assertIn(op, loaded_data_points)
      loaded_op = loaded_data_points[op]
      for bit_config in bit_configs:
        self.assertIn(bit_config, loaded_op)
        self.assertIn(bit_config, data_points[op])
    self.assertEqual(data_points, loaded_data_points)
Beispiel #2
0
    def test_regression_estimator_cross_validation_passes(self):
        def gen_operation(result_bit_count, operand_bit_count):
            return 'op: "kFoo" bit_count: %d operands { } operands { bit_count: %d }' % (
                result_bit_count, operand_bit_count)

        def gen_data_point(result_bit_count, operand_bit_count, delay):
            return 'data_points{{ operation {{ {} }} delay: {} delay_offset: 0}}'.format(
                gen_operation(result_bit_count, operand_bit_count), delay)

        data_points_str = [
            gen_data_point(1, 2, 1),
            gen_data_point(2, 2, 2),
            gen_data_point(4, 1, 4),
            gen_data_point(5, 111, 5),
            gen_data_point(7, 13, 7),
            gen_data_point(8, 2, 8),
            gen_data_point(10, 12, 10),
            gen_data_point(15, 6, 15),
            gen_data_point(20, 40, 20),
            gen_data_point(30, 15, 30),
            gen_data_point(31, 2, 31),
            gen_data_point(35, 2, 35),
            gen_data_point(40, 30, 40),
            gen_data_point(45, 9, 45),
            gen_data_point(50, 4, 50),
            gen_data_point(55, 400, 55),
            gen_data_point(70, 10, 70),
            gen_data_point(100, 50, 100),
            gen_data_point(125, 15, 125),
            gen_data_point(150, 100, 150),
        ]
        proto_text = """
    op_models {
      op: "kFoo"
      estimator {
        regression {
          expressions {
            factor {
              source: RESULT_BIT_COUNT
            }
          }
          kfold_validator {
            max_data_point_error: 0.15
            max_fold_geomean_error: 0.075
          }
        }
      }
    }
    """
        proto_text = proto_text + '\n'.join(data_points_str)

        delay_model.DelayModel(
            text_format.Parse(proto_text, delay_model_pb2.DelayModel()))
def run_characterization(
        stub: synthesis_service_pb2_grpc.SynthesisServiceStub) -> None:
    """Run characterization with the given synthesis service."""
    model = delay_model_pb2.DelayModel()

    for ops, runner in OPS_RUNNERS:
        for op in ops:
            runner(op, model, stub)

    print('# proto-file: xls/delay_model/delay_model.proto')
    print('# proto-message: xls.delay_model.DelayModel')
    print(model)
Beispiel #4
0
    def test_regression_estimator_cross_validation_insufficient_data_for_folds(
            self):
        def gen_operation(result_bit_count, operand_bit_count):
            return 'op: "kFoo" bit_count: %d operands { } operands { bit_count: %d }' % (
                result_bit_count, operand_bit_count)

        def gen_data_point(result_bit_count, operand_bit_count, delay):
            return 'data_points{{ operation {{ {} }} delay: {} delay_offset: 0}}'.format(
                gen_operation(result_bit_count, operand_bit_count), delay)

        data_points_str = [
            gen_data_point(1, 2, 100),
            gen_data_point(4, 1, 125),
            gen_data_point(4, 6, 150),
            gen_data_point(7, 13, 175),
            gen_data_point(10, 12, 200),
            gen_data_point(30, 15, 400),
        ]
        proto_text = """
    op_models {
      op: "kFoo"
      estimator {
        regression {
          expressions {
            factor {
              source: OPERAND_BIT_COUNT
              operand_number: 1
            }
          }
          expressions {
            factor {
              source: RESULT_BIT_COUNT
            }
          }
          kfold_validator {
            num_cross_validation_folds: 8
            max_data_point_error: 99.0
            max_fold_geomean_error: 99.0
          }
        }
      }
    }
    """
        proto_text = proto_text + '\n'.join(data_points_str)

        with self.assertRaises(delay_model.Error) as e:
            delay_model.DelayModel(
                text_format.Parse(proto_text, delay_model_pb2.DelayModel()))
        self.assertEqualIgnoringWhitespaceAndFloats(
            'kFoo: Too few data points to cross '
            'validate: 6 data points, 8 folds', str(e.exception))
def init_data(
    checkpoint_path: str
) -> Tuple[Dict[str, Set[str]], delay_model_pb2.DelayModel]:
    """Return new state, loading data from a checkpoint, if available."""
    data_points = {}
    model = delay_model_pb2.DelayModel()
    if checkpoint_path:
        filesystem.parse_text_proto_file(checkpoint_path, model)
        for data_point in model.data_points:
            op = data_point.operation
            if op.op not in data_points:
                data_points[op.op] = set()
            types_str = ', '.join([str(op.bit_count)] +
                                  [str(x.bit_count) for x in op.operands])
            data_points[op.op].add(types_str)
    return data_points, model
Beispiel #6
0
def main(argv):
    if len(argv) > 2:
        raise app.UsageError('Too many command-line arguments.')

    with open(argv[1], 'rb') as f:
        contents = f.read()

    dm = delay_model.DelayModel(
        text_format.Parse(contents, delay_model_pb2.DelayModel()))

    for op in dm.ops():
        op_model = dm.op_model(op)
        maybe_plot_op_model(op_model.estimator)

        for specialization_kind, estimator in op_model.specializations.items():
            maybe_plot_op_model(
                estimator,
                delay_model_pb2.SpecializationKind.Name(specialization_kind))
Beispiel #7
0
def main(argv):
  if len(argv) > 2:
    raise app.UsageError('Too many command-line arguments.')

  with open(argv[1], 'rb') as f:
    contents = f.read()

  dm = delay_model.DelayModel(
      text_format.Parse(contents, delay_model_pb2.DelayModel()))

  env = jinja2.Environment(undefined=jinja2.StrictUndefined)
  tmpl_text = runfiles.get_contents_as_text(
      'xls/delay_model/generate_delay_lookup.tmpl')
  template = env.from_string(tmpl_text)
  rendered = template.render(
      delay_model=dm,
      name=FLAGS.model_name,
      precedence=FLAGS.precedence,
      camel_case_name=''.join(
          s.capitalize() for s in FLAGS.model_name.split('_')))
  print('// DO NOT EDIT: this file is AUTOMATICALLY GENERATED and should not '
        'be changed.')
  print(rendered)
Beispiel #8
0
    def test_regression_estimator_cross_validation_data_point_exceeds_geomean_error(
            self):
        def gen_operation(result_bit_count, operand_bit_count):
            return 'op: "kFoo" bit_count: %d operands { } operands { bit_count: %d }' % (
                result_bit_count, operand_bit_count)

        def gen_data_point(result_bit_count, operand_bit_count, delay):
            return 'data_points{{ operation {{ {} }} delay: {} delay_offset: 0}}'.format(
                gen_operation(result_bit_count, operand_bit_count), delay)

        data_points_str = [
            gen_data_point(1, 2, 1),
            gen_data_point(2, 2, 2),
            gen_data_point(4, 1, 4),
            gen_data_point(5, 111, 5),
            gen_data_point(7, 13, 7),
            gen_data_point(8, 2, 8),
            gen_data_point(10, 12, 10),
            gen_data_point(15, 6, 15),
            gen_data_point(20, 40, 20),
            gen_data_point(30, 15, 30),
            gen_data_point(31, 2, 31),
            gen_data_point(35, 2, 35),
            gen_data_point(40, 30, 40),
            gen_data_point(45, 9, 45),
            gen_data_point(50, 4, 50),
            gen_data_point(55, 400, 55),
            gen_data_point(70, 10, 70),
            gen_data_point(100, 50, 100),
            gen_data_point(125, 15, 125),
            gen_data_point(150, 100, 150),
        ]
        proto_text = """
    op_models {
      op: "kFoo"
      estimator {
        regression {
          expressions {
            factor {
              source: OPERAND_BIT_COUNT
              operand_number: 1
            }
          }
          kfold_validator {
            max_fold_geomean_error: 0.1
          }
        }
      }
    }
    """
        proto_text = proto_text + '\n'.join(data_points_str)

        # Build regression model with operand_bit_count (uncorrelated with delay)
        # as only factor.

        with self.assertRaises(delay_model.Error) as e:
            delay_model.DelayModel(
                text_format.Parse(proto_text, delay_model_pb2.DelayModel()))

        self.assertEqualIgnoringWhitespaceAndFloats(
            'kFoo: Regression model failed '
            'k-fold cross validation for test set with geomean error 0.0 > max 0.0',
            str(e.exception))
        self.assertIn('> max 0.1', str(e.exception))
Beispiel #9
0
    def test_regression_estimator_cross_validation_data_point_exceeds_max_error(
            self):
        def gen_operation(result_bit_count, operand_bit_count):
            return 'op: "kFoo" bit_count: %d operands { } operands { bit_count: %d }' % (
                result_bit_count, operand_bit_count)

        def gen_data_point(result_bit_count, operand_bit_count, delay):
            return 'data_points{{ operation {{ {} }} delay: {} delay_offset: 0}}'.format(
                gen_operation(result_bit_count, operand_bit_count), delay)

        data_points_str = [
            gen_data_point(1, 2, 1),
            gen_data_point(2, 2, 2),
            gen_data_point(4, 1, 4),
            gen_data_point(5, 111, 5),
            gen_data_point(7, 13, 7),
            gen_data_point(8, 2, 8),
            gen_data_point(10, 12, 10),
            gen_data_point(15, 6, 15),
            gen_data_point(20, 40, 20),
            # Outlier
            gen_data_point(30, 15, 50),
            #
            gen_data_point(31, 2, 31),
            gen_data_point(35, 2, 35),
            gen_data_point(40, 30, 40),
            gen_data_point(45, 9, 45),
            gen_data_point(50, 4, 50),
            gen_data_point(55, 400, 55),
            gen_data_point(70, 10, 70),
            gen_data_point(100, 50, 100),
            gen_data_point(125, 15, 125),
            gen_data_point(150, 100, 150),
        ]
        proto_text = """
    op_models {
      op: "kFoo"
      estimator {
        regression {
          expressions {
            factor {
              source: RESULT_BIT_COUNT
            }
          }
          kfold_validator {
            max_data_point_error: 0.3
          }
        }
      }
    }
    """
        proto_text = proto_text + '\n'.join(data_points_str)

        with self.assertRaises(delay_model.Error) as e:
            delay_model.DelayModel(
                text_format.Parse(proto_text, delay_model_pb2.DelayModel()))
        self.assertEqualIgnoringWhitespaceAndFloats(
            'kFoo: Regression model failed k-fold '
            'cross validation for data point (30, 50) with absolute error 0.0'
            ' > max 0.0', str(e.exception))
        self.assertIn('> max 0.3', str(e.exception))
def run_characterization(
        stub: synthesis_service_pb2_grpc.SynthesisServiceStub) -> None:
    """Runs characterization via 'stub', DelayModel to stdout as prototext."""
    model = delay_model_pb2.DelayModel()

    # Bin ops
    _run_linear_bin_op_and_add('add', 'kAdd', model, stub)
    _run_linear_bin_op_and_add('sub', 'kSub', model, stub)
    # Observed shift data is noisy.
    _run_linear_bin_op_and_add('shll', 'kShll', model, stub)
    _run_linear_bin_op_and_add('shrl', 'kShrl', model, stub)
    _run_linear_bin_op_and_add('shra', 'kShra', model, stub)

    _run_quadratic_bin_op_and_add('sdiv', 'kSDiv', model, stub, signed=True)
    _run_quadratic_bin_op_and_add('smod', 'kSMod', model, stub, signed=True)
    _run_quadratic_bin_op_and_add('udiv', 'kUDiv', model, stub)
    _run_quadratic_bin_op_and_add('umod', 'kUMod', model, stub)

    # Unary ops
    _run_unary_op_and_add('neg', 'kNeg', model, stub, signed=True)
    _run_unary_op_and_add('not', 'kNot', model, stub)

    # Nary ops
    _run_nary_op_and_add('and', 'kAnd', model, stub)
    _run_nary_op_and_add('nand', 'kNand', model, stub)
    _run_nary_op_and_add('nor', 'kNor', model, stub)
    _run_nary_op_and_add('or', 'kOr', model, stub)
    _run_nary_op_and_add('xor', 'kXor', model, stub)

    # Reduction ops
    _run_reduction_op_and_add('and_reduce', 'kAndReduce', model, stub)
    _run_reduction_op_and_add('or_reduce', 'kOrReduce', model, stub)
    _run_reduction_op_and_add('xor_reduce', 'kXorReduce', model, stub)

    # Comparison ops
    _run_comparison_op_and_add('eq', 'kEq', model, stub)
    _run_comparison_op_and_add('ne', 'kNe', model, stub)
    # Note: Could optimize for sign - accuracy gains from
    # sign have been marginal so far, though. These ops
    # also cost less than smul / sdiv anyway.
    _run_comparison_op_and_add('sge', 'kSGe', model, stub)
    _run_comparison_op_and_add('sgt', 'kSGt', model, stub)
    _run_comparison_op_and_add('sle', 'kSLe', model, stub)
    _run_comparison_op_and_add('slt', 'kSLt', model, stub)
    _run_comparison_op_and_add('uge', 'kUGe', model, stub)
    _run_comparison_op_and_add('ugt', 'kUGt', model, stub)
    _run_comparison_op_and_add('ule', 'kULe', model, stub)
    _run_comparison_op_and_add('ult', 'kULt', model, stub)

    # Select ops
    # For functions only called for 1 op, could just encode
    # op and kOp into function.  However, perfer consistency
    # and readability of passing them in as args.
    # Note: Select op observed data is really weird, see _run_select_op_and_add
    _run_select_op_and_add('sel', 'kSel', model, stub)
    _run_one_hot_select_op_and_add('one_hot_sel', 'kOneHotSel', model, stub)

    # Encode ops
    _run_encode_op_and_add('encode', 'kEncode', model, stub)
    _run_decode_op_and_add('decode', 'kDecode', model, stub)

    # Dynamic bit slice op
    _run_dynamic_bit_slice_op_and_add('dynamic_bit_slice', 'kDynamicBitSlice',
                                      model, stub)

    # One hot op
    _run_one_hot_op_and_add('one_hot', 'kOneHot', model, stub)

    # Mul ops
    # Note: Modeling smul w/ sign bit as with sdiv decreases accuracy.
    _run_mul_op_and_add('smul', 'kSMul', model, stub)
    _run_mul_op_and_add('umul', 'kUMul', model, stub)

    # Array ops
    _run_array_index_op_and_add('array_index', 'kArrayIndex', model, stub)
    _run_array_update_op_and_add('array_update', 'kArrayUpdate', model, stub)

    # Add free ops.
    for free_op in FREE_OPS:
        entry = model.op_models.add(op=free_op)
        entry.estimator.fixed = 0

    # Final validation
    delay_model.DelayModel(model)

    print('# proto-file: xls/delay_model/delay_model.proto')
    print('# proto-message: xls.delay_model.DelayModel')
    print(model)