Exemple #1
0
def test_SymmetricalTokenDepthCriterion_depth_increase_token():
    """Test that error is raised if depth_increase_token is invalid."""
    config = sampler_pb2.SymmetricalTokenDepth(depth_increase_token="a")
    # Field is missing.
    with test.Raises(errors.UserError) as e_info:
        samplers.SymmetricalTokenDepthCriterion(config)
    assert "SymmetricalTokenDepth.depth_decrease_token must be a string" == str(
        e_info.value)
    # Value is empty.
    config.depth_decrease_token = ""
    with test.Raises(errors.UserError) as e_info:
        samplers.SymmetricalTokenDepthCriterion(config)
    assert "SymmetricalTokenDepth.depth_decrease_token must be a string" == str(
        e_info.value)
Exemple #2
0
def test_SymmetrcalTokenDepthCriterion_SampleIsComplete_reverse_order():
    """Test that sample is not complete if right token appears before left."""
    t = samplers.SymmetricalTokenDepthCriterion(
        sampler_pb2.SymmetricalTokenDepth(depth_increase_token="+",
                                          depth_decrease_token="-"))
    assert not t.SampleIsComplete(["-", "+"])
    assert not t.SampleIsComplete(["-", "a", "b", "c", "+"])
    assert t.SampleIsComplete(["-", "a", "b", "c", "+", "+", "-"])
Exemple #3
0
def test_SymmetricalTokenDepthCriterion_same_tokens():
    """test that error is raised if depth tokens are the same."""
    config = sampler_pb2.SymmetricalTokenDepth(depth_increase_token="a",
                                               depth_decrease_token="a")
    with test.Raises(errors.UserError) as e_info:
        samplers.SymmetricalTokenDepthCriterion(config)
    assert "SymmetricalTokenDepth tokens must be different" == str(
        e_info.value)
Exemple #4
0
def test_SymmetrcalTokenDepthCriterion_SampleIsComplete_reverse_order():
    """Test that sample is not complete if right token appears before left."""
    t = samplers.SymmetricalTokenDepthCriterion(
        sampler_pb2.SymmetricalTokenDepth(depth_increase_token='+',
                                          depth_decrease_token='-'))
    assert not t.SampleIsComplete(['-', '+'])
    assert not t.SampleIsComplete(['-', 'a', 'b', 'c', '+'])
    assert t.SampleIsComplete(['-', 'a', 'b', 'c', '+', '+', '-'])
Exemple #5
0
def test_SymmetricalTokenDepthCriterion_SampleIsComplete():
    """Test SampleIsComplete() returns expected values."""
    t = samplers.SymmetricalTokenDepthCriterion(
        sampler_pb2.SymmetricalTokenDepth(depth_increase_token="+",
                                          depth_decrease_token="-"))
    # Depth 0, incomplete.
    assert not t.SampleIsComplete([])
    # Depth 1, incomplete.
    assert not t.SampleIsComplete(["+"])
    # Depth -1, complete.
    assert t.SampleIsComplete(["-"])
    # Depth 0, complete.
    assert t.SampleIsComplete(["+", "-"])
    # Depth 1, incomplete.
    assert not t.SampleIsComplete(["a", "+", "b", "c"])
    # Depth 1, incomplete.
    assert not t.SampleIsComplete(["a", "+", "+", "b", "c", "-"])
    # Depth 0, complete.
    assert t.SampleIsComplete(["a", "+", "-", "+", "b", "c", "-"])
Exemple #6
0
def test_SymmetricalTokenDepthCriterion_SampleIsComplete():
    """Test SampleIsComplete() returns expected values."""
    t = samplers.SymmetricalTokenDepthCriterion(
        sampler_pb2.SymmetricalTokenDepth(depth_increase_token='+',
                                          depth_decrease_token='-'))
    # Depth 0, incomplete.
    assert not t.SampleIsComplete([])
    # Depth 1, incomplete.
    assert not t.SampleIsComplete(['+'])
    # Depth -1, complete.
    assert t.SampleIsComplete(['-'])
    # Depth 0, complete.
    assert t.SampleIsComplete(['+', '-'])
    # Depth 1, incomplete.
    assert not t.SampleIsComplete(['a', '+', 'b', 'c'])
    # Depth 1, incomplete.
    assert not t.SampleIsComplete(['a', '+', '+', 'b', 'c', '-'])
    # Depth 0, complete.
    assert t.SampleIsComplete(['a', '+', '-', '+', 'b', 'c', '-'])
Exemple #7
0
    def __init__(
        self,
        atomizer: atomizers.AtomizerBase,
        target_features: typing.Optional[np.array],
    ):
        # Temporary working directory is used to write files that the Grewe feature
        # extractor can use.
        self.working_dir = pathlib.Path(
            tempfile.mkdtemp(prefix="phd_clgen_backtracking_"))
        self.symtok = samplers.SymmetricalTokenDepthCriterion(
            sampler_pb2.SymmetricalTokenDepth(depth_increase_token="{",
                                              depth_decrease_token="}"))
        self.symtok.Specialize(atomizer)

        # Feature hill climbing state.
        self._previous_src = ""
        self._target_features = target_features
        if self._target_features is not None:
            self._previous_features = np.array([0, 0, 0, 0], dtype=np.int)
            self._init_feature_distance = scipy.spatial.distance.euclidean(
                self._previous_features, self._target_features)
            self._previous_feature_distance = self._init_feature_distance