def test_get_absolute(self): self.assertEqual(Path(".").get_absolute(Path("1.2")), Path("1.2")) self.assertEqual( Path(".x.y").get_absolute(Path("1.2")), Path("1.2.x.y")) self.assertEqual( Path("..x.y").get_absolute(Path("1.2")), Path("1.x.y")) self.assertEqual(Path("...x.y").get_absolute(Path("1.2")), Path("x.y")) with self.assertRaises(ValueError): Path("....x.y").get_absolute(Path("1.2"))
def test_ancestors(self): self.assertEqual(Path("").ancestors(), {Path("")}) self.assertEqual(Path("a").ancestors(), {Path(""), Path("a")}) self.assertEqual(Path("one.two.three").ancestors(), {Path(""), Path("one"), Path("one.two"), Path("one.two.three")})
def test_add_path(self): self.assertEqual(str(Path("one").add_path(Path("2"))), "one.2") self.assertEqual(str(Path("one").add_path(Path("2.3"))), "one.2.3") self.assertEqual(str(Path("").add_path(Path("2.3"))), "2.3") self.assertEqual(str(Path("one.2").add_path(Path(""))), "one.2") self.assertEqual(str(Path("").add_path(Path(""))), "") self.assertEqual(str(Path(".").add_path(Path(""))), ".") self.assertEqual(str(Path(".").add_path(Path("one.two"))), ".one.two") self.assertEqual(str(Path(".xy").add_path(Path("one.two"))), ".xy.one.two") with self.assertRaises(NotImplementedError): Path("one").add_path(Path(".2.3"))
def test_set(self): s = set([Path("one.2"), Path("one.1.3"), Path("one.1.3")]) self.assertIn(Path("one.2"), s) self.assertEqual(len(s), 2)
def test_str(self): self.assertEqual(str(Path("one.2")), "one.2") self.assertEqual(str(Path("")), "")
def test_eq(self): self.assertEqual(Path(""), Path("")) self.assertEqual(Path(".."), Path("..")) self.assertEqual(Path("one.2"), Path("one.2")) self.assertEqual(Path("one.2"), Path("one.2.3").parent()) self.assertNotEqual(Path("one.2"), Path("one.2.3")) self.assertNotEqual(Path(""), Path("."))
def __init__( self, layers: numbers.Integral = 1, input_dim: numbers.Integral = Ref("exp_global.default_layer_dim"), hidden_dim: numbers.Integral = Ref("exp_global.default_layer_dim"), dropout: numbers.Real = Ref("exp_global.dropout", default=0.0), weightnoise_std: numbers.Real = Ref("exp_global.weight_noise", default=0.0), param_init: param_initializers.ParamInitializer = Ref( "exp_global.param_init", default=bare(param_initializers.GlorotInitializer)), bias_init: param_initializers.ParamInitializer = Ref( "exp_global.bias_init", default=bare(param_initializers.ZeroInitializer)), yaml_path: Path = Path(), decoder_input_dim: Optional[numbers.Integral] = Ref( "exp_global.default_layer_dim", default=None), decoder_input_feeding: bool = True) -> None: self.num_layers = layers model = param_collections.ParamManager.my_params(self) if yaml_path is not None and "decoder" in yaml_path: if decoder_input_feeding: input_dim += decoder_input_dim self.hidden_dim = hidden_dim self.dropout_rate = dropout self.weightnoise_std = weightnoise_std self.input_dim = input_dim if not isinstance(param_init, collections.abc.Sequence): param_init = [param_init] * layers if not isinstance(bias_init, collections.abc.Sequence): bias_init = [bias_init] * layers # [i; f; o; g] self.p_Wx = [ model.add_parameters(dim=(hidden_dim * 4, input_dim), init=param_init[0].initializer( (hidden_dim * 4, input_dim), num_shared=4)) ] self.p_Wx += [ model.add_parameters(dim=(hidden_dim * 4, hidden_dim), init=param_init[i].initializer( (hidden_dim * 4, hidden_dim), num_shared=4)) for i in range(1, layers) ] self.p_Wh = [ model.add_parameters(dim=(hidden_dim * 4, hidden_dim), init=param_init[i].initializer( (hidden_dim * 4, hidden_dim), num_shared=4)) for i in range(layers) ] self.p_b = [ model.add_parameters(dim=(hidden_dim * 4, ), init=bias_init[i].initializer( (hidden_dim * 4, ), num_shared=4)) for i in range(layers) ] self.dropout_mask_x = None self.dropout_mask_h = None