Exemple #1
0
    def __init__(self,
                 name,
                 input_spec=None,
                 summary_labels=None,
                 l2_regularization=None):
        super().__init__(name=name,
                         summary_labels=summary_labels,
                         l2_regularization=l2_regularization)

        self.input_spec = self.default_input_spec()
        self.input_spec = util.valid_value_spec(value_spec=self.input_spec,
                                                accept_underspecified=True,
                                                return_normalized=True)

        if input_spec is not None:
            input_spec = util.valid_value_spec(value_spec=input_spec,
                                               accept_underspecified=True,
                                               return_normalized=True)

            self.input_spec = util.unify_value_specs(
                value_spec1=self.input_spec, value_spec2=input_spec)

        # Copy so that spec can be modified
        self.output_spec = self.get_output_spec(
            input_spec=dict(self.input_spec))
        self.output_spec = util.valid_value_spec(value_spec=self.output_spec,
                                                 accept_underspecified=True,
                                                 return_normalized=True)

        # Register layer globally
        if Layer.layers is None:
            Layer.layers = OrderedDict()
        # if self.name in Layer.layers:
        #     raise TensorforceError.unexpected()
        Layer.layers[self.name] = self
    def __init__(self,
                 name,
                 input_spec=None,
                 l2_regularization=None,
                 summary_labels=None):
        """
        Layer constructor.

        Args:
            input_spec (specification): Input tensor specification.
        """
        super().__init__(name=name,
                         l2_regularization=l2_regularization,
                         summary_labels=summary_labels)

        self.input_spec = self.default_input_spec()
        self.input_spec = util.valid_value_spec(value_spec=self.input_spec,
                                                accept_underspecified=True,
                                                return_normalized=True)

        if input_spec is not None:
            input_spec = util.valid_value_spec(value_spec=input_spec,
                                               accept_underspecified=True,
                                               return_normalized=True)

            self.input_spec = util.unify_value_specs(
                value_spec1=self.input_spec, value_spec2=input_spec)

        # Copy so that spec can be modified
        self.output_spec = self.get_output_spec(
            input_spec=dict(self.input_spec))
        self.output_spec = util.valid_value_spec(value_spec=self.output_spec,
                                                 accept_underspecified=True,
                                                 return_normalized=True)
Exemple #3
0
    def register_tensor(name, spec, batched):
        if '/' in name:
            raise TensorforceError.value(
                name='Module.register_tensor', argument='name', value=name, hint='contains /'
            )

        if Module.global_scope is None:  # ???
            raise TensorforceError.unexpected()

        scoped_name = name

        # if scoped_name in Module.global_tensors_spec:
        #     raise TensorforceError("Global tensor already exists: {}.".format(scoped_name))

        # optional? better to put in spec?
        spec = dict(spec)
        spec['batched'] = batched

        if scoped_name in Module.global_tensors_spec and \
                spec != Module.global_tensors_spec[scoped_name]:
            raise TensorforceError.mismatch(
                name='Module.register_tensor', argument='spec', value1=spec, value2=Module.global_tensors_spec[scoped_name]
            )

        if not util.valid_value_spec(value_spec=spec):
            raise TensorforceError.unexpected()

        if 'batched' in spec and spec['batched'] != batched:
            raise TensorforceError.unexpected()

        Module.global_tensors_spec[scoped_name] = spec
Exemple #4
0
    def __init__(self,
                 name,
                 dtype,
                 unit=None,
                 shape=(),
                 min_value=None,
                 max_value=None,
                 summary_labels=None):
        super().__init__(name=name, summary_labels=summary_labels)

        assert unit in (None, 'timesteps', 'episodes', 'updates')
        self.unit = unit

        spec = dict(type=dtype, shape=shape)
        spec = util.valid_value_spec(value_spec=spec, return_normalized=True)
        self.dtype = spec['type']
        self.shape = spec['shape']

        assert min_value is None or max_value is None or min_value < max_value
        if self.dtype == 'bool':
            if min_value is not None or max_value is not None:
                raise TensorforceError.unexpected()
        elif self.dtype in ('int', 'long'):
            if (min_value is not None and not isinstance(min_value, int)) or \
                    (max_value is not None and not isinstance(max_value, int)):
                raise TensorforceError.unexpected()
        elif self.dtype == 'float':
            if (min_value is not None and not isinstance(min_value, float)) or \
                    (max_value is not None and not isinstance(max_value, float)):
                raise TensorforceError.unexpected()
        else:
            assert False

        assert self.min_value() is None or self.max_value() is None or \
            self.min_value() <= self.max_value()
        if min_value is not None:
            if self.min_value() is None:
                raise TensorforceError.value(name=self.name,
                                             argument='lower bound',
                                             value=self.min_value(),
                                             hint=('not >= ' + str(min_value)))
            elif self.min_value() < min_value:
                raise TensorforceError.value(name=self.name,
                                             argument='lower bound',
                                             value=self.min_value(),
                                             hint=('< ' + str(min_value)))
        if max_value is not None:
            if self.max_value() is None:
                raise TensorforceError.value(name=self.name,
                                             argument='upper bound',
                                             value=self.max_value(),
                                             hint=('not <= ' + str(max_value)))
            elif self.max_value() > max_value:
                raise TensorforceError.value(name=self.name,
                                             argument='upper bound',
                                             value=self.max_value(),
                                             hint=('> ' + str(max_value)))

        Module.register_tensor(name=self.name, spec=spec, batched=False)
Exemple #5
0
    def __init__(self, name, dtype, shape=(), summary_labels=None):
        super().__init__(name=name, summary_labels=summary_labels)

        spec = dict(type=dtype, shape=shape)
        spec = util.valid_value_spec(value_spec=spec, return_normalized=True)
        self.dtype = spec['type']
        self.shape = spec['shape']

        Module.register_tensor(name=self.name, spec=spec, batched=False)
Exemple #6
0
    def __init__(self, name, dtype, shape=(), unit=None, summary_labels=None):
        super().__init__(name=name, summary_labels=summary_labels)

        assert unit in (None, 'timesteps', 'episodes', 'updates')

        spec = dict(type=dtype, shape=shape)
        spec = util.valid_value_spec(value_spec=spec, return_normalized=True)
        self.dtype = spec['type']
        self.shape = spec['shape']
        self.unit = unit

        Module.register_tensor(name=self.name, spec=spec, batched=False)