def __init__(self, size, window=3, stride=1, padding='SAME', bias=True, activation='relu', l2_regularization=0.0, l1_regularization=0.0, scope='conv1d', summary_labels=()): """ 1D convolutional layer. Args: size: Number of filters window: Convolution window size stride: Convolution stride padding: Convolution padding, one of 'VALID' or 'SAME' bias: If true, a bias is added activation: Type of nonlinearity, or dict with name & arguments l2_regularization: L2 regularization weight l1_regularization: L1 regularization weight """ self.size = size self.window = window self.stride = stride self.padding = padding self.bias = bias self.l2_regularization = l2_regularization self.l1_regularization = l1_regularization self.nonlinearity = Nonlinearity(summary_labels=summary_labels, **util.prepare_kwargs(activation)) super(Conv1d, self).__init__(scope=scope, summary_labels=summary_labels)
def __init__( self, size=None, weights=None, bias=True, activation='relu', l2_regularization=0.0, l1_regularization=0.0, skip=False, trainable=True, named_tensors=None, scope='dense', summary_labels=(), ): """ Dense layer. Args: size: Layer size, if None than input size matches the output size of the layer weights: Weight initialization, random if None. bias: If true, bias is added. activation: Type of nonlinearity, or dict with name & arguments l2_regularization: L2 regularization weight. l1_regularization: L1 regularization weight. skip: Add skip connection like ResNet (https://arxiv.org/pdf/1512.03385.pdf), doubles layers and ShortCut from Input to output """ self.skip = skip if self.skip and size is not None: raise TensorForceError( 'Dense Layer SKIP connection needs Size=None, uses input shape ' 'sizes to create skip connection network, please delete "size" parameter' ) self.linear = Linear(size=size, weights=weights, bias=bias, l2_regularization=l2_regularization, l1_regularization=l1_regularization, summary_labels=summary_labels, trainable=trainable) if self.skip: self.linear_skip = Linear(size=size, bias=bias, l2_regularization=l2_regularization, l1_regularization=l1_regularization, summary_labels=summary_labels, trainable=trainable) # TODO: Consider creating two nonlinearity variables when skip is used and learning beta # Right now, only a single beta can be learned self.nonlinearity = Nonlinearity(summary_labels=summary_labels, **util.prepare_kwargs(activation)) super(Dense, self).__init__(named_tensors=named_tensors, scope=scope, summary_labels=summary_labels)
def __init__(self, size, bias=False, activation='none', l2_regularization=0.0, l1_regularization=0.0, output=None, named_tensors=None, scope='dueling', summary_labels=()): """ Dueling layer. [Dueling Networks] (https://arxiv.org/pdf/1511.06581.pdf) Implement Y = Expectation[x] + (Advantage[x] - Mean(Advantage[x])) Args: size: Layer size. bias: If true, bias is added. activation: Type of nonlinearity, or dict with name & arguments l2_regularization: L2 regularization weight. l1_regularization: L1 regularization weight. output: None or tuple of output names for ('expectation','advantage','mean_advantage') """ # Expectation is broadcast back over advantage values so output is of size 1 self.expectation_layer = Linear( size=1, bias=bias, l2_regularization=l2_regularization, l1_regularization=l1_regularization, summary_labels=summary_labels, ) self.advantage_layer = Linear( size=size, bias=bias, l2_regularization=l2_regularization, l1_regularization=l1_regularization, summary_labels=summary_labels, ) self.output = output self.nonlinearity = Nonlinearity(summary_labels=summary_labels, **util.prepare_kwargs(activation)) super(Dueling, self).__init__(named_tensors=named_tensors, scope=scope, summary_labels=summary_labels)
def __init__(self, size, window=3, stride=1, padding='SAME', bias=True, activation='relu', l2_regularization=0.0, l1_regularization=0.0, named_tensors=None, scope='conv2d', summary_labels=()): """ 2D convolutional layer. Args: size: Number of filters window: Convolution window size, either an integer or pair of integers. stride: Convolution stride, either an integer or pair of integers. padding: Convolution padding, one of 'VALID' or 'SAME' bias: If true, a bias is added activation: Type of nonlinearity, or dict with name & arguments l2_regularization: L2 regularization weight l1_regularization: L1 regularization weight """ self.size = size if isinstance(window, int): self.window = (window, window) elif len(window) == 2: self.window = tuple(window) else: raise TensorForceError( 'Invalid window {} for conv2d layer, must be of size 2'.format( window)) self.stride = stride self.padding = padding self.bias = bias self.l2_regularization = l2_regularization self.l1_regularization = l1_regularization self.nonlinearity = Nonlinearity(summary_labels=summary_labels, **util.prepare_kwargs(activation)) super(Conv2d, self).__init__(named_tensors=named_tensors, scope=scope, summary_labels=summary_labels)