Example #1
0
    def __init__(self,
                 degree,
                 depth,
                 mark=None,
                 max_volterra_order=3,
                 **kwargs):
        # Check parameters
        if degree < 1: raise ValueError('!! Degree must be a positive integer')
        if depth < 0: raise ValueError('!! Depth must be a positive integer')

        # Call parent's constructor
        Model.__init__(self, mark)

        # Initialize fields
        self.degree = degree
        self.depth = depth
        self._max_volterra_order = min(max_volterra_order, degree)
        self.T = {}
        self._input = Input([depth], name='input')
        self._output = None
        self._target = None
        self._alpha = 1.1
        self._outputs = {}

        # Initialize operators in each degree
        orders = kwargs.get('orders', None)
        if orders is None: orders = list(range(1, self.degree + 1))
        self.orders = orders
        self._init_T()
Example #2
0
 def __init__(self, mark=None):
     Model.__init__(self, mark)
     RNet.__init__(self, 'RecurrentNet')
     self.superior = self
     self._default_net = self
     # Attributes
     self._state = NestedTensorSlot(self, 'State')
     # mascot will be initiated as a placeholder with no shape specified
     # .. and will be put into initializer argument of tf.scan
     self._mascot = None
Example #3
0
 def advanced_one_step(self, *args, lr_list=None, **kwargs):
     if lr_list is None: lr_list = [0.000088] * self.branches_num
     for i in range(self.branches_num):
         self.set_branch_index(i)
         if i > 0:
             FLAGS.overwrite = False
             FLAGS.save_best = True
         self._optimizer_lr_modify(lr_list[i])
         Model.train(self, *args, **kwargs)
     lr_list = [0.000088, 0.00088, 0.000088]
     self.train(branch_index=self.branches_num, lr_list=lr_list, **kwargs)
Example #4
0
 def build(self,
           optimizer=None,
           loss='euclid',
           metric=None,
           metric_is_like_loss=True,
           metric_name='Metric',
           **kwargs):
     Model.build(self,
                 optimizer=optimizer,
                 loss=loss,
                 metric=metric,
                 metric_name=metric_name,
                 metric_is_like_loss=metric_is_like_loss)
Example #5
0
 def __init__(self, mark=None, **kwargs):
     # Call parent's initializer
     Model.__init__(self, mark)
     Net.__init__(self, 'Bamboo_Broad_Net', inter_type=pedia.fork)
     assert self._inter_type == pedia.fork
     self.outputs = None
     # Private fields
     self._losses = []
     self._metrics = []
     self._train_ops = []
     self._var_list = []
     self._output_list = []
     self._branch_index = 0
     self._identity_initial = kwargs.get('ientity', False)
Example #6
0
    def train(self, *args, branch_index=0, lr_list=None, **kwargs):
        if lr_list is None: lr_list = [0.000088] * self.branches_num
        self.set_branch_index(branch_index)
        freeze = kwargs.get('freeze', True)
        if not freeze:
            train_step = []
            for i in range(branch_index + 1):
                self._optimizer_lr_modify(lr_list[i])
                train_step.append(
                    self._optimizer.minimize(loss=self._loss,
                                             var_list=self._var_list[i]))
            self._train_step = train_step

        Model.train(self, *args, **kwargs)
Example #7
0
    def __init__(self,
                 z_dim=None,
                 sample_shape=None,
                 output_shape=None,
                 mark=None,
                 classes=0):
        # Call parent's constructor
        Model.__init__(self, mark)

        self._targets = None
        self._conditional = classes > 0
        self._classes = classes
        if self._conditional:
            with self._graph.as_default():
                self._targets = tf.placeholder(dtype=tf.float32,
                                               shape=[None, classes],
                                               name='one_hot_labels')

        # Define generator and discriminator
        self.Generator = Net(pedia.Generator)
        self.Discriminator = Net(pedia.Discriminator)
        # Alias
        self.G = self.Generator
        self.D = self.Discriminator

        # If z_dim/sample_shape is provided, define the input for
        #   generator/discriminator accordingly
        if z_dim is not None:
            self.G.add(Input(sample_shape=[None, z_dim], name='z'))
        if sample_shape is not None:
            if (not isinstance(sample_shape, list)
                    and not isinstance(sample_shape, tuple)):
                raise TypeError('sample shape must be a list or a tuple')
            self.D.add(
                Input(sample_shape=[None] + list(sample_shape),
                      name='samples'))

        self._z_dim = z_dim
        self._sample_shape = sample_shape
        self._output_shape = output_shape
        self._sample_num = None

        # Private tensors and ops
        self._G, self._outputs = None, None
        self._Dr, self._Df = None, None
        self._logits_Dr, self._logits_Df = None, None
        self._loss_G, self._loss_D = None, None
        self._loss_Dr, self._loss_Df = None, None
        self._train_step_G, self._train_step_D = None, None
        self._merged_summary_G, self._merged_summary_D = None, None
Example #8
0
 def build(self,
           optimizer=None,
           loss='euclid',
           metric=None,
           batch_metric=None,
           eval_metric=None,
           **kwargs):
     context.metric_name = 'unknown'  # TODO: to be deprecated
     Model.build(self,
                 optimizer=optimizer,
                 loss=loss,
                 metric=metric,
                 batch_metric=batch_metric,
                 eval_metric=eval_metric,
                 **kwargs)
Example #9
0
  def __init__(self, mark=None):
    Model.__init__(self, mark)
    RNet.__init__(self, 'RecurrentNet')
    self.superior = self
    self._default_net = self
    # Attributes
    self._state_slot = NestedTensorSlot(self, 'State')
    # mascot will be initiated as a placeholder with no shape specified
    # .. and will be put into initializer argument of tf.scan
    self._mascot = None
    self._while_loop_free_output = None

    # TODO: BETA
    self.last_scan_output = None
    self.grad_delta_slot = NestedTensorSlot(self, 'GradDelta')
    self._grad_buffer_slot = NestedTensorSlot(self, 'GradBuffer')
Example #10
0
    def __init__(self,
                 z_dim=None,
                 sample_shape=None,
                 output_shape=None,
                 mark=None,
                 classes=0):
        # Call parent's constructor
        Model.__init__(self, mark)

        # Fields
        self._output_shape = output_shape

        # Define encoder and decoder
        self.Encoder = Net(pedia.Encoder)
        self.Decoder = Net(pedia.Decoder)

        self.Q = self.Encoder
        self.P = self.Decoder

        # If z_dim/sample_shape is provided, define the input for
        #   decoder/encoder accordingly
        if z_dim is not None:
            self.P.add(Input(sample_shape=[None, z_dim], name='z'))
        if sample_shape is not None:
            if (not isinstance(sample_shape, list)
                    and not isinstance(sample_shape, tuple)):
                raise TypeError('sample shape must be a list or a tuple')
            self.Q.add(
                Input(sample_shape=[None] + list(sample_shape),
                      name='samples'))

        # Placeholders
        self._sample_num = None
        self._classes = classes
        self._conditional = classes > 0
        if self._conditional:
            self._targets = tf.placeholder(dtype=tf.float32,
                                           shape=[None, classes],
                                           name='one_hot_labels')

        self._P, self._outputs = None, None

        # ...
        pass
Example #11
0
 def __init__(self, mark=None):
     Model.__init__(self, mark)
     Net.__init__(self, 'FeedforwardNet')
     self.superior = self
     self._default_net = self
Example #12
0
 def __init__(self, mark=None):
     Model.__init__(self, mark)
     Net.__init__(self, 'FeedforwardNet')
     self.outputs = None