def initialize(self, num_kpts, temp_length, input_dimension=3, output_dimension=3, output_pts=None): self.num_kpts = num_kpts self.temp_length = temp_length self.output_dimension = output_dimension self.output_pts = num_kpts if output_pts is None else output_pts self.input_dimension = input_dimension self.c1 = M.ConvLayer1D(3, 1024, stride=3, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False) self.r1 = ResBlock1D(k=3) self.r2 = ResBlock1D(k=3) self.r3 = ResBlock1D(k=3) self.r4 = ResBlock1D(k=3) # self.r3 = ResBlock1D(k=3, dilation=3) # self.c5 = M.ConvLayer1D(9, 256, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False) self.c4 = M.ConvLayer1D(1, self.output_pts * self.output_dimension)
def initialize(self, outchn=512, dilation=1, k=3): self.bn = M.BatchNorm() self.c1 = M.ConvLayer1D(k, outchn, dilation_rate=dilation, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID') self.c2 = M.ConvLayer1D(3, outchn, pad='VALID')
def initialize(self, num_kpts, temp_length): self.num_kpts = num_kpts self.temp_length = temp_length self.c1 = M.ConvLayer1D(3, 1024, stride=3, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False) self.r1 = ResBlock1D(k=3) self.r2 = ResBlock1D(k=3) self.r3 = ResBlock1D(k=3) self.r4 = ResBlock1D(k=3) # self.r3 = ResBlock1D(k=3, dilation=3) # self.c5 = M.ConvLayer1D(9, 256, activation=M.PARAM_PRELU, pad='VALID', batch_norm=True, usebias=False) self.c4 = M.ConvLayer1D(1, num_kpts*3)
def initialize(self, outchn=1024, k=3): self.k = k # self.bn = M.BatchNorm() self.c1 = M.ConvLayer1D(k, outchn, stride=k, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID') self.c2 = M.ConvLayer1D(1, outchn, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID')
def initialize(self): self.c1 = M.ConvLayer1D(1, 1024, activation=M.PARAM_PRELU, batch_norm=True, usebias=False) self.c2 = M.ConvLayer1D(1, 256, activation=M.PARAM_PRELU, batch_norm=True, usebias=False) self.c3 = M.ConvLayer1D(1, 256, activation=M.PARAM_PRELU, batch_norm=True, usebias=False) self.c4 = M.ConvLayer1D(1, 1)
def initialize(self, num_pts, temp_length, pt_dim): self.num_pts = num_pts self.pt_dim = pt_dim self.temp_length = temp_length self.c1 = M.ConvLayer1D(5, 512, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID') self.r1 = ResBlock1D(k=3, dilation=2) self.r2 = ResBlock1D(k=3, dilation=4) self.r3 = ResBlock1D(k=5, dilation=8) self.r4 = ResBlock1D(k=5, dilation=16) self.c5 = M.ConvLayer1D(9, 512, activation=M.PARAM_PRELU, batch_norm=True, usebias=False, pad='VALID') self.c4 = M.ConvLayer1D(1, num_pts * pt_dim)
def initialize(self): self.c1 = M.ConvLayer1D(5, 512, pad='VALID', activation=M.PARAM_LRELU, usebias=False, batch_norm=True) self.c2 = M.ConvLayer1D(5, 512, dilation_rate=2, pad='VALID', activation=M.PARAM_LRELU, usebias=False, batch_norm=True) self.c3 = M.ConvLayer1D(3, 512, dilation_rate=4, pad='VALID', activation=M.PARAM_LRELU, usebias=False, batch_norm=True) self.c4 = M.ConvLayer1D(3, 512, dilation_rate=4, pad='VALID', activation=M.PARAM_LRELU, usebias=False, batch_norm=True) self.c5 = M.ConvLayer1D(5, 512, dilation_rate=8, pad='VALID', activation=M.PARAM_LRELU, usebias=False, batch_norm=True) self.c6 = M.ConvLayer1D(5, 512, pad='VALID', activation=M.PARAM_LRELU, usebias=False, batch_norm=True) self.c7 = M.ConvLayer1D(1, 62, usebias=False)
def initialize(self): self.c1 = M.ConvLayer1D(1, 1024, activation=M.PARAM_PRELU) self.c2 = M.ConvLayer1D(1, 256, activation=M.PARAM_PRELU) self.c3 = M.ConvLayer1D(1, 256, activation=M.PARAM_PRELU) self.c4 = M.ConvLayer1D(1, 1)