示例#1
0
    def __init__(self,gz,dim):
        super(Model, self).__init__()
        self.gz = gz
        self.conv_model = nn.Sequential()
        self.act = nn.ELU
        self.conv = nn.Conv2d
        self.bn = nn.BatchNorm2d
        if dim==3:
            self.conv = nn.Conv3d
            self.bn = nn.BatchNorm3d
        bn=True
        self.conv_model.add_module("conv1",self.conv(4, 32, 5, stride=2,padding=2))
        if bn:
            self.conv_model.add_module("bn1",self.bn(32))
        self.conv_model.add_module("act1",self.act())
        for x in range(16):
            self.conv_model.add_module("conv%d" % (x+2),self.conv(32, 32, 5, stride=1,padding=2))
            if bn:
                self.conv_model.add_module("bn%d" % (x+2),self.bn(32))
            self.conv_model.add_module("act%d" % (x+2),self.act())

        print(self.conv_model)

        #create the distnaces / norm generator
        self.distyNorm=DistyNorm(dim,norm="euc")
        self.disty=Disty(dim,gz,self.distyNorm)
        #create the aggregator 
        self.aggMax=AggMax()
        self.agg=Agg(dim,gz,self.aggMax)
        #create the renger function
        self.R=torch.nn.Sequential(torch.nn.Linear(2,16),torch.nn.ELU(),torch.nn.Linear(16,16),torch.nn.ELU(),torch.nn.Linear(16,4))
示例#2
0
class Model(nn.Module):
    def __init__(self, gz, dim):
        super(Model, self).__init__()
        self.gz = gz
        self.conv_model = nn.Sequential()
        self.act = nn.ELU  # nn.Softplus # nn.ELU
        self.conv = nn.Conv2d
        self.bn = nn.BatchNorm2d
        if dim == 3:
            self.conv = nn.Conv3d
            self.bn = nn.BatchNorm3d
        bn = False

        #self.conv_model.add_module("conv1",self.conv(4, 8, 5, stride=2,padding=2))
        self.conv_model.add_module("conv1",
                                   self.conv(2, 8, 5, stride=2, padding=2))
        if bn:
            self.conv_model.add_module("bn1", self.bn(8))
        self.conv_model.add_module("act1", self.act())
        for x in range(3):
            self.conv_model.add_module("conv%d" % (x + 2),
                                       self.conv(8, 8, 5, stride=1, padding=2))
            if bn:
                self.conv_model.add_module("bn%d" % (x + 2), self.bn(8))
            self.conv_model.add_module("act%d" % (x + 2), self.act())

        print(self.conv_model)

        #create the distnaces / norm generator
        self.distyNorm = DistyNorm(dim, norm="euc")
        self.disty = Disty(dim, gz, self.distyNorm)
        #create the aggregator
        self.aggMax = AggMax()
        self.aggSum = AggSum()
        self.agg = Agg(dim, gz, self.aggSum)
        #create the renger function
        self.R = torch.nn.Sequential(torch.nn.Linear(2, 4), torch.nn.ELU(),
                                     torch.nn.Linear(4, 4), torch.nn.ELU(),
                                     torch.nn.Linear(4, 2))

    def forward(self, goals, inputs):
        tmp = self.disty.forward(inputs)
        R_tmp = self.R.forward(tmp)
        #tmp=tmp.prod(1,True)
        inputs_grid = self.agg.forward(R_tmp, inputs)
        output = self.conv_model(inputs_grid)
        energy = output.mean(3, False).mean(2, False).mean(1, False)

        #iterate...
        loss = 0
        for idx in range(len(inputs)):
            dE_dx, = torch.autograd.grad(output.sum(),
                                         inputs[idx]['points'],
                                         create_graph=True)
            inputs[idx]['pred'] = dE_dx
            loss += (((dE_dx - goals[idx]['points']) /
                      inputs[idx]['points'].size()[0])**2).sum()

        return loss, 0, 0  #g_sum,g_abs_sum
示例#3
0
class Model(nn.Module):
    def __init__(self, gz, dim):
        super(Model, self).__init__()
        self.gz = gz
        self.conv_model = nn.Sequential()
        self.act = nn.ELU
        self.conv = nn.Conv2d
        self.bn = nn.BatchNorm2d
        if dim == 3:
            self.conv = nn.Conv3d
            self.bn = nn.BatchNorm3d
        bn = True
        self.conv_model.add_module("conv1",
                                   self.conv(12, 8, 5, stride=2, padding=2))
        if bn:
            self.conv_model.add_module("bn1", self.bn(8))
        self.conv_model.add_module("act1", self.act())
        for x in range(6):
            self.conv_model.add_module("conv%d" % (x + 2),
                                       self.conv(8, 8, 5, stride=1, padding=2))
            if bn:
                self.conv_model.add_module("bn%d" % (x + 2), self.bn(8))
            self.conv_model.add_module("act%d" % (x + 2), self.act())

        print(self.conv_model)

        #create the distnaces / norm generator
        self.distyNorm = DistyNorm(dim, norm="euc")
        self.disty = Disty(dim, gz, self.distyNorm)
        #create the aggregator
        self.aggMax = AggMax()
        self.agg = Agg(dim, gz, self.aggMax)
        #create the renger function
        self.R = torch.nn.Sequential(torch.nn.Linear(12, 16), torch.nn.ELU(),
                                     torch.nn.Linear(16, 16), torch.nn.ELU(),
                                     torch.nn.Linear(16, 4))

    def forward(self, goals, inputs, backgrounds):
        inputs_grid = self.agg.forward(
            self.R.forward(self.disty.forward(inputs)), inputs)
        goals_grid = self.agg.forward(
            self.R.forward(self.disty.forward(goals)), goals)
        backgrounds_grid = self.agg.forward(
            self.R.forward(self.disty.forward(backgrounds)), backgrounds)
        print("I", inputs_grid.sum(), "G", goals_grid.sum(), "B",
              backgrounds_grid.sum())
        #inputs_grid=self.agg.forward(self.disty.forward(inputs),inputs)
        #goals_grid=self.agg.forward(self.disty.forward(goals),goals)
        scenes = torch.cat((inputs_grid, goals_grid, backgrounds_grid), 1)

        output = self.conv_model(scenes)
        print("O", output.sum())
        #inputs_tensor = torch.cat([ input['points'] for input in inputs ],0) #TODO WHY CANT I DO THIS?!?!?!
        #d_output_to_input = torch.cat(torch.autograd.grad(output,inputs_tensor,create_graph=True),0)

        #iterate...
        if True:
            cost = 0
            g_sum = 0
            g_abs_sum = 0
            for idx in range(len(inputs)):
                d_output_to_input = torch.autograd.grad(output[idx].mean(),
                                                        inputs[idx]['points'],
                                                        create_graph=True)[0]
                print("D", d_output_to_input.sum())
                cost += ((d_output_to_input - goals[idx]['points'])**2).mean()
                g_sum += d_output_to_input.sum()
                g_abs_sum += d_output_to_input.abs().sum()
            cost /= len(inputs)
            g_sum /= len(inputs)
            g_abs_sum /= len(inputs)

        #do it all at once
        #goals_tensor = torch.cat([ goal['points'] for goal in goals ],0)
        #d_output_to_input = torch.autograd.grad(output.mean(),inputs['points'],create_graph=True)
        #d_output_to_input = torch.cat(torch.autograd.grad(output.mean(),inputs['points'],create_graph=True),1)
        #cost = ((d_output_to_input-goals_tensor)**2).mean()
        #g_sum = d_output_to_input.sum()
        #g_abs_sum = d_output_to_input.abs().sum()

        return cost, g_sum, g_abs_sum
示例#4
0
class Model(nn.Module):
    def __init__(self,gz,dim):
        super(Model, self).__init__()
        self.gz = gz
        self.conv_model = nn.Sequential()
        self.act = nn.ELU
        self.conv = nn.Conv2d
        self.bn = nn.BatchNorm2d
        if dim==3:
            self.conv = nn.Conv3d
            self.bn = nn.BatchNorm3d
        bn=True
        self.conv_model.add_module("conv1",self.conv(4, 32, 5, stride=2,padding=2))
        if bn:
            self.conv_model.add_module("bn1",self.bn(32))
        self.conv_model.add_module("act1",self.act())
        for x in range(16):
            self.conv_model.add_module("conv%d" % (x+2),self.conv(32, 32, 5, stride=1,padding=2))
            if bn:
                self.conv_model.add_module("bn%d" % (x+2),self.bn(32))
            self.conv_model.add_module("act%d" % (x+2),self.act())

        print(self.conv_model)

        #create the distnaces / norm generator
        self.distyNorm=DistyNorm(dim,norm="euc")
        self.disty=Disty(dim,gz,self.distyNorm)
        #create the aggregator 
        self.aggMax=AggMax()
        self.agg=Agg(dim,gz,self.aggMax)
        #create the renger function
        self.R=torch.nn.Sequential(torch.nn.Linear(2,16),torch.nn.ELU(),torch.nn.Linear(16,16),torch.nn.ELU(),torch.nn.Linear(16,4))

    def forward(self, goals,inputs):
        inputs_grid=self.agg.forward(self.R.forward(self.disty.forward(inputs)),inputs)
        scenes=inputs_grid #torch.cat((inputs_grid,goals_grid),1)

        output = self.conv_model(scenes)

        #inputs_tensor = torch.cat([ input['points'] for input in inputs ],0) #TODO WHY CANT I DO THIS?!?!?!
        #d_output_to_input = torch.cat(torch.autograd.grad(output,inputs_tensor,create_graph=True),0)


        #iterate...
        dpoints=[]
        dattrs=[]
        if True:
            cost=0
            g_sum=0
            g_abs_sum=0
            for idx in range(len(inputs)):
                if False:
                    d_output_to_input = torch.autograd.grad(output[idx].mean(),inputs[idx]['attrs'],create_graph=True)[0]
                    doutputs.append(d_output_to_input.data.numpy().reshape(1,-1))
                    cost += ((d_output_to_input-goals[idx]['attrs'])**2).mean()
                else:
                    d_output_to_points,d_output_to_attrs = torch.autograd.grad(output[idx].mean(),[inputs[idx]['points'],inputs[idx]['attrs']],create_graph=True)
                    dpoints.append(d_output_to_points.data.numpy())
                    dattrs.append(d_output_to_attrs.data.numpy())
                    cost += ((d_output_to_points-goals[idx]['force'])**2).mean()+((d_output_to_attrs-goals[idx]['attrs'])**2).mean()
                g_sum += d_output_to_points.sum()+d_output_to_attrs.sum()
                g_abs_sum += d_output_to_points.abs().sum()+d_output_to_attrs.abs().sum()
            cost/=len(inputs)
            g_sum/=len(inputs)
            g_abs_sum/=len(inputs)


        #do it all at once
        #goals_tensor = torch.cat([ goal['points'] for goal in goals ],0)
        #d_output_to_input = torch.autograd.grad(output.mean(),inputs['points'],create_graph=True)
        #d_output_to_input = torch.cat(torch.autograd.grad(output.mean(),inputs['points'],create_graph=True),1)
        #cost = ((d_output_to_input-goals_tensor)**2).mean()
        #g_sum = d_output_to_input.sum()
        #g_abs_sum = d_output_to_input.abs().sum()

        return cost,g_sum,g_abs_sum,dpoints,dattrs #np.concatenate(doutputs,axis=1)
示例#5
0
class Model(nn.Module):
    def __init__(self,gz,dim):
        super(Model, self).__init__()
        self.gz = gz
        self.conv_model = nn.Sequential()
        self.act = nn.ELU
        self.conv = nn.Conv2d
        self.bn = nn.BatchNorm2d
        if dim==3:
            self.conv = nn.Conv3d
            self.bn = nn.BatchNorm3d
        bn=True
        self.conv_model.add_module("conv1",self.conv(2, 1, 5, stride=1,padding=2))
        if bn:
            self.conv_model.add_module("bn1",self.bn(1))
        self.conv_model.add_module("act1",self.act())
        for x in range(2):
            self.conv_model.add_module("conv%d" % (x+2),self.conv(1, 1, 5, stride=1,padding=2))
            if bn:
                self.conv_model.add_module("bn%d" % (x+2),self.bn(1))
            self.conv_model.add_module("act%d" % (x+2),self.act())

        print(self.conv_model)

        #create the distnaces / norm generator
        self.distyNorm=DistyNorm(dim,norm="euc")
        self.disty=Disty(dim,gz,self.distyNorm)
        #create the aggregator 
        self.aggMax=AggMax()
        self.agg=Agg(dim,gz,self.aggMax)
        #create the renger function
        self.R=torch.nn.Linear(1,1)

    def forward(self, goals,inputs):
        inputs_grid=self.agg.forward(self.R.forward(self.disty.forward(inputs)),inputs)
        goals_grid=self.agg.forward(self.R.forward(self.disty.forward(goals)),goals)
        #inputs_grid=self.agg.forward(self.disty.forward(inputs),inputs)
        #goals_grid=self.agg.forward(self.disty.forward(goals),goals)
        scenes=torch.cat((inputs_grid,goals_grid),1)

        output = self.conv_model(scenes)

        #inputs_tensor = torch.cat([ input['points'] for input in inputs ],0) #TODO WHY CANT I DO THIS?!?!?!
        #d_output_to_input = torch.cat(torch.autograd.grad(output,inputs_tensor,create_graph=True),0)

        #goals_tensor = torch.cat([ goal['points'] for goal in goals ],0)

        cost=0
        g_sum=0
        g_abs_sum=0
        for idx in range(len(inputs)):
            d_output_to_input = torch.autograd.grad(output[idx].mean(),inputs[idx]['points'],create_graph=True)[0]
            diff=Variable(goals[idx]['points']-inputs[idx]['points'],requires_grad=False)
            #cost+=((d_output_to_input-goals[idx]['points'])**2).mean()
            cost+=((d_output_to_input-diff)**2).mean()
            g_sum += d_output_to_input.sum()
            g_abs_sum += d_output_to_input.abs().sum()
        cost/=len(inputs)
        g_sum/=len(inputs)
        g_abs_sum/=len(inputs)

        return cost,g_sum,g_abs_sum