def do_train(cur_step, optimizer, sim, model): model.train() while True: steps = 4 * 25 * spf reset_sim(sim) st = time.time() loss, dis, vext = run_sim(steps, sim, model) en0 = time.time() # loss = get_loss(steps,sim) optimizer.zero_grad() print('step={}'.format(cur_step)) print('forward time={}'.format(en0 - st)) print('loss={}'.format(loss.data)) f.write('step {}: loss={}\n'.format(cur_step, loss.data)) print('step {}: loss={}\n'.format(cur_step, loss.data)) loss.backward() nn.utils.clip_grad_value_(model.parameters(), 10) if dis[:2].norm() < 0.5 and dis[2] > 0 and dis[2] < 0.5: break en1 = time.time() print('backward time={}'.format(en1 - en0)) optimizer.step() #f.write('{}\n'.format(vext.grad)); f.write('{}\n'.format(vext * scalev)) arcsim.delete_mesh(sim.cloths[0].mesh) # break torch.save(model.state_dict(), sys.argv[1] + "/3.pt")
def do_train(cur_step,optimizer,sim,vext): while True: steps=7*25*spf reset_sim(sim) st = time.time() loss, dis = run_sim(steps, sim, vext) # loss = loss + regularization(vext) en0 = time.time() optimizer.zero_grad() print('step={}'.format(cur_step)) print('forward time={}'.format(en0-st)) print('loss={}'.format(loss.data)) f.write('step {}: loss={}\n'.format(cur_step, loss.data)) print('step {}: loss={}\n'.format(cur_step, loss.data)) loss.backward() vext.grad.data.clamp_(-10,10) if dis[:2].norm() < 0.5 and dis[2]>0 and dis[2]<0.5: break en1 = time.time() print('backward time={}'.format(en1-en0)) print(vext.grad) optimizer.step() print(vext*scalev) f.write('{}\n'.format(vext*scalev)); arcsim.delete_mesh(sim.cloths[0].mesh)
def do_train(cur_step, optimizer, sim, vext): while True: steps = 4 * 25 * spf reset_sim(sim) st = time.time() loss, dis = run_sim(steps, sim, vext) en0 = time.time() # loss = get_loss(steps,sim) optimizer.zero_grad() print('step={}'.format(cur_step)) print('forward time={}'.format(en0 - st)) print('loss={}'.format(loss.data)) f.write('step {}: loss={}\n'.format(cur_step, loss.data)) print('step {}: loss={}\n'.format(cur_step, loss.data)) loss.backward() vext.grad.data.clamp_(-10, 10) if dis[:2].norm() < 0.5 and dis[2] > 0 and dis[2] < 0.5: #loss<1e-3:# break # dgrad, stgrad, begrad = torch.autograd.grad(loss, [density, stretch, bend]) en1 = time.time() print('backward time={}'.format(en1 - en0)) print(vext.grad) optimizer.step() print(vext * scalev) #f.write('{}\n'.format(vext.grad)); f.write('{}\n'.format(vext * scalev)) arcsim.delete_mesh(sim.cloths[0].mesh)
def naive_guess(density, stretchori, bend): return density, stretchori, bend M = [] for i in range(5, 25): fstd = sys.argv[2] + '/%04d_00.obj' % (i) mesh = arcsim.Mesh() arcsim.load_obj(mesh, fstd) fstd = sys.argv[2] + '/%04d_00.obj' % (i + 1) arcsim.load_obj(sim.cloths[0].mesh, fstd) arcsim.compute_ms_data(sim.cloths[0].mesh) for n0 in sim.cloths[0].mesh.nodes: n0.m = torch.zeros([], dtype=torch.float64) n = len(sim.cloths[0].mesh.nodes) fext = torch.zeros([n, 3], dtype=torch.float64) Jext = torch.zeros([n, 3], dtype=torch.float64) arcsim.add_external_forces(sim.cloths[0], sim.gravity, sim.wind, fext, Jext) m = [] i = 0 for n0, n1 in zip(mesh.nodes, sim.cloths[0].mesh.nodes): a = (n1.v - n0.v) / config['frame_time'] * config['frame_steps'] a[2] += 9.8 if i != 2 or i != 3: m.append((fext[i].sum() / a.sum())) i += 1 arcsim.delete_mesh(mesh) M.append(torch.relu(torch.stack(m).sum())) print('density={}'.format(torch.stack(M).mean())) density = torch.stack(M).mean() / scaleden mesh = arcsim.Mesh() fstd = sys.argv[2] + '/%04d_00.obj' % (4) arcsim.load_obj(mesh, fstd) mini = 1e10 maxi = -1e10 for n0 in mesh.nodes: mini = min(mini, n0.x[2]) maxi = max(maxi, n0.x[2]) defo = maxi - mini - 1 stre = density * 9.8 / defo * 2 arcsim.delete_mesh(mesh) print(stre) stretch = torch.tensor([[1, 0, 1, 0]], dtype=torch.float64).repeat( [6, 1]) * stre / scalestr # stretch[:,0] = stretchori[:,0] stretch[:, 1] = stretchori[:, 1] # stretch[:,2] = stretchori[:,2] stretch[:, 3] = stretchori[:, 3] print(stretch) bend = bend matconfig['density'] = density.detach().numpy().tolist() matconfig['stretching'] = stretch.detach().numpy().tolist() matconfig['bending'] = bend.detach().numpy().tolist() save_config(matconfig, sys.argv[1] + '/mat.json') return density, stretch, bend
def get_loss_eval(steps, sim): fstd = sys.argv[2] + '/%04d_00.obj' % (steps / 2) mesh = arcsim.Mesh() arcsim.load_obj(mesh, fstd) diffs = [] for node0, node1 in zip(mesh.nodes, sim.cloths[0].mesh.nodes): diffs.append((node0.x - node1.x).norm()) arcsim.delete_mesh(mesh) return torch.stack(diffs).mean()
def get_loss(steps,sim): fstd = sys.argv[2]+'/%04d_00.obj'%(steps/2) mesh = arcsim.Mesh() arcsim.load_obj(mesh, fstd) diffs = [] for node0,node1 in zip(mesh.nodes, sim.cloths[0].mesh.nodes): diffs.append((np.array(arcsim.tovec(node0.x))-np.array(arcsim.tovec(node1.x)))**2) arcsim.delete_mesh(mesh) return np.stack(diffs).sum(axis=1).mean()
def renew_loss(): print('renew',steps) matconfig['density'] = density.detach().numpy().tolist() matconfig['stretching'] = stretch.detach().numpy().tolist() matconfig['bending'] = bend.detach().numpy().tolist() save_config(matconfig, sys.argv[1]+'/mat.json') arcsim.delete_mesh(sim.cloths[0].mesh) reset_sim(sim) loss = run_sim(steps, sim) get_grad(steps, sim, loss, density,stretch,bend) return loss
def do_train(cur_step, sim): while True: steps = 4 * 25 * spf reset_sim(sim) loss, dis = run_sim(steps, sim) # loss = get_loss(steps,sim) print('step={}'.format(cur_step)) print('loss={}'.format(loss.data)) f.write('step {}: loss={}\n'.format(cur_step, loss.data)) print('step {}: loss={}\n'.format(cur_step, loss.data)) #f.write('{}\n'.format(vext.grad)); arcsim.delete_mesh(sim.cloths[0].mesh) break
def do_train(cur_step, optimizer, sim): while True: global steps steps = min(50, cur_step * 2 + 2) density, stretch, bend = reset_sim(sim) st = time.time() loss = run_sim(steps, sim) en0 = time.time() # loss = get_loss(steps,sim) optimizer.zero_grad() print('step={}'.format(cur_step)) print('forward time={}'.format(en0 - st)) print('loss={}'.format(loss.data)) f.write('step {}: d={} loss={}\n'.format(cur_step, density.data * scaleden, loss.data)) print('step {}: d={} loss={}\n'.format(cur_step, density.data * scaleden, loss.data)) if loss < 1e-4: break #loss.backward() # dgrad, stgrad, begrad = torch.autograd.grad(loss, [density, stretch, bend]) #density.grad.data.clamp_(-1,1) #stretch.grad.data.clamp_(-1,1) #bend.grad.data.clamp_(-1e-3,1e-3) #print(density.grad) #print(stretch.grad) #print(bend.grad) optimizer.step(renew_loss) en1 = time.time() print('backward time={}'.format((en1 - st) / steps)) print(density * scaleden) print(stretch * scalestr) print(bend * scaleben) ld = (torch.tensor(matstd['density'], dtype=torch.float64) - density * scaleden).norm().data ls = (torch.tensor(matstd['stretching'], dtype=torch.float64) - stretch * scalestr).norm().data lb = (torch.tensor(matstd['bending'], dtype=torch.float64) - bend * scaleben).norm().data print('dis={} {} {}\n'.format(ld, ls, lb)) matconfig['density'] = density.detach().numpy().tolist() matconfig['stretching'] = stretch.detach().numpy().tolist() matconfig['bending'] = bend.detach().numpy().tolist() save_config(matconfig, sys.argv[1] + '/mat.json') arcsim.delete_mesh(sim.cloths[0].mesh) get_dis(density, stretch, bend)