def run(self): count = 0 while self.running: # Update sensor values util.update() self.encoders.update() front, rear = util.sensors() r = self.sensor.right #if count < 5000: # if front not in [-1, 255]: # print str(count) + "," + str(abs(int(self.encoders.actual[0])/2)) + "," + str(front) # count += 1 self.state.update(self.encoders.actual) # Get recieved commands sensing = self.comm.recieve.sensing power = self.comm.recieve.power rotate = self.comm.recieve.rotate self.running = self.comm.recieve.running # Update robot based on commands if sensing and self.sensor.paused: self.sensor.resume() elif (not sensing) and (not self.sensor.paused): self.sensor.pause() util.move(power, rotate) # Send data self.comm.send(self.state.x, self.state.y, self.state.heading, self.encoders.actual[0], front, rear) # Sleep so sensors have time to update time.sleep(.02) self.sensor.stop() self.comm.stop()
def collect_directory(self): # Initialize Process Counter curr = 0 # Initialize Image Paths List self.image_list = [] # Congregate Image Files for path, subdirs, files in os.walk(str(self.image_path)): for file in files: # Select Files With Supported File Types if (file.endswith((".jpg", ".jpeg", ".png", ".tiff", ".JPG", ".JPEG", ".PNG", ".TIFF"))): # Create Local Image Data Map image_data = { 'dir': util.absolute(path), 'name': file, 'path': util.form_path([path, file]) } # Move File To Opened Path if (self.image_path != image_data['dir']): util.move( image_data['path'], util.absolute( util.form_path( [self.image_path, image_data['name']]))) # Add File To Image List self.image_list.append( util.form_path([self.image_path, image_data['name']])) # Update Prompt print("\rLoaded {} Images - {}".format( curr, self.image_path), end="") curr += 1 # Delete Empty Directories for file in os.listdir(str(self.image_path)): # Create Normalized Path dir_path = util.form_path([self.image_path, file]) # Check Directory Existence if (util.exists(dir_path)): try: # Remove Empty Directory os.rmdir(dir_path) except OSError: pass # Update Prompt print("\rLoaded All Images - {}".format(self.image_path)) # Verify Image List Length if (not (self.image_list)): util.perror("spectra: No images found") # Sort Image Paths in Lexicographical Order self.image_list.sort()
def control_speed(self): global white_mask, red_mask, twist_pub, image_width, current_work M_white = cv2.moments(white_mask) M_red = cv2.moments(red_mask) if M_white['m00'] > 0: cx= int(M_white['m10'] / M_white['m00']) # BEGIN CONTROL err = cx - image_width / 2 self.w2_twist.linear.x = 0.5 # and <= 1.7 self.w2_integral = self.w2_integral + err * 0.05 self.w2_derivative = (err - self.w2_previous_error) / 0.05 self.w2_twist.angular.z = float(err) * self.w2_Kp + (self.w2_Ki * float(self.w2_integral)) + ( self.w2_Kd * float(self.w2_derivative)) self.w2_previous_error = err twist_pub.publish(self.w2_twist) print 'saw white', self.w2_twist.angular.z else: print 'no white' if current_work == 3: time.sleep(1) self.w2_stop = True twist_pub.publish(Twist()) if M_red['m00'] > 0 and current_work == 3: print 'saw red' util.move(0.1, linear_scale = 0.1, max_error = 0.02)
def versionthis(filetoversion): global options try: if accesscontrollist.hasacl(filetoversion) and not options.ignoreacl: err = "filetoversion has a 'deny' in ACL permissions (ls -lde %s: %s) \n \ This program is currently not clever enough to check if you have permission to move/delete this file. \n \ To avoid this problem remove deny permissions from the access control entries \n \ or rerun this command with --ignoreacl" % (filetoversion, accesscontrollist.getacl(filetoversion)) raise SyncherException(err) # TODO: verify that this file is not already added logging.info("should: check for dups") filetoversionpath, repospathofversionedfile, repospathtoputnewfilein = settings.getFileToVersionPathes(filetoversion) util.makedirs(repospathtoputnewfilein) acl = None if options.ignoreacl: acl = accesscontrollist.removeacl(filetoversion) util.move(filetoversionpath, repospathofversionedfile) # repospathtoputnewfilein) if acl is not None: accesscontrollist.setacl(repospathofversionedfile, acl) util.symlink(repospathofversionedfile, filetoversionpath) syncdb.add(filetoversionpath) except Exception as e: logging.warn("ROLLING BACK because of %s" % e) undo.rollback() raise
def removesymlinks(repospath): global options reposfilepath = os.path.abspath(repospath) with open(os.path.join(repospath, SYNCHER_DB_FILENAME)) as db: try: for line in db: line = line.strip() if not os.path.islink(line): logging.warn("file is not linked to repo: %s" % (line)) elif not os.path.samefile(os.path.realpath(line), reposfilepath + line): logging.warn("%s is a symbolic link to %s not %s. it will not be removed" % (line, os.path.realpath(line), reposfilepath + line)) elif not options.dry: logging.info("deleting symlink at %s", reposfilepath + line) util.removesymlink(line) if os.path.exists(line + "-beforesyncher"): util.move(line + "-beforesyncher", line) else: logging.info("no original file to restore for %s", line) if not options.dry and not os.path.exists(line) and options.deleteemptydirs: util.deleteemptydirs(os.path.dirname(line)) except Exception as e: logging.warn("ROLLING BACK because of %s" % e) undo.rollback() raise
def test_move2(): disks ={(2,2): "x", (1,1): "o", (2,1): "o", (3,1): "o", (1,2): "o", (1,3): "o", (2,3): "o", (3,2): "o", (3,3): "o"} tries = [(1,0), (3,0), (5,0), (0,1), (0,3), (0,5), (4,1), (4,3), (1,4), (3,4)] with pytest.raises(ValueError): for move in tries: util.move(disks, move, "x")
def organize_directory(self): if (self.scenes): # Iterate Over Scenes List for curr, scene in enumerate(self.scenes, 1): # Create Scene Folder util.mkdir( util.form_path([self.image_path, SCENE.format(curr)])) # Move Images To Scene Folder for image in scene: try: # Generate Source and Destination Paths src = util.absolute(image) dst = util.normalize( util.form_path([ util.dirname(image), SCENE.format(curr), util.basename(image) ])) # Move Images To Scene Folder util.move(src, dst) except FileNotFoundError: pass # Update Prompt print("Organized All Images ") else: util.perror("spectra: No scenes found to analyze")
def blend1(): order_u = 3 order_v = 3 u = cos(pi/4) revol_type1 = 'circular' tang_alpha1 = 0.3 blend_num1 = 1 (baseCP1, v1) = Revol_Tangent(revol_type1, tang_alpha1, blend_num1) (baseCP2, v1) = Revol_Tangent(revol_type1, tang_alpha1, blend_num1) rawCP1 = RawSurface(baseCP1, 'circular') rawCP2 = RawSurface(baseCP2, 'circular') rawCP1.blendCP = stretch(rawCP1.blendCP, 20) rawCP1.blendCP = move(rawCP1.blendCP, 'y', 65) rawCP2.blendCP = stretch(rawCP2.blendCP, 20) rawCP2.blendCP = move(rawCP2.blendCP, 'y', -65) revol_type2 = 'hyperbola' tang_alpha2 = 0.3 blend_num2 = 2 baseCP3, v2 = Revol_Tangent(revol_type2, tang_alpha2, blend_num2) rawCP1.blendCP = vstack( (rawCP1.blendCP, baseCP3, rawCP2.blendCP) ) rawCP1.name = 'cir-hyper-cir' rawCP1.setSharpEdges([]) return rawCP1
def execute(self, userdata): global work, current_work util.move(-0.05, linear_scale = 0.1, max_error = 0.01) self.observe() util.move(0.05, linear_scale = 0.1, max_error = 0.01) work = False current_work += 1 userdata.rotate_turns = -1 time.sleep(0.5) return 'rotate'
def execute(self, userdata): global work, current_work, redline_count_loc3 redline_count_loc3 += 1 util.move(-0.05, linear_scale = 0.1, max_error = 0.01) self.observe() util.move(0.05, linear_scale = 0.1, max_error = 0.01) work = False if redline_count_loc3 >= 3: current_work += 1 userdata.rotate_turns = -1 return 'rotate'
def updatePos(self): """moves foe and makes it stop at its goal""" while dist(self.goalPos, self.rect.center) <= 1 + self.speed and self.alwaysMove: self.goalPos = (randint(20, 1200), randint(1, 450)) if dist(self.goalPos, self.rect.center) > 1 + self.speed: dx = self.goalPos[0] - self.rect.centerx dy = self.goalPos[1] - self.rect.centery mag = sqrt(dx**2 + dy**2) self.velocity = [self.speed * dx / mag, self.speed * dy / mag] util.move(self)
def check_possible_move(disks, symbol, size): """Checks if the player has possible moves. Returns True (there is at least one possible move) or False (there is not possible move for the symbol). """ state = False for x in range(size): for y in range(size): coordinate = x, y if coordinate not in disks: try: move(disks, coordinate, symbol) state = True break except ValueError: pass return state
def assign_alt_id(file, outfile): '''assign alt id if it is wrong! ''' info = get_atominfo(file) netdis = check_dis(info, 1) # dist for alt conformers atoms = [] for x in netdis: atoms.append(x[1]) atoms.append(x[2]) atoms.sort() uatoms = [] for i, x in enumerate(atoms): if i > 0 and x == atoms[i - 1]: continue uatoms.append(x) satom, altidd = [], [] for i, x in enumerate(uatoms): t = x.split('_') if i > 0: t0 = uatoms[i - 1].split('_') if t[:4] != t0[:4]: util.gsort(satom, 1, -1) altid = check_alt(satom) if altid: altidd.append(altid) #print satom satom = [] satom.append([x, info['occ'][int(t[7])]]) if len(altidd): if util.is_cif(file): newfile = update_ciffile(file, altidd) else: newfile = update_pdbfile(file, altidd) if outfile: util.move(newfile, outfile) print '\nThe updated new file = %s\n' % outfile else: print '\nThe updated new file = %s\n' % newfile
def insertion_sort(array): for i in range(1, len(array)): curr_unsorted = array[i] notfound = True index_sorted = i-1 while notfound: if curr_unsorted > array[index_sorted] or index_sorted == -1: array = util.move(array, index_sorted + 1, i) notfound = False else: index_sorted -= 1 return array
def execute(self, userdata): global g_tags, g_target_ARtag_id, g_cmd_vel_pub if rospy.is_shutdown(): return 'end' else: if g_target_ARtag_id in g_tags: tag_pose = g_tags[g_target_ARtag_id] if tag_pose.position.x > 0.7: #move with cmd_vel_pub print(tag_pose.position.x) twist = Twist() twist.linear.x = 0.3 g_cmd_vel_pub.publish(twist) return 'approach' else: # # move with move base # goal = MoveBaseGoal() # goal.target_pose.header.frame_id = str(g_target_ARtag_id) # goal.target_pose.pose.position = Point(0, 0, -0.7) # goal.target_pose.pose.orientation = Quaternion(0, 0, 0, 1) # self.client.send_goal(goal) # self.client.wait_for_result() util.rotate() util.move(0.5, linear_scale=0.3) util.rotate(-87) util.move(1.2, linear_scale=0.3) util.rotate(-87) util.move(0.5, linear_scale=0.3) util.rotate(-87) return 'arrived' print("lost target") g_cmd_vel_pub.publish(Twist()) return 'approach'
def makesymlinks(repospath): global options reposfilepath = os.path.abspath(repospath) with open(os.path.join(repospath, SYNCHER_DB_FILENAME)) as db: try: for line in db: line = line.strip() if not os.path.islink(line) and accesscontrollist.hasacl(line) and not options.ignoreacl: err = "filetoversion has a 'deny' in ACL permissions (ls -lde %s: %s) \n \ This program is currently not clever enough to check if you have permission to move/delete this file. \n \ To avoid this problem remove deny permissions from the access control entries \n \ or rerun this command with --ignoreacl" % (line, accesscontrollist.getacl(line)) logging.warn(err) elif not os.path.islink(line): acl = None if not options.dry: logging.info("creating symlink from %s to %s", reposfilepath + line, line) if os.path.exists(line): if options.ignoreacl: acl = accesscontrollist.removeacl(line) util.move(line, line + "-beforesyncher") # repospathtoputnewfilein) elif not os.path.exists(os.path.dirname(line)): util.makedirs(os.path.dirname(line)) util.symlink(reposfilepath + line, line) if acl is not None: accesscontrollist.setacl(line, acl) else: # if not os.path.realpath(line) == reposfilepath + line: if not os.path.samefile(os.path.realpath(line), reposfilepath + line): logging.warn("%s is already a symbolic link to %s not %s. it will not be followed and linked properly to repository" % (line, os.path.realpath(line), reposfilepath + line)) except Exception as e: logging.warn("ROLLING BACK because of %s" % e) undo.rollback() logging.warn("ROLLED BACK because of %s" % e) raise
def player_move(disks, symbol, size): """Realizes a player move. Looks up possible moves, asks for new coordinates, finds all changing discs, and reverses them. If there is no possible move, then it prints a notice and skips the move. """ if check_possible_move(disks, symbol, size): while True: try: new = ask_for_coordinates(disks, symbol) new = check_coordinates(disks, new, size) changes = move(disks, new, symbol) reversal(disks, new, symbol, changes) break except ValueError as e: print(e) else: print("You cannot play.")
def click_play(x, y, b, m): if othello.end(disks, size) or othello.check_all_moves(disks, size): othello.write_score(disks) os.remove("othello_disks.json") raise SystemExit("End") symbol = get_symbol() if othello.check_possible_move(disks, symbol, size): try: new_coordinate = get_coordinates(x, y) new_coordinate = othello.check_coordinates(disks, new_coordinate, size) changes = util.move(disks, new_coordinate, symbol) util.reversal(disks, new_coordinate, symbol, changes) change_player() except ValueError as e: print(e) else: print("You cannot play.") change_player()
def move(p1, p2): """ move files or directories under normal paths or zpaths """ util.move(p1, p2)
def test_move(): ret = util.move('d1/a.txt', 'd2', force=True) return 'move OK: ' + str(ret)
or reading_type_id == -1): print("ERROR: Some IDs not found!") tryNext() return 1 l = [description, unit_id, commodity_id, source_system_id, reading_type_id] meter_id = load_ids(l, mycursor) if (meter_id == -1): return 1 print("Meter ID: %d" % (meter_id)) if not load_values(meter_id, data_file, mycursor): print("NOTE! Meter ID (%d) created for '%s'" % (meter_id, data_file)) return 1 else: util.move(data_file, process_dir) return 0 def get_id(item, table, field, mycursor): """ Returns the ID from table TABLE whose FIELD is "ilike" to ITEM. Uses cursor MYCURSOR. If no ID is found, returns -1. """ sql = "SELECT id FROM %s WHERE %s ILIKE '%s' LIMIT 1" % (table, field, item) print("Getting %s ID ..." % (table)), mycursor.execute(sql) result = mycursor.fetchone()
print (curr_inbox) # get gz files # files will be sorted, it ensures : # 1."buy"s are picked up b4 "sold"s # 2. files are orderred by date for file_inbox in sorted(glob.glob(curr_inbox)): try: # get file capture date. This will be inserted for most tables. date_time = file_inbox.split("/")[3] # mv to progress file_progress = file_inbox.replace(p_inbox,p_progress) if util.move(file_inbox,file_progress) < 0: continue print ("moved " + file_inbox + " to " + file_progress) # get parser by mode p_parser = get_parser(file_inbox.split("/")[4]) # process if p_parser.process(file_progress, date_time) < 0: print("failed to process " + file_progress) file_errbox = file_inbox.replace(p_inbox,p_errbox) if util.move(file_progress,file_errbox): print ("moved " + file_progress + " to " + file_errbox) continue
def pred(self, rdmols, values, BATCH_SIZE=32, debug=False): data_feat = self.meta['data_feat'] MAX_N = self.meta.get('MAX_N', 32) USE_CUDA = self.USE_CUDA COMBINE_MAT_VECT = 'row' feat_vect_args = self.meta['feat_vect_args'] feat_mat_args = self.meta['feat_mat_args'] adj_args = self.meta['adj_args'] ds = netdataio.MoleculeDatasetMulti(rdmols, values, MAX_N, len(self.meta['tgt_nucs']), feat_vect_args, feat_mat_args, adj_args, combine_mat_vect=COMBINE_MAT_VECT) dl = torch.utils.data.DataLoader(ds, batch_size=BATCH_SIZE, shuffle=False) allres = [] alltrue = [] results_df = [] m_pos = 0 for i_batch, (adj, vect_feat, mat_feat, vals, mask) in enumerate(tqdm(dl)): if debug: print(adj.shape, vect_feat.shape, mat_feat.shape, vals.shape, masks.shape) adj_t = move(adj, USE_CUDA) args = [adj_t] if 'vect' in data_feat: vect_feat_t = move(vect_feat, USE_CUDA) args.append(vect_feat_t) if 'mat' in data_feat: mat_feat_t = move(mat_feat, USE_CUDA) args.append(mat_feat_t) mask_t = move(mask, USE_CUDA) vals_t = move(vals, USE_CUDA) res = self.net(args) std_out = False if isinstance(res, tuple): std_out = True if std_out: y_est = res[0].detach().cpu().numpy() std_est = res[1].detach().cpu().numpy() else: y_est = res.detach().cpu().numpy() std_est = np.zeros_like(y_est) for m, v, v_est, std_est in zip(mask.numpy(), vals.numpy(), y_est, std_est): for nuc_i, nuc in enumerate(self.meta['tgt_nucs']): m_n = m[:, nuc_i] v_n = v[:, nuc_i] v_est_n = v_est[:, nuc_i] std_est_n = std_est[:, nuc_i] for i in np.argwhere(m_n > 0).flatten(): results_df.append({ 'i_batch': i_batch, 'atom_idx': i, 'm_pos': m_pos, 'nuc_i': nuc_i, 'nuc': nuc, 'std_out': std_out, 'value': v_n[i], 'est': float(v_est_n[i]), 'std': float(std_est_n[i]), }) m_pos += 1 results_df = pd.DataFrame(results_df) return results_df
def fine_tune(self, box_is_left, ori_push_dist): if ori_push_dist > SQUARE_DIST * 2: ori_push_dist = SQUARE_DIST * 2 util.move(-ori_push_dist, linear_scale = 0.3) tmp_time = time.time() while self.box_tag_saw == False: direction = -1 if box_is_left else 1 twist = Twist() twist.angular.z = direction * 0.4 self.twist_pub.publish(twist) if time.time() - tmp_time > 15: return 'lost_box' self.twist_pub.publish(Twist()) box_middle_point, _ = self.look_up_box('box_middle', 'map') print box_middle_point, PARK_SPOT_WAYPOINTS[str(self.goal_stall_id)][0] error_x = box_middle_point.x - PARK_SPOT_WAYPOINTS[str(self.goal_stall_id)][0].x if self.goal_stall_id == 1: error_x += 0.3 error_y = box_middle_point.y - PARK_SPOT_WAYPOINTS[str(self.goal_stall_id)][0].y push_right_dist = error_y if error_y > 0 and abs(error_y) >= 0.05 else 0 push_left_dist = -error_y if error_y < 0 and abs(error_y) >= 0.05 else 0 push_forward = - error_x if error_x < 0 else 0 if self.goal_stall_id == 5: push_left_dist = 0 if self.goal_stall_id == 1: push_right_dist = 0 if not box_is_left: #robot is right to the box print "fine_tune:", "push_right ", push_right_dist, "push_left", push_left_dist, "push_forward", push_forward if push_left_dist != 0: self.go_to_side('box_front') util.move(AMCL_APPROACH_BOX, linear_scale= 0.2) util.move(push_left_dist) util.move(-1, linear_scale=0.3) if push_forward != 0: self.go_to_side('box_left') util.move(AMCL_APPROACH_BOX - 0.2, linear_scale=0.2) util.move(push_forward) util.move(-0.5, linear_scale= 0.3) if push_right_dist != 0: if push_forward == 0: self.go_to_side('box_left') util.move(-0.5, linear_scale= 0.3) self.go_to_side('box_left') util.move(AMCL_APPROACH_BOX, linear_scale=0.2) util.move(push_right_dist) else: # box is right to the goal, robot is left to the box print "fine_tune:", "push_right ", push_right_dist, "push_left", push_left_dist, "push_forward", push_forward if push_right_dist != 0: self.go_to_side('box_front') util.move(AMCL_APPROACH_BOX, linear_scale= 0.2) util.move(push_right_dist) util.move(-1, linear_scale=0.3) if push_forward != 0: self.go_to_side('box_right') util.move(AMCL_APPROACH_BOX - 0.2, linear_scale=0.2) util.move(push_forward) util.move(-0.5, linear_scale= 0.3) if push_left_dist != 0: if push_forward == 0: self.go_to_side('box_right') util.move(-0.5, linear_scale= 0.3) self.go_to_side('box_right') util.move(AMCL_APPROACH_BOX, linear_scale=0.2) util.move(push_left_dist)
def post(self, user_id, song_id, displacement): u_id = int(user_id) s_id = int(song_id) disp = int(displacement) self.write(util.move(u_id, s_id, disp))
def train(infile, outfile): d = pickle.load(open(infile, 'rb')) train_df = d['train_df'] test_df = d['test_df'] USE_CUDA = True if 'hconfspcl' in DATASET_NAME: atomicno = [1, 6, 7, 8, 9, 15, 16, 17] else: atomicno = [1, 6, 7, 8, 9] ### Create datasets and data loaders feat_vect_args = dict(feat_atomicno_onehot=atomicno, feat_pos=False, feat_atomicno=True, feat_valence=True, aromatic=True, hybridization=True, partial_charge=False, formal_charge=True, # WE SHOULD REALLY USE THIS r_covalent=False, total_valence_onehot=True, r_vanderwals=False, default_valence=True, rings=True) feat_mat_args = dict(feat_distances = False, feat_r_pow = None) #[-1, -2, -3]) split_weights = [1, 1.5, 2, 3] adj_args = dict(edge_weighted=False, #norm_adj=True, add_identity=True, split_weights=split_weights) BATCH_SIZE = 64 # 16 COMBINE_MAT_VECT='row' INIT_NOISE = 1e-3 print("len(train_df)=", len(train_df)) ds_train = MoleculeDatasetNew(train_df.rdmol.tolist(), train_df.value.tolist(), MAX_N, feat_vect_args, feat_mat_args, adj_args, combine_mat_vect=COMBINE_MAT_VECT) print("len(ds_train)=", len(ds_train)) dl_train = torch.utils.data.DataLoader(ds_train, batch_size=BATCH_SIZE, shuffle=True,pin_memory=True) ds_test = MoleculeDatasetNew(test_df.rdmol.tolist(), test_df.value.tolist(), MAX_N, feat_vect_args, feat_mat_args, adj_args, combine_mat_vect=COMBINE_MAT_VECT) dl_test = torch.utils.data.DataLoader(ds_test, batch_size=BATCH_SIZE//2, shuffle=True,pin_memory=True) USE_RESNET=True D_VERT = 128 D_EDGE = 4 LAYER_N = 5 g_feature_n = 29 + len(atomicno) # + 3 net = relnets.GraphEdgeVertPred(g_feature_n, len(split_weights), MAX_N, layer_n=LAYER_N, internal_d_vert=D_VERT, internal_d_edge=D_EDGE, resnet=USE_RESNET, INIT_NOISE=INIT_NOISE) data_feat =['vect'] net = move(net, USE_CUDA) #new_net.load_state_dict(new_state) for n, p in net.named_parameters(): print(n, p.shape) WEIGHT_DECAY=1e-7 criterion = nn.MSELoss() #criterion = lambda x, y : torch.mean((x-y)**4) amsgrad = False LR = 1e-2 MODEL_NAME = "{}_{}_relnet_{}_{}_{}_{}_{}_{}_{}_{}_{}.{:08d}".format(NUC, DATASET_NAME, LAYER_N, D_VERT, D_EDGE, amsgrad, BATCH_SIZE, LR, INIT_NOISE, WEIGHT_DECAY, g_feature_n, int(time.time() % 1e7)) print("MODEL_NAME=", MODEL_NAME) writer = SummaryWriter("logs/{}".format(MODEL_NAME)) # weight_decay appears to help a bit optimizer = torch.optim.Adam(net.parameters(), lr=LR, amsgrad=amsgrad, eps=1e-9, weight_decay=WEIGHT_DECAY) #, momentum=0.9) scheduler = None scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.95) pickle.dump({"feat_vect_args" : feat_vect_args, "feat_mat_args" : feat_mat_args, "adj_args" : adj_args, "init_noise" : INIT_NOISE, "lr" : LR, 'BATCH_SIZE' : BATCH_SIZE, 'USE_RESNET' : USE_RESNET, 'INIT_NOISE' : INIT_NOISE, 'D_VERT' : D_VERT, 'D_EDGE' : D_EDGE, 'MAX_N' : MAX_N, 'data_feat' : data_feat, 'train_filename' : infile, 'g_feature_n' : g_feature_n, 'LAYER_N' : LAYER_N, }, open(os.path.join(CHECKPOINT_DIR, MODEL_NAME + ".meta"), 'wb')) generic_runner(net, optimizer, scheduler, criterion, dl_train, dl_test, MODEL_NAME, MAX_EPOCHS=10000, CHECKPOINT_EVERY=10, data_feat=data_feat, USE_CUDA=USE_CUDA)
given_path="tmp_file.msa" dl_gdrive.download_file_from_google_drive( data_googleid, tmp_file ) # if remote was gzipped was_gz = False if is_gz_file( given_path ): was_gz = True print("Input was zipped. unzipping!") suffix="." + given_path.split('.')[-1] cmd = [] cmd.append("gunzip") cmd.append("--keep") cmd.append("--suffix") cmd.append(suffix) cmd.append( given_path ) #print((" ".join(cmd))) subprocess.call(cmd) given_path=given_path[:-len(suffix)] if was_gz or not local: print("Moving the data over:\n " + given_path + " -> " + p.raw_sequences) util.move( given_path, p.raw_sequences ) else: print("Making a copy of the data:\n " + given_path + " -> " + p.raw_sequences) util.copy( given_path, p.raw_sequences ) print("") print("Version string: " + p.version)
def move(p1, p2): """ move files or directories under normal paths or zpaths """ util.move(p1, p2)
or reading_type_id == -1): print("ERROR: Some IDs not found!") tryNext() return 1 l = [description, unit_id, commodity_id, source_system_id, reading_type_id] meter_id = load_ids(l, mycursor) if (meter_id == -1): return 1 print("Meter ID: %d" % (meter_id)) if not load_values(meter_id, data_file, mycursor): print("NOTE! Meter ID (%d) created for '%s'" % (meter_id, data_file)) return 1 else: util.move(data_file, process_dir) return 0 def get_id(item, table, field, mycursor): """ Returns the ID from table TABLE whose FIELD is "ilike" to ITEM. Uses cursor MYCURSOR. If no ID is found, returns -1. """ sql = "SELECT id FROM %s WHERE %s ILIKE '%s' LIMIT 1" % (table, field, item) print("Getting %s ID ..." % (table)), mycursor.execute(sql) result = mycursor.fetchone() if not result: util.fail()
def execute(self, userdata): if self.trial == 0: self.set_init_map_pose() self.trial += 1 print "trial: ", self.trial if self.trial > 2: goal_pose = util.goal_pose('map', ON_RAMP_WAYPOINT[0], ON_RAMP_WAYPOINT[1]) self.client.send_goal(goal_pose) self.client.wait_for_result() return 'completed' ar_tag_sub = rospy.Subscriber('/ar_pose_marker', AlvarMarkers, self.ar_tag_sub_callback) print "Waiting for /ar_pose_marker message..." rospy.wait_for_message('/ar_pose_marker', AlvarMarkers) self.twist_pub = rospy.Publisher("/cmd_vel_mux/input/teleop", Twist, queue_size=1) current_box_stall_id = None tmp_time = time.time() while True: if rospy.is_shutdown(): ar_tag_sub.unregister() return 'end' if self.box_tag_id != None and self.goal_tag_id != None: try: (trans, _) = self.listener.lookupTransform( '/map', '/ar_marker_' + str(self.box_tag_id), rospy.Time(0)) box_tag_point = Point(trans[0], trans[1], trans[2]) current_box_stall_id = get_cloest_stall(box_tag_point) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): print "TF look up exception when look up goal tag" if current_box_stall_id != None and self.goal_stall_id != None: assert current_box_stall_id > 1 and current_box_stall_id < 5, \ "current_box_stall_id = {0}".format(str(current_box_stall_id)) assert current_box_stall_id != self.goal_stall_id, \ "current_box_stall_id = {0}, self.goal_stall_id = {1}".format(str(current_box_stall_id), \ str(self.goal_stall_id)) break twist = Twist() twist.angular.z = -0.4 self.twist_pub.publish(twist) if time.time() - tmp_time > 5: goal_pose = util.goal_pose('map', PARK_SPOT_WAYPOINTS['6'][0], Quaternion(0, 0, 0, 1)) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() if time.time() - tmp_time > 20: goal_pose = util.goal_pose('map', PARK_SPOT_WAYPOINTS['7'][0], Quaternion(0, 0, 0, 1)) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() box_is_left = True if int(current_box_stall_id) > int(self.goal_stall_id): box_is_left = False if box_is_left: if current_box_stall_id == 2: point = PARK_SPOT_WAYPOINTS['6'][0] quaternion = PARK_SPOT_WAYPOINTS['6'][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() point = PARK_SPOT_WAYPOINTS[str(current_box_stall_id - 1)][0] quaternion = PARK_SPOT_WAYPOINTS[str(current_box_stall_id - 1)][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() #util.signal(1, onColor=Led.BLACK) #debug util.rotate(-92) for i in range(abs(current_box_stall_id - self.goal_stall_id)): if i == 0 and current_box_stall_id == 2: push_dist = SQUARE_DIST * 2 - BOX_EDGE_LENGTH / 2 - ROBOT_LENGTH / 2 - 0.1 util.move(push_dist, linear_scale=0.1) else: util.move(-0.6, linear_scale=0.3) self.go_to_side('box_front') # util.move(AMCL_APPROACH_BOX) # rospy.sleep(1) push_dist = SQUARE_DIST + AMCL_APPROACH_BOX - 0.05 util.move(push_dist, linear_scale=0.2) else: point = PARK_SPOT_WAYPOINTS[str(int(current_box_stall_id) + 1)][0] quaternion = PARK_SPOT_WAYPOINTS[str( int(current_box_stall_id) + 1)][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() #util.signal(1, onColor=Led.BLACK) #debug util.rotate(90) for i in range(abs(current_box_stall_id - self.goal_stall_id)): if i == 0: util.move(-0.3, linear_scale=0.3) else: util.move(-0.6, linear_scale=0.3) self.go_to_side('box_front') if i == 0: util.rotate(6, max_error=2, anglular_scale=0.5) push_dist = SQUARE_DIST + AMCL_APPROACH_BOX - 0.07 util.move(push_dist, linear_scale=0.2) # if self.fine_tune(box_is_left, push_dist) == 'lost_box': # goal_pose = util.goal_pose('map', OFF_RAMP_WAYPOINT[0], OFF_RAMP_WAYPOINT[1]) # self.client.send_goal(goal_pose) # self.client.wait_for_result() # return 'restart' util.signal(2, onColor=Led.GREEN, onColor2=Led.RED) util.move(-0.2) ar_tag_sub.unregister() return 'completed'
_, dx, dy = dd[i] nx = x + dx ny = y + dy dfs(nx, ny) if cnt > 0: res.append((x, y, 0)) dfs0(sx, sy) dfs(sx, sy) #print(*res, sep='') prv = None R = [] px = py = -1 md = 1 for x, y, m in res: if m: if not md: ps = util.move(MP, X, Y, px, py, x, y) R.extend(ps) md = 1 else: R.append((x, y)) px = x py = y else: md = 0 #print(R) ans = util.pos_to_command(R) print(*ans, sep='')
def test_move3(): disks ={(0,1): "x", (0,2): "x", (0,3): "x", (1,0): "x", (2,0): "x", (3,0): "x", (1,1): "x", (2,1): "x", (1,2): "x", (2,2): "o", (4,0): "o", (0,4): "o"} changes = [(0,1), (0,2), (0,3), (1,0), (2,0), (3,0), (1,1)] assert sorted(util.move(disks, (0,0), "o")) == sorted(changes)
def test_move1(): disks ={(2,2): "x", (1,1): "o", (2,1): "o", (3,1): "o", (1,2): "o", (1,3): "o", (2,3): "o", (3,2): "o", (3,3): "o"} tries = {(0,0): [(1,1)], (2,0): [(2,1)], (4,0): [(3,1)], (0,2): [(1,2)], (0,4): [(1,3)], (2,4): [(2,3)], (4,2): [(3,2)], (4,4): [(3,3)]} for move, change in tries.items(): assert util.move(disks, move, "x") == change
def generic_runner(net, optimizer, scheduler, criterion, dl_train, dl_test, MODEL_NAME, MAX_EPOCHS=1000, CHECKPOINT_EVERY=20, data_feat = ['mat'], USE_CUDA=True): writer = SummaryWriter(f"{TENSORBOARD_DIR}/{MODEL_NAME}") for epoch_i in tqdm(range(MAX_EPOCHS)): if scheduler is not None: scheduler.step() running_loss = 0.0 total_points = 0.0 total_compute_time = 0.0 t1_total = time.time() net.train() for i_batch, (adj, vect_feat, mat_feat, vals, mask) in \ tqdm(enumerate(dl_train), total=len(dl_train)): t1 = time.time() optimizer.zero_grad() adj_t = move(adj, USE_CUDA) args = [adj_t] if 'vect' in data_feat: vect_feat_t = move(vect_feat, USE_CUDA) args.append(vect_feat_t) if 'mat' in data_feat: mat_feat_t = move(mat_feat, USE_CUDA) args.append(mat_feat_t) mask_t = move(mask, USE_CUDA) vals_t = move(vals, USE_CUDA) res = net(args) #print(mask_t.shape, vals_t.shape, res.shape) true_val = vals_t[mask_t>0].reshape(-1, 1) loss = criterion(res[mask_t > 0], true_val) loss.backward() optimizer.step() running_loss += loss.item() * len(true_val) total_points += len(true_val) t2 = time.time() total_compute_time += (t2-t1) #ksjndask t2_total = time.time() print("{} {:3.3f} compute={:3.1f}s total={:3.1f}s".format(epoch_i, running_loss/total_points, total_compute_time, t2_total-t1_total)) writer.add_scalar("train_loss", running_loss/total_points, epoch_i) if epoch_i % 5 == 0: net.eval() optimizer.zero_grad() allres = [] alltrue = [] for i_batch, (adj, vect_feat, mat_feat, vals, mask) in enumerate(dl_test): adj_t = move(adj, USE_CUDA) args = [adj_t] if 'vect' in data_feat: vect_feat_t = move(vect_feat, USE_CUDA) args.append(vect_feat_t) if 'mat' in data_feat: mat_feat_t = move(mat_feat, USE_CUDA) args.append(mat_feat_t) mask_t = move(mask, USE_CUDA) vals_t = move(vals, USE_CUDA) res = net(args) y_est = res[mask_t > 0].squeeze() y = vals_t[mask_t>0] allres.append(y_est.squeeze().detach().cpu().numpy()) alltrue.append(y.cpu().detach().numpy()) allres = np.concatenate(allres) alltrue = np.concatenate(alltrue) delta = allres - alltrue writer.add_scalar("test_std_err", np.std(delta), epoch_i) writer.add_scalar("test_mean_abs_err", np.mean(np.abs(delta)), epoch_i) writer.add_scalar("test_max_error", np.max(np.abs(delta)), epoch_i) print(epoch_i, np.std(delta)) if epoch_i % CHECKPOINT_EVERY == 0: checkpoint_filename = os.path.join(CHECKPOINT_DIR, "{}.{:08d}".format(MODEL_NAME, epoch_i)) torch.save(net.state_dict(), checkpoint_filename + ".state") torch.save(net, checkpoint_filename + ".model")
def post(self, user_id, song_id, displacement): u_id = int(user_id) s_id = int(song_id) disp = int(displacement) self.write(util.move(u_id, s_id, disp))
def execute(self, userdata): self.set_init_map_pose() ar_tag_sub = rospy.Subscriber('/ar_pose_marker', AlvarMarkers, self.ar_tag_sub_callback) print "Waiting for /ar_pose_marker message..." rospy.wait_for_message('/ar_pose_marker', AlvarMarkers) twist_pub = rospy.Publisher("/cmd_vel_mux/input/teleop", Twist, queue_size=1) tmp_time = time.time() while True: if rospy.is_shutdown(): return 'end' if self.box_tag_id != None and self.goal_tag_id != None: break twist = Twist() twist.angular.z = -0.2 twist_pub.publish(twist) if time.time() - tmp_time > 5: goal_pose = util.goal_pose('map', PARK_SPOT_WAYPOINTS['6'][0], PARK_SPOT_WAYPOINTS['6'][1]) self.client.send_goal(goal_pose) self.client.wait_for_result() util.rotate(87) while True: if rospy.is_shutdown(): ar_tag_sub.unregister() return 'end' try: (trans, rots) = self.listener.lookupTransform( 'map', '/ar_marker_' + str(self.box_tag_id), rospy.Time(0)) point = Point(trans[0], trans[1], trans[2]) #quaternion = Quaternion(rots[0],rots[1],rots[2],rots[3]) quaternion = Quaternion(0, 0, 0.118, 0.993) self.box_waypoint = util.goal_pose('map', point, quaternion) print "self.box_waypoint", self.box_waypoint (trans, rots) = self.listener.lookupTransform( '/map', '/ar_marker_' + str(self.goal_tag_id), rospy.Time(0)) point = Point(trans[0], trans[1], 0.010) # quaternion = Quaternion(rots[0],rots[1],rots[2],rots[3]) quaternion = Quaternion(0, 0, 0.118, 0.993) self.goal_waypoint = util.goal_pose('map', point, quaternion) print "self.goal_waypoint", self.goal_waypoint except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): continue finally: if self.box_waypoint != None and self.goal_waypoint != None: break print "Try to get waypoints:", self.box_waypoint, self.goal_waypoint print get_cloest_stall(self.goal_waypoint.target_pose.pose.position) assert self.box_waypoint != None and self.goal_waypoint != None self.box_stall_id = get_cloest_stall( self.box_waypoint.target_pose.pose.position) self.goal_stall_id = get_cloest_stall( self.goal_waypoint.target_pose.pose.position) print self.box_stall_id, self.goal_stall_id assert self.box_stall_id != self.goal_stall_id box_is_left = True if int(self.box_stall_id) > int(self.goal_stall_id): box_is_left = False square_dist = 0.825 if box_is_left: point = PARK_SPOT_WAYPOINTS[str(int(self.box_stall_id) - 1)][0] quaternion = PARK_SPOT_WAYPOINTS[str(int(self.box_stall_id) - 1)][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) self.client.wait_for_result() util.signal(1, onColor=Led.BLACK) #debug util.rotate(-87) push_dist = (abs(int(self.box_stall_id) - int(self.goal_stall_id)) + 1) * square_dist - 0.4 util.move(push_dist) util.signal(2, onColor=Led.GREEN) util.rotate(87) else: point = PARK_SPOT_WAYPOINTS[str(int(self.box_stall_id) + 1)][0] quaternion = PARK_SPOT_WAYPOINTS[str(int(self.box_stall_id) + 1)][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) self.client.wait_for_result() util.signal(1, onColor=Led.BLACK) #debug util.rotate(87) push_dist = abs( int(self.box_stall_id) - int(self.goal_stall_id) + 1) * square_dist util.move(push_dist) util.signal(2, onColor=Led.GREEN) util.rotate(-87) ar_tag_sub.unregister() return 'completed'
def execute(self, userdata): # set to None for each loop self.box_tag_id = None self.goal_tag_id = None self.goal_stall_id = None self.box_tag_saw = None if self.is_initial_pose_set == False or True: self.set_init_map_pose() self.is_initial_pose_set = True ar_tag_sub = rospy.Subscriber('/ar_pose_marker', AlvarMarkers, self.ar_tag_sub_callback) print "Waiting for /ar_pose_marker message..." rospy.wait_for_message('/ar_pose_marker', AlvarMarkers) self.twist_pub = rospy.Publisher("/cmd_vel_mux/input/teleop", Twist, queue_size=1) current_box_stall_id = None tmp_time = time.time() Is_in_stall_5 = False while True: if rospy.is_shutdown(): ar_tag_sub.unregister() return 'end' if self.box_tag_id != None and self.goal_tag_id != None: try: (trans, _) = self.listener.lookupTransform('/map', '/ar_marker_' + str(self.box_tag_id), rospy.Time(0)) box_tag_point = Point(trans[0], trans[1], trans[2]) current_box_stall_id= get_cloest_stall(box_tag_point) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): print "TF look up exception when look up goal tag" if current_box_stall_id != None and self.goal_stall_id != None: print "current_box_stall_id", current_box_stall_id, 'self.goal_stall_id', self.goal_stall_id if current_box_stall_id == 1 or current_box_stall_id == 5: return 'completed' if current_box_stall_id == self.goal_stall_id: return 'completed' break twist = Twist() twist.angular.z = - 0.4 self.twist_pub.publish(twist) if (time.time() - tmp_time) > 5 and not Is_in_stall_5: goal_pose = util.goal_pose('map', PARK_SPOT_WAYPOINTS['6'][0], Quaternion(0,0,0,1)) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() Is_in_stall_5 = True box_is_left = True if int(current_box_stall_id) > int(self.goal_stall_id): box_is_left = False if box_is_left: # if current_box_stall_id == 2: # point = PARK_SPOT_WAYPOINTS['6'][0] # quaternion = PARK_SPOT_WAYPOINTS['6'][1] # goal_pose = util.goal_pose('map', point, quaternion) # self.client.send_goal(goal_pose) # print "waiting for result ", goal_pose.target_pose.header.frame_id # self.client.wait_for_result() point = PARK_SPOT_WAYPOINTS[str(current_box_stall_id-1)][0] quaternion = PARK_SPOT_WAYPOINTS[str(current_box_stall_id-1)][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() util.rotate(-92) for i in range(abs(current_box_stall_id - self.goal_stall_id)): if i == 0 and current_box_stall_id == 2: push_dist = SQUARE_DIST * 2 - BOX_EDGE_LENGTH/2 - ROBOT_LENGTH/2 - 0.1 util.move(push_dist, linear_scale=0.1) else: util.move(-0.7, linear_scale=0.3) if not self.go_to_side('box_front'): return 'completed' push_dist = SQUARE_DIST + AMCL_APPROACH_BOX - 0.05 util.move(push_dist, linear_scale= 0.1) else: point = PARK_SPOT_WAYPOINTS[str(int(current_box_stall_id)+1)][0] quaternion = PARK_SPOT_WAYPOINTS[str(int(current_box_stall_id)+1)][1] goal_pose = util.goal_pose('map', point, quaternion) self.client.send_goal(goal_pose) print "waiting for result ", goal_pose.target_pose.header.frame_id self.client.wait_for_result() if current_box_stall_id == 3: util.rotate(81) else: util.rotate(89) for i in range(abs(current_box_stall_id - self.goal_stall_id)): if i == 0 and current_box_stall_id == 4: push_dist = SQUARE_DIST * 2 - BOX_EDGE_LENGTH/2 - ROBOT_LENGTH/2 - 0.1 util.move(push_dist, linear_scale=0.1) else: util.move(-0.7, linear_scale=0.3) if not self.go_to_side('box_front'): return 'completed' # if i == 0: # util.rotate(6, max_error=2, anglular_scale=0.5) push_dist = SQUARE_DIST + AMCL_APPROACH_BOX - 0.07 util.move(push_dist, linear_scale= 0.1) util.signal(2, onColor1 = Led.GREEN, onColor2=Led.RED) util.move(-0.2, linear_scale = 0.3) ar_tag_sub.unregister() return 'completed'
def train(infile, outfile): d = pickle.load(open(infile, 'rb')) train_df = d['train_df'] test_df = d['test_df'] USE_CUDA = True atomicno = [1, 6, 7, 8, 9, 15, 16, 17] ### Create datasets and data loaders feat_vect_args = dict(feat_atomicno_onehot=atomicno, feat_pos=False, feat_atomicno=True, feat_valence=True, aromatic=True, hybridization=True, partial_charge=False, formal_charge=True, r_covalent=False, total_valence_onehot=True, r_vanderwals=False, default_valence=True, rings=True) feat_mat_args = dict(feat_distances=False, feat_r_pow=None) #[-1, -2, -3]) split_weights = [1, 1.5, 2, 3] adj_args = dict(edge_weighted=False, norm_adj=True, add_identity=True, split_weights=split_weights) BATCH_SIZE = 32 MASK_ZEROOUT_PROB = 0.0 COMBINE_MAT_VECT = 'row' INIT_NOISE = 1.0e-2 print("len(train_df)=", len(train_df)) ds_train = MoleculeDatasetNew(train_df.rdmol.tolist(), train_df.value.tolist(), MAX_N, feat_vect_args, feat_mat_args, adj_args, combine_mat_vect=COMBINE_MAT_VECT, mask_zeroout_prob=MASK_ZEROOUT_PROB) print("len(ds_train)=", len(ds_train)) dl_train = torch.utils.data.DataLoader(ds_train, batch_size=BATCH_SIZE, shuffle=True, pin_memory=True) ds_test = MoleculeDatasetNew(test_df.rdmol.tolist(), test_df.value.tolist(), MAX_N, feat_vect_args, feat_mat_args, adj_args, combine_mat_vect=COMBINE_MAT_VECT) dl_test = torch.utils.data.DataLoader(ds_test, batch_size=BATCH_SIZE // 2, shuffle=True, pin_memory=True) USE_RESNET = True INT_D = 1024 # 1024 # #1024 # 1024 # 32 # 1024 # 1024 LAYER_N = 7 g_feature_n = 29 + len(atomicno) # + 3 USE_GRAPH_MAT = False GS = 1 # len(split_weights) AGG_FUNC = nets.goodmax if USE_GRAPH_MAT: net = nets.GraphMatModel(g_feature_n, [INT_D] * LAYER_N, resnet=USE_RESNET, noise=INIT_NOISE, GS=GS) data_feat = ['mat'] else: net = nets.GraphVertModel(g_feature_n, [INT_D] * LAYER_N, resnet=USE_RESNET, noise=INIT_NOISE, agg_func=AGG_FUNC, GS=GS) data_feat = ['vect'] net = move(net, USE_CUDA) #new_net.load_state_dict(new_state) for n, p in net.named_parameters(): print(n, p.shape) criterion = nn.MSELoss() #criterion = lambda x, y : torch.mean((x-y)**4) amsgrad = False LR = 1e-3 SCHEDULER = True GAMMA = 0.90 MODEL_NAME = "{}_{}_graph_conv_{}_{}_{}_{}_{}_{}_{}_{}_{}_{}.{:08d}".format( NUC, DATASET_NAME, LAYER_N, INT_D, amsgrad, MASK_ZEROOUT_PROB, BATCH_SIZE, LR, INIT_NOISE, SCHEDULER, GAMMA, g_feature_n, int(time.time() % 1e7)) print("MODEL_NAME=", MODEL_NAME) writer = SummaryWriter("logs/{}".format(MODEL_NAME)) optimizer = torch.optim.Adam(net.parameters(), lr=LR, amsgrad=amsgrad, eps=1e-6) #, momentum=0.9) if SCHEDULER: scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=GAMMA) else: scheduler = None pickle.dump( { "feat_vect_args": feat_vect_args, "feat_mat_args": feat_mat_args, "adj_args": adj_args, "init_noise": INIT_NOISE, "lr": LR, 'BATCH_SIZE': BATCH_SIZE, 'USE_RESNET': USE_RESNET, 'INIT_NOISE': INIT_NOISE, 'INT_D': INT_D, 'AGG_FUNC': AGG_FUNC, 'data_feat': data_feat, 'train_filename': infile, 'g_feature_n': g_feature_n, 'USE_GRAPH_MAT': USE_GRAPH_MAT, 'LAYER_N': LAYER_N, }, open(os.path.join(CHECKPOINT_DIR, MODEL_NAME + ".meta"), 'wb')) generic_runner(net, optimizer, scheduler, criterion, dl_train, dl_test, MODEL_NAME, MAX_EPOCHS=10000, CHECKPOINT_EVERY=50, data_feat=data_feat, USE_CUDA=USE_CUDA)