return output if __name__ == "__main__": conversion_factor = 1440 # Conversion factor for Tinker units to Mv/cm. ########################################################################### # # Handle user arguments # ########################################################################### start = time.time() parser = create_parser() args = parser.parse_args() nengines = args.nengines equil = args.equil stride = args.stride # Process args for MDI mdi.MDI_Init(args.mdi, mpi_world) if use_mpi4py: mpi_world = mdi.MDI_Get_Intra_Code_MPI_Comm() world_rank = mpi_world.Get_rank() snapshot_filename = args.snap probes = [int(x) for x in args.probes.split()]
import cv2 import math import argparse import os import random import torch import torch.optim as optim import ngransac from network import CNNet from dataset import SparseDataset import util parser = util.create_parser( 'NG-RANSAC demo for a user defined image pair. Fits an essential matrix (default) or fundamental matrix (-fmat) using OpenCV RANSAC vs. NG-RANSAC.' ) parser.add_argument('--image1', '-img1', default='images/demo1.jpg', help='path to image 1') parser.add_argument('--image2', '-img2', default='images/demo2.jpg', help='path to image 2') parser.add_argument( '--outimg', '-out',
import numpy as np import cv2 import random import torch import torch.optim as optim import ngransac from network import CNNet from dataset import SparseDataset import util # parse command line arguments parser = util.create_parser( description="Train a neural guidance network end-to-end using a task loss." ) parser.add_argument( '--datasets', '-ds', default= 'brown_bm_3---brown_bm_3-maxpairs-10000-random---skip-10-dilate-25,st_peters_square', help='which datasets to use, separate multiple datasets by comma') parser.add_argument('--variant', '-v', default='train', help='subfolder of the dataset to use') parser.add_argument( '--hyps',
import numpy as np import cv2 import random import os import torch import ngransac import time from network import CNNet from dataset import SparseDataset import util parser = util.create_parser( description="Test NG-RANSAC on pre-calculated correspondences.") parser.add_argument('--dataset', '-ds', default='reichstag', help='which dataset to use') parser.add_argument('--batchmode', '-bm', action='store_true', help='loop over all test datasets defined in util.py') parser.add_argument('--variant', '-v', default='test', help='subfolder of the dataset to use')
import numpy as np import math import torch import torch.optim as optim import ngransac from network import CNNet from dataset import SparseDataset import util # parse command line arguments parser = util.create_parser( description= "Train a neural guidance network using correspondence distance to a ground truth model to calculate target probabilities." ) parser.add_argument( '--datasets', '-ds', default= 'brown_bm_3---brown_bm_3-maxpairs-10000-random---skip-10-dilate-25,st_peters_square', help='which datasets to use, separate multiple datasets by comma') parser.add_argument('--variant', '-v', default='train', help='subfolder of the dataset to use') parser.add_argument('--learningrate', '-lr',