def prepareShapeModel(datasetDirectory, shape): import glob datasetFiles_shape = glob.glob(datasetDirectory + '/' + shape + '.dat') if (len(datasetFiles_shape) < 1): raise Exception("Dataset not available at " + datasetDirectory + " for shape " + shape) shapeModeler = ShapeModeler(filename=datasetFiles_shape[0], num_principle_components=numParams) #import pdb; pdb.set_trace() return shapeModeler
def prepareShapesModel(datasetDirectory, num_params): shapes = {} nameFilter = re.compile(regexp) datasets = glob.glob(datasetDirectory + '/*.dat') for dataset in datasets: name = os.path.splitext(os.path.basename(dataset))[0] if nameFilter.match(name): shapeModeler = ShapeModeler(init_filename=dataset, num_principle_components=num_params) shapes[name] = shapeModeler return shapes
settings_shapeLearners = [] userInputCaptures = [] nb_param = 10 spaces = {} abc = [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z' ] for letter in abc: (shapeType, init_datasetFile, update_datasetFiles, datasetParam) = generateSettings(letter) spaces[letter] = ShapeModeler(init_filename=init_datasetFile, update_filenames=update_datasetFiles, param_filename=datasetParam, num_principle_components=nb_param) def downsampleShape(shape, numDesiredPoints, xyxyFormat=False): numPointsInShape = len(shape) / 2 if (xyxyFormat): #make xyxy format x_shape = shape[0::2] y_shape = shape[1::2] else: x_shape = shape[0:numPointsInShape] y_shape = shape[numPointsInShape:] if isinstance(x_shape, np.ndarray): #convert arrays to lists for interp1d x_shape = (x_shape.T).tolist()[0]