Exemple #1
0
def main():

    p = argparse.ArgumentParser(description="Generate QC pages based "
                                "on a realistic head model .msh "
                                "file and a centroid for "
                                "projection")

    p.add_argument('msh', type=str, help="Path to .msh realistic head model")
    p.add_argument('centroid',
                   type=str,
                   help="Path to .txt centroid coordinates")
    p.add_argument('dscalar',
                   type=str,
                   help="Path to .dscalar file for brain texture")
    p.add_argument('out_html',
                   type=str,
                   help="Path to HTML output file for QC")

    args = p.parse_args()

    msh = args.msh
    centroid = args.centroid
    texture = np.load(args.dscalar)
    out = args.out_html

    # Load in mesh surfaces
    logging.info(f"Loading in brain surfaces from {msh}...")
    brain = decompose_gmsh(msh, GM_ENTITIES)

    # Construct dummy field object
    f = FieldFunc(mesh_file=msh,
                  initial_centroid=np.genfromtxt(centroid),
                  tet_weights=None,
                  coil='',
                  field_dir=None)

    logging.info("Discretizing parameteric mesh...")
    C, iR, bounds = f.C, f.iR, f.bounds
    param_mesh = construct_parameteric_mesh(C, iR, bounds)
    gen_mshplot_html(brain, texture, param_mesh, out)
Exemple #2
0
def main():

    parser = argparse.ArgumentParser(
        description="Run grid optimization on a single subject")
    parser.add_argument('msh',
                        type=str,
                        help="Subject Gmsh .msh realistic head model")
    parser.add_argument('weights',
                        type=str,
                        help=".npy binary containing a weight for each "
                        "tetrahedron")
    parser.add_argument('centroid',
                        type=str,
                        help="Coordinates in T1w space for a centroid "
                        "to the weight function to optimize over")
    parser.add_argument('coil', type=str, help="Path to SimNIBS coil file")
    parser.add_argument('output_file',
                        type=str,
                        help="Output file storing optimal coordinates")
    parser.add_argument('locdim',
                        type=int,
                        help="Number of points to evaluate along each "
                        "spatial dimension")
    parser.add_argument('rotdim',
                        type=int,
                        help="Number of points to evaluate along each "
                        "rotational dimension")
    parser.add_argument('--history',
                        type=str,
                        help="Output file to store history of scores"
                        " into for convergence/visualization")
    parser.add_argument('--workdir',
                        type=str,
                        help="Working directory to run simulations in")
    parser.add_argument('--ncpus',
                        type=int,
                        help="Number of threads to use for each batch "
                        "of simulations. Default = 8")
    parser.add_argument('--batchsize',
                        type=int,
                        help="Number of simulations to run simultaneously, "
                        "will default to half the number of cpus if not "
                        "specified.")
    parser.add_argument('--options',
                        type=str,
                        help="ADVANCED: Modify defaults for FEM evaluation "
                        "function.")

    args = parser.parse_args()
    msh = args.msh
    wf = np.load(args.weights)
    centroid = np.genfromtxt(args.centroid)
    coil = args.coil
    ncpus = args.ncpus or 8
    batch_size = args.batchsize or (ncpus // 2 - 1)
    history = args.history
    workdir = args.workdir or "/tmp/"
    loc_dim = args.locdim
    rot_dim = args.rotdim
    output_file = args.output_file
    options = args.options

    if options:
        with open(options, 'r') as f:
            opts = json.load(f)
    else:
        opts = {}

    # Construct objective function object
    logging.info(f"Using {ncpus} cpus")
    femfunc = FieldFunc(mesh_file=msh,
                        initial_centroid=centroid,
                        tet_weights=wf,
                        coil=coil,
                        field_dir=workdir,
                        cpus=ncpus,
                        **opts)

    # Set up grid for evaluation
    x_in = np.linspace(femfunc.bounds[0, 0], femfunc.bounds[0, 1], loc_dim)
    y_in = np.linspace(femfunc.bounds[1, 0], femfunc.bounds[1, 1], loc_dim)
    rot_in = np.linspace(0, 180, rot_dim)
    input_array = cartesian([x_in, y_in, rot_in])

    score_list = []
    if history and os.path.exists(history):

        # Clip evaluation array based on number previously evaluated
        logging.info(f"{history} already exists! Skipping previous runs")
        with open(history, 'r') as f:
            num_previous = sum(1 for _ in f) - 1
        input_array = input_array[num_previous:, :]
        score_list.append(np.genfromtxt(history, skip_header=1, delimiter=','))

    elif not os.path.exists(history):
        header = "x,y,r,score\n"
        with open(history, 'w') as f:
            f.write(header)

    # Split input array into chunks
    divisions = np.arange(batch_size, input_array.shape[0], batch_size)
    input_arrays = np.split(input_array, divisions)

    logging.info(f"Running {len(input_arrays)} iterations...")
    logging.info(f"Evaluating {batch_size} simulations per iteration...")

    for i, a in enumerate(input_arrays):

        logging.info(f"Iteration number {i} of {len(input_arrays)}")
        scores = femfunc.evaluate(a)

        score_arr = np.c_[a, scores]
        score_list.append(score_arr)
        if history:
            with open(history, "a") as h_file:
                logging.debug(f"Writing to {history}...")
                np.savetxt(h_file, np.c_[a, scores], delimiter=',')

        logging.info(f"Completed iteration {i}")

    logging.info("Finished computing evaluation grid!")

    # Find the best value
    all_scores = np.vstack(score_list)
    best_row = np.argmax(all_scores[:, -1])
    best_input = all_scores[best_row, :-1]
    np.savetxt(output_file, best_input)
    logging.info(f"Saved optimal coordinates to {output_file}")
Exemple #3
0
def main():

    args = docopt(__doc__)

    # Parse arguments
    mesh = args['<mesh>']
    weights = np.load(args['<weightfile>'])
    init_centroid = np.genfromtxt(args['<init_centroid>'])
    coil = args['<coil>']
    output_file = args['<output_file>']
    cpus = int(args['--cpus']) or 8
    tmpdir = args['--tmp-dir'] or os.getenv('TMPDIR') or "/tmp/"
    num_iters = int(args['--n-iters']) or 50
    min_samps = int(args['--min-var-samps']) or 10
    tol = float(args['--convergence']) or 0.001
    history = args['--history']
    skip_convergence = args['--skip-convergence']
    options = args['--options']

    if options:
        with open(options, 'r') as f:
            opts = json.load(f)
        logging.info("Using custom options file {}".format(options))
        logging.info("{}".format('\''.join(
            [f"{k}:{v}" for k, v in opts.items()])))
    else:
        opts = {}

    logging.info('Using {} cpus'.format(cpus))

    f = FieldFunc(mesh_file=mesh,
                  initial_centroid=init_centroid,
                  tet_weights=weights,
                  coil=coil,
                  field_dir=tmpdir,
                  cpus=cpus,
                  **opts)

    # Make search domain
    search_domain = TensorProductDomain([
        ClosedInterval(f.bounds[0, 0], f.bounds[0, 1]),
        ClosedInterval(f.bounds[1, 0], f.bounds[1, 1]),
        ClosedInterval(0, 180)
    ])

    c_search_domain = cTensorProductDomain([
        ClosedInterval(f.bounds[0, 0], f.bounds[0, 1]),
        ClosedInterval(f.bounds[1, 0], f.bounds[1, 1]),
        ClosedInterval(0, 180)
    ])

    # Generate historical points
    prior = DefaultPrior(n_dims=3 + 2, num_noise=1)
    prior.tophat = TophatPrior(-2, 5)
    prior.ln_prior = NormalPrior(12.5, 1.6)
    hist_pts = cpus
    i = 0
    init_pts = search_domain.generate_uniform_random_points_in_domain(hist_pts)
    observations = -f.evaluate(init_pts)
    hist_data = HistoricalData(dim=3, num_derivatives=0)
    hist_data.append_sample_points(
        [SamplePoint(inp, o, 0.0) for o, inp in zip(observations, init_pts)])

    # Train GP model
    gp_ll = GaussianProcessLogLikelihoodMCMC(historical_data=hist_data,
                                             derivatives=[],
                                             prior=prior,
                                             chain_length=1000,
                                             burnin_steps=2000,
                                             n_hypers=2**4,
                                             noisy=False)
    gp_ll.train()

    # Initialize grad desc params
    sgd_params = cGDParams(num_multistarts=200,
                           max_num_steps=50,
                           max_num_restarts=5,
                           num_steps_averaged=4,
                           gamma=0.7,
                           pre_mult=1.0,
                           max_relative_change=0.5,
                           tolerance=1.0e-10)

    num_samples = int(cpus * 1.3)
    best_point_history = []

    # Sum of errors buffer
    var_buffer = deque(maxlen=min_samps)
    for i in np.arange(0, num_iters):

        # Optimize qEI and pick samples
        points_to_sample, ei = gen_sample_from_qei(gp_ll.models[0],
                                                   c_search_domain,
                                                   sgd_params=sgd_params,
                                                   num_samples=num_samples,
                                                   num_mc=2**10)

        # Collect observations
        sampled_points = -f.evaluate(points_to_sample)
        evidence = [
            SamplePoint(c, v, 0.0)
            for c, v in zip(points_to_sample, sampled_points)
        ]

        # Update model
        gp_ll.add_sampled_points(evidence)
        gp_ll.train()

        # Pull model and pull values
        gp = gp_ll.models[0]
        min_point = np.argmin(gp._points_sampled_value)
        min_val = np.min(gp._points_sampled_value)
        best_coord = gp.get_historical_data_copy().points_sampled[min_point]

        logging.info('Iteration {} of {}'.format(i, num_iters))
        logging.info('Recommended Points:')
        logging.info(points_to_sample)
        logging.info('Expected Improvement: {}'.format(ei))
        logging.info('Current Best:')
        logging.info(f'f(x*)= {min_val}')
        logging.info(f'Coord: {best_coord}')
        best_point_history.append(str(min_val))

        if history:
            with open(history, 'w') as buf:
                buf.write('\n'.join(best_point_history))

        # Convergence check
        if (len(var_buffer) == var_buffer.maxlen) and not skip_convergence:
            deviation = sum([abs(x - min_val) for x in var_buffer])
            if deviation < tol:
                logging.info('Convergence reached!')
                logging.info('Deviation: {}'.format(deviation))
                logging.info('History length: {}'.format(var_buffer.maxlen))
                logging.info('Tolerance: {}'.format(tol))
                break

        var_buffer.append(min_val)

    # Save position and orientation matrix
    np.savetxt(output_file, best_coord)
Exemple #4
0
def main():

    p = argparse.ArgumentParser(description="Run a SimNIBS simulation "
                                "at the coordinates specified at a given "
                                "orientation")
    p.add_argument("mesh", type=str, help="Realistic head model " ".msh file")
    p.add_argument("orientation",
                   type=str,
                   help="Input parameters to evaluate "
                   "objective function on")
    p.add_argument("centroid",
                   type=str,
                   help="Coordinate .txt file pointing "
                   "to centroid for parameteric mesh seeding")
    p.add_argument("weights",
                   type=str,
                   help="Weight function used to "
                   "evaluate the objective function")
    p.add_argument("coil",
                   type=str,
                   help="Coil to use for running a "
                   "simulation, .ccd physical model or .nii.gz dA/dt file")
    p.add_argument("out_fields",
                   type=str,
                   help="Output gmsh simulation .msh file")
    p.add_argument("out_coil", type=str, help="Output coil position .geo file")
    p.add_argument("out_coords",
                   type=str,
                   help="Output transformed coil coordinate file in RAS")

    args = p.parse_args()

    # Parse arguments
    msh = args.mesh
    orientation = np.genfromtxt(args.orientation)
    centroid = np.genfromtxt(args.centroid)
    wf = np.load(args.weights)
    coil = args.coil
    out_fields = args.out_fields
    out_coil = args.out_coil
    out_coords = args.out_coords

    # Construct the objective function
    fem = FieldFunc(msh,
                    initial_centroid=centroid,
                    tet_weights=wf,
                    coil=coil,
                    field_dir=os.getcwd(),
                    cpus=2)

    # Get position and anterior facing direction of coil
    matsimnibs = fem._transform_input(*orientation)
    a = matsimnibs[:3, 1]

    # Coil adjustment methodology
    # We never want anterior facing coil orientations
    if a[1] < 0:
        orientation[2] = (orientation[2] + 180) % 360
    _, matsimnibs = fem.run_simulation(orientation, out_fields, out_coil)

    # Save matsimnibs matrix
    np.save(out_coords, matsimnibs)

    # Write in weight function
    M = mesh_io.read_msh(out_fields)
    gm = np.where(M.elm.tag1 == 2)
    wf_field = np.zeros_like(M.elmdata[1].value)
    wf_field[gm] = wf
    M.add_element_field(wf_field, 'weightfunction')
    M.write(out_fields)
Exemple #5
0
def main():

    args = docopt(__doc__)

    #Parse arguments
    mesh        =   args['<mesh>']
    weights     =   np.load(args['<weightfile>'])
    C           =   np.load(args['<quad_const>'])
    b           =   np.load(args['<bounds>'])
    R           =   np.load(args['<affine>'])
    coil        =   args['<coil>']
    loc_out     =   args['<loc_out>']
    rot_out     =   args['<rot_out>']
    cpus        =   int(args['--cpus']) or 8
    tmpdir      =   args['--tmp-dir'] or os.getenv('TMPDIR') or "/tmp/"
    num_iters   =   args['--n-iters'] or 50


    #Make search domain
    search_domain = TensorProductDomain([
            ClosedInterval(b[0,0],b[0,1]), #X coord on quadratic surface
            ClosedInterval(b[1,0],b[1,1]), #Y coord on quadratic surface
            ClosedInterval(0,180) #Rotational angle
            ])

    c_search_domain = cTensorProductDomain([
            ClosedInterval(b[0,0],b[0,1]), 
            ClosedInterval(b[1,0],b[1,1]),
            ClosedInterval(0,180)
            ])

    #Make objective function
    f = FieldFunc(mesh_file=mesh, quad_surf_consts=C,
                  surf_to_mesh_matrix=R, tet_weights=weights,
                  field_dir=tmpdir, coil=coil, cpus=cpus)


    #Generate historical points
    hist_pts = int(cpus * 1.5)
    init_pts = search_domain.generate_uniform_random_points_in_domain(hist_pts)
    observations = -f.evaluate(init_pts)
    hist_data = HistoricalData(dim = 3, num_derivatives= 0)
    hist_data.append_sample_points([SamplePoint(inp,o,0.0) 
                                    for o,inp in 
                                    zip(observations,init_pts)])

    #Set up model specifications
    prior = DefaultPrior(n_dims = 3 + 2, num_noise=1)
    gp_ll = GaussianProcessLogLikelihoodMCMC(historical_data=hist_data,
                                             derivatives=[], prior=prior,
                                             chain_length=1000, burnin_steps=2000,
                                             n_hypers=2**4, noisy=False)
    gp_ll.train()

    #Initialize grad desc params
    sgd_params = cGDParams(num_multistarts=200, max_num_steps=50,
                           max_num_restarts=2, num_steps_averaged=4,
                           gamma=0.7, pre_mult=1.0, max_relative_change=0.5,
                           tolerance=1.0e-10)

    num_samples = int(cpus*1.3)
    best_point_history = []
    for i in np.arange(0,num_iters):
            
        #Optimize qEI and pick samples
        points_to_sample, ei = gen_sample_from_qei(gp_ll.models[0],
                                                   c_search_domain, sgd_params=sgd_params,
                                                   num_samples=num_samples, num_mc=2**10)

        #Collect observations
        sampled_points = -f.evaluate(points_to_sample)
        evidence = [SamplePoint(c,v,0.0) for c,v in zip(points_to_sample, sampled_points)]

        #Update model
        gp_ll.add_sampled_points(evidence)
        gp_ll.train()

        #Pull model and pull values
        gp = gp_ll.models[0]
        min_point = np.argmin(gp._points_sampled_value)
        min_val = np.min(gp._points_sampled_value)
        best_coord = gp.get_historical_data_copy().points_sampled[min_point]

        print('Recommended Points:')
        print(points_to_sample)
        print('Expected Improvement: {}'.format(ei))
        print('Current Best:')
        print('f(x*)=',min_val)
        print('Coord:', best_coord)

        best_point_history.append(min_val)

    #Once sampling is done take the best point and transform it back into native space
    preaff_loc = geolib.map_param_2_surf(best_coord[0],best_coord[1],C)
    preaff_rot,_ = geolib.map_rot_2_surf(best_coord[0],best_coord[1],best_coord[2],C)
    loc = np.matmul(R,preaff_loc)
    rot = np.matmul(R,preaff_rot)
    np.savetxt(loc_out,loc)
    np.savetxt(rot_out,rot)
Exemple #6
0
def main():

    parser = argparse.ArgumentParsers(
        description="Run grid optimization on a single subject")
    parser.add_argument('msh',
                        type=str,
                        help="Subject Gmsh .msh realistic head model")
    parser.add_argument('weights',
                        type=str,
                        help=".npy binary containing a weight for each "
                        "tetrahedron")
    parser.add_argument('centroid',
                        type=str,
                        help="Coordinates in T1w space for a centroid "
                        "to the weight function to optimize over")
    parser.add_argument('coil', type=str, help="Path to SimNIBS coil file")
    parser.add_argument('output_file',
                        type=str,
                        help="Output file storing optimal coordinates")
    parser.add_argument('output_file',
                        type=str,
                        help="Output file storing optimal coordinates")
    parser.add_argument('--history',
                        type=str,
                        help="Output file to store history of scores"
                        " into for convergence/visualization")
    parser.add_argument('--workdir',
                        type=str,
                        help="Working directory to run simulations in")
    parser.add_argument('--ncpus',
                        type=int,
                        help="Number of threads to use for each batch "
                        "of simulations. Default = 8")
    parser.add_argument('--batchsize',
                        type=int,
                        help="Number of simulations to run simultaneously, "
                        "will default to half the number of cpus if not "
                        "specified.")
    parser.add_argument('--solver',
                        type=int,
                        help="Optunity solver to use, "
                        "defaults to particle swarm",
                        choices=optunity.available_solvers())

    args = parser.parse()
    msh = args.msh
    wf = np.load(args.weights)
    centroid = np.genfromtxt(args.centroid)
    coil = args.coil
    ncpus = args.ncpus or 8
    batch_size = args.batchsize or (ncpus // 2 - 1)
    history = args.history
    workdir = args.workdir or "/tmp/"
    output_file = args.output_file
    solver = args.solver or "particle swarm"

    # Construct objective function object
    f = FieldFunc(mesh_file=msh,
                  initial_centroid=centroid,
                  tet_weights=wf,
                  coil=coil,
                  field_dir=workdir,
                  cpus=ncpus)

    # Set up optunity optimization
    # Can we feed a list of inputs here?
    pars, details, _ = optunity.minimize(f.evaluate,
                                         num_evals=100,
                                         x=[f.bounds[0, 0], f.bounds[0, 1]],
                                         y=[f.bounds[1, 0], f.bounds[1, 1]],
                                         theta=[0, 180],
                                         solver_name=solver)