Esempio n. 1
0
def project(f3d,
            p,
            s,
            sx,
            sy,
            a,
            apply_ctf=False,
            size=None,
            flip_phase=False):
    orient = util.euler2rot(np.deg2rad(p[star.Relion.ANGLEROT]),
                            np.deg2rad(p[star.Relion.ANGLETILT]),
                            np.deg2rad(p[star.Relion.ANGLEPSI]))
    pshift = np.exp(
        -2 * np.pi * 1j *
        (-p[star.Relion.ORIGINX] * sx + -p[star.Relion.ORIGINY] * sy))
    f2d = vop.interpolate_slice_numba(f3d, orient, size=size)
    f2d *= pshift
    if apply_ctf or flip_phase:
        apix = star.calculate_apix(p)
        c = ctf.eval_ctf(s / apix,
                         a,
                         p[star.Relion.DEFOCUSU],
                         p[star.Relion.DEFOCUSV],
                         p[star.Relion.DEFOCUSANGLE],
                         p[star.Relion.PHASESHIFT],
                         p[star.Relion.VOLTAGE],
                         p[star.Relion.AC],
                         p[star.Relion.CS],
                         bf=0,
                         lp=2 * apix)
        if flip_phase:
            c = np.sign(c)
        f2d *= c
    return f2d
Esempio n. 2
0
def main(args):
    log = logging.getLogger('root')
    hdlr = logging.StreamHandler(sys.stdout)
    log.addHandler(hdlr)
    log.setLevel(logging.getLevelName(args.loglevel.upper()))
    if args.boxsize is None:
        log.error("Please specify box size")
        return 1
    df = star.parse_star(args.input, keep_index=False)
    if args.cls is not None:
        df = star.select_classes(df, args.cls)
    if args.apix is None:
        args.apix = star.calculate_apix(df)
    nside = 2**args.healpix_order
    angular_sampling = np.sqrt(3 / np.pi) * 60 / nside
    theta, phi = pix2ang(nside, np.arange(12 * nside**2))
    phi = np.pi - phi
    hp = np.column_stack((np.sin(theta) * np.cos(phi),
                          np.sin(theta) * np.sin(phi), np.cos(theta)))
    kdtree = cKDTree(hp)
    st = np.sin(np.deg2rad(df[star.Relion.ANGLETILT]))
    ct = np.cos(np.deg2rad(df[star.Relion.ANGLETILT]))
    sp = np.sin(np.deg2rad(df[star.Relion.ANGLEROT]))
    cp = np.cos(np.deg2rad(df[star.Relion.ANGLEROT]))
    ptcls = np.column_stack((st * cp, st * sp, ct))
    _, idx = kdtree.query(ptcls)
    cnts = np.bincount(idx, minlength=theta.size)
    frac = cnts / np.max(cnts).astype(np.float64)
    mu = np.mean(frac)
    sigma = np.std(frac)
    color_scale = (frac - mu) / sigma
    color_scale[color_scale > 5] = 5
    color_scale[color_scale < -1] = -1
    color_scale /= 6
    color_scale += 1 / 6.
    r = args.boxsize * args.apix / 2
    rp = np.reshape(r + r * frac * args.height_scale, (-1, 1))
    base1 = hp * r
    base2 = hp * rp
    base1 = base1[:, [0, 1, 2]] + np.array([r] * 3)
    base2 = base2[:, [0, 1, 2]] + np.array([r] * 3)
    height = np.squeeze(np.abs(rp - r))
    idx = np.where(height >= 0.01)[0]
    width = args.width_scale * np.pi * r * angular_sampling / 360
    bild = np.hstack((base1, base2, np.ones((base1.shape[0], 1)) * width))
    fmt_color = ".color %f 0 %f\n"
    fmt_cyl = ".cylinder %f %f %f %f %f %f %f\n"
    with open(args.output, "w") as f:
        for i in idx:
            f.write(fmt_color % (color_scale[i], 1 - color_scale[i]))
            f.write(fmt_cyl % tuple(bild[i]))
    return 0
Esempio n. 3
0
def main(args):
    """ Main denoising CNN function """

    # Load STAR file and neural network
    star_file = load_star(args.input_micrographs)
    num_mics = len(star_file)
    apix = star.calculate_apix(star_file)
    cutoff_frequency = 1. / args.max_resolution
    nn = load_trained_model(args.model)
    suffix = args.output_suffix
    phaseflip = args.phaseflip
    flipback = args.flipback
    merge_noisy = args.merge_noisy
    merge_freq1 = 1. / (args.merge_resolution + args.merge_width)
    merge_freq2 = 1. / args.merge_resolution

    # Main denoising loop
    for i, metadata in tqdm(star_file.iterrows(),
                            desc="Denoising",
                            total=num_mics):

        mic_file = metadata[star.Relion.MICROGRAPH_NAME]

        # Pre-calculate frequencies, angles, and soft mask
        if not i:
            first_mic = load_mic(mic_file)
            freqs, angles = get_mic_freqs(first_mic, apix, angles=True)
            softmask = 1. - smoothstep(merge_freq1, merge_freq2, freqs)
            merge_band = (softmask < 1) * (softmask > 0)

        new_mic = process(nn,
                          mic_file,
                          metadata,
                          freqs,
                          angles,
                          apix,
                          cutoff_frequency,
                          softmask,
                          merge_band,
                          phaseflip=phaseflip,
                          flipback=flipback,
                          merge_noisy=merge_noisy)

        new_mic_file = mic_file.replace(".mrc", "{0}.mrc".format(suffix))
        save_mic(new_mic, new_mic_file)

    return
Esempio n. 4
0
File: sort.py Progetto: wwangat/pyem
def main(args):
    pyfftw.interfaces.cache.enable()

    refmap = mrc.read(args.key, compat="relion")
    df = star.parse_star(args.input, keep_index=False)
    star.augment_star_ucsf(df)
    refmap_ft = vop.vol_ft(refmap, threads=args.threads)

    apix = star.calculate_apix(df)
    sz = refmap_ft.shape[0] // 2 - 1
    sx, sy = np.meshgrid(rfftfreq(sz), fftfreq(sz))
    s = np.sqrt(sx**2 + sy**2)
    r = s * sz
    r = np.round(r).astype(np.int64)
    r[r > sz // 2] = sz // 2 + 1
    a = np.arctan2(sy, sx)

    def1 = df["rlnDefocusU"].values
    def2 = df["rlnDefocusV"].values
    angast = df["rlnDefocusAngle"].values
    phase = df["rlnPhaseShift"].values
    kv = df["rlnVoltage"].values
    ac = df["rlnAmplitudeContrast"].values
    cs = df["rlnSphericalAberration"].values
    xshift = df["rlnOriginX"].values
    yshift = df["rlnOriginY"].values

    score = np.zeros(df.shape[0])

    # TODO parallelize
    for i, row in df.iterrows():
        xcor = particle_xcorr(row, refmap_ft)

    if args.top is None:
        args.top = df.shape[0]

    top = df.iloc[np.argsort(score)][:args.top]
    star.simplify_star_ucsf(top)
    star.write_star(args.output, top)
    return 0
Esempio n. 5
0
def main(args):
    log = logging.getLogger(__name__)
    log.setLevel(logging.INFO)
    hdlr = logging.StreamHandler(sys.stdout)
    if args.quiet:
        hdlr.setLevel(logging.WARNING)
    else:
        hdlr.setLevel(logging.INFO)
    log.addHandler(hdlr)

    if args.target is None and args.sym is None:
        log.error(
            "At least a target or symmetry group must be provided via --target or --sym"
        )
        return 1
    elif args.target is not None and args.boxsize is None and args.origin is None:
        log.error("An origin must be provided via --boxsize or --origin")
        return 1

    if args.target is not None:
        try:
            args.target = np.array(
                [np.double(tok) for tok in args.target.split(",")])
        except:
            log.error(
                "Target must be comma-separated list of x,y,z coordinates")
            return 1

    if args.origin is not None:
        if args.boxsize is not None:
            log.warn("--origin supersedes --boxsize")
        try:
            args.origin = np.array(
                [np.double(tok) for tok in args.origin.split(",")])
        except:
            log.error(
                "Origin must be comma-separated list of x,y,z coordinates")
            return 1

    if args.sym is not None:
        args.sym = util.relion_symmetry_group(args.sym)

    df = star.parse_star(args.input)

    if args.apix is None:
        args.apix = star.calculate_apix(df)
        if args.apix is None:
            log.warn(
                "Could not compute pixel size, default is 1.0 Angstroms per pixel"
            )
            args.apix = 1.0
            df[star.Relion.MAGNIFICATION] = 10000
            df[star.DETECTORPIXELSIZE] = 1.0

    if args.cls is not None:
        df = star.select_classes(df, args.cls)

    if args.target is not None:
        if args.origin is not None:
            args.origin /= args.apix
        elif args.boxsize is not None:
            args.origin = np.ones(3) * args.boxsize / 2
        args.target /= args.apix
        c = args.target - args.origin
        c = np.where(np.abs(c) < 1, 0, c)  # Ignore very small coordinates.
        d = np.linalg.norm(c)
        ax = c / d
        cm = util.euler2rot(*np.array(
            [np.arctan2(ax[1], ax[0]),
             np.arccos(ax[2]),
             np.deg2rad(args.psi)]))
        ops = [op.dot(cm) for op in args.sym] if args.sym is not None else [cm]
        dfs = [
            star.transform_star(df,
                                op.T,
                                -d,
                                rotate=args.shift_only,
                                invert=args.target_invert,
                                adjust_defocus=args.adjust_defocus)
            for op in ops
        ]
    elif args.sym is not None:
        dfs = list(
            subparticle_expansion(df, args.sym,
                                  -args.displacement / args.apix))
    else:
        log.error(
            "At least a target or symmetry group must be provided via --target or --sym"
        )
        return 1

    if args.recenter:
        for s in dfs:
            star.recenter(s, inplace=True)

    if args.suffix is None and not args.skip_join:
        if len(dfs) > 1:
            df = util.interleave(dfs)
        else:
            df = dfs[0]
        star.write_star(args.output, df)
    else:
        for i, s in enumerate(dfs):
            star.write_star(os.path.join(args.output, args.suffix + "_%d" % i),
                            s)
    return 0
Esempio n. 6
0
def main(args):
    log = logging.getLogger('root')
    hdlr = logging.StreamHandler(sys.stdout)
    log.addHandler(hdlr)
    log.setLevel(logging.getLevelName(args.loglevel.upper()))
    df = star.parse_star(args.input, keep_index=False)
    star.augment_star_ucsf(df)
    maxshift = np.round(np.max(np.abs(df[star.Relion.ORIGINS].values)))

    if args.map is not None:
        if args.map.endswith(".npy"):
            log.info("Reading precomputed 3D FFT of volume")
            f3d = np.load(args.map)
            log.info("Finished reading 3D FFT of volume")
            if args.size is None:
                args.size = (f3d.shape[0] - 3) // args.pfac
        else:
            vol = mrc.read(args.map, inc_header=False, compat="relion")
            if args.mask is not None:
                mask = mrc.read(args.mask, inc_header=False, compat="relion")
                vol *= mask
            if args.size is None:
                args.size = vol.shape[0]
            if args.crop is not None and args.size // 2 < maxshift + args.crop // 2:
                log.error(
                    "Some shifts are too large to crop (maximum crop is %d)" %
                    (args.size - 2 * maxshift))
                return 1
            log.info("Preparing 3D FFT of volume")
            f3d = vop.vol_ft(vol, pfac=args.pfac, threads=args.threads)
            log.info("Finished 3D FFT of volume")
    else:
        log.error("Please supply a map")
        return 1

    sz = (f3d.shape[0] - 3) // args.pfac
    apix = star.calculate_apix(df) * np.double(args.size) / sz
    sx, sy = np.meshgrid(np.fft.rfftfreq(sz), np.fft.fftfreq(sz))
    s = np.sqrt(sx**2 + sy**2)
    a = np.arctan2(sy, sx)
    log.info("Projection size is %d, unpadded volume size is %d" %
             (args.size, sz))
    log.info("Effective pixel size is %f A/px" % apix)

    if args.subtract and args.size != sz:
        log.error("Volume and projections must be same size when subtracting")
        return 1

    if args.crop is not None and args.size // 2 < maxshift + args.crop // 2:
        log.error("Some shifts are too large to crop (maximum crop is %d)" %
                  (args.size - 2 * maxshift))
        return 1

    ift = None

    with mrc.ZSliceWriter(args.output, psz=apix) as zsw:
        for i, p in df.iterrows():
            f2d = project(f3d,
                          p,
                          s,
                          sx,
                          sy,
                          a,
                          pfac=args.pfac,
                          apply_ctf=args.ctf,
                          size=args.size,
                          flip_phase=args.flip)
            if ift is None:
                ift = irfft2(f2d.copy(),
                             threads=args.threads,
                             planner_effort="FFTW_ESTIMATE",
                             auto_align_input=True,
                             auto_contiguous=True)
            proj = fftshift(
                ift(f2d.copy(),
                    np.zeros(ift.output_shape, dtype=ift.output_dtype)))
            log.debug("%f +/- %f" % (np.mean(proj), np.std(proj)))
            if args.subtract:
                with mrc.ZSliceReader(p["ucsfImagePath"]) as zsr:
                    img = zsr.read(p["ucsfImageIndex"])
                log.debug("%f +/- %f" % (np.mean(img), np.std(img)))
                proj = img - proj
            if args.crop is not None:
                orihalf = args.size // 2
                newhalf = args.crop // 2
                x = orihalf - np.int(np.round(p[star.Relion.ORIGINX]))
                y = orihalf - np.int(np.round(p[star.Relion.ORIGINY]))
                proj = proj[y - newhalf:y + newhalf, x - newhalf:x + newhalf]
            zsw.write(proj)
            log.debug(
                "%d@%s: %d/%d" %
                (p["ucsfImageIndex"], p["ucsfImagePath"], i + 1, df.shape[0]))

    if args.star is not None:
        log.info("Writing output .star file")
        if args.crop is not None:
            df = star.recenter(df, inplace=True)
        if args.subtract:
            df[star.UCSF.IMAGE_ORIGINAL_PATH] = df[star.UCSF.IMAGE_PATH]
            df[star.UCSF.IMAGE_ORIGINAL_INDEX] = df[star.UCSF.IMAGE_INDEX]
        df[star.UCSF.IMAGE_PATH] = args.output
        df[star.UCSF.IMAGE_INDEX] = np.arange(df.shape[0])
        star.simplify_star_ucsf(df)
        star.write_star(args.star, df)
    return 0
Esempio n. 7
0
def main(args):
    if args.info:
        args.input.append(args.output)

    df = pd.concat(
        (star.parse_star(inp, augment=args.augment) for inp in args.input),
        join="inner")

    dfaux = None

    if args.cls is not None:
        df = star.select_classes(df, args.cls)

    if args.info:
        if star.is_particle_star(df) and star.Relion.CLASS in df.columns:
            c = df[star.Relion.CLASS].value_counts()
            print("%s particles in %d classes" %
                  ("{:,}".format(df.shape[0]), len(c)))
            print("    ".join([
                '%d: %s (%.2f %%)' % (i, "{:,}".format(s), 100. * s / c.sum())
                for i, s in iteritems(c.sort_index())
            ]))
        elif star.is_particle_star(df):
            print("%s particles" % "{:,}".format(df.shape[0]))
        if star.Relion.MICROGRAPH_NAME in df.columns:
            mgraphcnt = df[star.Relion.MICROGRAPH_NAME].value_counts()
            print(
                "%s micrographs, %s +/- %s particles per micrograph" %
                ("{:,}".format(len(mgraphcnt)), "{:,.3f}".format(
                    np.mean(mgraphcnt)), "{:,.3f}".format(np.std(mgraphcnt))))
        try:
            print("%f A/px (%sX magnification)" %
                  (star.calculate_apix(df), "{:,.0f}".format(
                      df[star.Relion.MAGNIFICATION][0])))
        except KeyError:
            pass
        if len(df.columns.intersection(star.Relion.ORIGINS3D)) > 0:
            print("Largest shift is %f pixels" % np.max(
                np.abs(df[df.columns.intersection(
                    star.Relion.ORIGINS3D)].values)))
        return 0

    if args.drop_angles:
        df.drop(star.Relion.ANGLES, axis=1, inplace=True, errors="ignore")

    if args.drop_containing is not None:
        containing_fields = [
            f for q in args.drop_containing for f in df.columns if q in f
        ]
        if args.invert:
            containing_fields = df.columns.difference(containing_fields)
        df.drop(containing_fields, axis=1, inplace=True, errors="ignore")

    if args.offset_group is not None:
        df[star.Relion.GROUPNUMBER] += args.offset_group

    if args.restack is not None:
        if not args.augment:
            star.augment_star_ucsf(df, inplace=True)
        star.set_original_fields(df, inplace=True)
        df[star.UCSF.IMAGE_PATH] = args.restack
        df[star.UCSF.IMAGE_INDEX] = np.arange(df.shape[0])

    if args.subsample_micrographs is not None:
        if args.bootstrap is not None:
            print("Only particle sampling allows bootstrapping")
            return 1
        mgraphs = df[star.Relion.MICROGRAPH_NAME].unique()
        if args.subsample_micrographs < 1:
            args.subsample_micrographs = np.int(
                max(np.round(args.subsample_micrographs * len(mgraphs)), 1))
        else:
            args.subsample_micrographs = np.int(args.subsample_micrographs)
        ind = np.random.choice(len(mgraphs),
                               size=args.subsample_micrographs,
                               replace=False)
        mask = df[star.Relion.MICROGRAPH_NAME].isin(mgraphs[ind])
        if args.auxout is not None:
            dfaux = df.loc[~mask]
        df = df.loc[mask]

    if args.subsample is not None and args.suffix == "":
        if args.subsample < 1:
            args.subsample = np.int(
                max(np.round(args.subsample * df.shape[0]), 1))
        else:
            args.subsample = np.int(args.subsample)
        ind = np.random.choice(df.shape[0], size=args.subsample, replace=False)
        mask = df.index.isin(ind)
        if args.auxout is not None:
            dfaux = df.loc[~mask]
        df = df.loc[mask]

    if args.copy_angles is not None:
        angle_star = star.parse_star(args.copy_angles, augment=args.augment)
        df = star.smart_merge(df,
                              angle_star,
                              fields=star.Relion.ANGLES,
                              key=args.merge_key)

    if args.copy_alignments is not None:
        align_star = star.parse_star(args.copy_alignments,
                                     augment=args.augment)
        df = star.smart_merge(df,
                              align_star,
                              fields=star.Relion.ALIGNMENTS,
                              key=args.merge_key)

    if args.copy_reconstruct_images is not None:
        recon_star = star.parse_star(args.copy_reconstruct_images,
                                     augment=args.augment)
        df[star.Relion.RECONSTRUCT_IMAGE_NAME] = recon_star[
            star.Relion.IMAGE_NAME]

    if args.transform is not None:
        if args.transform.count(",") == 2:
            r = geom.euler2rot(
                *np.deg2rad([np.double(s) for s in args.transform.split(",")]))
        else:
            r = np.array(json.loads(args.transform))
        df = star.transform_star(df, r, inplace=True)

    if args.invert_hand:
        df = star.invert_hand(df, inplace=True)

    if args.copy_paths is not None:
        path_star = star.parse_star(args.copy_paths)
        star.set_original_fields(df, inplace=True)
        df[star.Relion.IMAGE_NAME] = path_star[star.Relion.IMAGE_NAME]

    if args.copy_ctf is not None:
        ctf_star = pd.concat((star.parse_star(inp, augment=args.augment)
                              for inp in glob.glob(args.copy_ctf)),
                             join="inner")
        df = star.smart_merge(df,
                              ctf_star,
                              star.Relion.CTF_PARAMS,
                              key=args.merge_key)

    if args.copy_micrograph_coordinates is not None:
        coord_star = pd.concat(
            (star.parse_star(inp, augment=args.augment)
             for inp in glob.glob(args.copy_micrograph_coordinates)),
            join="inner")
        df = star.smart_merge(df,
                              coord_star,
                              fields=star.Relion.MICROGRAPH_COORDS,
                              key=args.merge_key)

    if args.scale is not None:
        star.scale_coordinates(df, args.scale, inplace=True)
        star.scale_origins(df, args.scale, inplace=True)
        star.scale_magnification(df, args.scale, inplace=True)

    if args.scale_particles is not None:
        star.scale_origins(df, args.scale_particles, inplace=True)
        star.scale_magnification(df, args.scale_particles, inplace=True)

    if args.scale_coordinates is not None:
        star.scale_coordinates(df, args.scale_coordinates, inplace=True)

    if args.scale_origins is not None:
        star.scale_origins(df, args.scale_origins, inplace=True)

    if args.scale_magnification is not None:
        star.scale_magnification(df, args.scale_magnification, inplace=True)

    if args.scale_apix is not None:
        star.scale_apix(df, args.scale_apix, inplace=True)

    if args.recenter:
        df = star.recenter(df, inplace=True)

    if args.zero_origins:
        df = star.zero_origins(df, inplace=True)

    if args.pick:
        df.drop(df.columns.difference(star.Relion.PICK_PARAMS),
                axis=1,
                inplace=True,
                errors="ignore")

    if args.subsample is not None and args.suffix != "":
        if args.subsample < 1:
            print("Specific integer sample size")
            return 1
        nsamplings = args.bootstrap if args.bootstrap is not None else df.shape[
            0] / np.int(args.subsample)
        inds = np.random.choice(df.shape[0],
                                size=(nsamplings, np.int(args.subsample)),
                                replace=args.bootstrap is not None)
        for i, ind in enumerate(inds):
            star.write_star(
                os.path.join(
                    args.output,
                    os.path.basename(args.input[0])[:-5] + args.suffix +
                    "_%d" % (i + 1)), df.iloc[ind])

    if args.to_micrographs:
        df = star.to_micrographs(df)

    if args.micrograph_range:
        df.set_index(star.Relion.MICROGRAPH_NAME, inplace=True)
        m, n = [int(tok) for tok in args.micrograph_range.split(",")]
        mg = df.index.unique().sort_values()
        outside = list(range(0, m)) + list(range(n, len(mg)))
        dfaux = df.loc[mg[outside]].reset_index()
        df = df.loc[mg[m:n]].reset_index()

    if args.micrograph_path is not None:
        df = star.replace_micrograph_path(df,
                                          args.micrograph_path,
                                          inplace=True)

    if args.min_separation is not None:
        gb = df.groupby(star.Relion.MICROGRAPH_NAME)
        dupes = []
        for n, g in gb:
            nb = algo.query_connected(
                g[star.Relion.COORDS].values - g[star.Relion.ORIGINS],
                args.min_separation / star.calculate_apix(df))
            dupes.extend(g.index[~np.isnan(nb)])
        dfaux = df.loc[dupes]
        df.drop(dupes, inplace=True)

    if args.merge_source is not None:
        if args.merge_fields is not None:
            if "," in args.merge_fields:
                args.merge_fields = args.merge_fields.split(",")
            else:
                args.merge_fields = [args.merge_fields]
        else:
            print("Merge fields must be specified using --merge-fields")
            return 1
        if args.merge_key is not None:
            if "," in args.merge_key:
                args.merge_key = args.merge_key.split(",")
        if args.by_original:
            args.by_original = star.original_field(args.merge_key)
        else:
            args.by_original = args.merge_key
        merge_star = star.parse_star(args.merge_source, augment=args.augment)
        df = star.smart_merge(df,
                              merge_star,
                              fields=args.merge_fields,
                              key=args.merge_key,
                              left_key=args.by_original)

    if args.revert_original:
        df = star.revert_original(df, inplace=True)

    if args.set_optics is not None:
        tok = args.set_optics.split(",")
        df = star.set_optics_groups(df,
                                    sep=tok[0],
                                    idx=int(tok[1]),
                                    inplace=True)
        df.dropna(axis=0, how="any", inplace=True)

    if args.drop_optics_group is not None:
        idx = df[star.Relion.OPTICSGROUP].isin(args.drop_optics_group)
        if not np.any(idx):
            idx = df[star.Relion.OPTICSGROUPNAME].isin(args.drop_optics_group)
        if not np.any(idx):
            print("No group found to drop")
            return 1
        df = df.loc[~idx]

    if args.split_micrographs:
        dfs = star.split_micrographs(df)
        for mg in dfs:
            star.write_star(
                os.path.join(args.output,
                             os.path.basename(mg)[:-4]) + args.suffix, dfs[mg])
        return 0

    if args.auxout is not None and dfaux is not None:
        if not args.relion2:
            df = star.remove_deprecated_relion2(dfaux, inplace=True)
            star.write_star(args.output,
                            df,
                            resort_records=args.sort,
                            simplify=args.augment_output,
                            optics=True)
        else:
            df = star.remove_new_relion31(dfaux, inplace=True)
            star.write_star(args.output,
                            df,
                            resort_records=args.sort,
                            simplify=args.augment_output,
                            optics=False)

    if args.output is not None:
        if not args.relion2:  # Relion 3.1 style output.
            df = star.remove_deprecated_relion2(df, inplace=True)
            star.write_star(args.output,
                            df,
                            resort_records=args.sort,
                            simplify=args.augment_output,
                            optics=True)
        else:
            df = star.remove_new_relion31(df, inplace=True)
            star.write_star(args.output,
                            df,
                            resort_records=args.sort,
                            simplify=args.augment_output,
                            optics=False)
    return 0
Esempio n. 8
0
def main(args):
    log = logging.getLogger('root')
    hdlr = logging.StreamHandler(sys.stdout)
    log.addHandler(hdlr)
    log.setLevel(logging.getLevelName(args.loglevel.upper()))

    dfs = [metadata.parse_fx_par(fn) for fn in args.input]
    n = dfs[0].shape[0]
    if not np.all(np.array([df.shape[0] for df in dfs]) == n):
        log.error("Input files are not aligned!")
        return 1
    df = pd.concat(dfs, axis=0, ignore_index=True)
    df["CLASS"] = np.repeat(np.arange(1, len(dfs) + 1), n)

    if args.min_occ:
        df = df[df["OCC"] >= args.min_occ]

    df = df.sort_values(by="OCC")
    df = df.drop_duplicates("C", keep="last")
    df = df.sort_values(by="C")
    df.reset_index(inplace=True)

    if args.min_score is not None:
        if args.min_score < 1:
            args.min_score = np.percentile(df["SCORE"],
                                           (1 - args.min_score) * 100)
        df = df.loc[df["SCORE"] >= args.min_score]

    if args.merge is not None:
        dfo = star.parse_star(args.merge)
        args.apix = star.calculate_apix(dfo)
        args.cs = dfo.iloc[0][star.Relion.CS]
        args.ac = dfo.iloc[0][star.Relion.AC]
        args.voltage = dfo.iloc[0][star.Relion.VOLTAGE]
        df = metadata.par2star(df,
                               data_path=args.stack,
                               apix=args.apix,
                               cs=args.cs,
                               ac=args.ac,
                               kv=args.voltage,
                               invert_eulers=args.invert_eulers)
        if args.stack is None:
            df[star.UCSF.IMAGE_INDEX] = dfo[star.UCSF.IMAGE_INDEX]
            df[star.UCSF.IMAGE_PATH] = dfo[star.UCSF.IMAGE_PATH]
        key = [star.UCSF.IMAGE_INDEX, star.UCSF.IMAGE_PATH]
        fields = star.Relion.MICROGRAPH_COORDS + [
            star.UCSF.IMAGE_ORIGINAL_INDEX, star.UCSF.IMAGE_ORIGINAL_PATH
        ] + [star.Relion.OPTICSGROUP
             ] + star.Relion.OPTICSGROUPTABLE + [star.Relion.RANDOMSUBSET]
        df = star.smart_merge(df, dfo, fields=fields, key=key)
        if args.revert_original:
            df = star.revert_original(df, inplace=True)
    else:
        df = metadata.par2star(df,
                               data_path=args.stack,
                               apix=args.apix,
                               cs=args.cs,
                               ac=args.ac,
                               kv=args.voltage,
                               invert_eulers=args.invert_eulers)

    if args.cls is not None:
        df = star.select_classes(df, args.cls)

    df = star.check_defaults(df, inplace=True)
    df = star.compatible(df, relion2=args.relion2, inplace=True)
    star.write_star(args.output, df, optics=(not args.relion2))
    return 0
Esempio n. 9
0
File: star.py Progetto: dzyla/pyem
def main(args):
    if args.info:
        args.input.append(args.output)

    df = pd.concat(
        (star.parse_star(inp, augment=args.augment) for inp in args.input),
        join="inner")

    dfaux = None

    if args.cls is not None:
        df = star.select_classes(df, args.cls)

    if args.info:
        if star.is_particle_star(df) and star.Relion.CLASS in df.columns:
            c = df[star.Relion.CLASS].value_counts()
            print("%s particles in %d classes" %
                  ("{:,}".format(df.shape[0]), len(c)))
            print("    ".join([
                '%d: %s (%.2f %%)' % (i, "{:,}".format(s), 100. * s / c.sum())
                for i, s in c.sort_index().iteritems()
            ]))
        elif star.is_particle_star(df):
            print("%s particles" % "{:,}".format(df.shape[0]))
        if star.Relion.MICROGRAPH_NAME in df.columns:
            mgraphcnt = df[star.Relion.MICROGRAPH_NAME].value_counts()
            print(
                "%s micrographs, %s +/- %s particles per micrograph" %
                ("{:,}".format(len(mgraphcnt)), "{:,.3f}".format(
                    np.mean(mgraphcnt)), "{:,.3f}".format(np.std(mgraphcnt))))
        try:
            print("%f A/px (%sX magnification)" %
                  (star.calculate_apix(df), "{:,.0f}".format(
                      df[star.Relion.MAGNIFICATION][0])))
        except KeyError:
            pass
        return 0

    if args.drop_angles:
        df.drop(star.Relion.ANGLES, axis=1, inplace=True, errors="ignore")

    if args.drop_containing is not None:
        containing_fields = [
            f for q in args.drop_containing for f in df.columns if q in f
        ]
        if args.invert:
            containing_fields = df.columns.difference(containing_fields)
        df.drop(containing_fields, axis=1, inplace=True, errors="ignore")

    if args.offset_group is not None:
        df[star.Relion.GROUPNUMBER] += args.offset_group

    if args.subsample_micrographs is not None:
        if args.bootstrap is not None:
            print("Only particle sampling allows bootstrapping")
            return 1
        mgraphs = df[star.Relion.MICROGRAPH_NAME].unique()
        if args.subsample_micrographs < 1:
            args.subsample_micrographs = np.int(
                max(np.round(args.subsample_micrographs * len(mgraphs)), 1))
        else:
            args.subsample_micrographs = np.int(args.subsample_micrographs)
        ind = np.random.choice(len(mgraphs),
                               size=args.subsample_micrographs,
                               replace=False)
        mask = df[star.Relion.MICROGRAPH_NAME].isin(mgraphs[ind])
        if args.auxout is not None:
            dfaux = df.loc[~mask]
        df = df.loc[mask]

    if args.subsample is not None and args.suffix == "":
        if args.subsample < 1:
            args.subsample = np.int(
                max(np.round(args.subsample * df.shape[0]), 1))
        else:
            args.subsample = np.int(args.subsample)
        ind = np.random.choice(df.shape[0], size=args.subsample, replace=False)
        mask = df.index.isin(ind)
        if args.auxout is not None:
            dfaux = df.loc[~mask]
        df = df.loc[mask]

    if args.copy_angles is not None:
        angle_star = star.parse_star(args.copy_angles, augment=args.augment)
        df = star.smart_merge(df, angle_star, fields=star.Relion.ANGLES)

    if args.transform is not None:
        if args.transform.count(",") == 2:
            r = star.euler2rot(
                *np.deg2rad([np.double(s) for s in args.transform.split(",")]))
        else:
            r = np.array(json.loads(args.transform))
        df = star.transform_star(df, r, inplace=True)

    if args.invert_hand:
        df[star.Relion.ANGLEROT] = -df[star.Relion.ANGLEROT]
        df[star.Relion.ANGLETILT] = 180 - df[star.Relion.ANGLETILT]

    if args.copy_paths is not None:
        path_star = star.parse_star(args.copy_paths)
        df[star.Relion.IMAGE_NAME] = path_star[star.Relion.IMAGE_NAME]

    if args.copy_ctf is not None:
        ctf_star = pd.concat((star.parse_star(inp, augment=args.augment)
                              for inp in glob.glob(args.copy_ctf)),
                             join="inner")
        df = star.smart_merge(df, ctf_star, star.Relion.CTF_PARAMS)

    if args.copy_micrograph_coordinates is not None:
        coord_star = pd.concat(
            (star.parse_star(inp, augment=args.augment)
             for inp in glob.glob(args.copy_micrograph_coordinates)),
            join="inner")
        df = star.smart_merge(df,
                              coord_star,
                              fields=star.Relion.MICROGRAPH_COORDS)

    if args.scale is not None:
        star.scale_coordinates(df, args.scale, inplace=True)
        star.scale_origins(df, args.scale, inplace=True)
        star.scale_magnification(df, args.scale, inplace=True)

    if args.scale_particles is not None:
        star.scale_origins(df, args.scale, inplace=True)
        star.scale_magnification(df, args.scale, inplace=True)

    if args.scale_coordinates is not None:
        star.scale_coordinates(df, args.scale_coordinates, inplace=True)

    if args.scale_origins is not None:
        star.scale_origins(df, args.scale_origins, inplace=True)

    if args.scale_magnification is not None:
        star.scale_magnification(df, args.scale_magnfication, inplace=True)

    if args.recenter:
        df = star.recenter(df, inplace=True)

    if args.zero_origins:
        df = star.zero_origins(df, inplace=True)

    if args.pick:
        df.drop(df.columns.difference(star.Relion.PICK_PARAMS),
                axis=1,
                inplace=True,
                errors="ignore")

    if args.subsample is not None and args.suffix != "":
        if args.subsample < 1:
            print("Specific integer sample size")
            return 1
        nsamplings = args.bootstrap if args.bootstrap is not None else df.shape[
            0] / np.int(args.subsample)
        inds = np.random.choice(df.shape[0],
                                size=(nsamplings, np.int(args.subsample)),
                                replace=args.bootstrap is not None)
        for i, ind in enumerate(inds):
            star.write_star(
                os.path.join(
                    args.output,
                    os.path.basename(args.input[0])[:-5] + args.suffix +
                    "_%d" % (i + 1)), df.iloc[ind])

    if args.to_micrographs:
        gb = df.groupby(star.Relion.MICROGRAPH_NAME)
        mu = gb.mean()
        df = mu[[
            c for c in star.Relion.CTF_PARAMS + star.Relion.MICROSCOPE_PARAMS +
            [star.Relion.MICROGRAPH_NAME] if c in mu
        ]].reset_index()

    if args.micrograph_range:
        df.set_index(star.Relion.MICROGRAPH_NAME, inplace=True)
        m, n = [int(tok) for tok in args.micrograph_range.split(",")]
        mg = df.index.unique().sort_values()
        outside = list(range(0, m)) + list(range(n, len(mg)))
        dfaux = df.loc[mg[outside]].reset_index()
        df = df.loc[mg[m:n]].reset_index()

    if args.micrograph_path is not None:
        df = star.replace_micrograph_path(df,
                                          args.micrograph_path,
                                          inplace=True)

    if args.min_separation is not None:
        gb = df.groupby(star.Relion.MICROGRAPH_NAME)
        dupes = []
        for n, g in gb:
            nb = algo.query_connected(
                g[star.Relion.COORDS],
                args.min_separation / star.calculate_apix(df))
            dupes.extend(g.index[~np.isnan(nb)])
        dfaux = df.loc[dupes]
        df.drop(dupes, inplace=True)

    if args.merge_source is not None:
        if args.merge_fields is not None:
            if "," in args.merge_fields:
                args.merge_fields = args.merge_fields.split(",")
            else:
                args.merge_fields = [args.merge_fields]
        else:
            print("Merge fields must be specified using --merge-fields")
            return 1
        if args.merge_key is not None:
            if "," in args.merge_key:
                args.merge_key = args.merge_key.split(",")
        merge_star = star.parse_star(args.merge_source, augment=args.augment)
        df = star.smart_merge(df,
                              merge_star,
                              fields=args.merge_fields,
                              key=args.merge_key)

    if args.split_micrographs:
        dfs = star.split_micrographs(df)
        for mg in dfs:
            star.write_star(
                os.path.join(args.output,
                             os.path.basename(mg)[:-4]) + args.suffix, dfs[mg])
        return 0

    if args.auxout is not None and dfaux is not None:
        star.write_star(args.auxout, dfaux, simplify=args.augment)

    if args.output is not None:
        star.write_star(args.output, df, simplify=args.augment)
    return 0
Esempio n. 10
0
def main(args):
    """
    Projection subtraction program entry point.
    :param args: Command-line arguments parsed by ArgumentParser.parse_args()
    :return: Exit status
    """
    log = logging.getLogger('root')
    hdlr = logging.StreamHandler(sys.stdout)
    log.addHandler(hdlr)
    log.setLevel(logging.getLevelName(args.loglevel.upper()))

    if args.dest is None and args.suffix == "":
        args.dest = ""
        args.suffix = "_subtracted"

    log.info("Reading particle .star file")
    df = star.parse_star(args.input, keep_index=False)
    star.augment_star_ucsf(df)
    if not args.original:
        df[star.UCSF.IMAGE_ORIGINAL_PATH] = df[star.UCSF.IMAGE_PATH]
        df[star.UCSF.IMAGE_ORIGINAL_INDEX] = df[star.UCSF.IMAGE_INDEX]
    df.sort_values(star.UCSF.IMAGE_ORIGINAL_PATH,
                   inplace=True,
                   kind="mergesort")
    gb = df.groupby(star.UCSF.IMAGE_ORIGINAL_PATH)
    df[star.UCSF.IMAGE_INDEX] = gb.cumcount()
    df[star.UCSF.IMAGE_PATH] = df[star.UCSF.IMAGE_ORIGINAL_PATH].map(
        lambda x: os.path.join(
            args.dest, args.prefix + os.path.basename(x).replace(
                ".mrcs", args.suffix + ".mrcs")))

    if args.submap_ft is None:
        log.info("Reading volume")
        submap = mrc.read(args.submap, inc_header=False, compat="relion")
        if args.submask is not None:
            log.info("Masking volume")
            submask = mrc.read(args.submask, inc_header=False, compat="relion")
            submap *= submask
        log.info("Preparing 3D FFT of volume")
        submap_ft = vop.vol_ft(submap,
                               pfac=args.pfac,
                               threads=min(args.threads, cpu_count()))
        log.info("Finished 3D FFT of volume")
    else:
        log.info("Loading 3D FFT from %s" % args.submap_ft)
        submap_ft = np.load(args.submap_ft)
        log.info("Loaded 3D FFT from %s" % args.submap_ft)

    sz = (submap_ft.shape[0] - 3) // args.pfac

    maxshift = np.round(np.max(np.abs(df[star.Relion.ORIGINS].values)))
    if args.crop is not None and sz < 2 * maxshift + args.crop:
        log.error("Some shifts are too large to crop (maximum crop is %d)" %
                  (sz - 2 * maxshift))
        return 1

    sx, sy = np.meshgrid(np.fft.rfftfreq(sz), np.fft.fftfreq(sz))
    s = np.sqrt(sx**2 + sy**2)
    r = s * sz
    r = np.round(r).astype(np.int64)
    r[r > sz // 2] = sz // 2 + 1
    nr = np.max(r) + 1
    a = np.arctan2(sy, sx)

    if args.refmap is not None:
        coefs_method = 1
        if args.refmap_ft is None:
            refmap = mrc.read(args.refmap, inc_header=False, compat="relion")
            refmap_ft = vop.vol_ft(refmap,
                                   pfac=args.pfac,
                                   threads=min(args.threads, cpu_count()))
        else:
            log.info("Loading 3D FFT from %s" % args.refmap_ft)
            refmap_ft = np.load(args.refmap_ft)
            log.info("Loaded 3D FFT from %s" % args.refmap_ft)
    else:
        coefs_method = 0
        refmap_ft = np.empty(submap_ft.shape, dtype=submap_ft.dtype)

    apix = star.calculate_apix(df)
    log.info("Computed pixel size is %f A" % apix)

    log.debug("Grouping particles by output stack")
    gb = df.groupby(star.UCSF.IMAGE_PATH)

    iothreads = threading.BoundedSemaphore(args.io_thread_pairs)
    qsize = args.io_queue_length
    fftthreads = args.fft_threads

    def init():
        global tls
        tls = threading.local()

    log.info("Instantiating thread pool with %d workers" % args.threads)
    pool = Pool(processes=args.threads, initializer=init)
    threads = []

    log.info("Performing projection subtraction")

    try:
        for fname, particles in gb:
            log.debug("Instantiating queue")
            queue = Queue.Queue(maxsize=qsize)
            log.debug("Create producer for %s" % fname)
            prod = threading.Thread(target=producer,
                                    args=(pool, queue, submap_ft, refmap_ft,
                                          fname, particles, sx, sy, s, a, apix,
                                          coefs_method, r, nr, fftthreads,
                                          args.crop, args.pfac))
            log.debug("Create consumer for %s" % fname)
            cons = threading.Thread(target=consumer,
                                    args=(queue, fname, apix, iothreads))
            threads.append((prod, cons))
            iothreads.acquire()
            log.debug("iotheads at %d" % iothreads._Semaphore__value)
            log.debug("Start consumer for %s" % fname)
            cons.start()
            log.debug("Start producer for %s" % fname)
            prod.start()
    except KeyboardInterrupt:
        log.debug("Main thread wants out!")

    for pair in threads:
        for thread in pair:
            try:
                thread.join()
            except RuntimeError as e:
                log.debug(e)

    pool.close()
    pool.join()
    pool.terminate()

    log.info("Finished projection subtraction")

    log.info("Writing output .star file")
    if args.crop is not None:
        df = star.recenter(df, inplace=True)
    star.simplify_star_ucsf(df)
    star.write_star(args.output, df)

    return 0
Esempio n. 11
0
def main(args):
    """ Main SNR-measuring function """

    # Load STAR file and neural network
    star_file = load_star(args.input_micrographs)
    num_mics = len(star_file)
    apix = star.calculate_apix(star_file)
    cutoff_frequency = 1. / args.max_resolution
    nn = load_trained_model(args.model)
    orig, even, odd = args.even_odd_suffix.split(",")
    phaseflip = args.phaseflip
    augment = args.augment

    SNR_df = pd.DataFrame(columns=[
        "MicrographName", "var_S", "var_N_noisy", "var_N_denoised", "var_B",
        "frequencies", "svar_S", "svar_N_noisy", "svar_N_denoised", "svar_B"
    ])

    # Main denoising loop
    for i, metadata in tqdm(star_file.iterrows(),
                            desc="Denoising",
                            total=num_mics):

        mic_file = metadata[star.Relion.MICROGRAPH_NAME]

        # Pre-calculate frequencies and angles
        if not i:
            first_mic = load_mic(mic_file)
            freqs, angles = get_mic_freqs(first_mic, apix, angles=True)

        # Bin and denoise the even and odd micrographs
        even_mic_file = mic_file.replace(orig, even)
        Re, De = process_snr(nn,
                             even_mic_file,
                             metadata,
                             freqs,
                             angles,
                             apix,
                             cutoff_frequency,
                             phaseflip=phaseflip,
                             augment=augment)

        odd_mic_file = mic_file.replace(orig, odd)
        Ro, Do = process_snr(nn,
                             odd_mic_file,
                             metadata,
                             freqs,
                             angles,
                             apix,
                             cutoff_frequency,
                             phaseflip=phaseflip,
                             augment=augment)

        # Calculate variances and spectral variances for plotting
        var_S, var_N_noisy, var_N_denoised, var_B = get_variances(
            Re, Ro, De, Do)

        frequencies, svar_S, svar_N_noisy, svar_N_denoised, svar_B = \
            get_spectral_variances(Re, Ro, De, Do, apix=apix)

        SNR_df.loc[i] = [
            mic_file, var_S, var_N_noisy, var_N_denoised, var_B, frequencies,
            svar_S, svar_N_noisy, svar_N_denoised, svar_B
        ]

    SNR_df.to_pickle(args.output_dataframe)
    return
Esempio n. 12
0
def generate_training_data(training_mics,
                           cutoff,
                           training_data,
                           suffixes,
                           window=192,
                           phaseflip=True):
    """ Generate the training data given micrographs and their CTF information

    Keyword arguments:
    training_mics -- Micrograph STAR file with CTF information for each image
    cutoff -- Spatial frequency for Fourier cropping an image
    training_data -- Filename for the HDF file that is created 

    It is presumed that all images have the same shape and pixel size. 
    By default, phase-flipping is performed to correct for the CTF.
    """

    star_file = load_star(training_mics)
    apix = star.calculate_apix(star_file)
    n_mics = len(star_file)

    dset_file = File(training_data, "w")
    dset_shape, n_patches, mic_freqs, mic_angles = get_dset_shape(
        star_file, window, apix, cutoff)

    even_dset = dset_file.create_dataset("even", dset_shape, dtype="float32")
    odd_dset = dset_file.create_dataset("odd", dset_shape, dtype="float32")

    orig, even, odd = suffixes.split(",")
    if len(suffixes.split(",")) != 3:
        raise Exception("Improperly formatted suffixes for even/odd mics!")

    for i, metadata in tqdm(star_file.iterrows(),
                            desc="Pre-processing",
                            total=n_mics):

        mic_file = metadata[star.Relion.MICROGRAPH_NAME]
        even_file = mic_file.replace(orig, even)
        odd_file = mic_file.replace(orig, odd)

        mic_even_patches, apix_bin = process(metadata,
                                             cutoff,
                                             window,
                                             even_file,
                                             mic_freqs,
                                             mic_angles,
                                             phaseflip=phaseflip)

        mic_odd_patches, apix_bin = process(metadata,
                                            cutoff,
                                            window,
                                            odd_file,
                                            mic_freqs,
                                            mic_angles,
                                            phaseflip=phaseflip)

        even_dset[i * n_patches:(i + 1) * n_patches] = mic_even_patches
        odd_dset[i * n_patches:(i + 1) * n_patches] = mic_odd_patches

    even_dset.attrs['apix'] = apix_bin
    even_dset.attrs['phaseflip'] = phaseflip

    odd_dset.attrs['apix'] = apix_bin
    odd_dset.attrs['phaseflip'] = phaseflip

    dset_file.close()
    return
Esempio n. 13
0
def main(args):
    log = logging.getLogger(__name__)
    log.setLevel(logging.INFO)
    hdlr = logging.StreamHandler(sys.stdout)
    if args.quiet:
        hdlr.setLevel(logging.WARNING)
    else:
        hdlr.setLevel(logging.INFO)
    log.addHandler(hdlr)

    if args.markers is None and args.target is None and args.sym is None:
        log.error(
            "A marker or symmetry group must be provided via --target, --markers, or --sym"
        )
        return 1
    elif args.sym is None and args.markers is None and args.boxsize is None and args.origin is None:
        log.error(
            "An origin must be provided via --boxsize, --origin, or --markers")
        return 1
    elif args.sym is not None and args.markers is None and args.target is None and \
            (args.boxsize is not None or args.origin is not None):
        log.warn("Symmetry expansion alone will ignore --target or --origin")

    if args.target is not None:
        try:
            args.target = np.array(
                [np.double(tok) for tok in args.target.split(",")])
        except:
            log.error(
                "Target must be comma-separated list of x,y,z coordinates")
            return 1

    if args.origin is not None:
        if args.boxsize is not None:
            logger.warn("--origin supersedes --boxsize")
        try:
            args.origin = np.array(
                [np.double(tok) for tok in args.origin.split(",")])
        except:
            log.error(
                "Origin must be comma-separated list of x,y,z coordinates")
            return 1

    if args.marker_sym is not None:
        args.marker_sym = relion_symmetry_group(args.marker_sym)

    star = parse_star(args.input, keep_index=False)

    if args.apix is None:
        args.apix = calculate_apix(star)
        if args.apix is None:
            logger.warn(
                "Could not compute pixel size, default is 1.0 Angstroms per pixel"
            )
            args.apix = 1.0

    if args.cls is not None:
        star = select_classes(star, args.cls)

    cmms = []

    if args.markers is not None:
        cmmfiles = glob.glob(args.markers)
        for cmmfile in cmmfiles:
            for cmm in parse_cmm(cmmfile):
                cmms.append(cmm / args.apix)

    if args.target is not None:
        cmms.append(args.target / args.apix)

    stars = []

    if len(cmms) > 0:
        if args.origin is not None:
            args.origin /= args.apix
        elif args.boxsize is not None:
            args.origin = np.ones(3) * args.boxsize / 2
        else:
            log.warn("Using first marker as origin")
            if len(cmms) == 1:
                log.error(
                    "Using first marker as origin, expected at least two markers"
                )
                return 1
            args.origin = cmms[0]
            cmms = cmms[1:]

        markers = [cmm - args.origin for cmm in cmms]

        if args.marker_sym is not None and len(markers) == 1:
            markers = [op.dot(markers[0]) for op in args.marker_sym]
        elif args.marker_sym is not None:
            log.error(
                "Exactly one marker is required for symmetry-derived subparticles"
            )
            return 1

        rots = [euler2rot(*np.deg2rad(r[1])) for r in star[ANGLES].iterrows()]
        #origins = star[ORIGINS].copy()
        for m in markers:
            d = np.linalg.norm(m)
            ax = m / d
            op = euler2rot(
                *np.array([np.arctan2(ax[1], ax[0]),
                           np.arccos(ax[2]), 0.]))
            stars.append(transform_star(star, op.T, -d, rots=rots))

    if args.sym is not None:
        args.sym = relion_symmetry_group(args.sym)
        if len(stars) > 0:
            stars = [
                se for se in subparticle_expansion(
                    s, args.sym, -args.displacement / args.apix) for s in stars
            ]
        else:
            stars = list(
                subparticle_expansion(star, args.sym,
                                      -args.displacement / args.apix))

    if args.recenter:
        for s in stars:
            recenter(s, inplace=True)

    if args.suffix is None and not args.skip_join:
        if len(stars) > 1:
            star = interleave(stars)
        else:
            star = stars[0]
        write_star(args.output, star)
    else:
        for i, star in enumerate(stars):
            write_star(os.path.join(args.output, args.suffix + "_%d" % i),
                       star)
    return 0
Esempio n. 14
0
def main(args):
    log = logging.getLogger(__name__)
    hdlr = logging.StreamHandler(sys.stdout)
    log.addHandler(hdlr)
    log.setLevel(logging.getLevelName(args.loglevel.upper()))

    if args.target is None and args.sym is None and args.transform is None and args.euler is None:
        log.error("At least a target, transformation matrix, Euler angles, or a symmetry group must be provided")
        return 1
    elif (args.target is not None or args.transform is not None) and args.boxsize is None and args.origin is None:
        log.error("An origin must be provided via --boxsize or --origin")
        return 1

    if args.apix is None:
        df = star.parse_star(args.input, nrows=1)
        args.apix = star.calculate_apix(df)
        if args.apix is None:
            log.warn("Could not compute pixel size, default is 1.0 Angstroms per pixel")
            args.apix = 1.0
            df[star.Relion.MAGNIFICATION] = 10000
            df[star.Relion.DETECTORPIXELSIZE] = 1.0

    if args.target is not None:
        try:
            args.target = np.array([np.double(tok) for tok in args.target.split(",")])
        except:
            log.error("Target must be comma-separated list of x,y,z coordinates")
            return 1

    if args.euler is not None:
        try:
            args.euler = np.deg2rad(np.array([np.double(tok) for tok in args.euler.split(",")]))
            args.transform = np.zeros((3, 4))
            args.transform[:, :3] = geom.euler2rot(*args.euler)
            if args.target is not None:
                args.transform[:, -1] = args.target
        except:
            log.error("Euler angles must be comma-separated list of rotation, tilt, skew in degrees")
            return 1

    if args.transform is not None and not hasattr(args.transform, "dtype"):
        if args.target is not None:
            log.warn("--target supersedes --transform")
        try:
            args.transform = np.array(json.loads(args.transform))
        except:
            log.error("Transformation matrix must be in JSON/Numpy format")
            return 1

    if args.origin is not None:
        if args.boxsize is not None:
            log.warn("--origin supersedes --boxsize")
        try:
            args.origin = np.array([np.double(tok) for tok in args.origin.split(",")])
            args.origin /= args.apix
        except:
            log.error("Origin must be comma-separated list of x,y,z coordinates")
            return 1
    elif args.boxsize is not None:
        args.origin = np.ones(3) * args.boxsize / 2
    
    if args.sym is not None:
        args.sym = util.relion_symmetry_group(args.sym)

    df = star.parse_star(args.input)

    if star.calculate_apix(df) != args.apix:
        log.warn("Using specified pixel size of %f instead of calculated size %f" %
                 (args.apix, star.calculate_apix(df)))

    if args.cls is not None:
        df = star.select_classes(df, args.cls)

    if args.target is not None:
        args.target /= args.apix
        c = args.target - args.origin
        c = np.where(np.abs(c) < 1, 0, c)  # Ignore very small coordinates.
        d = np.linalg.norm(c)
        ax = c / d
        r = geom.euler2rot(*np.array([np.arctan2(ax[1], ax[0]), np.arccos(ax[2]), np.deg2rad(args.psi)]))
        d = -d
    elif args.transform is not None:
        r = args.transform[:, :3]
        if args.transform.shape[1] == 4:
            d = args.transform[:, -1] / args.apix
            d = r.dot(args.origin) + d - args.origin
        else:
            d = 0
    elif args.sym is not None:
        r = np.identity(3)
        d = -args.displacement / args.apix
    else:
        log.error("At least a target or symmetry group must be provided via --target or --sym")
        return 1

    log.debug("Final rotation: %s" % str(r).replace("\n", "\n" + " " * 16))
    ops = [op.dot(r.T) for op in args.sym] if args.sym is not None else [r.T]
    log.debug("Final translation: %s (%f px)" % (str(d), np.linalg.norm(d)))
    dfs = list(subparticle_expansion(df, ops, d, rotate=args.shift_only, invert=args.invert, adjust_defocus=args.adjust_defocus))
 
    if args.recenter:
        for s in dfs:
            star.recenter(s, inplace=True)
    
    if args.suffix is None and not args.skip_join:
        if len(dfs) > 1:
            df = util.interleave(dfs)
        else:
            df = dfs[0]
        df = star.compatible(df, relion2=args.relion2, inplace=True)
        star.write_star(args.output, df, optics=(not args.relion2))
    else:
        for i, s in enumerate(dfs):
            s = star.compatible(s, relion2=args.relion2, inplace=True)
            star.write_star(os.path.join(args.output, args.suffix + "_%d" % i), s, optics=(not args.relion2))
    return 0
Esempio n. 15
0
def main(args):
    """
    Projection subtraction program entry point.
    :param args: Command-line arguments parsed by ArgumentParser.parse_args()
    :return: Exit status
    """
    log = logging.getLogger('root')
    hdlr = logging.StreamHandler(sys.stdout)
    log.addHandler(hdlr)
    log.setLevel(logging.getLevelName(args.loglevel.upper()))

    log.debug("Reading particle .star file")
    df = parse_star(args.input, keep_index=False)
    df.reset_index(inplace=True)
    df["rlnImageOriginalName"] = df["rlnImageName"]
    df["ucsfOriginalParticleIndex"], df["ucsfOriginalImagePath"] = \
        df["rlnImageOriginalName"].str.split("@").str
    df["ucsfOriginalParticleIndex"] = pd.to_numeric(
        df["ucsfOriginalParticleIndex"])
    df.sort_values("rlnImageOriginalName", inplace=True, kind="mergesort")
    gb = df.groupby("ucsfOriginalImagePath")
    df["ucsfParticleIndex"] = gb.cumcount() + 1
    df["ucsfImagePath"] = df["ucsfOriginalImagePath"].map(
        lambda x: os.path.join(
            args.dest, args.prefix + os.path.basename(x).replace(
                ".mrcs", args.suffix + ".mrcs")))
    df["rlnImageName"] = df["ucsfParticleIndex"].map(
        lambda x: "%.6d" % x).str.cat(df["ucsfImagePath"], sep="@")
    log.debug("Read particle .star file")

    if args.submap_ft is None:
        submap = mrc.read(args.submap, inc_header=False, compat="relion")
        submap_ft = vol_ft(submap, threads=min(args.threads, cpu_count()))
    else:
        log.debug("Loading %s" % args.submap_ft)
        submap_ft = np.load(args.submap_ft)
        log.debug("Loaded %s" % args.submap_ft)

    sz = submap_ft.shape[0] // 2 - 1
    sx, sy = np.meshgrid(np.fft.rfftfreq(sz), np.fft.fftfreq(sz))
    s = np.sqrt(sx**2 + sy**2)
    r = s * sz
    r = np.round(r).astype(np.int64)
    r[r > sz // 2] = sz // 2 + 1
    nr = np.max(r) + 1
    a = np.arctan2(sy, sx)

    if args.refmap is not None:
        coefs_method = 1
        if args.refmap_ft is None:
            refmap = mrc.read(args.refmap, inc_header=False, compat="relion")
            refmap_ft = vol_ft(refmap, threads=min(args.threads, cpu_count()))
        else:
            log.debug("Loading %s" % args.refmap_ft)
            refmap_ft = np.load(args.refmap_ft)
            log.debug("Loaded %s" % args.refmap_ft)
    else:
        coefs_method = 0
        refmap_ft = np.empty(submap_ft.shape, dtype=submap_ft.dtype)
    apix = calculate_apix(df)

    log.debug("Constructing particle metadata references")
    # npart = df.shape[0]
    idx = df["ucsfOriginalParticleIndex"].values
    stack = df["ucsfOriginalImagePath"].values.astype(np.str, copy=False)
    def1 = df["rlnDefocusU"].values
    def2 = df["rlnDefocusV"].values
    angast = df["rlnDefocusAngle"].values
    phase = df["rlnPhaseShift"].values
    kv = df["rlnVoltage"].values
    ac = df["rlnAmplitudeContrast"].values
    cs = df["rlnSphericalAberration"].values
    az = df["rlnAngleRot"].values
    el = df["rlnAngleTilt"].values
    sk = df["rlnAnglePsi"].values
    xshift = df["rlnOriginX"].values
    yshift = df["rlnOriginY"].values
    new_idx = df["ucsfParticleIndex"].values
    new_stack = df["ucsfImagePath"].values.astype(np.str, copy=False)

    log.debug("Grouping particles by output stack")
    gb = df.groupby("ucsfImagePath")

    iothreads = threading.BoundedSemaphore(args.io_thread_pairs)
    qsize = args.io_queue_length
    fftthreads = args.fft_threads
    # pyfftw.interfaces.cache.enable()

    log.debug("Instantiating worker pool")
    pool = Pool(processes=args.threads)
    threads = []

    try:
        for fname, particles in gb.indices.iteritems():
            log.debug("Instantiating queue")
            queue = Queue.Queue(maxsize=qsize)
            log.debug("Create producer for %s" % fname)
            prod = threading.Thread(
                target=producer,
                args=(pool, queue, submap_ft, refmap_ft, fname, particles, idx,
                      stack, sx, sy, s, a, apix, def1, def2, angast, phase, kv,
                      ac, cs, az, el, sk, xshift, yshift, new_idx, new_stack,
                      coefs_method, r, nr, fftthreads))
            log.debug("Create consumer for %s" % fname)
            cons = threading.Thread(target=consumer,
                                    args=(queue, fname, apix, fftthreads,
                                          iothreads))
            threads.append((prod, cons))
            iothreads.acquire()
            log.debug("iotheads at %d" % iothreads._Semaphore__value)
            log.debug("Start consumer for %s" % fname)
            cons.start()
            log.debug("Start producer for %s" % fname)
            prod.start()
    except KeyboardInterrupt:
        log.debug("Main thread wants out!")

    for pair in threads:
        for thread in pair:
            try:
                thread.join()
            except RuntimeError as e:
                log.debug(e)

    pool.close()
    pool.join()
    pool.terminate()

    df.drop([c for c in df.columns if "ucsf" in c or "eman" in c],
            axis=1,
            inplace=True)

    df.set_index("index", inplace=True)
    df.sort_index(inplace=True, kind="mergesort")

    write_star(args.output, df, reindex=True)

    return 0