def ssnr_sequential___individual_data_collect(self, r, op): if (self is None): get_mrc_func = IV.get_mrc else: get_mrc_func = self.cache.get_mrc v = get_mrc_func(r['subtomogram']) if ('angle' in r): v = GR.rotate_pad_mean(v, angle=N.array(r['angle'], dtype=N.float), loc_r=N.array(r['loc'], dtype=N.float)) if ((op is not None) and ('segmentation_tg' in op) and ('template' in r) and ('segmentation' in r['template'])): phi = IV.read_mrc_vol(r['template']['segmentation']) phi_m = (phi > 0.5) del phi (ang_inv, loc_inv) = AAL.reverse_transform_ang_loc(r['angle'], r['loc']) phi_mr = GR.rotate(phi_m, angle=ang_inv, loc_r=loc_inv, default_val=0) del phi_m del ang_inv, loc_inv import aitom.tomominer.pursuit.multi.util as PMU v_s = PMU.template_guided_segmentation(v=v, m=phi_mr, op=op['segmentation_tg']) del phi_mr if (v_s is not None): v_f = N.isfinite(v_s) if (v_f.sum() > 0): v_s[N.logical_not(v_f)] = v_s[v_f].mean() v = v_s del v_s v = NF.fftshift(NF.fftn(v)) m = get_mrc_func(r['mask']) if ('angle' in r): m = GR.rotate_mask(m, angle=N.array(r['angle'], dtype=N.float)) v[(m < op['mask_cutoff'])] = 0.0 return {'v': v, 'm': m, }
def average(dj, mask_count_threshold): vol_sum = None mask_sum = None for d in dj: v = IF.read_mrc_vol(d['subtomogram']) if (not N.all(N.isfinite(v))): raise Exception('error loading', d['subtomogram']) vm = IF.read_mrc_vol(d['mask']) v_r = GR.rotate_pad_mean(v, angle=d['angle'], loc_r=d['loc']) assert N.all(N.isfinite(v_r)) vm_r = GR.rotate_mask(vm, angle=d['angle']) assert N.all(N.isfinite(vm_r)) if (vol_sum is None): vol_sum = N.zeros(v_r.shape, dtype=N.float64, order='F') vol_sum += v_r if (mask_sum is None): mask_sum = N.zeros(vm_r.shape, dtype=N.float64, order='F') mask_sum += vm_r ind = (mask_sum >= mask_count_threshold) vol_sum_fft = NF.fftshift(NF.fftn(vol_sum)) avg = N.zeros(vol_sum_fft.shape, dtype=N.complex) avg[ind] = (vol_sum_fft[ind] / mask_sum[ind]) avg = N.real(NF.ifftn(NF.ifftshift(avg))) return { 'v': avg, 'm': (mask_sum / len(dj)), }
def transform(phi, A_fourier, n, inv=False): A_real = inv_fourier_transform(A_fourier) ang = [phi['q_rot'], phi['q_tilt'], phi['q_psi']] loc = [phi['q_x'], phi['q_y'], phi['q_z']] if inv: ang, loc = ang_loc.reverse_transform_ang_loc(ang, loc) A_real_rot = rotate.rotate_pad_mean(A_real, angle=ang, loc_r=loc) result = fourier_transform(A_real_rot) return result
def var__local(self, data_json, labels=None, mask_cutoff=0.5, return_key=True, segmentation_tg_op=None): if (labels is None): labels = ([0] * len(data_json)) sum_v = {} prod_sum_v = {} mask_sum = {} for (i, r) in enumerate(data_json): if ((self is not None) and (self.work_queue is not None) and self.work_queue.done_tasks_contains(self.task.task_id)): raise Exception('Duplicated task') v = IV.read_mrc_vol(r['subtomogram']) v = GR.rotate_pad_mean(v, angle=N.array(r['angle'], dtype=N.float), loc_r=N.array(r['loc'], dtype=N.float)) m = IV.read_mrc_vol(r['mask']) m = GR.rotate_mask(m, N.array(r['angle'], dtype=N.float)) if ((segmentation_tg_op is not None) and ('template' in r) and ('segmentation' in r['template'])): phi = IV.read_mrc(r['template']['segmentation'])['value'] import aitom.tomominer.pursuit.multi.util as PMU v_s = PMU.template_guided_segmentation(v=v, m=(phi > 0.5), op=segmentation_tg_op) if (v_s is not None): v = v_s del v_s v_t = N.zeros(v.shape) v_f = N.isfinite(v) v_t[v_f] = v[v_f] v_t[N.logical_not(v_f)] = v[v_f].mean() v = v_t del v_f, v_t v = NF.fftshift(NF.fftn(v)) v[(m < mask_cutoff)] = 0.0 if (labels[i] not in sum_v): sum_v[labels[i]] = v else: sum_v[labels[i]] += v if (labels[i] not in prod_sum_v): prod_sum_v[labels[i]] = (v * N.conj(v)) else: prod_sum_v[labels[i]] += (v * N.conj(v)) if (labels[i] not in mask_sum): mask_sum[labels[i]] = N.zeros(m.shape, dtype=N.int) mask_sum[labels[i]][(m >= mask_cutoff)] += 1 re = {'sum': sum_v, 'prod_sum': prod_sum_v, 'mask_sum': mask_sum, } if return_key: re_key = self.cache.save_tmp_data(re, fn_id=self.task.task_id) assert (re_key is not None) return {'key': re_key, } else: return re
def get_correlation_score(theta, img_db_path, d, k=None): X = get_image_db(img_db_path) n = theta['n'] J = theta['J'] K = theta['K'] v1 = inv_fourier_transform(X[d['v']]) m1 = X[d['m']] if k != None: v2 = inv_fourier_transform(theta['A'][k]) m2 = np.ones((n, n, n)) item = align.fast_align(v1, m1, v2, m2)[0] best_ang = item['ang'] best_loc = item['loc'] A_real_pred = v2 k_pred = k else: best_ang = None best_loc = None best_score = None A_real_pred = None k_pred = None for k in range(K): v2 = inv_fourier_transform(theta['A'][k]) m2 = np.ones((n, n, n)) transforms = align.fast_align(v1, m1, v2, m2) item = transforms[0] score = item['score'] if best_score == None or score > best_score: best_score = score best_ang = item['ang'] best_loc = item['loc'] A_real_pred = v2 k_pred = k A_aligned = rotate.rotate_pad_mean(A_real_pred, angle=best_ang, loc_r=best_loc) return ("Model%d" % d['v'], k_pred, stats.fsc(v1, A_aligned))
def normalize(record, op): if os.path.isfile(record['pose']['subtomogram']): return {'record': record, } ls = level_set(record=record, op=op['segmentation']) if (ls is None): return phi = N.zeros(ls['phi'].shape) phi[(ls['phi'] > 0)] = ls['phi'][(ls['phi'] > 0)] c = PNU.center_mass(phi) mid_co = (N.array(phi.shape) / 2) if (N.sqrt(N.square((c - mid_co)).sum()) > (N.min(phi.shape) * op['center_mass_max_displacement_proportion'])): return rm = PNU.pca(v=phi, c=c)['v'] record['pose']['c'] = c.tolist() record['pose']['rm'] = rm.tolist() phi_pn = GR.rotate(phi, rm=rm, c1=c, default_val=0) v_org_pn = GR.rotate_pad_mean(ls['v_org'], rm=rm, c1=c) return {'ls': ls, 'phi': phi, 'phi_pn': phi_pn, 'v_org_pn': v_org_pn, 'record': record, }
def process(op): with open(op['input data json file']) as f: dj = json.load(f) if ('test' in op): if (('sample_num' in op['test']) and (op['test']['sample_num'] > 0) and (len(dj) > op['test']['sample_num'])): print( ('testing the procedure using a subsample of %d subtomograms' % op['test']['sample_num'])) dj = random.sample(dj, op['test']['sample_num']) mat = None for (i, d) in enumerate(dj): print('\rloading', i, ' ', end=' ') sys.stdout.flush() v = IF.read_mrc_vol(d['subtomogram']) if (op['mode'] == 'pose'): vr = GR.rotate_pad_mean(v, rm=N.array(d['pose']['rm']), c1=N.array(d['pose']['c'])) elif (op['mode'] == 'template'): vr = GR.rotate_pad_mean(v, angle=N.array(d['angle']), loc_r=N.array(d['loc'])) else: raise Exception('op[mode]') if (mat is None): mat = N.zeros((len(dj), vr.size)) mat[i, :] = vr.flatten() if ('PCA' in op): import aitom.tomominer.dimension_reduction.empca as drempca pca = drempca.empca(data=mat, weights=N.ones(mat.shape), nvec=op['PCA']['n_dims'], niter=op['PCA']['n_iter']) mat_km = pca.coeff else: mat_km = mat km = SC.KMeans(n_clusters=op['kmeans']['cluster num'], n_init=op['kmeans']['n_init'], n_jobs=(op['kmeans']['n_jobs'] if ('n_jobs' in op['kmeans']) else (-1)), verbose=op['kmeans']['verbose']) lbl = km.fit_predict(mat_km) dj_new = [] for (i, d) in enumerate(dj): dn = {} if ('id' in d): dn['id'] = d['id'] dn['subtomogram'] = d['subtomogram'] dn['cluster_label'] = int(lbl[i]) dj_new.append(dn) op['output data json file'] = os.path.abspath(op['output data json file']) if (not os.path.isdir(os.path.dirname(op['output data json file']))): os.makedirs(os.path.dirname(op['output data json file'])) with open(op['output data json file'], 'w') as f: json.dump(dj_new, f, indent=2) clus_dir = os.path.join(op['out dir'], 'vol-avg') if (not os.path.isdir(clus_dir)): os.makedirs(clus_dir) clus_stat = [] for l in set(lbl.tolist()): avg_file_name = os.path.abspath( os.path.join(clus_dir, ('%03d.mrc' % (l, )))) v_avg = mat[(lbl == l), :].sum(axis=0).reshape(v.shape) IF.put_mrc(mrc=v_avg, path=avg_file_name, overwrite=True) clus_stat.append({ 'cluster_label': l, 'size': len([_ for _ in lbl if (_ == l)]), 'subtomogram': avg_file_name, }) op['output cluster stat file'] = os.path.abspath( op['output cluster stat file']) if (not os.path.isdir(os.path.dirname(op['output cluster stat file']))): os.makedirs(os.path.dirname(op['output cluster stat file'])) with open(op['output cluster stat file'], 'w') as f: json.dump(clus_stat, f, indent=2)