Example #1
0
def analyze_diamond(data_func,
                    N,
                    omega_larmor,
                    kernel="rbf",
                    verbose=False,
                    plots=False):
    tau = choose_tau_params(N)
    data = data_func(N, tau)
    dip_inds, windows = find_resonances(data, fit_dips_below=None)
    lower_ind_cutoff = 3220  # corresponds to 15 microseconds
    if verbose:
        print "number of dips to measure: ", sum(dip_inds >= lower_ind_cutoff)
    spin_dict = {}
    for dip_ind in dip_inds:
        if dip_ind >= lower_ind_cutoff:  # 15 microseconds and on
            spin_dict = analyze_dip(dip_ind,
                                    tau,
                                    data_func,
                                    omega_larmor,
                                    spin_dict,
                                    N_vals=np.arange(0, 256, 2),
                                    error_tol=.1 / 64,
                                    verbose=verbose,
                                    plots=plots)
    # load the classifiers
    guess_scaler = learning.load_obj("classifiers/scaler_svm_" + kernel +
                                     "_di3220_29diamonds_cxABe")
    guess_clf = learning.load_obj("classifiers/clf_svm_" + kernel +
                                  "_di3220_29diamonds_cxABe")

    def guess_classifier(features):
        return guess_clf.predict(guess_scaler.transform([features]))

    guess_As, guess_Bs, dataerrs, all_guess_As, all_guess_Bs, select_As, select_Bs = choose_spin_guesses(
        spin_dict,
        N,
        omega_larmor,
        tau,
        data,
        guess_classifier,
        x_min=1,
        error_fun=squared_error)
    cluster_As, cluster_Bs, cluster_dataerrs = cluster_spin_guesses(
        guess_As, guess_Bs, dataerrs, eps=.075, min_samples=1)
    As, Bs, num_remove = cluster_As, cluster_Bs, 1
    while num_remove <= 2:
        As, Bs, num_remove = remove_spins(As,
                                          Bs,
                                          N,
                                          omega_larmor,
                                          tau,
                                          data,
                                          num_remove=num_remove,
                                          error_fun=squared_error,
                                          verbose=verbose)
    return As, Bs, all_guess_As, all_guess_Bs, select_As, select_Bs, guess_As, guess_Bs, cluster_As, cluster_Bs
Example #2
0
def analyze_diamond(data_func, N, omega_larmor, kernel = "rbf", verbose = False, plots = False):
	tau = choose_tau_params(N)
	data = data_func(N, tau)
	dip_inds, windows = find_resonances(data, fit_dips_below = None)
	lower_ind_cutoff = 3220 # corresponds to 15 microseconds
	if verbose:
		print "number of dips to measure: ", sum(dip_inds >= lower_ind_cutoff)
	spin_dict = {}
	for dip_ind in dip_inds:
		if dip_ind >= lower_ind_cutoff: # 15 microseconds and on
			spin_dict = analyze_dip(dip_ind, tau, data_func, omega_larmor, spin_dict, N_vals = np.arange(0,256,2),
				error_tol = .1/64, verbose = verbose, plots = plots)
	# load the classifiers
	guess_scaler = learning.load_obj("classifiers/scaler_svm_" + kernel + "_di3220_29diamonds_cxABe")
	guess_clf = learning.load_obj("classifiers/clf_svm_" + kernel + "_di3220_29diamonds_cxABe")
	def guess_classifier(features):
		return guess_clf.predict(guess_scaler.transform([features]))
	guess_As, guess_Bs, dataerrs, all_guess_As, all_guess_Bs, select_As, select_Bs = choose_spin_guesses(spin_dict, N, omega_larmor, tau, data, guess_classifier, x_min = 1, error_fun = squared_error)
	cluster_As, cluster_Bs, cluster_dataerrs = cluster_spin_guesses(guess_As, guess_Bs, dataerrs, eps = .075, min_samples = 1)
	As, Bs, num_remove = cluster_As, cluster_Bs, 1
	while num_remove <= 2:
		As, Bs, num_remove = remove_spins(As, Bs, N, omega_larmor, tau, data, num_remove = num_remove, error_fun = squared_error, verbose=verbose)
	return As, Bs, all_guess_As, all_guess_Bs, select_As, select_Bs, guess_As, guess_Bs, cluster_As, cluster_Bs
Example #3
0
        if k >= 0:
            label_inds = np.where(labels == k)[0]
            best_guess = np.argmin(dataerrs[label_inds])
            cluster_As.append(guess_As[label_inds][best_guess])
            cluster_Bs.append(guess_Bs[label_inds][best_guess])
            cluster_dataerrs.append(dataerrs[label_inds][best_guess])
    return np.array(cluster_As), np.array(cluster_Bs), np.array(
        cluster_dataerrs)


# an approximation of the background due to the weakly coupled spins
#A_background = 4 * mag * (np.random.rand(400) - .5)
#B_background = 2 * mag * (np.random.rand(400))
#background_dict = {"A_background" : A_background, "B_background" : B_background}
#learning.store_obj(background_dict, "background_A_B")
background_dict = learning.load_obj("background_A_B")
A_background, B_background = background_dict["A_background"], background_dict[
    "B_background"]


# given guess_As and guess_Bs, this function considers all ways of removing num_remove spins from the guess list
# it compares all of these possibilities along with not taking anything out in terms of the error from this subset to the data
# the error function considers the spins along with 400 weakly coupled spins which approximates the background.
# it returns the optimal subset as well as a number to remove next time: the same number if a spin was removed, otherwise the number
# considered for removal this time plus one (considering taking several out at the same time)
def remove_spins(guess_As,
                 guess_Bs,
                 N,
                 omega_larmor,
                 tau,
                 data,
Example #4
0
	cluster_As, cluster_Bs, cluster_dataerrs = [], [], []
	for k in np.unique(labels):
		if k >= 0:
			label_inds = np.where(labels == k)[0]
			best_guess = np.argmin(dataerrs[label_inds])
			cluster_As.append(guess_As[label_inds][best_guess])
			cluster_Bs.append(guess_Bs[label_inds][best_guess])
			cluster_dataerrs.append(dataerrs[label_inds][best_guess])
	return np.array(cluster_As), np.array(cluster_Bs), np.array(cluster_dataerrs)

# an approximation of the background due to the weakly coupled spins
#A_background = 4 * mag * (np.random.rand(400) - .5)
#B_background = 2 * mag * (np.random.rand(400))
#background_dict = {"A_background" : A_background, "B_background" : B_background}
#learning.store_obj(background_dict, "background_A_B")
background_dict = learning.load_obj("background_A_B")
A_background, B_background = background_dict["A_background"], background_dict["B_background"]

# given guess_As and guess_Bs, this function considers all ways of removing num_remove spins from the guess list
# it compares all of these possibilities along with not taking anything out in terms of the error from this subset to the data
# the error function considers the spins along with 400 weakly coupled spins which approximates the background.
# it returns the optimal subset as well as a number to remove next time: the same number if a spin was removed, otherwise the number
# considered for removal this time plus one (considering taking several out at the same time)
def remove_spins(guess_As, guess_Bs, N, omega_larmor, tau, data, num_remove = 1, error_fun = squared_error, verbose = False):
	# creates all subsets of s with size between lower and upper
	def subset_size_range(s, lower, upper):
		ans = []
		for i in range(lower, upper+1):
			for j in itertools.combinations(s, i):
				ans.append(j)
		return ans