def _assemble_cpmg_data(self, spin_id=None): """Assemble the CPMG data. @keyword spin_id: The spin ID string to restrict data to. @type spin_id: str """ # Spin loop. for spin, mol_name, res_num, res_name, id in spin_loop(full_info=True, selection=spin_id, return_id=True, skip_desel=True): # The residue index. res_index = res_num - 1 # Sanity checks. if res_index < 0: raise RelaxError("A residue number of less than 1 is not supported in NESSY.") elif res_index > 699: raise RelaxError("A residue number of greater than 700 is not supported in NESSY.") # Loop over all spectrometer frequencies. for exp_type, frq, offset, ei, mi, oi in loop_exp_frq_offset(return_indices=True): # Loop over all dispersion points. di_new = 0 for point, di in loop_point(exp_type=exp_type, frq=frq, offset=offset, skip_ref=False, return_indices=True): # The keys. keys = find_intensity_keys(exp_type=exp_type, frq=frq, point=point, time=cdp.relax_time_list[0]) # Convert the reference point for NESSY input. if point == None or isNaN(point): point = 0 # Loop over the keys. for key in keys: # Another check. if self.cpmg_data[mi][di_new][res_index] != '': raise RelaxError("Only one spin system per residue is supported in NESSY.") # Store the data (if it exists). if key in spin.peak_intensity: self.cpmg_data[mi][di_new][res_index] = str(spin.peak_intensity[key]) # The CPMG frequency. self.cpmg_frqs[mi][di_new] = str(point) # Increment the field index. di_new += 1
## Now try do a line of best fit by least squares. # The peak intensities, errors and times. values = [] errors = [] times = [] for time, ti in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point, return_indices=True): value = average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, sim_index=None) values.append(value) error = average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True) errors.append(error) times.append(time) # Find intensity keys. int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) #print type(int_keys) # Loop over the replicates. #for i in range(len(int_keys)): #print( cur_spin.peak_intensity[int_keys[i]], value ) #print( cur_spin.peak_intensity_err[int_keys[i]], error) # Convert to numpy array. values = asarray(values) errors = asarray(errors) times = asarray(times) # Initialise the function to minimise. E.setup_data(values=values, errors=errors, times=times) # Initial guess for minimisation. Solved by linear least squares.
def test_find_intensity_keys_r1rho(self): """Unit test of the find_intensity_keys() function. This uses the data of the saved state attached to U{bug #21344<https://gna.org/bugs/?21344>}. """ # Load the state. statefile = status.install_path + sep+'test_suite'+sep+'shared_data'+sep+'dispersion'+sep+'bug_21344_trunc.bz2' state.load_state(statefile, force=True) # Original data (spectrum id: exp_type, frq, omega_rf_ppm, spin_lock_field_strength, time_spin_lock). data = dict() data['118_431.00_0.00'] = ['46_0_35_0', 'R1rho', 799777399.1, 118.078, 431.0, 0.0] data['118_431.00_0.04'] = ['48_0_35_4', 'R1rho', 799777399.1, 118.078, 431.0, 0.04] data['118_431.00_0.10'] = ['47_0_35_10', 'R1rho', 799777399.1, 118.078, 431.0, 0.1] data['118_431.00_0.20'] = ['49_0_35_20', 'R1rho', 799777399.1, 118.078, 431.0, 0.2] data['118_651.20_0.00'] = ['36_0_39_0', 'R1rho', 799777399.1, 118.078, 651.2, 0.0] data['118_651.20_0.04'] = ['39_0_39_4', 'R1rho', 799777399.1, 118.078, 651.2, 0.04] data['118_651.20_0.10'] = ['37_0_39_10', 'R1rho', 799777399.1, 118.078, 651.2, 0.1] data['118_651.20_0.20'] = ['40_0_39_20', 'R1rho', 799777399.1, 118.078, 651.2, 0.2] data['118_651.20_0.40'] = ['38_0_39_40', 'R1rho', 799777399.1, 118.078, 651.2, 0.4] data['118_800.50_0.00'] = ['41_0_41_0', 'R1rho', 799777399.1, 118.078, 800.5, 0.0] data['118_800.50_0.04'] = ['44_0_41_4', 'R1rho', 799777399.1, 118.078, 800.5, 0.04] data['118_800.50_0.10'] = ['42_0_41_10', 'R1rho', 799777399.1, 118.078, 800.5, 0.1] data['118_800.50_0.20'] = ['45_0_41_20', 'R1rho', 799777399.1, 118.078, 800.5, 0.2] data['118_800.50_0.40'] = ['43_0_41_40', 'R1rho', 799777399.1, 118.078, 800.5, 0.4] data['118_984.00_0.00'] = ['31_0_43_0', 'R1rho', 799777399.1, 118.078, 984.0, 0.0] data['118_984.00_0.04'] = ['34_0_43_4', 'R1rho', 799777399.1, 118.078, 984.0, 0.04] data['118_984.00_0.10'] = ['32_0_43_10', 'R1rho', 799777399.1, 118.078, 984.0, 0.1] data['118_984.00_0.20'] = ['35_0_43_20', 'R1rho', 799777399.1, 118.078, 984.0, 0.2] data['118_984.00_0.40'] = ['33_0_43_40', 'R1rho', 799777399.1, 118.078, 984.0, 0.4] data['118_1341.11_0.00'] = ['1_0_46_0', 'R1rho', 799777399.1, 118.078, 1341.11, 0.0] data['118_1341.11_0.04'] = ['4_0_46_4', 'R1rho', 799777399.1, 118.078, 1341.11, 0.04] data['118_1341.11_0.10'] = ['2_0_46_10', 'R1rho', 799777399.1, 118.078, 1341.11, 0.1] data['118_1341.11_0.20'] = ['5_0_46_20', 'R1rho', 799777399.1, 118.078, 1341.11, 0.2] data['118_1341.11_0.40'] = ['3_0_46_40', 'R1rho', 799777399.1, 118.078, 1341.11, 0.4] data['118_1648.50_0.00'] = ['60_0_48_0', 'R1rho', 799777399.1, 118.078, 1648.5, 0.0] data['118_1648.50_0.04'] = ['63_0_48_4', 'R1rho', 799777399.1, 118.078, 1648.5, 0.04] data['118_1648.50_0.10'] = ['61_0_48_10', 'R1rho', 799777399.1, 118.078, 1648.5, 0.1] data['118_1648.50_0.14'] = ['62_0_48_14', 'R1rho', 799777399.1, 118.078, 1648.5, 0.14] data['118_1648.50_0.20'] = ['64_0_48_20', 'R1rho', 799777399.1, 118.078, 1648.5, 0.2] data['124_1341.11_0.00'] = ['11_500_46_0', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.0] data['124_1341.11_0.04'] = ['14_500_46_4', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.04] data['124_1341.11_0.10'] = ['12_500_46_10', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.1] data['124_1341.11_0.20'] = ['15_500_46_20', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.2] data['124_1341.11_0.40'] = ['13_500_46_40', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.4] data['130_800.50_0.00'] = ['50_1000_41_0', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.0] data['130_800.50_0.04'] = ['53_1000_41_4', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.04] data['130_800.50_0.10'] = ['51_1000_41_10', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.1] data['130_800.50_0.20'] = ['54_1000_41_20', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.2] data['130_800.50_0.40'] = ['52_1000_41_40', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.4] data['130_1341.11_0.00'] = ['21_1000_46_0', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.0] data['130_1341.11_0.04'] = ['24_1000_46_4', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.04] data['130_1341.11_0.10'] = ['22_1000_46_10', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.1] data['130_1341.11_0.20'] = ['25_1000_46_20', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.2] data['130_1341.11_0.40'] = ['23_1000_46_40', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.4] data['130_1648.50_0.00'] = ['65_1000_48_0', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.0] data['130_1648.50_0.04'] = ['68_1000_48_4', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.04] data['130_1648.50_0.10'] = ['66_1000_48_10', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.1] data['130_1648.50_0.14'] = ['67_1000_48_14', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.14] data['130_1648.50_0.20'] = ['69_1000_48_20', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.2] data['143_800.50_0.00'] = ['55_2000_41_0', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.0] data['143_800.50_0.04'] = ['58_2000_41_4', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.04] data['143_800.50_0.10'] = ['56_2000_41_10', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.1] data['143_800.50_0.20'] = ['59_2000_41_20', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.2] data['143_800.50_0.40'] = ['57_2000_41_40', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.4] data['143_1341.11_0.00'] = ['6_2000_46_0', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.0] data['143_1341.11_0.04'] = ['9_2000_46_4', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.04] data['143_1341.11_0.10'] = ['7_2000_46_10', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.1] data['143_1341.11_0.20'] = ['10_2000_46_20', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.2] data['143_1341.11_0.40'] = ['8_2000_46_40', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.4] data['180_1341.11_0.00'] = ['16_5000_46_0', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.0] data['180_1341.11_0.04'] = ['19_5000_46_4', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.04] data['180_1341.11_0.10'] = ['17_5000_46_10', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.1] data['180_1341.11_0.20'] = ['20_5000_46_20', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.2] data['180_1341.11_0.40'] = ['18_5000_46_40', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.4] data['241_1341.11_0.00'] = ['26_10000_46_0', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.0] data['241_1341.11_0.04'] = ['29_10000_46_4', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.04] data['241_1341.11_0.10'] = ['27_10000_46_10', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.1] data['241_1341.11_0.20'] = ['30_10000_46_20', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.2] data['241_1341.11_0.40'] = ['28_10000_46_40', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.4] # Check the number of time counts. print("Checking the id return experiment.") for id in cdp.exp_type: exp_type = cdp.exp_type[id] frq = cdp.spectrometer_frq[id] offset = cdp.spin_lock_offset[id] point = cdp.spin_lock_nu1[id] # Loop over time for time in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point): ids = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) print(exp_type, frq, offset, point, time, data["%3.0f_%3.2f_%1.2f"%(offset, point, time)][5], id, ids) # Test the id return self.assertEqual(len(ids), 1) # Test the time point self.assertEqual(time, data["%3.0f_%3.2f_%1.2f"%(offset, point, time)][5]) self.assertEqual(ids[0], data["%3.0f_%3.2f_%1.2f"%(offset, point, time)][0])
def test_find_intensity_keys_r1rho(self): """Unit test of the find_intensity_keys() function. This uses the data of the saved state attached to U{bug #21344<https://web.archive.org/web/https://gna.org/bugs/?21344>}. """ # Load the state. statefile = status.install_path + sep + 'test_suite' + sep + 'shared_data' + sep + 'dispersion' + sep + 'bug_21344_trunc.bz2' state.load_state(statefile, force=True) # Original data (spectrum id: exp_type, frq, omega_rf_ppm, spin_lock_field_strength, time_spin_lock). data = dict() data['118_431.00_0.00'] = [ '46_0_35_0', 'R1rho', 799777399.1, 118.078, 431.0, 0.0 ] data['118_431.00_0.04'] = [ '48_0_35_4', 'R1rho', 799777399.1, 118.078, 431.0, 0.04 ] data['118_431.00_0.10'] = [ '47_0_35_10', 'R1rho', 799777399.1, 118.078, 431.0, 0.1 ] data['118_431.00_0.20'] = [ '49_0_35_20', 'R1rho', 799777399.1, 118.078, 431.0, 0.2 ] data['118_651.20_0.00'] = [ '36_0_39_0', 'R1rho', 799777399.1, 118.078, 651.2, 0.0 ] data['118_651.20_0.04'] = [ '39_0_39_4', 'R1rho', 799777399.1, 118.078, 651.2, 0.04 ] data['118_651.20_0.10'] = [ '37_0_39_10', 'R1rho', 799777399.1, 118.078, 651.2, 0.1 ] data['118_651.20_0.20'] = [ '40_0_39_20', 'R1rho', 799777399.1, 118.078, 651.2, 0.2 ] data['118_651.20_0.40'] = [ '38_0_39_40', 'R1rho', 799777399.1, 118.078, 651.2, 0.4 ] data['118_800.50_0.00'] = [ '41_0_41_0', 'R1rho', 799777399.1, 118.078, 800.5, 0.0 ] data['118_800.50_0.04'] = [ '44_0_41_4', 'R1rho', 799777399.1, 118.078, 800.5, 0.04 ] data['118_800.50_0.10'] = [ '42_0_41_10', 'R1rho', 799777399.1, 118.078, 800.5, 0.1 ] data['118_800.50_0.20'] = [ '45_0_41_20', 'R1rho', 799777399.1, 118.078, 800.5, 0.2 ] data['118_800.50_0.40'] = [ '43_0_41_40', 'R1rho', 799777399.1, 118.078, 800.5, 0.4 ] data['118_984.00_0.00'] = [ '31_0_43_0', 'R1rho', 799777399.1, 118.078, 984.0, 0.0 ] data['118_984.00_0.04'] = [ '34_0_43_4', 'R1rho', 799777399.1, 118.078, 984.0, 0.04 ] data['118_984.00_0.10'] = [ '32_0_43_10', 'R1rho', 799777399.1, 118.078, 984.0, 0.1 ] data['118_984.00_0.20'] = [ '35_0_43_20', 'R1rho', 799777399.1, 118.078, 984.0, 0.2 ] data['118_984.00_0.40'] = [ '33_0_43_40', 'R1rho', 799777399.1, 118.078, 984.0, 0.4 ] data['118_1341.11_0.00'] = [ '1_0_46_0', 'R1rho', 799777399.1, 118.078, 1341.11, 0.0 ] data['118_1341.11_0.04'] = [ '4_0_46_4', 'R1rho', 799777399.1, 118.078, 1341.11, 0.04 ] data['118_1341.11_0.10'] = [ '2_0_46_10', 'R1rho', 799777399.1, 118.078, 1341.11, 0.1 ] data['118_1341.11_0.20'] = [ '5_0_46_20', 'R1rho', 799777399.1, 118.078, 1341.11, 0.2 ] data['118_1341.11_0.40'] = [ '3_0_46_40', 'R1rho', 799777399.1, 118.078, 1341.11, 0.4 ] data['118_1648.50_0.00'] = [ '60_0_48_0', 'R1rho', 799777399.1, 118.078, 1648.5, 0.0 ] data['118_1648.50_0.04'] = [ '63_0_48_4', 'R1rho', 799777399.1, 118.078, 1648.5, 0.04 ] data['118_1648.50_0.10'] = [ '61_0_48_10', 'R1rho', 799777399.1, 118.078, 1648.5, 0.1 ] data['118_1648.50_0.14'] = [ '62_0_48_14', 'R1rho', 799777399.1, 118.078, 1648.5, 0.14 ] data['118_1648.50_0.20'] = [ '64_0_48_20', 'R1rho', 799777399.1, 118.078, 1648.5, 0.2 ] data['124_1341.11_0.00'] = [ '11_500_46_0', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.0 ] data['124_1341.11_0.04'] = [ '14_500_46_4', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.04 ] data['124_1341.11_0.10'] = [ '12_500_46_10', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.1 ] data['124_1341.11_0.20'] = [ '15_500_46_20', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.2 ] data['124_1341.11_0.40'] = [ '13_500_46_40', 'R1rho', 799777399.1, 124.247031462, 1341.11, 0.4 ] data['130_800.50_0.00'] = [ '50_1000_41_0', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.0 ] data['130_800.50_0.04'] = [ '53_1000_41_4', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.04 ] data['130_800.50_0.10'] = [ '51_1000_41_10', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.1 ] data['130_800.50_0.20'] = [ '54_1000_41_20', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.2 ] data['130_800.50_0.40'] = [ '52_1000_41_40', 'R1rho', 799777399.1, 130.416062924, 800.5, 0.4 ] data['130_1341.11_0.00'] = [ '21_1000_46_0', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.0 ] data['130_1341.11_0.04'] = [ '24_1000_46_4', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.04 ] data['130_1341.11_0.10'] = [ '22_1000_46_10', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.1 ] data['130_1341.11_0.20'] = [ '25_1000_46_20', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.2 ] data['130_1341.11_0.40'] = [ '23_1000_46_40', 'R1rho', 799777399.1, 130.416062924, 1341.11, 0.4 ] data['130_1648.50_0.00'] = [ '65_1000_48_0', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.0 ] data['130_1648.50_0.04'] = [ '68_1000_48_4', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.04 ] data['130_1648.50_0.10'] = [ '66_1000_48_10', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.1 ] data['130_1648.50_0.14'] = [ '67_1000_48_14', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.14 ] data['130_1648.50_0.20'] = [ '69_1000_48_20', 'R1rho', 799777399.1, 130.416062924, 1648.5, 0.2 ] data['143_800.50_0.00'] = [ '55_2000_41_0', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.0 ] data['143_800.50_0.04'] = [ '58_2000_41_4', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.04 ] data['143_800.50_0.10'] = [ '56_2000_41_10', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.1 ] data['143_800.50_0.20'] = [ '59_2000_41_20', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.2 ] data['143_800.50_0.40'] = [ '57_2000_41_40', 'R1rho', 799777399.1, 142.754125848, 800.5, 0.4 ] data['143_1341.11_0.00'] = [ '6_2000_46_0', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.0 ] data['143_1341.11_0.04'] = [ '9_2000_46_4', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.04 ] data['143_1341.11_0.10'] = [ '7_2000_46_10', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.1 ] data['143_1341.11_0.20'] = [ '10_2000_46_20', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.2 ] data['143_1341.11_0.40'] = [ '8_2000_46_40', 'R1rho', 799777399.1, 142.754125848, 1341.11, 0.4 ] data['180_1341.11_0.00'] = [ '16_5000_46_0', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.0 ] data['180_1341.11_0.04'] = [ '19_5000_46_4', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.04 ] data['180_1341.11_0.10'] = [ '17_5000_46_10', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.1 ] data['180_1341.11_0.20'] = [ '20_5000_46_20', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.2 ] data['180_1341.11_0.40'] = [ '18_5000_46_40', 'R1rho', 799777399.1, 179.768314621, 1341.11, 0.4 ] data['241_1341.11_0.00'] = [ '26_10000_46_0', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.0 ] data['241_1341.11_0.04'] = [ '29_10000_46_4', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.04 ] data['241_1341.11_0.10'] = [ '27_10000_46_10', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.1 ] data['241_1341.11_0.20'] = [ '30_10000_46_20', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.2 ] data['241_1341.11_0.40'] = [ '28_10000_46_40', 'R1rho', 799777399.1, 241.458629241, 1341.11, 0.4 ] # Check the number of time counts. print("Checking the id return experiment.") for id in cdp.exp_type: exp_type = cdp.exp_type[id] frq = cdp.spectrometer_frq[id] offset = cdp.spin_lock_offset[id] point = cdp.spin_lock_nu1[id] # Loop over time for time in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point): ids = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) print(exp_type, frq, offset, point, time, data["%3.0f_%3.2f_%1.2f" % (offset, point, time)][5], id, ids) # Test the id return self.assertEqual(len(ids), 1) # Test the time point self.assertEqual( time, data["%3.0f_%3.2f_%1.2f" % (offset, point, time)][5]) self.assertEqual( ids[0], data["%3.0f_%3.2f_%1.2f" % (offset, point, time)][0])
def calculate_r2eff(): """Calculate the R2eff values for fixed relaxation time period data.""" # Data checks. check_exp_type() check_disp_points() check_exp_type_fixed_time() # Printouts. print("Calculating the R2eff/R1rho values for fixed relaxation time period data.") # Loop over the spins. for spin, mol_name, resi, resn, spin_id in spin_loop(full_info=True, return_id=True, skip_desel=True): # Spin ID printout. print("Spin '%s'." % spin_id) # Skip spins which have no data. if not hasattr(spin, 'peak_intensity'): continue # Initialise the data structures. if not hasattr(spin, 'r2eff'): spin.r2eff = {} if not hasattr(spin, 'r2eff_err'): spin.r2eff_err = {} # Loop over all the data. for exp_type, frq, offset, point, time in loop_exp_frq_offset_point_time(): # The three keys. ref_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=None, time=time) int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point) # Check for missing data. missing = False for i in range(len(ref_keys)): if ref_keys[i] not in spin.peak_intensity: missing = True for i in range(len(int_keys)): if int_keys[i] not in spin.peak_intensity: missing = True if missing: continue # Average the reference intensity data and errors. ref_intensity = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=None, time=time) ref_intensity_err = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=None, time=time, error=True) # Average the intensity data and errors. intensity = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) intensity_err = average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True) # Check for math domain errors or log for values less than 0.0. if ref_intensity == 0.0: skip_data = True elif float(intensity) / ref_intensity <= 0.0: skip_data = True else: skip_data = False if skip_data: spin_string = generate_spin_string(spin=spin, mol_name=mol_name, res_num=resi, res_name=resn) msg = "Math log(I / I_ref) domain error for spin '%s' in R2eff value calculation for fixed relaxation time period data. I=%3.3f, I_ref=%3.3f. The point is skipped." % (spin_string, intensity, ref_intensity) warn(RelaxWarning("%s" % msg)) point_info = "This happened for '%s' at %3.1f MHz, for offset=%3.1f ppm and dispersion point %3.1f Hz and time %1.2f s.\n" % (exp_type, frq/1E6, offset, point, time) print(point_info) else: # Calculate the R2eff value. spin.r2eff[param_key] = calc_two_point_r2eff(relax_time=time, I_ref=ref_intensity, I=intensity) # Calculate the R2eff error. spin.r2eff_err[param_key] = calc_two_point_r2eff_err(relax_time=time, I_ref=ref_intensity, I=intensity, I_ref_err=ref_intensity_err, I_err=intensity_err)
def _assemble_cpmg_data(self, spin_id=None): """Assemble the CPMG data. @keyword spin_id: The spin ID string to restrict data to. @type spin_id: str """ # Spin loop. for spin, mol_name, res_num, res_name, id in spin_loop( full_info=True, selection=spin_id, return_id=True, skip_desel=True): # The residue index. res_index = res_num - 1 # Sanity checks. if res_index < 0: raise RelaxError( "A residue number of less than 1 is not supported in NESSY." ) elif res_index > 699: raise RelaxError( "A residue number of greater than 700 is not supported in NESSY." ) # Loop over all spectrometer frequencies. for exp_type, frq, offset, ei, mi, oi in loop_exp_frq_offset( return_indices=True): # Loop over all dispersion points. di_new = 0 for point, di in loop_point(exp_type=exp_type, frq=frq, offset=offset, skip_ref=False, return_indices=True): # The keys. keys = find_intensity_keys(exp_type=exp_type, frq=frq, point=point, time=cdp.relax_time_list[0]) # Convert the reference point for NESSY input. if point == None or isNaN(point): point = 0 # Loop over the keys. for key in keys: # Another check. if self.cpmg_data[mi][di_new][res_index] != '': raise RelaxError( "Only one spin system per residue is supported in NESSY." ) # Store the data (if it exists). if key in spin.peak_intensity: self.cpmg_data[mi][di_new][res_index] = str( spin.peak_intensity[key]) # The CPMG frequency. self.cpmg_frqs[mi][di_new] = str(point) # Increment the field index. di_new += 1
def back_calc_peak_intensities(spin=None, spin_id=None, exp_type=None, frq=None, offset=None, point=None): """Back-calculation of peak intensity for the given relaxation time. @keyword spin: The specific spin data container. @type spin: SpinContainer instance @keyword spin_id: The optional spin ID string for use in warning messages. @type spin_id: str or None @keyword exp_type: The experiment type. @type exp_type: str @keyword frq: The spectrometer frequency. @type frq: float @keyword offset: For R1rho-type data, the spin-lock offset value in ppm. @type offset: None or float @keyword point: The dispersion point data (either the spin-lock field strength in Hz or the nu_CPMG frequency in Hz). @type point: float @return: The back-calculated peak intensities for the given exponential curve. @rtype: numpy rank-1 float array """ # Check. if not has_exponential_exp_type(): raise RelaxError("Back-calculation is not allowed for the fixed time experiment types.") # The key. param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point) # Create the initial parameter vector. param_vector = assemble_param_vector(spins=[spin], key=param_key) # The peak intensities and times. values = [] errors = [] times = [] for time in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point): # Check the peak intensity keys. int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) for i in range(len(int_keys)): if int_keys[i] not in spin.peak_intensity: if spin_id: warn(RelaxWarning("The spin %s peak intensity key '%s' is not present, skipping the back-calculation." % (spin_id, int_keys[i]))) else: warn(RelaxWarning("The peak intensity key '%s' is not present, skipping the back-calculation." % int_keys[i])) return # The data. values.append(average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time)) errors.append(average_intensity(spin=spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True)) times.append(time) # The scaling matrix in a diagonalised list form. scaling_list = [] for i in range(len(param_vector)): scaling_list.append(1.0) # Initialise the relaxation fit functions. model = Relax_fit_opt(model='exp', num_params=len(spin.params), values=values, errors=errors, relax_times=times, scaling_matrix=scaling_list) # Make a single function call. This will cause back calculation and the data will be stored in the C module. model.func(param_vector) # Get the data back. results = model.back_calc_data() # Return the correct peak height. return results
def minimise_r2eff(spins=None, spin_ids=None, min_algor=None, min_options=None, func_tol=None, grad_tol=None, max_iterations=None, constraints=False, scaling_matrix=None, verbosity=0, sim_index=None, lower=None, upper=None, inc=None): """Optimise the R2eff model by fitting the 2-parameter exponential curves. This mimics the R1 and R2 relax_fit analysis. @keyword spins: The list of spins for the cluster. @type spins: list of SpinContainer instances @keyword spin_ids: The list of spin IDs for the cluster. @type spin_ids: list of str @keyword min_algor: The minimisation algorithm to use. @type min_algor: str @keyword min_options: An array of options to be used by the minimisation algorithm. @type min_options: array of str @keyword func_tol: The function tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. @type func_tol: None or float @keyword grad_tol: The gradient tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. @type grad_tol: None or float @keyword max_iterations: The maximum number of iterations for the algorithm. @type max_iterations: int @keyword constraints: If True, constraints are used during optimisation. @type constraints: bool @keyword scaling_matrix: The diagonal and square scaling matrix. @type scaling_matrix: numpy rank-2, float64 array or None @keyword verbosity: The amount of information to print. The higher the value, the greater the verbosity. @type verbosity: int @keyword sim_index: The index of the simulation to optimise. This should be None if normal optimisation is desired. @type sim_index: None or int @keyword lower: The model specific lower bounds of the grid search which must be equal to the number of parameters in the model. This optional argument is only used when doing a grid search. @type lower: list of numbers @keyword upper: The model specific upper bounds of the grid search which must be equal to the number of parameters in the model. This optional argument is only used when doing a grid search. @type upper: list of numbers @keyword inc: The model specific increments for each dimension of the space for the grid search. The number of elements in the array must equal to the number of parameters in the model. This argument is only used when doing a grid search. @type inc: list of int """ # Check that the C modules have been compiled. if not C_module_exp_fn: raise RelaxError("Relaxation curve fitting is not available. Try compiling the C modules on your platform.") # Loop over the spins. for si in range(len(spins)): # Skip deselected spins. if not spins[si].select: continue # Loop over each spectrometer frequency and dispersion point. for exp_type, frq, offset, point in loop_exp_frq_offset_point(): # The parameter key. param_key = return_param_key_from_data(exp_type=exp_type, frq=frq, offset=offset, point=point) # The initial parameter vector. param_vector = assemble_param_vector(spins=[spins[si]], key=param_key, sim_index=sim_index) # Diagonal scaling. if scaling_matrix is not None: param_vector = dot(inv(scaling_matrix), param_vector) # Linear constraints. A, b = None, None if constraints: A, b = linear_constraints(spins=[spins[si]], scaling_matrix=scaling_matrix) # Print out. if verbosity >= 1: # Individual spin section. top = 2 if verbosity >= 2: top += 2 text = "Fitting to spin %s, frequency %s and dispersion point %s" % (spin_ids[si], frq, point) subsection(file=sys.stdout, text=text, prespace=top) # Grid search printout. if match('^[Gg]rid', min_algor): result = 1 for x in inc: result = mul(result, x) print("Unconstrained grid search size: %s (constraints may decrease this size).\n" % result) # The peak intensities, errors and times. values = [] errors = [] times = [] data_flag = True for time in loop_time(exp_type=exp_type, frq=frq, offset=offset, point=point): # Check the peak intensity keys. int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) peak_intensities = spins[si].peak_intensity if sim_index != None: peak_intensities = spins[si].peak_intensity_sim for i in range(len(int_keys)): if int_keys[i] not in peak_intensities: if verbosity: warn(RelaxWarning("The spin %s peak intensity key '%s' is not present, skipping the optimisation." % (spin_ids[si], int_keys[i]))) data_flag = False break if data_flag: values.append(average_intensity(spin=spins[si], exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, sim_index=sim_index)) errors.append(average_intensity(spin=spins[si], exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True)) times.append(time) if not data_flag: continue # Raise errors if number of time points is less than 2. if len(times) < 3: subsection(file=sys.stdout, text="Exponential curve fitting error for point:", prespace=2) point_info = "%s at %3.1f MHz, for offset=%3.3f ppm and dispersion point %-5.1f, with %i time points." % (exp_type, frq/1E6, offset, point, len(times)) raise RelaxError("The data setup points to exponential curve fitting, but only %i time points was found, where 3 time points is minimum. If calculating R2eff values for fixed relaxation time period data, check that a reference intensity has been specified for each offset value."%(len(times))) # The scaling matrix in a diagonalised list form. scaling_list = [] if scaling_matrix is None: for i in range(len(param_vector)): scaling_list.append(1.0) else: for i in range(len(scaling_matrix)): scaling_list.append(scaling_matrix[i, i]) # Initialise the function to minimise. model = Relax_fit_opt(model='exp', num_params=len(param_vector), values=values, errors=errors, relax_times=times, scaling_matrix=scaling_list) # Grid search. if search('^[Gg]rid', min_algor): results = grid(func=model.func, args=(), num_incs=inc, lower=lower, upper=upper, A=A, b=b, verbosity=verbosity) # Unpack the results. param_vector, chi2, iter_count, warning = results f_count = iter_count g_count = 0.0 h_count = 0.0 # Minimisation. else: results = generic_minimise(func=model.func, dfunc=model.dfunc, d2func=model.d2func, args=(), x0=param_vector, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, A=A, b=b, full_output=True, print_flag=verbosity) # Unpack the results. if results == None: return param_vector, chi2, iter_count, f_count, g_count, h_count, warning = results # Scaling. if scaling_matrix is not None: param_vector = dot(scaling_matrix, param_vector) # Disassemble the parameter vector. disassemble_param_vector(param_vector=param_vector, spins=[spins[si]], key=param_key, sim_index=sim_index) # Monte Carlo minimisation statistics. if sim_index != None: # Chi-squared statistic. spins[si].chi2_sim[sim_index] = chi2 # Iterations. spins[si].iter_sim[sim_index] = iter_count # Function evaluations. spins[si].f_count_sim[sim_index] = f_count # Gradient evaluations. spins[si].g_count_sim[sim_index] = g_count # Hessian evaluations. spins[si].h_count_sim[sim_index] = h_count # Warning. spins[si].warning_sim[sim_index] = warning # Normal statistics. else: # Chi-squared statistic. spins[si].chi2 = chi2 # Iterations. spins[si].iter = iter_count # Function evaluations. spins[si].f_count = f_count # Gradient evaluations. spins[si].g_count = g_count # Hessian evaluations. spins[si].h_count = h_count # Warning. spins[si].warning = warning
values.append(value) error = average_intensity(spin=cur_spin, exp_type=exp_type, frq=frq, offset=offset, point=point, time=time, error=True) errors.append(error) times.append(time) # Find intensity keys. int_keys = find_intensity_keys(exp_type=exp_type, frq=frq, offset=offset, point=point, time=time) #print type(int_keys) # Loop over the replicates. #for i in range(len(int_keys)): #print( cur_spin.peak_intensity[int_keys[i]], value ) #print( cur_spin.peak_intensity_err[int_keys[i]], error) # Convert to numpy array. values = asarray(values) errors = asarray(errors) times = asarray(times) # Initialise the function to minimise. E.setup_data(values=values, errors=errors, times=times)