def generate_energy_data_global_minimum(): jobdef_file = "scripts/JobDef.json" jobdef = JobDef(jobdef_file) modelfile = "models/TBcanonical_s.json" model = Model(modelfile) input_density_file = "scripts/rho.json" input_density = InputDensity(input_density_file) with BackupFiles(input_density_file, jobdef_file, modelfile): jobdef.write_total_energy() jobdef.update_hamiltonian("scase") jobdef.update_model("TBcanonical_s") jobdef.update_input_rho(input_density_file) results_dir = jobdef['results_dir'] energy_file = os.path.join(jobdef['results_dir'], "energy.txt") energy_array_filename = os.path.join(results_dir, "total_energy_array_global_minimum.csv") execution_args = ['pylato/main.py', jobdef_file] U_array = np.linspace(0.005, 10, num=100) energy_array = [] for U in U_array: input_density.update_U(U) energy_array.append(calculate_energy_result( U, 0, 0, model, energy_file, execution_args)) x_label = "U/|t|" y_label = "energy/eV" save_1D_raw_data(U_array, energy_array, x_label, y_label, energy_array_filename)
def generate_mag_mom_corr_scase_global_minimum(): jobdef_file = "scripts/JobDef.json" jobdef = JobDef(jobdef_file) modelfile = "models/TBcanonical_s.json" model = Model(modelfile) input_density_file = "scripts/rho.json" input_density = InputDensity(input_density_file) with BackupFiles(input_density_file, jobdef_file, modelfile): jobdef.write_magnetic_correlation() jobdef.write_groundstate_classification() jobdef.update_hamiltonian("scase") jobdef.update_model("TBcanonical_s") jobdef.update_input_rho(input_density_file) results_dir = jobdef['results_dir'] mag_corr_file = os.path.join(results_dir, "mag_corr.txt") classification_file = os.path.join(results_dir, "classification.txt") mag_corr_array_filename = os.path.join( results_dir, "mag_mom_corr_with_class_scase_global_minimum.csv") execution_args = ['pylato/main.py', jobdef_file] U_array = np.linspace(0.005, 10, num=100) mag_corr_array = [] classification_array = [] for U in U_array: input_density.update_U(U) mag_corr, classification = calculate_mag_corr_result( U, 0, 0, model, mag_corr_file, execution_args, classification_file=classification_file) mag_corr_array.append(mag_corr) classification_array.append(classification) x_label = "U/|t|" y_label = "C_avg" extra_info_label = "classification" save_1D_with_extra_info_raw_data( x_vals=U_array, results=mag_corr_array, extra_info=classification_array, labels=[x_label, y_label, extra_info_label], filename=mag_corr_array_filename)
def generate_mag_mom_corr_scase_global_minimum(): jobdef_file = "scripts/JobDef.json" jobdef = JobDef(jobdef_file) modelfile = "models/TBcanonical_s.json" model = Model(modelfile) input_density_file = "scripts/rho.json" input_density = InputDensity(input_density_file) with BackupFiles(input_density_file, jobdef_file, modelfile): jobdef.write_magnetic_correlation() jobdef.write_groundstate_classification() jobdef.update_hamiltonian("scase") jobdef.update_model("TBcanonical_s") jobdef.update_input_rho(input_density_file) results_dir = jobdef['results_dir'] mag_corr_file = os.path.join(results_dir, "mag_corr.txt") classification_file = os.path.join(results_dir, "classification.txt") mag_corr_array_filename = os.path.join(results_dir, "mag_mom_corr_with_class_scase_global_minimum.csv") execution_args = ['pylato/main.py', jobdef_file] U_array = np.linspace(0.005, 10, num=100) mag_corr_array = [] classification_array = [] for U in U_array: input_density.update_U(U) mag_corr, classification = calculate_mag_corr_result( U, 0, 0, model, mag_corr_file, execution_args, classification_file=classification_file) mag_corr_array.append(mag_corr) classification_array.append(classification) x_label = "U/|t|" y_label = "C_avg" extra_info_label = "classification" save_1D_with_extra_info_raw_data( x_vals=U_array, results=mag_corr_array, extra_info=classification_array, labels=[x_label, y_label, extra_info_label], filename=mag_corr_array_filename)