def extract_complete_matrix(MminL,
                            MmaxL,
                            edge_states,
                            N,
                            lz_val,
                            matrix_label,
                            matrix_name,
                            run_on_cluster=1):
    args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
    matrix_filename = FM.filename_complete_matrix(matrix_name, args)
    hilbert_space_dim = GA.size_of_hilbert_space(MminL - edge_states,
                                                 MmaxL + edge_states, N,
                                                 lz_val)
    if EC.does_file_really_exist(matrix_filename):
        row, col, mat_elements = FM.read_complete_matrix(matrix_name, args)
    else:

        if run_on_cluster:
            print("matrix should have been created by now. you have a bug!")
            return 0
        row, col, mat_elements = create_complete_matrix(
            MminL, MmaxL, edge_states, N, lz_val, matrix_label, matrix_name)

    matrix = sparse.coo_matrix((mat_elements, (row, col)),
                               shape=(hilbert_space_dim, hilbert_space_dim),
                               dtype=complex)
    matrix = matrix.tocsr()
    return matrix
def calc_hamiltonian_pieces(MminL,
                            MmaxL,
                            edge_states,
                            N,
                            lz_val,
                            hamiltonian_labels,
                            params_filename,
                            send_jobs=True):
    params = ParametersAnnulus(params_filename)
    hamiltonian_terms_names = [
        'interactions', 'confining_potential', 'SC_term', 'FM_term'
    ]
    all_matrices_filenames = []
    for i in range(len(hamiltonian_labels)):
        if hamiltonian_labels[i] != 'None':
            while EC.how_many_jobs(
            ) + params.speeding_parameter > params.max_jobs_in_queue_S:
                sleep(100)
                print("too many jobs in queue!")
            args = [
                MminL, MmaxL, edge_states, N, lz_val, hamiltonian_labels[i]
            ]
            matrix_filename = FM.filename_complete_matrix(
                hamiltonian_terms_names[i], args)
            all_matrices_filenames.append(matrix_filename)
            if not EC.does_file_really_exist(matrix_filename) and send_jobs:
                create_matrix_pieces(MminL, MmaxL, edge_states, N, lz_val,
                                     hamiltonian_labels[i],
                                     hamiltonian_terms_names[i],
                                     params_filename)
    return all_matrices_filenames
def calc_lz_vals_for_spectrum_from_scratch(params_filename,
                                           lz_val_external=None,
                                           parameters_external=None):
    params = ParametersAnnulus(params_filename)
    if lz_val_external:
        lz_val = lz_val_external
    else:
        lz_val = params.lz_val

    if parameters_external:
        parameters = parameters_external
    else:
        parameters = params.h_parameters

    filename_lz_spectrum = FM.filename_spectrum_lz_total_vals(
        params.MminLaughlin, params.MmaxLaughlin, params.edge_states, params.N,
        params.hamiltonian_labels, parameters)
    if EC.does_file_really_exist(filename_lz_spectrum):
        print("already created " + filename_lz_spectrum)
        return filename_lz_spectrum

    filename_spectrum = calc_low_lying_spectrum_from_scratch(
        params_filename, lz_val_external, parameters_external)
    while not EC.does_file_really_exist(filename_spectrum):
        sleep(60)
    matrix_name = 'total_angular_momentum'
    matrix_label = params.total_ang_momentum_label
    args = [
        params.MminLaughlin, params.MmaxLaughlin, params.edge_states, params.N,
        lz_val, matrix_label
    ]
    AMAS.create_matrix_pieces(params.MminLaughlin, params.MmaxLaughlin,
                              params.edge_states, params.N, lz_val,
                              matrix_label, matrix_name, params_filename)

    filename_lz_matrix = FM.filename_complete_matrix(matrix_name, args)
    while not EC.does_file_really_exist(filename_lz_matrix):
        sleep(60)

    queue = params.lz_spectrum_queue
    mem, vmem = JS.get_mem_vmem_vals(queue, params.lz_spectrum_mem,
                                     params.lz_spectrum_vmem)
    args = [params_filename]
    if lz_val_external:
        args = args + [lz_val_external]
    if parameters_external:
        args = args + parameters_external
    args = args + ['spectrum_lz_calc']
    jobname = 'calc_lz_spectrum_' + '_'.join([str(a) for a in args])
    jobname = EC.make_job_name_short_again(jobname)
    JS.send_job(scriptNames.multiFile,
                pbs_filename=jobname,
                script_args=args,
                queue=queue,
                mem=mem,
                vmem=vmem)

    return filename_lz_spectrum
def delete_excess_pieces(MminL, MmaxL, edge_states, N, lz_val, matrix_label,
                         matrix_name):
    args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
    directory = FM.filename_matrix_pieces_directory(matrix_name, args)
    filename_full = FM.filename_complete_matrix(matrix_name, args)
    if EC.does_file_really_exist(filename_full):
        shutil.rmtree(directory)
        print("full matrix is present so pieces files were deleted")
        return 0
    print("files were NOT deleted since full matrix file is NOT present")
    return 0
def extract_Hamiltonian(MminL,
                        MmaxL,
                        edge_states,
                        N,
                        lz_val,
                        hamiltonian_labels,
                        parameters,
                        run_on_cluster=1):
    # [interactions_label,confining_potential_label,SC_label,FM_label],[interactions_parameter,confining_potential_parameter,SC_parameter,FM_parameter]

    hamiltonian_terms_names = [
        'interactions', 'confining_potential', 'SC_term', 'FM_term'
    ]

    Mmin = MminL - edge_states
    Mmax = MmaxL + edge_states
    hilbert_space_dim = GA.size_of_hilbert_space(Mmin, Mmax, N, lz_val)

    hamiltonian = sparse.csr_matrix((hilbert_space_dim, hilbert_space_dim),
                                    dtype=complex)

    for i in range(len(hamiltonian_labels)):
        if hamiltonian_labels[i] != 'None':
            args = [
                MminL, MmaxL, edge_states, N, lz_val, hamiltonian_labels[i]
            ]
            filename_ham_term = FM.filename_complete_matrix(
                hamiltonian_terms_names[i], args)
            if EC.does_file_really_exist(filename_ham_term):
                row, col, mat_elements = FM.read_complete_matrix(
                    hamiltonian_terms_names[i], args)
            else:

                if run_on_cluster:
                    print(
                        "matrix should have been created by now. you have a bug!"
                    )
                    return 0
                else:
                    row, col, mat_elements = create_complete_matrix(
                        MminL, MmaxL, edge_states, N, lz_val,
                        hamiltonian_labels[i], hamiltonian_terms_names[i])

            mat_elements = parameters[i] * mat_elements

            ham_term = sparse.coo_matrix(
                (mat_elements, (row, col)),
                shape=(hilbert_space_dim, hilbert_space_dim),
                dtype=complex)
            ham_term = ham_term.tocsr()

            hamiltonian = hamiltonian + ham_term
    return hamiltonian
def create_complete_matrix(MminL,
                           MmaxL,
                           edge_states,
                           N,
                           lz_val,
                           matrix_label,
                           matrix_name,
                           output='matrix'):
    args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
    filename_matrix = FM.filename_complete_matrix(matrix_name, args)
    if EC.does_file_really_exist(filename_matrix):
        if output:
            row, col, mat_elements = FM.read_complete_matrix(matrix_name, args)
            return row, col, mat_elements
        return 0

    Mmin = MminL - edge_states
    Mmax = MmaxL + edge_states
    if matrix_name == 'interactions':
        if matrix_label[:8] == 'toy_flux':
            potential_type = matrix_label[:8]
            magnetic_flux = float(matrix_label[9:])
            H_2_particles = LCA.extract_two_particle_hamiltonian(
                Mmin, Mmax, potential_type, magnetic_flux)
        else:
            potential_type = matrix_label
            H_2_particles = LCA.extract_two_particle_hamiltonian(
                Mmin, Mmax, potential_type)

        if lz_val == 'not_fixed':
            row, col, mat_elements = LCA.multi_particle_interaction_energy_matrix(
                Mmin, Mmax, N, H_2_particles)
        else:
            row, col, mat_elements = LCA.multi_particle_interaction_energy_matrix_const_lz(
                Mmin, Mmax, N, H_2_particles, lz_val)

    else:
        single_particle_operator = SPA.create_single_particle_operator(
            MminL, MmaxL, edge_states, matrix_label, matrix_name)
        if lz_val == 'not_fixed':
            row, col, mat_elements = GA.bilinear_operator_N_particle_subspace(
                Mmin, Mmax, N, single_particle_operator)
        else:
            row, col, mat_elements = GA.bilinear_operator_N_particle_subspace_fixed_lz(
                Mmin, Mmax, N, single_particle_operator, lz_val)
    FM.write_complete_matrix(matrix_name, args, row, col, mat_elements)
    if output:
        return row, col, mat_elements
    return 0
def unite_and_write_full_matrix(MminL, MmaxL, edge_states, N, lz_val,
                                matrix_label, matrix_name, params_filename):
    params = ParametersAnnulus(params_filename)
    queue = params.write_unite_matrix_queue
    mem, vmem = JS.get_mem_vmem_vals(queue, params.write_unite_matrix_mem,
                                     params.write_unite_matrix_vmem)
    args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
    filename_complete_matrix = FM.filename_complete_matrix(matrix_name, args)
    if EC.does_file_really_exist(filename_complete_matrix):
        print("matrix already written")
        return 0

    args = [matrix_name, MminL, MmaxL, edge_states, N, lz_val, matrix_label]
    filename = 'writing_matrix_' + "_".join([str(a) for a in args])
    filename = EC.make_job_name_short_again(filename)
    JS.send_job(scriptNames.uniteMatrixPieces,
                queue=queue,
                mem=mem,
                vmem=vmem,
                pbs_filename=filename,
                script_args=args)
    return 0
Ejemplo n.º 8
0
if os.getcwd()[0] == '/':
    working_dir = docs['directories']['working_dir']
    sys.path.append(working_dir)

from AnnulusFQH import MatricesAndSpectra as AMAS
from DataManaging import fileManaging as FM
from ATLASClusterInterface import errorCorrectionsAndTests as EC, JobSender as JS

JS.limit_num_threads()

matrix_name = sys.argv[1]
MminL = int(sys.argv[2])
MmaxL = int(sys.argv[3])
edge_states = int(sys.argv[4])
N = int(sys.argv[5])
lz_val = sys.argv[6]
matrix_label = sys.argv[7]

if lz_val != 'not_fixed':
    lz_val = int(float(lz_val))

args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
filename_complete_matrix = FM.filename_complete_matrix(matrix_name, args)
if EC.does_file_really_exist(filename_complete_matrix):
    print("matrix already written")
else:
    AMAS.unite_matrix_pieces(MminL, MmaxL, edge_states, N, lz_val,
                             matrix_label, matrix_name)
    AMAS.delete_excess_pieces(MminL, MmaxL, edge_states, N, lz_val,
                              matrix_label, matrix_name)
def create_matrix_pieces(MminL, MmaxL, edge_states, N, lz_val, matrix_label,
                         matrix_name, params_filename):
    params = ParametersAnnulus(params_filename)
    common_args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]

    filename_complete_matrix = FM.filename_complete_matrix(
        matrix_name, common_args)
    if EC.does_file_really_exist(filename_complete_matrix):
        print("already created " + filename_complete_matrix)
        return 0

    queue = params.matrix_pieces_queue
    mem, vmem = JS.get_mem_vmem_vals(queue, params.matrix_pieces_mem,
                                     params.matrix_pieces_vmem)

    Mmin = MminL - edge_states
    Mmax = MmaxL + edge_states
    hilbert_space_size = GA.size_of_hilbert_space(Mmin, Mmax, N, lz_val)
    if hilbert_space_size < 10000:
        args = [matrix_name] + common_args
        str_args = [str(a) for a in args]
        str_args = '-'.join(str_args)
        filename_job = str_args
        JS.send_job(scriptNames.fullMBMatrix,
                    queue,
                    mem=mem,
                    vmem=vmem,
                    script_args=args,
                    pbs_filename=filename_job)
        return 0

    slice_size = int(hilbert_space_size / params.speeding_parameter)
    last_slice_size = hilbert_space_size - params.speeding_parameter * slice_size

    slice_start = 0
    slice_end = slice_size
    count_num_jobs_sent = 0
    for i in range(params.speeding_parameter):
        filename_args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
        filename_martix_piece = FM.filename_matrix_piece(
            matrix_name, filename_args, [slice_start, slice_end - 1])
        if EC.does_file_really_exist(filename_martix_piece):
            print("matrix piece " + filename_martix_piece + " already exists!")
        else:
            count_num_jobs_sent = count_num_jobs_sent + 1
            args = [matrix_name] + common_args + [slice_start, slice_end] + [
                params.speeding_parameter, params_filename
            ]
            str_args = [str(a) for a in args]
            str_args = '-'.join(str_args[:-1])
            filename_job = str_args
            JS.send_job(scriptNames.piecesMBMatrix,
                        queue,
                        mem=mem,
                        vmem=vmem,
                        script_args=args,
                        pbs_filename=filename_job)
            sleep(2)
        slice_start = slice_start + slice_size
        slice_end = slice_end + slice_size
    # taking care of last slice
    if last_slice_size > 0:
        slice_end = slice_start + last_slice_size
        filename_args = [MminL, MmaxL, edge_states, N, lz_val, matrix_label]
        filename_martix_piece = FM.filename_matrix_piece(
            matrix_name, filename_args, [slice_start, slice_end - 1])
        if EC.does_file_really_exist(filename_martix_piece):
            print("matrix piece " + filename_martix_piece + " already exists!")
        else:
            count_num_jobs_sent = count_num_jobs_sent + 1
            args = [matrix_name] + common_args + [slice_start, slice_end] + [
                params.speeding_parameter, params_filename
            ]
            str_args = [str(a) for a in args]
            str_args = '-'.join(str_args[:-1])
            filename = str_args
            JS.send_job(scriptNames.piecesMBMatrix,
                        queue,
                        mem=mem,
                        vmem=vmem,
                        script_args=args,
                        pbs_filename=filename)

    if count_num_jobs_sent == 0:
        AMASW.unite_and_write_full_matrix(MminL, MmaxL, edge_states, N, lz_val,
                                          matrix_label, matrix_name,
                                          params_filename)
    return 0