def create_corr_matrix_sinh(matrix_index, filepath, fileprefix, filesuffix, complex=True, verbose=0): """Creates a correlation function matrix. Reads different correlation functions and inserts them into a matrix. The matrix is filled row majored, the correlation functions matrix is stored column majored. It is assumed that the matrix is symmetric, the off diagonal elements are symmetrized. Args: nbsamples: Number of bootstrap samples created. filepath: The path to the data, including the fileprefix. filestring: A list of the changing parts of the filenames. The length of the list gives the size of the matrix. filesuffix: The suffix of the data files. column: The column of the input file to be read. The same column is read from every file! verbose: Changes the amount of information printed. Returns: A numpy array with four axis. The first axis is the column of operators, the second axis is the row of operators, the third axis is the number of the bootstrap sample and the last axis is the time dependence. The second returned argument is the time extend of the correlation function matrix, that is (T/2)+1, where T is the length of the original correlation function in time. """ fname = "%s/%s%i%i%s" % (filepath, fileprefix, 0, 0, filesuffix) datashape = io.read_header(fname) nsample = datashape[0] Lt = datashape[1] type = datashape[2] if type == 1: data_matrix = np.zeros((nsample, Lt, len(matrix_index), len(matrix_index)), dtype="complex") else: data_matrix = np.zeros((nsample, Lt, len(matrix_index), len(matrix_index)), dtype="float") for i in range(len(matrix_index)): for j in range(len(matrix_index)): fname = "%s/%s%i%i%s" % (filepath, fileprefix, matrix_index[i], matrix_index[j], filesuffix) if type == 0: data = np.real(io.read_data_ascii(fname)) else: data = io.read_data_ascii(fname) if data.shape[0] != nsample or data.shape[1] != Lt: print("incompatible correlators") else: data_matrix[:,:,i,j] = data corr_mat_symm = np.zeros_like(data_matrix) for _s in range(0, nsample): for _t in range(0, Lt): corr_mat_symm[_s, _t] = (data_matrix[_s, _t] + data_matrix[_s, _t].T) / 2. if not complex and type == 1: return np.real(corr_mat_symm) else: return corr_mat_symm
def create_corr_matrix_cosh(filename_matrix, complex=False): """Creates a correlation function matrix. Reads different correlation functions and inserts them into a matrix. The matrix is filled row majored, the correlation functions matrix is stored column majored. It is assumed that the matrix is symmetric, the off diagonal elements are symmetrized. Args: nbsamples: Number of bootstrap samples created. filepath: The path to the data, including the fileprefix. filestring: A list of the changing parts of the filenames. The length of the list gives the size of the matrix. filesuffix: The suffix of the data files. Returns: A numpy array with four axis. The first axis is the column of operators, the second axis is the row of operators, the third axis is the number of the bootstrap sample and the last axis is the time dependence. The second returned argument is the time extend of the correlation function matrix, that is (T/2)+1, where T is the length of the original correlation function in time. """ matrix_dim_col = filename_matrix.shape[0] matrix_dim_row = filename_matrix.shape[1] if matrix_dim_col != matrix_dim_row: print("must be a square matrix\n") os.sys.exit(-1) print("dimension of the matrix: %i\n" % matrix_dim_col) datashape = io.read_header(filename_matrix[0,0]) nsample = datashape[0] Lt = datashape[1] type = datashape[2] if type == 1: data_matrix = np.zeros((nsample, Lt, matrix_dim_col, matrix_dim_col), dtype="complex") else: data_matrix = np.zeros((nsample, Lt, matrix_dim_col, matrix_dim_col), dtype="float") for i in range(matrix_dim_col): for j in range(matrix_dim_col): data = io.read_data_ascii(filename_matrix[i,j]) if data.shape[0] != nsample or data.shape[1] != Lt: print("incompatible correlators") os.sys.exit(-1) else: data_sys = bootstrap.sym(data) data_matrix[:,:,i,j] = data_sys corr_mat_symm = np.zeros_like(data_matrix) for _s in range(0, nsample): for _t in range(0, Lt): corr_mat_symm[_s, _t] = (data_matrix[_s, _t] + data_matrix[_s, _t].T) / 2. if not complex and type == 1: return np.real(corr_mat_symm) else: return corr_mat_symm
def create_corr_matrix(nbsamples, filepath, filestring, filesuffix=".dat", column=(1, ), verbose=0): """Creates a correlation function matrix. Reads different correlation functions and inserts them into a matrix. The matrix is filled row majored, the correlation functions matrix is stored column majored. It is assumed that the matrix is symmetric, the off diagonal elements are symmetrized. WARNING: Up to now a maximum matrix size of 20x20 operators is implemented. Args: nbsamples: Number of bootstrap samples created. filepath: The path to the data, including the fileprefix. filestring: A list of the changing parts of the filenames. The length of the list gives the size of the matrix. filesuffix: The suffix of the data files. column: The column of the input file to be read. The same column is read from every file! verbose: Changes the amount of information printed. Returns: A numpy array with four axis. The first axis is the column of operators, the second axis is the row of operators, the third axis is the number of the bootstrap sample and the last axis is the time dependence. The second returned argument is the time extend of the correlation function matrix, that is (T/2)+1, where T is the length of the original correlation function in time. """ # calculate the matrix size based on the number of elements of filestring # at the moment up to 20x20 matrices are supported _nbops = 0 for _i in range(1, 20): if len(filestring) == _i * _i: _nbops = _i break # if the size could not be determined then return if _nbops == 0: print("ERROR: size of the correlation matrix could not be determined") os.sys.exit(-4) # Treat first element differently so we can create an array of the correct # size. This allows also to check whether the other files have the same # number of configurations and the same time extent. _name = "".join((filepath, filestring[0], filesuffix)) if verbose: print("filename " + _name) _data1 = io.read_data_ascii(_name, column, verbose) _nbcfg = _data1.shape[0] _T = _data1.shape[1] _boot1 = bootstrap.sym_and_boot(_data1, nbsamples) # create correlation function matrix corr_mat = np.zeros(_boot1.shape[0:2] + (_nbops, ) * 2) corr_mat[:, :, 0, 0] = _boot1 # read in all other correlation functions, bootstrap them and write them to # the numpy array for _nb, _sub in enumerate(filestring[1:], start=1): # generate filename _name = "".join((filepath, _sub, filesuffix)) if verbose: print("filename " + _name) # read in data _data = io.read_data_ascii(_name, column) _nbcfg1 = _data.shape[0] _T1 = _data.shape[1] # check if size is the same as the first operator if _nbcfg != _nbcfg1 or _T != _T1: print("ERROR while reading file " + _name) print("\tnumber of configurations or time extent is wrong") else: _boot = bootstrap.sym_and_boot(_data, nbsamples) corr_mat[:, :, int(_nb / _nbops), int(_nb % _nbops)] = _boot corr_mat_symm = np.zeros_like(corr_mat) for _s in range(0, nbsamples): for _t in range(0, int(_T / 2) + 1): corr_mat_symm[_s, _t] = (corr_mat[_s, _t] + corr_mat[_s, _t].T) / 2. return corr_mat_symm