def main(): path = '/uufs/chpc.utah.edu/common/home/sdss10/sdss5/target/catalogs/ebosstarget/v0005/' files = ['ebosstarget-v0005-qso.fits', 'ebosstarget-v0005-std.fits'] out = '/uufs/chpc.utah.edu/common/home/sdss10/sdss5/target/catalogs/' for file_ in files: print(f'Converting {file_}') data = astropy.table.Table.read(path + file_) data.meta = {} data.rename_columns(data.colnames, list(map(lambda x: x.lower(), data.colnames))) if 'std' in file_: data['has_wise_phot'] = '0' to_csv(data, out + file_ + '.csv', header=True, overwrite=True, convert_arrays=True) del data print(f'Copying {file_}') cursor = database.cursor() cursor.copy_expert( 'COPY catalogdb.ebosstarget_v5 FROM STDIN ' 'WITH DELIMITER \',\' NULL \'\\N\' CSV HEADER;', open(out + file_ + '.csv')) database.commit()
def main(): file_ = '/uufs/chpc.utah.edu/common/home/sdss10/sdss5/target/catalogs/gaia_unwise_agn/v1/Gaia_unWISE_AGNs.fits' # noqa data = astropy.table.Table.read(file_) data.meta = {} data.rename_columns(data.colnames, list(map(lambda x: x.lower(), data.colnames))) to_csv(data, file_ + '.csv', header=True, overwrite=True) del data cursor = database.cursor() fileobj = open(file_ + '.csv') fileobj.readline() # Read header cursor.copy_from(fileobj, 'catalogdb.gaia_unwise_agn', sep=',') database.commit()
def main(): database.become_admin() file_ = os.environ['CATALOGDB_DIR'] + '/sdss_qso/dr16q/DR16Q_v4.fits' data = astropy.table.Table.read(file_) data.meta = {} data.rename_columns(data.colnames, list(map(lambda x: x.lower(), data.colnames))) to_csv(data, file_ + '.csv', header=False, delimiter=',') del data cursor = database.cursor() fileobj = open(file_ + '.csv') cursor.copy_from(fileobj, 'catalogdb.sdss_dr16_qso', sep=',') database.commit()
def main(): assert database.connected skiprows = { 'GLMIA.tbl.gz': 9, 'GLMIIA.tbl.gz': 13, 'GLM3D_jan2009_Archive.tbl.gz': 13, 'GLM3DA_l330+02.tbl.gz': 14, 'GLM3DA_l330-02.tbl.gz': 14, 'GLM3DA_l335-02.tbl.gz': 14 } files = glob.glob(os.environ['CATALOGDB_DIR'] + '/GLIMPSE/*.tbl.gz') for file_ in files: print(file_) basename = os.path.basename(file_) nrows = skiprows[basename] tables = pandas.read_csv(file_, skipinitialspace=True, delimiter=' ', chunksize=1000000, header=None, skiprows=nrows) cursor = database.cursor() for ii, table in enumerate(tables): print(f'Chunk {ii+1} ...') table[0] = table[0] + table[1] table = table.drop(columns=1) table[3] = table[3].replace({0: pandas.NA}) stream = io.StringIO() table.to_csv(stream, header=False, index=False, na_rep='\\0') stream.seek(0) cursor.copy_from(stream, 'catalogdb.glimpse', sep=',', null='\\0') database.commit()
def main(): assert database.connected files = glob.glob(os.environ['CATALOGDB_DIR'] + '/PS1/g18/*.fit') for file_ in files: print(file_) data = astropy.table.Table.read(file_) data.meta = {} stream = io.StringIO() data.write(stream, format='csv', fast_writer=True) cursor = database.cursor() stream.seek(0) stream.readline() # Read header cursor.copy_from(stream, 'catalogdb.ps1_g18', sep=',') database.commit()
def main(): file_ = os.environ[ 'CATALOGDB_DIR'] + '/sdssApogeeAllStarMerge/r13/allStarMerge-r13-l33-58932beta.fits' # noqa data = astropy.table.Table.read(file_) data.meta = {} data.rename_columns(data.colnames, list(map(lambda x: x.lower(), data.colnames))) to_csv(data, file_ + '.csv', header=True) del data database.become_admin() cursor = database.cursor() fileobj = open(file_ + '.csv') fileobj.readline() # Read header cursor.copy_from(fileobj, 'catalogdb.sdss_apogeeAllStarMerge_r13', sep='\t') database.commit()