############################# Fourier_Quad Option ################################################

logger = tool_box.get_logger("%s/work/test/log/%d.dat" % (my_home, rank))

############################# Fourier_quad data collection #######################################
# combine the data of each field into one big catalog
if cmd == "collect":

    t1 = time.time()

    h5f_path = fourier_cata_path + "fourier_cata.hdf5"
    if rank == 0:
        h5f = h5py.File(h5f_path, "w")
        h5f.close()

    c_dicts, c_fields = tool_box.field_dict(cfht_cata_path + "nname.dat")
    f_dicts, f_fields = tool_box.field_dict(fourier_cata_path + "nname.dat")

    # choose the fields both exist in the two catalog
    pre_fields = []
    for field in c_fields:
        if field in f_fields:
            pre_fields.append(field)

    # loop the areas,
    # the catalog of each area will be stored in "w_i"
    num = []
    for area_id in range(1, area_num + 1):
        # distribution the files
        field_tar = []
        for field in pre_fields:
Beispiel #2
0
from sys import path
path.append("%s/work/mylib/" % my_home)
import tool_box
from subprocess import Popen
from mpi4py import MPI
import time

comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()

ts = time.time()

data_path = "/mw/w1234/original/"
nm_path = data_path + "nname.dat"
fields = tool_box.field_dict(nm_path)[1]

missions = tool_box.allot(fields, cpus)[rank]

store_path = "/mnt/ddnfs/data_users/hkli/CFHT/catalog/"

for dirs in ["result", "result_int", "result_ext"]:
    for field in missions:
        try:
            cmd = "scp -r /mw/w1234/original/%s/%s/ " \
                  "[email protected]:/mnt/ddnfs/data_users/hkli/CFHT/catalog/%s/"\
                  %(field, dirs, field)
            # print(cmd)
            a = Popen(cmd, shell=True)
            a.wait()
        except:
Beispiel #3
0
# The new Fourier_Quad catalog differs from the old version!!!
# collect: collect the data from the files of each field. It creates the "fourier_cata.hdf5" in
#           the parent directory of the one contain the field catalog.
#           If the catalog file doesn't exist, run it firstly !!!.
#           It will add the redshift parameters from CFHT catalog into the finial catalog.


comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()

data_path = "/mnt/perc/hklee/CFHT/catalog/fourier_cata_new/"
raw_cata_path = data_path + "raw_cata_new/"

dicts, fields = tool_box.field_dict(data_path + "nname.dat")

my_field = tool_box.allot(fields, cpus)[rank]

chip_num = 36

for field_nm in my_field:
    field_path = raw_cata_path + "%s/"%field_nm
    files = os.listdir(field_path)

    chip_exps = []
    for nm in files:
        if ".dat" in nm:
            exp_nm = nm.split("p")[0]
            if exp_nm not in chip_exps:
                chip_exps.append(exp_nm)
    contents = f.readlines()
for path in contents:
    if "cfht_data_path" in path:
        data_path = path.split("=")[1]
    elif "cfht_res_path" in path:
        result_path = path.split("=")[1]
    elif "cfht_pic_path" in path:
        pic_path = path.split("=")[1]
    elif "cfht_field_path" in path:
        field_path = path.split("=")[1]

size = 48
fq = Fourier_Quad(size, 123)

nname_path = data_path + "nname.dat"
field_dict, fields = tool_box.field_dict(nname_path)
r_fields = tool_box.allot(fields, cpus)[rank]

# for the stacking process
count = 0

# the location of each galaxy is labeled by the field_label and exposure_label
# counting from the left, the first, third and fifth figure denotes "w_m(p)_(m)p_"
# the second and the fourth denotes "m" or "p" (1=m,0=p)
# the last two figure denote the chip NO.
for field in r_fields:
    expos = list(field_dict[field].keys())
    field_label = tool_box.cfht_label(field)
    for expo in expos:
        expo_label = int(expo.split("p")[0])
        chips = field_dict[field][expo]
Beispiel #5
0
            #                     if os.path.exists(chip_path):
            #                         all_files.append(chip_nm + "\n")
            #                         # print(chip_nm)
            #                     else:
            #                         print("Can't find %s"%chip_path)

        with open(total_path + "/cat_inform/nname_field_chips.dat", "w") as f:
            f.writelines(all_files)
        with open(total_path + "/cat_inform/nname_field.dat", "w") as f:
            f.writelines(fields)
        print(len(fields))

elif mode == "hdf5_cata":
    # convert the .dat to .hdf5

    fields, field_name = tool_box.field_dict(
        total_path + "/cat_inform/nname_field_chips.dat")
    # if rank == 0:
    #     print(len(field_name))

    field_name_sub = tool_box.alloc(field_name, cpus, "seq")[rank]

    fields_expo_sub_avail = []
    fields_expo_sub_raw_avail = []
    fields_sub_avail = []
    fields_raw_sub_avail = []
    exception_sub = []
    for fns in field_name_sub:
        # read the the field data
        field_src_path = total_path + "/%s/result" % fns

        try:
# stack the CFHT shear catalog files into one big .hdf5 file.

comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()

ts = time.clock()

envs_path = "%s/work/envs/envs.dat" % my_home
get_contents = [['cfht', "cfht_path_catalog", '1'],
                ['cfht', "cfht_path_result", '1']]
path_items = tool_box.config(envs_path, ['get', 'get'], get_contents)
total_cata_path, result_path = path_items

field_dict, fields = tool_box.field_dict(total_cata_path +
                                         "cfht_cata/nname.dat")

area_num = 4
if rank == 0:
    h5f = h5py.File(total_cata_path + "cfht_cata/cata.hdf5", "w")
    h5f.close()
    gal_count = 0

num_in_field = []

for area_id in range(1, 1 + area_num):
    field_paths = []
    field_count = 0
    for field in fields:
        if "w%d" % area_id in field:
            field_path = total_cata_path + "cfht_cata/%s.dat" % field
import h5py
from astropy.io import fits
import matplotlib.pyplot as plt

cmd = argv[1]

comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()
# cpus = 1
# rank = 0
log_path = "./log_%d.dat" % rank
logger = tool_box.get_logger(log_path)

nm_path = "/mw/w1234/original/nname.dat"
all_expos, all_fields = tool_box.field_dict(nm_path)

fields = tool_box.allot(all_fields, cpus)[rank]

chip_data_path = "/mw/w1234/original/"
result_path = "/lmc/cfht/para_fit/"

my, mx = numpy.mgrid[0:4644, 0:2112]
myf = my.flatten()
mxf = mx.flatten()
tags = numpy.arange(0, len(myf))
if cmd == "files":
    if rank == 0:
        for field_name in all_fields:
            pic_field_path = result_path + "pic/" + '%s/' % field_name
            if not os.path.exists(pic_field_path):
Beispiel #8
0
my_home = os.popen("echo $MYWORK_DIR").readlines()[0][:-1]
from sys import path
path.append('%s/work/mylib/' % my_home)
import tool_box
from mpi4py import MPI
import numpy
import time

comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()

dfy_path = "/home/dfy/data/CFHTLens/split-point/"
my_path = "/mnt/perc/hklee/CFHT/catalog/cfht_cata/field_dat/"

pre_fields = tool_box.field_dict(
    "/mnt/perc/hklee/CFHT/catalog/cfht_cata/nname.dat")[1]

my_field = tool_box.allot(pre_fields, cpus)[rank]

nms = ["w1p4p0", "w1p3p0"]

for field_nm in my_field:

    my_cata = numpy.loadtxt(my_path + field_nm + ".dat")

    if field_nm == "w1p4p0":
        dfy_field_nm = "w1p4m0"
    elif field_nm == "w1p3p0":
        dfy_field_nm = "w1p3m0"
    else:
        dfy_field_nm = field_nm
Beispiel #9
0
data_name, g1num, g2num, bin_num, thresh = argv[1], argv[2], argv[3], argv[
    4], float(argv[5])
g1num, g2num, bin_num = int(g1num), int(g2num), int(bin_num)

with open("/home/hkli/work/envs/envs.dat", "r") as f:
    contents = f.readlines()
for path in contents:
    if "cfht_data_path" in path:
        total_path = path.split("=")[1]
    elif "cfht_res_path" in path:
        result_path = path.split("=")[1]

field_path = result_path + "field/"

nname_path = total_path + "nname.dat"
all_fields = tool_box.field_dict(nname_path)[1]

filter_path = result_path + "field/filtered.dat"
filter_exist = os.path.exists(filter_path)

fq = Fourier_Quad(48, 123)
cache_path = result_path + data_name
print(cache_path)
arr = numpy.load(cache_path)["arr_1"]
est_g1 = arr[:g1num, 0]
fd_g1 = arr[:g1num, 6]
est_g2 = arr[:g2num, 3]
fd_g2 = arr[:g2num, 7]

dg1 = fd_g1[1] - fd_g1[0]
dg2 = fd_g2[1] - fd_g2[0]
logger = tool_box.get_logger("%s/work/test/log/%d.dat" % (my_home, rank))

############################# CFHTLenS catalog collection ########################################
# combine the data of each field into one big catalog
if cmd == "collect":

    t1 = time.time()

    logger.info("RANK: %03d. Begin..." % rank)

    h5f_path = cfht_cata_path + "cfht_cata.hdf5"
    if rank == 0:
        h5f = h5py.File(h5f_path, "w")
        h5f.close()

    pre_fields = tool_box.field_dict(cfht_cata_path + "nname.dat")[1]

    # loop the areas,
    # the catalog of each area will be stored in "w_i"
    num = []
    for area_id in range(1, area_num + 1):
        # distribution the files
        field_tar = []
        for field in pre_fields:
            if "w%d" % area_id in field:
                field_tar.append(field)

        # if fields are more than the threads,
        # some thread will get an empty "field_pool"
        field_pool = tool_box.allot(field_tar, cpus)[rank]