コード例 #1
0
def calc_delta_delta(event, mask):

    # if 'source_position_zd' not in df.columns:
    frame = AltAz(location=LOCATION,
                  obstime=to_astropy_time(
                      pd.to_datetime(event.observation_info.time)))

    crab_altaz = crab.transform_to(frame)

    source_position_zd_phs = crab_altaz.zen.deg
    source_position_az_phs = crab_altaz.az.deg

    source_x, source_y = horizontal_to_camera(
        source_position_zd_phs,
        source_position_az_phs,
        event.zd,
        event.az,
    )

    # safe x, y and t components of Photons. shape = (#photons,3)
    xyt = event.photon_stream.point_cloud
    lol = event.photon_stream.list_of_lists
    x, y, t = xyt.T
    x = np.rad2deg(x) / camera_distance_mm_to_deg(1)
    y = np.rad2deg(y) / camera_distance_mm_to_deg(1)

    cleaned_photons = np.zeros(len(x), dtype=bool)
    for i in range(len(lol)):
        if mask[i]:
            for j in range(len(lol[i])):
                cleaned_photons[i] = True

    cog_x = np.mean(x[cleaned_photons])
    cog_y = np.mean(y[cleaned_photons])

    true_delta = np.arctan2(cog_y - source_y, cog_x - source_x)

    delta = calc_delta(phs2image(event.photon_stream.list_of_lists), mask)
    delta_delta = true_delta - delta
    if delta_delta < -np.pi / 2:
        delta_delta += 2 * np.pi


#    if delta_delta < -90:
#        delta_delta += 360

    return delta_delta
コード例 #2
0
ファイル: fact_to_dl3.py プロジェクト: LukasBeiske/aict-tools
def calc_random_source(pointing_zd, pointing_az, wobble_distance):
    phi = np.random.uniform(0, 2 * np.pi, len(pointing_zd))

    r = wobble_distance / camera_distance_mm_to_deg(1)
    x = r * np.cos(phi)
    y = r * np.sin(phi)

    zd, az = camera_to_horizontal(x, y, pointing_zd, pointing_az)

    return zd, az
コード例 #3
0
def add_theta_deg_columns(df):
    for i in range(6):
        incol = 'theta' if i == 0 else 'theta_off_{}'.format(i)
        outcol = 'theta_deg' if i == 0 else 'theta_deg_off_{}'.format(i)
        if incol in df.columns:
            df[outcol] = camera_distance_mm_to_deg(df[incol])
コード例 #4
0
def calc_hillas_features_phs(phs, clustering):
    """
    Safes Hillas features from Photon Stream Cluster to dict ev

    Inputs:
    -----------------------------------------
    phs:            Photon Stream of an event
    clustering:     Photon Stream cluster

    Returns:
    -----------------------------------------
    ev:             dictionary with Hillas features
    """

    ev = {}

    # safe x, y and t components of Photons. shape = (#photons,3)
    xyt = phs.point_cloud
    x, y, t = xyt.T
    x = np.rad2deg(x) / camera_distance_mm_to_deg(1)
    y = np.rad2deg(y) / camera_distance_mm_to_deg(1)

    # biggest cluster:
    biggest_cluster = np.argmax(np.bincount(clustering.labels[clustering.labels != -1]))
    mask = clustering.labels == biggest_cluster

    # all clusters
    # mask = clustering.labels != -1
    ev['cluster_size_ratio'] = (clustering.labels != -1).sum() / mask.sum()

    ev['n_pixel'] = len(np.unique(np.column_stack([x[mask], y[mask]]), axis=0))

    # Leakage
    image = phs2image(phs.list_of_lists)
    cleaned_pix = np.zeros(len(image), dtype=bool)

    border_pix = get_border_pixel_mask()
    k = 0
    cleaned_img = np.zeros(len(image))
    for i in range(len(phs.list_of_lists)):
        for j in range(len(phs.list_of_lists[i])):
            if mask[k]:
                cleaned_pix[i] = True
                cleaned_img[i] += 1
            k += 1

    border_ph = [(border_pix[i] and cleaned_pix[i]) for i in range (1440)]
    ev['leakage'] = cleaned_img[border_ph].sum()/mask.sum()

    # covariance and eigenvalues/vectors for later calculations
    cov = np.cov(x[mask], y[mask])
    eig_vals, eig_vecs = np.linalg.eigh(cov)

    # Descriptive statistics: mean, std dev, kurtosis, skewness
    ev['kurtosis_x'] = scipy.stats.kurtosis(x[mask])
    ev['kurtosis_y'] = scipy.stats.kurtosis(y[mask])
    ev['skewness_x'] = scipy.stats.skew(x[mask])
    ev['skewness_y'] = scipy.stats.skew(y[mask])

    # means of cluster
    ev['cog_x'] = np.mean(x[mask])
    ev['cog_y'] = np.mean(y[mask])

    # width, length and delta
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        ev['width'], ev['length'] = np.sqrt(eig_vals)
#        delta = np.arctan(eig_vecs[1, 1] / eig_vecs[0, 1])
#    ev['delta'] = delta

    delta_mask = cleaned_img > mask.sum() * 1.5 / 100
    # covariance and eigenvalues/vectors for delta
    if delta_mask.sum() == 0:
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            ev['width'], ev['length'] = np.sqrt(eig_vals)
            delta = np.arctan(eig_vecs[1, 1] / eig_vecs[0, 1])
    else:
        cov_d = np.cov(pix_x[delta_mask], pix_y[delta_mask], fweights=cleaned_img[delta_mask])
        eig_vals, eig_vecs = np.linalg.eigh(cov_d)
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            ev['width'], ev['length'] = np.sqrt(eig_vals)
            delta = np.arctan(eig_vecs[1, 1] / eig_vecs[0, 1])
    ev['delta'] = delta

    # rotate into main component system
    delta_x = x[mask] - ev['cog_x']
    delta_y = y[mask] - ev['cog_y']
    long = np.cos(delta) * delta_x + np.sin(delta) * delta_y
    trans = - np.sin(delta) * delta_x + np.cos(delta) * delta_y

    # higher order weights in cluster coordinates
    ev['kurtosis_long'] = scipy.stats.kurtosis(long)
    ev['kurtosis_trans'] = scipy.stats.kurtosis(trans)
    ev['skewness_long'] = scipy.stats.skew(long)
    ev['skewness_trans'] = scipy.stats.skew(trans)

    # number of photons in biggest cluster
    ev['size'] = mask.sum()

    # number of clusters
    ev['clusters'] = clustering.number

    return ev
コード例 #5
0
ファイル: hdf_utils.py プロジェクト: fact-project/erna
def write_fits_to_hdf5(outputfile,
                       inputfiles,
                       mode='a',
                       compression='gzip',
                       progress=True,
                       key='events'):

    initialized = False

    version = None
    with h5py.File(outputfile, mode) as hdf_file:

        for inputfile in tqdm(inputfiles, disable=not progress):
            with fits.open(inputfile) as f:

                if version is None:
                    version = f[0].header['VERSION']
                    hdf_file.attrs['fact_tools_version'] = version
                else:
                    if version != f[0].header['VERSION']:
                        raise ValueError(
                            'Merging output of different FACT-Tools versions not allowed'
                        )

                if len(f) < 2:
                    continue

                array = np.array(f[1].data[:])

                # convert all names to snake case
                array.dtype.names = rename_columns(array.dtype.names)

                # add columns with theta in degrees
                arrays = []
                names = []
                for in_col, out_col in zip(theta_columns, theta_deg_columns):
                    if in_col in array.dtype.names:
                        arrays.append(camera_distance_mm_to_deg(array[in_col]))
                        names.append(out_col)

                if len(names) > 0:
                    array = recfunctions.append_fields(
                        array,
                        names=names,
                        data=arrays,
                        usemask=False,
                    )

                if not initialized:
                    initialize_h5py(
                        hdf_file,
                        array,
                        key=key,
                        compression=compression,
                    )
                    initialized = True

                append_to_h5py(hdf_file, array, key=key)

            if 'timestamp' in array.dtype.names:
                hdf_file[key]['timestamp'].attrs['timeformat'] = 'iso'