Esempio n. 1
0
def get_log_spirals(subject_id,
                    gal=None,
                    angle=None,
                    pic_array=None,
                    bar_length=0):
    drawn_arms = gu.get_drawn_arms(subject_id, gu.classifications)
    if gal is None or angle is None:
        gal, angle = gu.get_galaxy_and_angle(subject_id)
    if pic_array is None:
        pic_array, deprojected_image = gu.get_image(gal, subject_id, angle)

    path_to_subject = './lib/distances/subject-{}.npy'.format(subject_id)

    distances = gu.get_distances(subject_id)
    if distances is None or distances.shape[0] != len(
            drawn_arms) or not os.path.exists(path_to_subject):
        # print('\t- Calculating distances')
        distances = metric.calculate_distance_matrix(drawn_arms)
        np.save('./lib/distances/subject-{}.npy'.format(subject_id), distances)

    p = Pipeline(drawn_arms,
                 phi=angle,
                 ba=gal['PETRO_BA90'],
                 image_size=pic_array.shape[0],
                 distances=distances)

    arms = p.get_arms(clean_points=True, bar_length=bar_length)
    # print('Identified {} spiral arms'.format(len(arms)))
    return [arm.reprojected_log_spiral for arm in arms]
def make_arm_plots():
    outfile = 'lib/duplicate_comb_spirals'
    bar = Bar('Plotting arms',
              max=len(dr8ids), suffix='%(percent).1f%% - %(eta)ds')
    arm_loc = 'lib/duplicate_spiral_arms'
    for i in range(len(dr8ids)):
        original_id = ss_ids[i]
        gal, angle = gu.get_galaxy_and_angle(original_id)
        pic_array, _ = gu.get_image(gal, original_id, angle)
        arms = [
            Arm.load(os.path.join(arm_loc, f))
            for f in os.listdir(arm_loc)
            if re.match('^{}-[0-9]+.pickle$'.format(dr8ids[i]), f)
        ]
        plt.figure(figsize=(8, 8))
        plt.imshow(pic_array, cmap='gray')
        for i, arm in enumerate(arms):
            plt.plot(
                *arm.reprojected_log_spiral.T,
                c=('C2' if not arm.FLAGGED_AS_BAD else 'C1')
            )
        plt.savefig(os.path.join(outfile, '{}.png'.format(original_id)))
        plt.close()
        bar.next()
    bar.finish()
def get_gal_pa(subject_id):
    try:
        p = Pipeline.load('lib/pipelines/{}.json'.format(subject_id))
    except FileNotFoundError:
        drawn_arms = gu.get_drawn_arms(subject_id, gu.classifications)
        gal, angle = gu.get_galaxy_and_angle(subject_id)
        pic_array, deprojected_image = gu.get_image(gal, subject_id, angle)
        p = Pipeline(drawn_arms,
                     phi=angle,
                     ba=gal['PETRO_BA90'],
                     image_size=pic_array.shape[0])
    arms = (Arm.load(os.path.join('lib/spiral_arms', f))
            for f in os.listdir('lib/spiral_arms')
            if re.match('^{}-[0-9]+.pickle$'.format(subject_id), f))
    arms = [arm for arm in arms if not arm.FLAGGED_AS_BAD]

    pa = np.zeros(len(arms))
    sigma_pa = np.zeros(pa.shape)
    length = np.zeros(pa.shape)
    for i, arm in enumerate(arms):
        pa[i] = arm.pa
        length[i] = arm.length
        sigma_pa[i] = arm.sigma_pa
    if len(arms) == 0:
        return (np.nan, np.nan,
                np.stack((np.tile(subject_id, len(pa)), pa, sigma_pa, length),
                         axis=1))
    combined_pa = (pa * length).sum() / length.sum()
    combined_sigma_pa = np.sqrt((length**2 * sigma_pa**2).sum()) / length.sum()
    return (
        combined_pa,
        combined_sigma_pa,
        np.stack((np.tile(subject_id, len(pa)), pa, sigma_pa, length), axis=1),
    )
def make_models():
    bar = Bar('Obtaining model fits',
              max=len(dr8ids), suffix='%(percent).1f%% - %(eta)ds')
    arm_loc = 'lib/duplicate_spiral_arms'
    df = []
    arm_count = []
    for i in range(len(dr8ids)):
        original_id = ss_ids[i]
        # validation_id = validation_ids[i]
        gal, angle = gu.get_galaxy_and_angle(original_id)
        mass = float(gal['SERSIC_MASS'])

        arms = [
            Arm.load(os.path.join(arm_loc, f))
            for f in os.listdir(arm_loc)
            if re.match('^{}-[0-9]+.pickle$'.format(dr8ids[i]), f)
        ]
        for i, arm in enumerate(arms):
            if not arm.FLAGGED_AS_BAD:
                arm_count.append(len(arm.arms))
                models, scores = arm.fit_polynomials(
                    n_splits=8,  # score=r2_score, lower_better=False
                )
                s = {
                    **{k: v.mean() for k, v in scores.items()},
                    **{'{}_std'.format(k): v.std() for k, v in scores.items()}
                }
                s['mass'] = mass
                s['dr8objid'] = str(dr8ids[i])
                df.append(s)
        bar.next()
    bar.finish()
    arm_count = np.array(arm_count)
    df = pd.DataFrame(df)
    df.to_pickle('model-comparison-results.pkl')
def get_splits_df(ss_id, val_id, dr8id):
    gal, angle = gu.get_galaxy_and_angle(ss_id)
    cls_for_gal = gu.classifications.query(
        'subject_ids == {} | subject_ids == {}'.format(ss_id, val_id)
    )
    results = []
    for i in range(N_SPLITS):
        cls_sample = cls_for_gal.sample(30)
        results.append(
            gzbuilderaggregation.make_model(
                cls_sample,
                gal, angle,
            )
        )
    disk_df = pd.DataFrame([
        i[0]['disk'] for i in results if i[0]['disk'] is not None
    ])
    disk_df.columns = 'disk_' + disk_df.columns
    bulge_df = pd.DataFrame([
        i[0]['bulge'] for i in results if i[0]['bulge'] is not None
    ])
    bulge_df.columns = 'bulge_' + bulge_df.columns
    bar_df = pd.DataFrame([
        i[0]['bar'] for i in results if i[0]['bar'] is not None
    ])
    bar_df.columns = 'bar_' + bar_df.columns
    pa_df = pd.DataFrame(
        [get_pa_from_arms(i[-1]) for i in results],
        columns=('pa', 'sigma_pa')
    )

    gal_df = pd.concat((disk_df, bulge_df, bar_df, pa_df), axis=1, sort=False)
    return gal_df
Esempio n. 6
0
def get_best_classification(subject_id, should_plot=False, should_save=False):
    # grab all the required metadata for this galaxy
    psf = gu.get_psf(subject_id)
    diff_data = gu.get_image_data(subject_id)
    pixel_mask = 1 - np.array(diff_data['mask'])[::-1]
    galaxy_data = np.array(diff_data['imageData'])[::-1]
    size_diff = diff_data['width'] / diff_data['imageWidth']

    def _lf(rendered_model, y=galaxy_data):
        Y = rg.convolve2d(rendered_model, psf, mode='same',
                          boundary='symm') * pixel_mask
        return mean_squared_error(Y.flatten(),
                                  0.8 * (y * pixel_mask).flatten())

    classifications = gu.classifications.query(
        'subject_ids == {}'.format(subject_id))
    annotations = classifications['annotations'].apply(json.loads)
    models = annotations.apply(pa.parse_annotation, size_diff=size_diff)
    rendered_models = models.apply(rg.calculate_model,
                                   args=(diff_data['width'], ))
    scores = rendered_models.apply(_lf)
    best_index = scores.idxmin()
    best_cls = classifications.loc[best_index]
    best_model = models.loc[best_index]
    best_rendered_model = rendered_models.loc[best_index]

    if should_plot:
        gal, angle = gu.get_galaxy_and_angle(subject_id)
        pic_array, deprojected_image = gu.get_image(gal, subject_id, angle)
        # arcseconds per pixel for zooniverse image
        pix_size = pic_array.shape[0] / (gal['PETRO_THETA'].iloc[0] * 4)
        # arcseconds per pixel for galaxy data
        pix_size2 = galaxy_data.shape[0] / (gal['PETRO_THETA'].iloc[0] * 4)
        imshow_kwargs = {
            'cmap':
            'gray_r',
            'origin':
            'lower',
            'extent': (
                # left of image in arcseconds from centre
                -pic_array.shape[0] / 2 / pix_size,
                pic_array.shape[0] / 2 / pix_size,  # right...
                -pic_array.shape[1] / 2 / pix_size,  # bottom...
                pic_array.shape[1] / 2 / pix_size  # top...
            ),
        }
        tc, tp = make_transforms(galaxy_data, pix_size2)
        plot_model(best_rendered_model, galaxy_data, psf, best_model,
                   pixel_mask, imshow_kwargs, tc, tp, best_cls)
        plt.savefig('best_residual/{}.pdf'.format(subject_id))
        plt.close()
    if should_save:
        with open('best_annotation/{}.json'.format(subject_id), 'w') as f:
            f.write(json.dumps(pa.make_json(best_model)))

    return best_cls
Esempio n. 7
0
def make_map():
    sid_list = sorted(np.loadtxt('lib/subject-id-list.csv', dtype='u8'))
    to_iter = sid_list[:]
    coords = []
    for subject_id in tqdm(to_iter):
        gal, angle = gu.get_galaxy_and_angle(subject_id)
        coords.append((subject_id, gal['RA'].iloc[0], gal['DEC'].iloc[0]))

    df = pd.DataFrame(coords, columns=('subject_id', 'Ra', 'Dec'))

    df.to_pickle('lib/wcs-coord-map.pkl')
    return df
Esempio n. 8
0
def get_optimized_model(subject_id, mode='best'):
    gal, angle = gu.get_galaxy_and_angle(subject_id)
    sep = coords.separation(
        SkyCoord(ra=gal['RA'] * u.degree, dec=gal['DEC'] * u.degree))
    idxmin_sep = np.argmin(sep)
    if not sep[idxmin_sep] < 1 * u.arcsec:
        return None
    pic_array, deprojected_image = gu.get_image(gal, subject_id, angle)
    psf = gu.get_psf(subject_id)
    diff_data = gu.get_image_data(subject_id)
    pixel_mask = 1 - np.array(diff_data['mask'])[::-1]
    galaxy_data = np.array(diff_data['imageData'])[::-1]
    size_diff = diff_data['width'] / diff_data['imageWidth']
    # arcseconds per pixel for zooniverse image
    pix_size = pic_array.shape[0] / (gal['PETRO_THETA'].iloc[0] * 4)
    # arcseconds per pixel for galaxy data
    pix_size2 = galaxy_data.shape[0] / (gal['PETRO_THETA'].iloc[0] * 4)
    try:
        if mode == 'agg':
            agg_fname = os.path.join('..', 'component-clustering',
                                     'cluster-output',
                                     '{}.json'.format(subject_id))
            with open(agg_fname) as f:
                model = pa.parse_aggregate_model(json.load(f),
                                                 size_diff=size_diff)
        elif mode == 'best':
            c = gu.classifications.query('classification_id == {}'.format(
                best_cls[str(subject_id)])).iloc[0]
            a = json.loads(c['annotations'])
            model = pa.parse_annotation(a, size_diff=size_diff)
        else:
            raise ValueError('Invalid value for "mode"')
    except KeyError:
        print('\nFailed: {}'.format(subject_id))
        return None

    no_spiral_model = deepcopy(model)
    no_spiral_model['spiral'] = []

    mf_nosp = ModelFitter(no_spiral_model, galaxy_data, psf, pixel_mask)
    md_nosp = mf_nosp.model
    try:
        new_nosp_model, res = mf_nosp.fit(options={'maxiter': 100})
    except ValueError:
        print('\nCould not fit: {}'.format(subject_id))
        return None
    m0_nosp = Model(no_spiral_model, galaxy_data, psf, pixel_mask)
    m1_nosp = Model(new_nosp_model, galaxy_data, psf, pixel_mask)
    return (subject_id, m0_nosp, m1_nosp, sd.iloc[idxmin_sep], pix_size2)
def make_combined_arms():
    bar = Bar('Obtaining combined spirals',
              max=len(dr8ids), suffix='%(percent).1f%% - %(eta)ds')
    for i in range(len(dr8ids)):
        original_id = ss_ids[i]
        validation_id = validation_ids[i]
        gal, angle = gu.get_galaxy_and_angle(original_id)
        original_drawn_arms = gu.get_drawn_arms(original_id)
        validation_drawn_arms = gu.get_drawn_arms(validation_id)
        drawn_arms = np.array(
            list(original_drawn_arms) + list(validation_drawn_arms),
        )
        p = Pipeline(drawn_arms, phi=angle, ba=gal['PETRO_BA90'],
                     image_size=512, distances=None, parallel=True)
        arms = p.get_arms()
        for j, arm in enumerate(arms):
            arm.save('lib/duplicate_spiral_arms/{}-{}'.format(dr8ids[i], j))
        bar.next()
    bar.finish()
Esempio n. 10
0
def get_spiral_arms(subject_id, should_recreate=True):
    if (
        (not os.path.exists('lib/pipelines/{}.json'.format(subject_id)))
        or should_recreate
    ):
        gal, angle = gu.get_galaxy_and_angle(subject_id)
        drawn_arms = gu.get_drawn_arms(subject_id, gu.classifications)
        p = Pipeline(drawn_arms, phi=angle, ba=gal['PETRO_BA90'],
                     image_size=512, parallel=True)
        p.save('lib/pipelines/{}.json'.format(subject_id))
        arms = p.get_arms()
        for i, arm in enumerate(arms):
            arm.save('lib/spiral_arms/{}-{}'.format(subject_id, i))
        return arms
    else:
        arm_files = [
            os.path.join('lib/spiral_arms', i)
            for i in os.listdir('lib/spiral_arms')
            if str(subject_id) in i
        ]
        return [Arm.load(a) for a in arm_files]
def make_comparison(dr8id, ss_id, val_id):
    comp_file = 'model-variances/{}_components.pickle'.format(dr8id)
    pa_file = 'model-variances/{}_pa.pickle'.format(dr8id)
    if os.path.isfile(comp_file) and os.path.isfile(pa_file):
        return
    all_cls = gu.classifications.query(
        '(subject_ids == {}) or (subject_ids == {})'.format(ss_id, val_id))
    all_models = all_cls['annotations'].apply(json.loads).apply(
        ash.remove_scaling).apply(pa.parse_annotation)
    all_geoms = pd.DataFrame(all_models.apply(gas.get_geoms).values.tolist(),
                             columns=('disk', 'bulge', 'bar'))

    ss = ShuffleSplit(n_splits=20, test_size=0.5, random_state=0)
    split_models = []
    pas = []

    gal, angle = gu.get_galaxy_and_angle(ss_id)

    bar = Bar(str(dr8id), max=ss.n_splits, suffix='%(percent).1f%% - %(eta)ds')
    for i, (train_index, _) in enumerate(ss.split(all_geoms)):
        models = all_models.iloc[train_index]
        drawn_arms = get_drawn_arms((ss_id, val_id), all_cls.iloc[train_index])
        if len(drawn_arms) > 1:
            p = Pipeline(drawn_arms,
                         phi=angle,
                         ba=gal['PETRO_BA90'],
                         image_size=512,
                         parallel=True)
            pas.append(p.get_pitch_angle(p.get_arms()))
        else:
            pas.append((np.nan, np.nan))
        geoms = all_geoms.iloc[train_index]
        labels = list(map(np.array, gas.cluster_components(geoms)))
        comps = gas.get_aggregate_components(geoms, models, labels)
        aggregate_disk, aggregate_bulge, aggregate_bar = comps
        split_models.append({
            'disk': aggregate_disk if aggregate_disk else None,
            'bulge': aggregate_bulge if aggregate_bulge else None,
            'bar': aggregate_bar if aggregate_bar else None,
        })
        bar.next()
    bar.finish()
    splits_df = []
    for model in split_models:
        model_comps = {}
        for key in ('disk', 'bulge', 'bar'):
            if model.get(key, None) is None:
                model[key] = {}
            mu = model[key].get('mu', (np.nan, np.nan))
            model_comps['{}-mux'.format(key)] = mu[0]
            model_comps['{}-muy'.format(key)] = mu[1]
            for param in ('roll', 'rEff', 'axRatio', 'i0', 'n', 'c'):
                model_comps['{}-{}'.format(key, param)] = (model[key].get(
                    param, np.nan))
        splits_df.append(model_comps)
    splits_df = pd.DataFrame(splits_df)

    pas = pd.DataFrame(pas,
                       columns=('pa', 'sigma_pa'),
                       index=pd.Series(range(len(pas)), name='split_index'))
    splits_df.to_pickle(comp_file)
    pas.to_pickle(pa_file)
Esempio n. 12
0
    plt.figure(figsize=(8, 8))
    plt.title('Combined galaxy')
    plt.imshow(pic_array, origin='lower', cmap='gray_r')
    ax = plt.gca()
    for p in patches:
        ax.add_patch(p)
    plt.axis('off')
    if outfile is not None:
        plt.savefig(outfile)


if __name__ == "__main__":
    sid_list = sorted(np.loadtxt('lib/subject-id-list.csv', dtype='u8'))
    to_iter = sid_list
    for subject_id in tqdm(to_iter):
        gal, angle = gu.get_galaxy_and_angle(subject_id)
        pic_array, deprojected_image = gu.get_image(gal, subject_id, angle)
        pix_size = pic_array.shape[0] / (gal['PETRO_THETA'].iloc[0] * 4
                                         )  # pixels per arcsecond

        disk_res, bulge_res, bar_res = cluster_components(subject_id)

        spirals = get_log_spirals(subject_id,
                                  gal=gal,
                                  angle=angle,
                                  pic_array=pic_array,
                                  bar_length=10)
        xtick_labels = np.linspace(-100, 100, 11).astype(int)
        xtick_positions = xtick_labels * pix_size + pic_array.shape[0] / 2
        xtick_mask = (xtick_positions > 0) & (xtick_positions <
                                              pic_array.shape[0])
Esempio n. 13
0
def main(mangaid, subject_id):
    gal, angle = gu.get_galaxy_and_angle(subject_id)
    unit_converter = convert_arcsec_to_km(gal)
    df = read_file(mangaid)
    invalid_mask = df.values == -9999.0
    mask = np.any(invalid_mask, axis=1)
    df.iloc[mask] = np.nan
    df = df.dropna()
    df['R-arcsec'] = df['R']
    df['R'] = unit_converter(df['R'])
    scale = 4 * float(gal['PETRO_THETA'])
    zoo_coords_r = df['R-arcsec'].values / scale
    keys = (
        'GAS_IC-V',
        'GAS___-V',
        'BTH_IC-V',
        'BTH___-V',
    )
    labels = (
        r'$H_\alpha$ velocity, fixed center & inclination',
        r'$H_\alpha$ velocity, varying center & inclination',
        r'$H_\alpha$ and stellar velocity, fixed center & inclination',
        r'$H_\alpha$ and stellar velocity, varying centre and inclination',
    )
    drawn_arms = gu.get_drawn_arms(subject_id, gu.classifications)
    arm_pipeline = Pipeline(drawn_arms,
                            phi=angle,
                            ba=gal['PETRO_BA90'],
                            image_size=512,
                            parallel=True)
    arms = arm_pipeline.get_arms()
    gzb_pa, gzb_sigma_pa = arm_pipeline.get_pitch_angle(arms)
    arm_details = [{
        'pa':
        arm.pa,
        'sigma_pa':
        arm.sigma_pa,
        'min_r':
        unit_converter(
            np.linalg.norm(arm.log_spiral - (256, 256), axis=1).min() *
            float(gal['PETRO_THETA']) * 4 / 512),
        'max_r':
        unit_converter(
            np.linalg.norm(arm.log_spiral - (256, 256), axis=1).max() *
            float(gal['PETRO_THETA']) * 4 / 512)
    } for arm in arms]
    min_r = min(a['min_r'] for a in arm_details)
    max_r = max(a['max_r'] for a in arm_details)
    fitted = {}
    fig, ax = plt.subplots(figsize=(8, 6))
    sa_pas = []
    sa_pa_datas = []
    for i, (key, label) in enumerate(zip(keys, labels)):
        f = tanh_model(df['R'].values, df[key].values)
        p = least_squares(f, (160, 1E-17), x_scale=(10, 1E-17))['x']
        fitted[key] = f(p) + df[key].values
        # Calculate shear from analytic solve of dln(Ω)/dln(R)
        shear = shear_from_tanh(p[1], df['R'].values)
        omega = df[key] / (2 * np.pi * df['R'])
        shear_data = get_shear(omega[:-1], df['R'].values[:-1])

        plt.plot(df['R'], shear, c='C{}'.format(i % 10), label=label)
        plt.plot(np.stack((df['R'][:-1], df['R'][1:])).mean(axis=0),
                 shear_data,
                 '--',
                 c='C{}'.format(i % 10))

        sa_pa = np.rad2deg(get_predicted_pa(shear))
        sa_pa_data = np.rad2deg(get_predicted_pa(shear_data))
        sa_pas.append(sa_pa)
        sa_pa_datas.append(sa_pa_data)
        print('For key: {}'.format(key))
        msk = (df['R'] > min_r) & (df['R'] < max_r)
        print('\tRotation-predicted: {:.4f}°'.format(sa_pa[msk].mean()))
        print('\tGZB measured PA: {:.4f} ± {:.4f}°'.format(
            gzb_pa, gzb_sigma_pa))

    plt.plot([], [], 'k-', label=r'Analytic differentiation')
    plt.plot([], [], 'k--', label='Numerical differentiation')

    plt.xlabel('Distance from galaxy centre [km]')
    plt.ylabel(r'Shear rate, $\Gamma$')
    plt.legend()
    plt.savefig('{}_shear.pdf'.format(mangaid), bbox_inches='tight')
    plt.close()

    np.save('pavr', np.stack((zoo_coords_r, sa_pas[0]), axis=1))

    imshow_kwargs = {
        'cmap': 'gray',
        'origin': 'lower',
        'extent': [-0.5 * scale, 0.5 * scale] * 2,
    }
    pic_array, _ = gu.get_image(gal, subject_id, angle)
    fig, ax = plt.subplots(ncols=1, figsize=(5, 5))
    plt.imshow(pic_array, **imshow_kwargs)
    for i, arm in enumerate(arms):
        varying_arm_t = fit_varying_pa(arm, zoo_coords_r,
                                       np.stack(sa_pas).mean(axis=0))
        t_predict = np.linspace(varying_arm_t.min(), varying_arm_t.max(), 100)
        f = interp1d(varying_arm_t, zoo_coords_r)
        varying_arm = xy_from_r_theta(f(t_predict), t_predict)

        log_spiral = xy_from_r_theta(*np.flipud(arm.polar_logsp))
        plt.plot(*arm.deprojected_coords.T * scale, '.', markersize=1, alpha=1)
        plt.plot(*log_spiral * scale, c='r', linewidth=3, alpha=0.8)
        plt.plot(*varying_arm * scale, c='g', linewidth=3, alpha=0.8)
    # plots for legend
    plt.plot([], [],
             c='g',
             linewidth=3,
             alpha=0.8,
             label='Swing-amplified spiral')
    plt.plot([], [], c='r', linewidth=3, alpha=0.8, label='Logarithmic spiral')
    plt.axis('equal')
    plt.xlabel('Arcseconds from galaxy centre')
    plt.ylabel('Arcseconds from galaxy centre')
    plt.xlim(-25, 25)
    plt.ylim(-25, 25)
    plt.legend()
    plt.savefig('{}_varying-pa.pdf'.format(mangaid), bbox_inches='tight')
    plt.close()
    return

    fig, ax = plt.subplots(figsize=(8, 6))
    for sa_pa, label in zip(sa_pas, labels):
        plt.plot(df['R'], sa_pa, label=label)
    for row in arm_details:
        plt.hlines(row['pa'], row['min_r'], row['max_r'])
        plt.fill_between(
            np.linspace(row['min_r'], row['max_r'], 2),
            row['pa'] - row['sigma_pa'],
            row['pa'] + row['sigma_pa'],
            color='k',
            alpha=0.2,
        )
    plt.legend()
    plt.xlabel('Distance from galaxy centre [km]')
    plt.ylabel('Pitch angle [degrees]')
    plt.savefig('{}_pa.pdf'.format(mangaid), bbox_inches='tight')
    plt.close()

    fig, ax = plt.subplots(figsize=(8, 6))
    # df.plot('R', keys, label=labels, ax=ax)
    for i, key in enumerate(keys):
        plt.fill_between(
            df['R'].values,
            df[key].values - df[key + 'e'].values,
            df[key].values + df[key + 'e'].values,
            color='C{}'.format(i % 10),
            alpha=0.1,
        )
        plt.plot(df['R'].values, df[key].values, '--', c='C{}'.format(i % 10))
        plt.plot(df['R'].values, fitted[key], c='C{}'.format(i % 10))
    for i, label in enumerate(labels):
        plt.plot([], [], c='C{}'.format(i % 10), label=label)
    plt.plot([], [], 'k-', label=r'$A\tanh(bR)$ model')
    plt.plot([], [], 'k--', label='Data')
    plt.legend()
    plt.xlabel('Distance from galaxy centre [km]')
    plt.ylabel(r'Rotational velocity [$\mathrm{km}\mathrm{s}^{-1}$]')
    plt.savefig('{}_rotational-velocity_2.pdf'.format(mangaid),
                bbox_inches='tight')
    plt.close()
Esempio n. 14
0
def make_galfit_feedme(subject_id,
                       classification,
                       output_dir='',
                       output_fname='output.fits',
                       overwrite=True):
    sid_loc = os.path.join(output_dir, str(subject_id))
    loc = os.path.join(sid_loc, str(classification['classification_id']))
    if overwrite and os.path.isdir(loc):
        shutil.rmtree(loc)
    if not os.path.isdir(sid_loc):
        os.mkdir(sid_loc)
    if not os.path.isdir(loc):
        os.mkdir(loc)

    # Grab galaxy information
    gal, angle = gu.get_galaxy_and_angle(subject_id)
    coord = np.array((gal['RA'].values[0], gal['DEC'].values[0]))
    diff_data = gu.get_image_data(subject_id)
    size_diff = diff_data['width'] / diff_data['imageWidth']
    bad_pixel_mask = np.array(diff_data['mask']).T[::-1]

    cutout_size = 4 * np.tile(gal['PETRO_THETA'].values[0], 2)
    frame = scg.queryFromRaDec(gal['RA'], gal['DEC'])[0]

    original_image_loc = get_original_fits(frame)
    image_file = fits.open(original_image_loc)
    image_file_loc = os.path.join(loc, 'image_cutout.fits')

    # create the cutout and sigma image
    hdu = image_file[0]
    im_cutout, sigma_cutout = scg.cutFits(image_file,
                                          *coord,
                                          cutout_size,
                                          sigma=True)
    if im_cutout is False:
        return False
    hdu.data = im_cutout.data
    # Update the FITS header with the cutout WCS
    hdu.header.update(im_cutout.wcs.to_header())
    # Write the cutout to a new FITS file
    hdu.writeto(image_file_loc, overwrite=True)

    image_file = fits.open(image_file_loc)

    if image_file[0].data.shape != bad_pixel_mask.shape:
        print(image_file[0].data.shape, bad_pixel_mask.shape)
        raise BoundsError(
            'Could not make a {0:.2f}" by {0:.2f}"'.format(cutout_size[0]) +
            ' cutout from this fits file')
        return False

    # get galaxy wcs (not needed?)
    image_wcs = WCS(image_file_loc)

    # make fits file for pixel mask
    bad_pixel_loc = os.path.join(loc, 'bad_pixel_image.fits')
    fits.HDUList([fits.PrimaryHDU(bad_pixel_mask)]).writeto(bad_pixel_loc)

    # get sigma image
    sigma_loc = os.path.join(loc, 'sigma.fits')
    fits.HDUList([fits.PrimaryHDU(sigma_cutout.data)]).writeto(sigma_loc,
                                                               overwrite=True)

    # get PSF
    psf_loc = os.path.join(loc, 'PSF.fits')
    psf = scg.getPSF(coord, frame, image_file, fname=psf_loc)
    fits.HDUList([fits.PrimaryHDU(psf)]).writeto(psf_loc)

    # Parse the volunteer's classification into a model
    model = pa.parse_annotation(json.loads(classification['annotations']),
                                size_diff=size_diff)
    object_list = create_object_list(model, gal, image_wcs)
    output_loc = os.path.join(loc, 'imgblock.fits')
    feedme = galfit_formatter.substitute({
        'input_fits':
        os.path.abspath(image_file_loc),
        'output_fits':
        os.path.abspath(output_loc),
        'sigma_image':
        os.path.abspath(sigma_loc),
        'psf_fits':
        os.path.abspath(psf_loc),
        'psf_fine_sampling':
        1,
        'bad_pixel_mask':
        os.path.abspath(bad_pixel_loc),
        'param_constraint_file':
        'none',
        'region_xmin':
        1,
        'region_xmax':
        image_file[0].data.shape[1],  # x is columns
        'region_ymin':
        1,
        'region_ymax':
        image_file[0].data.shape[0],  # y is rows
        'convolution_box_width':
        100,  # something magical in GALFIT
        'convolution_box_height':
        100,  # something magical in GALFIT
        'photomag_zero':
        26.563,  # sdss r-band photomag zeropoint
        'plate_scale_dy':
        0.396,  # arcseconds per pixel
        'plate_scale_dx':
        0.396,
        'display_type':
        None,
        'object_list':
        object_list,
    })
    feedme_loc = os.path.join(loc, 'galfit.feedme')
    with open(feedme_loc, 'w') as f:
        f.write(feedme)
    return {
        'base': loc,
        'feedme': feedme_loc,
        'image': image_file_loc,
        'output': output_loc,
        'psf': psf_loc,
        'mask': bad_pixel_loc,
    }
Esempio n. 15
0
            'original_disk_ba': np.nan,
            'validation_disk_ba': np.nan,
            'original_disk_reff': np.nan,
            'validation_disk_reff': np.nan,
            'original_bulge_ba': np.nan,
            'validation_bulge_ba': np.nan,
            'original_bulge_reff': np.nan,
            'validation_bulge_reff': np.nan,
            'original_bar_ba': np.nan,
            'validation_bar_ba': np.nan,
            'original_bar_reff': np.nan,
            'validation_bar_reff': np.nan,
            'original_pa': np.nan,
            'validation_pa': np.nan,
        }
        gal, angle = gu.get_galaxy_and_angle(ss_ids[i])
        gal_v, angle_v = gu.get_galaxy_and_angle(validation_ids[i])
        original_id = ss_ids[i]
        validation_id = validation_ids[i]
        try:
            with open('cluster-output/{}.json'.format(original_id)) as f:
                original_components = json.load(f)
            with open('cluster-output/{}.json'.format(validation_id)) as f:
                validation_components = json.load(f)
        except OSError:
            continue
        res['original_model_string'] = get_model_string(original_components)
        res['validation_model_string'] = get_model_string(
            validation_components)

        both_have_disks = (original_components['disk'] is not None
        X_test = np.vander(R_test, degree)
        clf.fit(X[:, :-1], t_train, sample_weight=point_weights[train])
        s = clf.score(
            X_test[:, :-1],
            t_test,
            sample_weight=point_weights[test]
        )
        params.append(clf.coef_)
        score += s / n_splits
    return score, params


if __name__ == '__main__':
    chosenId = 21097008
    # chosenId = 21686558
    gal, angle = gu.get_galaxy_and_angle(chosenId)
    pic_array, deprojected_image = gu.get_image(
        gal, chosenId, angle
    )

    drawn_arms = gu.get_drawn_arms(chosenId, gu.classifications)

    galaxy_object = GalaxySpirals(
        drawn_arms,
        ba=gal['SERSIC_BA'].iloc[0],
        phi=-angle
    )
    try:
        distances
    except NameError:
        distances = galaxy_object.calculate_distances()
Esempio n. 17
0
def plot_aggregation(subject_id, model=None, cluster_masks=None, arms=None):
    if model is None or cluster_masks is None or arms is None:
        print(model)
        model_path = os.path.join(
            'cluster-output', '{}.json'.format(subject_id)
        )
        masks_path = os.path.join('cluster_masks', '{}.npy'.format(subject_id))
        if not (os.path.exists(model_path) and os.path.exists(masks_path)):
            return
        with open(model_path) as f:
            model = json.load(f)
        with open(masks_path) as f:
            cluster_masks = np.load(f)
        arms = get_spiral_arms(subject_id, should_recreate=False)

    annotations = gu.classifications[
        gu.classifications['subject_ids'] == subject_id
    ]['annotations'].apply(json.loads)
    models = annotations\
        .apply(ash.remove_scaling)\
        .apply(pa.parse_annotation)\
        .apply(sanitize_model)
    spirals = models.apply(lambda d: d.get('spiral', None))
    geoms = pd.DataFrame(
        models.apply(get_geoms).values.tolist(),
        columns=('disk', 'bulge', 'bar')
    )

    logsps = [arm.reprojected_log_spiral for arm in arms]

    disk_cluster_geoms = geoms['disk'][cluster_masks[0]]
    bulge_cluster_geoms = geoms['bulge'][cluster_masks[1]]
    bar_cluster_geoms = geoms['bar'][cluster_masks[2]]

    aggregate_disk_geom = ash.make_ellipse(model['disk'])
    aggregate_bulge_geom = ash.make_ellipse(model['bulge'])
    aggregate_bar_geom = ash.make_box(model['bar'])

    gal, angle = gu.get_galaxy_and_angle(subject_id)
    pic_array, _ = gu.get_image(gal, subject_id, angle)

    def ts(s):
        return ash.transform_shape(s, pic_array.shape[0],
                                   gal['PETRO_THETA'].iloc[0])

    def tv(v):
        return ash.transform_val(v, pic_array.shape[0],
                                 gal['PETRO_THETA'].iloc[0])

    imshow_kwargs = {
        'cmap': 'gray',
        'origin': 'lower',
        'extent': [tv(0), tv(pic_array.shape[0])]*2,
    }
    fig, ((ax0, ax1), (ax2, ax3)) = plt.subplots(
        ncols=2, nrows=2,
        figsize=(10, 10),
        sharex=True, sharey=True
    )
    ax0.imshow(pic_array, **imshow_kwargs)
    for comp in geoms['disk'].values:
        if comp:
            ax0.add_patch(
                PolygonPatch(ts(comp), fc='C0', ec='k',
                             alpha=0.2, zorder=3)
            )
    ax1.imshow(pic_array, **imshow_kwargs)
    for comp in geoms['bulge'].values:
        if comp:
            ax1.add_patch(
                PolygonPatch(ts(comp), fc='C1', ec='k',
                             alpha=0.5, zorder=3)
            )
    ax2.imshow(pic_array, **imshow_kwargs)
    for comp in geoms['bar'].values:
        if comp:
            ax2.add_patch(
                PolygonPatch(ts(comp), fc='C2', ec='k',
                             alpha=0.2, zorder=3)
            )
    ax3.imshow(pic_array, **imshow_kwargs)
    for arm in arms:
        for a in arm.arms:
            ax3.plot(*tv(a).T)

    for i, ax in enumerate((ax0, ax1, ax2, ax3)):
        ax.set_xlim(imshow_kwargs['extent'][:2])
        ax.set_ylim(imshow_kwargs['extent'][2:])
        if i % 2 == 0:
            ax.set_ylabel('Arcseconds from center')
        if i > 1:
            ax.set_xlabel('Arcseconds from center')
    fig.subplots_adjust(wspace=0.05, hspace=0.05)
    plt.savefig('drawn_shapes/{}.pdf'.format(subject_id), bbox_inches='tight')
    plt.close()

    fig, ((ax0, ax1), (ax2, ax3)) = plt.subplots(
        ncols=2, nrows=2,
        figsize=(10, 10),
        sharex=True, sharey=True
    )
    ax0.imshow(pic_array, **imshow_kwargs)
    for comp in disk_cluster_geoms.values:
        ax0.add_patch(
            PolygonPatch(ts(comp), fc='C0', ec='k', alpha=0.1, zorder=3)
        )
    if model['disk'] is not None:
        aggregate_disk_geom = ash.make_ellipse(model['disk'])
        ax0.add_patch(
            PolygonPatch(ts(aggregate_disk_geom), fc='C1', ec='k', alpha=0.5,
                         zorder=3)
        )
    ax1.imshow(pic_array, **imshow_kwargs)
    for comp in bulge_cluster_geoms.values:
        ax1.add_patch(
            PolygonPatch(ts(comp), fc='C1', ec='k', alpha=0.1, zorder=3)
        )
    if aggregate_bulge_geom is not None:
        ax1.add_patch(
            PolygonPatch(ts(aggregate_bulge_geom), fc='C2', ec='k', alpha=0.5,
                         zorder=3)
        )
    ax2.imshow(pic_array, **imshow_kwargs)
    for comp in bar_cluster_geoms.values:
        ax2.add_patch(
            PolygonPatch(ts(comp), fc='C2', ec='k', alpha=0.1, zorder=3)
        )
    if aggregate_bar_geom is not None:
        ax2.add_patch(
            PolygonPatch(ts(aggregate_bar_geom), fc='C3', ec='k', alpha=0.5,
                         zorder=3)
        )
    ax3.imshow(pic_array, **imshow_kwargs)
    for arm in arms:
        plt.plot(*tv(arm.coords).T, '.', alpha=0.5, markersize=0.5)
    for arm in logsps:
        plt.plot(*tv(arm).T)

    for i, ax in enumerate((ax0, ax1, ax2, ax3)):
        ax.set_xlim(imshow_kwargs['extent'][:2])
        ax.set_ylim(imshow_kwargs['extent'][2:])
        if i % 2 == 0:
            ax.set_ylabel('Arcseconds from center')
        if i > 1:
            ax.set_xlabel('Arcseconds from center')
    fig.subplots_adjust(wspace=0.05, hspace=0.05)
    plt.savefig('clustered_shapes/{}.pdf'.format(subject_id),
                bbox_inches='tight')
    plt.close()

    fig = plt.figure(figsize=(10, 10))
    ax = plt.gca()
    ax.imshow(pic_array, **imshow_kwargs)
    if aggregate_disk_geom is not None:
        ax.add_patch(
            PolygonPatch(ts(aggregate_disk_geom), fc='C0', ec='k', alpha=0.25,
                         zorder=3)
        )
    if aggregate_bulge_geom is not None:
        ax.add_patch(
            PolygonPatch(ts(aggregate_bulge_geom), fc='C1', ec='k', alpha=0.25,
                         zorder=3)
        )
    if aggregate_bar_geom is not None:
        ax.add_patch(
            PolygonPatch(ts(aggregate_bar_geom), fc='C2', ec='k', alpha=0.25,
                         zorder=3)
        )
    for arm in logsps:
        plt.plot(*tv(arm).T, c='C3')

    ax.set_xlim(imshow_kwargs['extent'][:2])
    ax.set_ylim(imshow_kwargs['extent'][2:])
    ax.set_ylabel('Arcseconds from center')
    ax.set_xlabel('Arcseconds from center')
    plt.savefig('aggregate_model/{}.pdf'.format(subject_id),
                bbox_inches='tight')
    plt.close()