예제 #1
0
def table() -> pylatex.Table:
    optics_single = optics.as_designed_single_channel()
    model_distortion = optics_single.rays_output.distortion.model()
    model_distortion_relative = optics_single.rays_output_relative.distortion.model(
    )

    def fmt_coeff(coeff: u.Quantity):
        return kgpy.format.quantity(
            a=coeff.value * u.dimensionless_unscaled,
            scientific_notation=True,
            digits_after_decimal=2,
        )

    result = pylatex.Table()
    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular('ll|rr')) as tabular:
            tabular.escape = False
            tabular.append(
                '\multicolumn{2}{l}{Coefficient} & $x\'$ & $y\'$\\\\')
            tabular.add_hline()
            for c, name in enumerate(model_distortion.x.coefficient_names):
                tabular.add_row([
                    f'{name}',
                    f'({model_distortion.x.coefficients[c].unit:latex_inline})',
                    fmt_coeff(
                        model_distortion_relative.x.coefficients[c].squeeze()),
                    fmt_coeff(
                        model_distortion_relative.y.coefficients[c].squeeze()),
                ])
    return result
예제 #2
0
def table() -> pylatex.Table:
    result = pylatex.Table(position='!htb')
    result._star_latex_name = True
    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular(table_spec='llll', )) as tabular:
            tabular.escape = False
            tabular.add_row(
                ['Parameter', 'Requirement', 'Science Driver', 'Capabilities'])
            tabular.add_hline()
            tabular.add_row([
                r'Spectral line',
                r'\OV',
                r'\EEs',
                r'\OVion, \MgXion, \HeIion, Figure~\ref{fig:bunch}',
            ])
            tabular.add_row([
                r'Spectral sampling',
                r'\spectralResolutionRequirement',
                r'Broadening from \MHD\ waves',
                r'\dispersionDoppler, Table~\ref{table:prescription}',
            ])
            tabular.add_row([
                r'Spatial resolution',
                r'\angularResolutionRequirement (\spatialResolutionRequirement)',
                r'\EEs',
                r'\spatialResolutionTotal, Table~\ref{table:errorBudget}',
            ])
            tabular.add_row([
                r'\SNRShort',
                r'\snrRequirement\ (\CHShort)',
                r'\MHD\ waves in \CHShort',
                r'\StackedCoronalHoleSNR\ ($\NumExpInStack \times \text{\detectorExposureLength}$ exp.), '
                r'Table~\ref{table:counts}',
            ])
            tabular.add_row([
                r'Cadence',
                r'\cadenceRequirement',
                r'Torsional waves',
                r'\detectorExposureLength\ eff., Section~\ref{subsec:SensitivityandCadence}',
            ])
            tabular.add_row([
                r'Observing time',
                r'\observingTimeRequirement',
                r'\EEs',
                r'\SI{270}{\second}, Section~\ref{sec:MissionProfile}',
            ])
            tabular.add_row([
                r'\FOV\ diameter',
                r'\fovRequirement',
                r'Span \QSShort, \ARShort, and limb',
                r'\fov, Table~\ref{table:prescription}',
            ])
    result.add_caption(
        pylatex.NoEscape(
            r"""\ESIS\ instrument requirements and capabilties. Note that MTF exceeds the Rayleigh criterion of 0.109."""
        ))
    result.append(kgpy.latex.Label('table:scireq'))
    return result
예제 #3
0
def df_to_pdf(df,
              out_file,
              print_index=True,
              debug=False,
              digit_round=1,
              caption=None,
              comma_separated_columns=[],
              gen_latex='False'):
    """
    convert data frame to pdf/latex. used to create tables for paper/thesis.
    """
    if digit_round is not None:
        df = df.round(digit_round)

    for column in comma_separated_columns:
        df[column] = df[column].map('{:,}'.format)

    table_columns = len(df.columns) + 1 if print_index else len(df.columns)

    if gen_latex:
        with open(table_path + out_file + '.tex', 'w') as f:
            f.write(
                df.to_latex(escape=False,
                            index=print_index,
                            column_format='c' * table_columns))
        return

    doc = pl.Document(documentclass='standalone', document_options='varwidth')
    doc.packages.append(pl.Package('booktabs'))

    with doc.create(pl.MiniPage()):
        with doc.create(pl.Table(position='htbp')) as table:
            table.append(pl.Command('centering'))
            table.append(
                pl.NoEscape(
                    df.to_latex(escape=False,
                                index=print_index,
                                column_format='c' * table_columns)))
            if caption is not None:
                table.add_caption(caption)
    if debug:
        return doc.dumps()

    doc.generate_pdf(output_path + out_file)
예제 #4
0
def table() -> pylatex.Table:
    optics_all = esis.flight.optics.as_measured()
    detector = optics_all.detector
    result = pylatex.Table()
    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular('ccccc')) as tabular:
            tabular.escape = False
            tabular.add_row([
                r'Channel',
                r'Quad.',
                r'Gain',
                r'Read noise',
                r'Dark current',
            ])
            tabular.add_row([
                '', '', f'({detector.gain.unit:latex_inline})',
                f'({detector.readout_noise.unit:latex_inline})',
                f'({detector.dark_current.unit:latex_inline})'
            ])
            tabular.add_hline()
            for i in range(detector.gain.shape[0]):
                for j in range(detector.gain.shape[1]):
                    if j == 0:
                        channel_name_i = optics_all.channel_name[i]
                        serial_number_i = f'({detector.serial_number[i]})'
                    else:
                        channel_name_i = ''
                        serial_number_i = ''
                    tabular.add_row([
                        f'{channel_name_i} {serial_number_i}',
                        j + 1,
                        detector.gain[i, j].value,
                        detector.readout_noise[i, j].value,
                        f'{detector.dark_current[i, j].value:0.3f}',
                    ])
                tabular.add_hline()
    result.add_caption(pylatex.NoEscape(r'\ESIS\ camera properties'))
    result.append(kgpy.latex.Label('tabel:cameraProperties'))
    return result
예제 #5
0
def _add_keyboard_notes(document: pylatex.Document, texts: dict,
                        images: dict) -> None:
    document.append(
        pylatex.Subsection(
            title=texts["keyboard"]["title"],
            label=False,
            numbering=False,
        ))

    document.append(texts["keyboard"]["text0"])
    document.append(pylatex.NoEscape(r"\vspace{5mm}"))

    table = pylatex.Tabular(r" l | l | p{9cm} ")

    # table.add_hline()
    table.add_row(*tuple(
        pylatex.MediumText(column_title) for column_title in (
            "Bereich",
            "Beschreibung der Klänge",
            "verwendete Lautsprecher",
        )))
    for zone_idx in range(3):
        table.add_hline()
        table.add_row(
            _make_img(images["zone_{}".format(zone_idx)],
                      width=0.22,
                      add_figure=False),
            texts["keyboard"]["zone{}sound".format(zone_idx)],
            texts["keyboard"]["zone{}speaker".format(zone_idx)],
        )

    # table.add_hline()

    document.append(pylatex.Table(data=table, position="h!"))
    document.append(texts["keyboard"]["text1"])
    # document.append(pylatex.NoEscape(r"\vspace{3mm}"))
    document.append(texts["keyboard"]["text2"])
예제 #6
0
def table_old() -> pylatex.Table:
    result = pylatex.Table()
    result._star_latex_name = True
    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular('llrr')) as tabular:
            tabular.escape = False
            tabular.add_row(
                [r'Element', r'Parameter', r'Requirement', r'Measured'])
            tabular.add_hline()
            tabular.add_row(
                [r'Primary', r'RMS slope error ($\mu$rad)', r'$<1.0$', r''])
            tabular.add_row([r'', r'Integration length (mm)', r'4.0', r''])
            tabular.add_row([r'', r'Sample length (mm)', r'2.0', r''])
            tabular.add_hline()
            tabular.add_row(
                [r'Primary', r'RMS roughness (nm)', r'$<2.5$', r''])
            tabular.add_row([r'', r'Periods (mm)', r'0.1-6', r''])
            tabular.add_hline()
            tabular.add_row(
                [r'Grating', r'RMS slope error ($\mu$rad)', r'$<3.0$', r''])
            tabular.add_row([
                r'', r'Integration length (mm)', r'2 \roy{why fewer sigfigs?}',
                r''
            ])
            tabular.add_row([r'', r'Sample length (mm)', r'1', r''])
            tabular.add_hline()
            tabular.add_row(
                [r'Grating', r'RMS roughness (nm)', r'$<2.3$', r''])
            tabular.add_row([r'', r'Periods (mm)', r'0.02-2', r''])
            tabular.add_hline()
    result.add_caption(
        pylatex.NoEscape(r"""
Figure and surface roughness requirements compared to metrology for the \ESIS\ optics.
Slope error (both the numerical estimates and the measurements) is worked out with integration length and sample length 
defined per ISO 10110."""))
    result.append(kgpy.latex.Label('table:error'))
    return result
예제 #7
0
def _add_data(doc: pl.Document, ds: Dataset, nr: NonRedundantization,
              meth: MLMethod):
    name = f'{ds.name}_{nr.name}_{meth.name}'
    directory = ds.name

    aVp_graph = f'{name}.jpg'
    angle_dist_graph = f'{name}_angledistribution.jpg'
    error_dist_graph = f'{name}_errordistribution.jpg'
    sqerror_graph = f'{name}_sqerror_vs_actual.jpg'
    stats_csv_all = f'{name}_stats_all.csv'
    stats_csv_out = f'{name}_stats_out.csv'

    actualVpred_file = os.path.join(directory, aVp_graph)
    ang_dist_file = os.path.join(directory, angle_dist_graph)
    error_dist_file = os.path.join(directory, error_dist_graph)
    sqerror_file = os.path.join(directory, sqerror_graph)

    df_all = pd.read_csv(os.path.join(directory, stats_csv_all))
    df_out = pd.read_csv(os.path.join(directory, stats_csv_out))

    with doc.create(pl.Section(f'Method: {ds.name}, {nr.name}, {meth.name}')):
        with doc.create(pl.Subsection('Summary of method:')):
            doc.append(f'Dataset: {ds.name}')
            doc.append(f'\nNon-redundantization: {nr.name}')
            doc.append(f'\nType of machine learning used: {meth.name}')

    with doc.create(pl.Subsection('Summary of the data:')):
        with doc.create(pl.Figure(position='!htbp')) as actualVpred:
            actualVpred.add_image(actualVpred_file, width='300px')
            actualVpred.add_caption(
                'Graph showing the predicted packing angle against the actual packing angle, when using the above specified methods of non-redundetization and machine learning.'
            )

        with doc.create(pl.Table(position='!htbp')) as table:
            table.add_caption('Summary of results for all data')
            table.append(pl.Command('centering'))
            table.append(pl.NoEscape(df_all.to_latex(escape=False)))

        with doc.create(pl.Table(position='!htbp')) as table:
            table.add_caption('Summary of results for outliers.')
            table.append(pl.Command('centering'))
            table.append(pl.NoEscape(df_out.to_latex(escape=False)))

        with doc.create(pl.Figure(position='!htbp')) as graphs:
            with doc.create(
                    pl.SubFigure(position='!htbp',
                                 width=pl.NoEscape(
                                     r'0.30\linewidth'))) as ang_dist_graph:
                ang_dist_graph.add_image(ang_dist_file,
                                         width=pl.NoEscape(r'\linewidth'))
                ang_dist_graph.add_caption(
                    'Frequency distribution of the packing angle.')
            with doc.create(
                    pl.SubFigure(position='!htbp',
                                 width=pl.NoEscape(
                                     r'0.33\linewidth'))) as error_dist_graph:
                error_dist_graph.add_image(error_dist_file,
                                           width=pl.NoEscape(r'\linewidth'))
                error_dist_graph.add_caption(
                    'Distribution of errors calculated as the difference between the predicted and actual interface angle.'
                )
            with doc.create(
                    pl.SubFigure(position='!htbp',
                                 width=pl.NoEscape(
                                     r'0.33\linewidth'))) as sqerror_graph:
                sqerror_graph.add_image(sqerror_file,
                                        width=pl.NoEscape(r'\linewidth'))
                sqerror_graph.add_caption(
                    'Squared error in predicted packing angle against actual packing angle.'
                )
            graphs.add_caption('Graphs for further metrics.')
예제 #8
0
def generate_latex(output):
    top10 = 'top10.csv'
    top10_df = pd.read_csv(top10)
    top10_o = 'top10_out.csv'
    top10_o_df = pd.read_csv(top10_o)

    doc = pl.Document(page_numbers=True,
                      geometry_options={
                          "tmargin": "1cm",
                          "lmargin": "1cm"
                      })

    doc.packages.append(pl.Package('booktabs'))
    doc.preamble.append(
        pl.Command('title', 'VH/VL Packing Angle Pipeline Results'))
    doc.preamble.append(pl.Command('author', 'Veronica A. Boron'))

    doc.append(pl.NoEscape(r'\maketitle'))
    doc.append(
        'This document summarizes the results obtained by running `packing_angle_pipeline.py` on various datasets.'
    )
    # TODO table of contents is broken and doesn't show all sections
    # doc.append(pl.NoEscape(r'\tableofcontents'))
    doc.append(pl.NoEscape(r'\newpage'))
    with doc.create(pl.Section(f'Summary of Results')):
        with doc.create(pl.Table(position='!htbp')) as table:
            table.add_caption(
                'Rakings of the top 20 combinations of methods, datasets, non-redundatizing, \
            and correction factors. They were ranked according to a combination parameter which was calcuated \
            in the following way: Combined-parameter = |(1/Pearsons coefficient)| + |mean-error| + |RMSE| + \
            |RELRMSE|. The smallest combined-para value indicates a combination of low errors and \
            high correlation coefficient.')
            table.append(pl.Command('centering'))
            table.append(pl.NoEscape(r'\resizebox{\textwidth}{!}{'))
            table.append(pl.NoEscape(top10_df.to_latex(escape=False)))
            table.append(pl.NoEscape(r'}'))

        with doc.create(pl.Table(position='!htbp')) as table:
            table.add_caption(
                'Rakings of the top 20 combinations of methods, datasets, non-redundatizing, \
            and correction factors for outlier prediction. They were ranked in according to a combination parameter.'
            )
            table.append(pl.Command('centering'))
            table.append(pl.NoEscape(r'\resizebox{\textwidth}{!}{'))
            table.append(pl.NoEscape(top10_o_df.to_latex(escape=False)))
            table.append(pl.NoEscape(r'}'))

    # for ds, nr, cr in itertools.product(Dataset, NonRedundantization, Correction):
    # for ds, nr, in itertools.product(Dataset, NonRedundantization):
    #     _add_data(doc, ds, nr, MLMethod.XvalWeka, cr)
    #     doc.append(pl.NoEscape(r'\newpage'))

    # for tt, nr, meth, cr in itertools.product(get_all_testtrain(), NonRedundantization, MLMethod, Correction):
    for tt, nr, meth in itertools.product(get_all_testtrain(),
                                          NonRedundantization, MLMethod):
        # if meth is not MLMethod.XvalWeka:
        #  _add_data(doc, tt.testing, nr, meth, cr)
        _add_data(doc, tt.testing, nr, meth)
        doc.append(pl.NoEscape(r'\newpage'))

    print('Generating PDF...')
    doc.generate_pdf(output, clean_tex=False)
예제 #9
0
def table() -> pylatex.Table:
    optics_single = optics.as_designed_single_channel()
    optics_all = esis.flight.optics.as_measured()
    primary = optics_single.primary
    grating = optics_single.grating
    unit_length_integration = u.mm
    unit_length_sample = u.mm
    unit_slope_error = u.urad
    unit_ripple_period = u.mm
    unit_ripple = u.nm
    unit_microroughness_period = u.um
    unit_microroughness = u.nm
    result = pylatex.Table()
    result._star_latex_name = True
    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular('llrr')) as tabular:
            tabular.escape = False
            tabular.add_row([
                r'Element',
                r'Parameter',
                r'Requirement',
                r'Measured',
            ])
            tabular.add_hline()
            tabular.add_row([
                r'Primary',
                f'RMS slope error ({unit_slope_error:latex_inline})',
                f'{optics_single.primary.slope_error.value.to(unit_slope_error).value:0.1f}',
                f'{optics_all.primary.slope_error.value.to(unit_slope_error).value:0.1f}',
            ])
            tabular.add_row([
                r'',
                f'\\quad Integration length = {primary.slope_error.length_integration.to(unit_length_integration).value:0.1f}\,{unit_length_integration:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_row([
                r'',
                f'\\quad Sample length = {primary.slope_error.length_sample.to(unit_length_sample).value:0.1f}\,{unit_length_sample:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_row([
                r'',
                f'RMS roughness ({unit_ripple:latex_inline})',
                f'{optics_single.primary.ripple.value.to(unit_ripple).value:0.1f}',
                f'{optics_all.primary.ripple.value.to(unit_ripple).value:0.1f}',
            ])
            tabular.add_row([
                r'',
                f'\quad Periods = ${primary.ripple.periods_min.to(unit_ripple_period).value:0.2f}-{primary.ripple.periods_max.to(unit_ripple_period).value:0.1f}$\\,{unit_ripple_period:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_row([
                r'',
                f'RMS microroughness ({unit_microroughness:latex_inline})',
                f'{optics_single.primary.microroughness.value.to(unit_ripple).value:0.1f}',
                f'{optics_all.primary.microroughness.value.to(unit_ripple).value:0.1f}',
            ])
            tabular.add_row([
                r'',
                f'\quad Periods = ${primary.microroughness.periods_min.to(unit_microroughness_period).value:0.2f}-{primary.microroughness.periods_max.to(unit_microroughness_period).value:0.1f}$\\,{unit_microroughness_period:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_hline()
            tabular.add_row([
                r'Grating',
                f'RMS slope error ({unit_slope_error:latex_inline})',
                f'{optics_single.grating.slope_error.value.to(unit_slope_error).value:0.1f}',
                f'{optics_all.grating.slope_error.value.to(unit_slope_error).value.mean():0.1f}',
            ])
            tabular.add_row([
                r'',
                f'\\quad Integration length = {grating.slope_error.length_integration.to(unit_length_integration).value:0.1f}\,{unit_length_integration:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_row([
                r'',
                f'\\quad Sample length = {grating.slope_error.length_sample.to(unit_length_sample).value:0.1f}\,{unit_length_sample:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_row([
                r'',
                f'RMS roughness ({unit_ripple:latex_inline})',
                f'{optics_single.grating.ripple.value.to(unit_ripple).value:0.1f}',
                f'{optics_all.grating.ripple.value.to(unit_ripple).value.mean():0.1f}',
            ])
            tabular.add_row([
                r'',
                f'\quad Periods = ${grating.ripple.periods_min.to(unit_ripple_period).value:0.2f}-{grating.ripple.periods_max.to(unit_ripple_period).value:0.1f}$\\,{unit_ripple_period:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_row([
                r'',
                f'RMS microroughness ({unit_microroughness:latex_inline})',
                f'{optics_single.grating.microroughness.value.to(unit_ripple).value:0.1f}',
                f'{optics_all.grating.microroughness.value.to(unit_ripple).value.mean():0.1f}',
            ])
            tabular.add_row([
                r'',
                f'\quad Periods = ${grating.microroughness.periods_min.to(unit_microroughness_period).value:0.2f}-{grating.microroughness.periods_max.to(unit_microroughness_period).value:0.1f}$\\,{unit_microroughness_period:latex_inline}',
                r'',
                r'',
            ])
            tabular.add_hline()

    result.add_caption(
        pylatex.NoEscape(r"""
Figure and surface roughness requirements compared to metrology for the \ESIS\ optics.
Slope error (both the numerical estimates and the measurements) is worked out with integration length and sample length 
defined per ISO 10110."""))
    result.append(kgpy.latex.Label('table:error'))
    return result
예제 #10
0
def table(doc: kgpy.latex.Document) -> pylatex.Table:
    result = pylatex.Table()

    optics_single = optics.as_designed_single_channel()
    wavelength = optics_single.bunch.wavelength
    index_o5 = np.nonzero(optics_single.bunch.ion == 'o_5')[0][0]
    wavelength_o5 = wavelength[index_o5]
    index_mg10_2 = np.nonzero(optics_single.bunch.ion == 'mg_10')[0][1]
    wavelength_mg10_2 = wavelength[index_mg10_2]

    intensity_o5 = [334.97, 285.77, 1018.65, 519.534
                    ] * u.erg / u.cm**2 / u.sr / u.s
    intensity_mg10 = [51.43, 2.62, 397.64, 239.249
                      ] * u.erg / u.cm**2 / u.sr / u.s

    energy_o5 = wavelength_o5.to(u.erg, equivalencies=u.spectral()) / u.photon
    energy_mg10 = wavelength_mg10_2.to(u.erg,
                                       equivalencies=u.spectral()) / u.photon

    optics_single_measured = optics.as_measured_single_channel()
    rays = optics_single_measured.rays_output

    area = rays.intensity.copy()
    area[~rays.mask] = np.nan
    area = np.nansum(
        area, (rays.axis.pupil_x, rays.axis.pupil_y, rays.axis.velocity_los),
        keepdims=True)
    area[area == 0] = np.nan
    area = np.nanmean(area, (rays.axis.field_x, rays.axis.field_y)).squeeze()
    area_o5 = area[0]
    area_mg10 = area[2]

    pixel_subtent = (optics_single.plate_scale.x *
                     optics_single.plate_scale.y * u.pix * u.pix).to(u.sr)
    time_integration = optics_single.detector.exposure_length

    counts_o5 = (intensity_o5 * area_o5 * pixel_subtent * time_integration /
                 energy_o5).to(u.photon)
    counts_mg10 = (intensity_mg10 * area_mg10 * pixel_subtent *
                   time_integration / energy_mg10).to(u.photon)
    counts_total = counts_o5 + counts_mg10

    stack_num = 12
    counts_total_stacked = counts_total * stack_num

    noise_shot = np.sqrt(counts_total.value) * counts_total.unit
    noise_shot_stacked = np.sqrt(
        counts_total_stacked.value) * counts_total.unit

    noise_read = optics_single_measured.detector.readout_noise.mean()
    noise_read = noise_read * optics_single_measured.detector.gain.mean()
    noise_read_o5 = (noise_read / (energy_o5 / (3.6 * u.eV / u.electron))).to(
        u.photon)
    noise_read_o5_stacked = stack_num * noise_read_o5

    noise_total = np.sqrt(np.square(noise_shot) + np.square(noise_read_o5))
    noise_total_stacked = np.sqrt(
        np.square(noise_shot_stacked) + np.square(noise_read_o5_stacked))

    snr = counts_total / noise_total
    snr_stacked = counts_total_stacked / noise_total_stacked

    label = f'1 $\\times$ {kgpy.format.quantity(time_integration, digits_after_decimal=0)} exp.'
    label_stacked = f'{stack_num} $\\times$ {kgpy.format.quantity(time_integration, digits_after_decimal=0)} exp.'

    doc.set_variable(
        name='NumExpInStack',
        value=str(stack_num),
    )

    doc.set_variable_quantity(
        name='StackedCoronalHoleSNR',
        value=snr_stacked[np.argmin(intensity_o5)],
        digits_after_decimal=1,
    )

    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular('lrrrr')) as tabular:
            tabular.escape = False
            tabular.add_row([r'Source', r'\VR', r'\VR', r'\VR', r'\CDS'])
            tabular.add_row(r'Solar context', r'\QSShort', r'\CHShort',
                            r'\ARShort', r'\ARShort')
            tabular.add_hline()
            tabular.add_hline()
            tabular.append(f'\\multicolumn{{5}}{{c}}{{{label}}}\\\\')
            tabular.add_row([
                r'\OV',
            ] + [f'{c:0.0f}' for c in counts_o5.value])
            tabular.add_row([
                r'\MgXdim',
            ] + [f'{c:0.0f}' for c in counts_mg10.value])
            tabular.add_hline()
            tabular.add_row([
                r'Total',
            ] + [f'{c:0.0f}' for c in counts_total.value])
            tabular.add_row([
                r'Shot noise',
            ] + [f'{c:0.1f}' for c in noise_shot.value])
            tabular.add_row([
                r'Read noise',
            ] + 4 * [f'{noise_read_o5.value:0.1f}'])
            tabular.add_row([
                r'\SNRShort',
            ] + [f'{c:0.1f}' for c in snr.value])
            tabular.add_hline()
            tabular.add_hline()
            tabular.append(f'\\multicolumn{{5}}{{c}}{{{label_stacked}}}\\\\')
            tabular.add_row([
                'Total',
            ] + [f'{c:0.0f}' for c in counts_total_stacked.value])
            tabular.add_row([
                r'\SNRShort',
            ] + [f'{c:0.1f}' for c in snr_stacked.value])
            tabular.add_hline()
            tabular.add_hline()

    result.add_caption(
        pylatex.NoEscape(r"""
Estimated signal statistics per channel (in photon counts) for \ESIS\ lines in \CH, \QS, and \AR.
Note that the \SNR\ estimates are lower bounds since charge diffusion decreases the shot noise."""
                         ))
    result.append(kgpy.latex.Label('table:counts'))
    return result
예제 #11
0
    def tabulateAll(self):
        """
        Create a table that summarises all input variables and additive
        components, including the constant kernel as baseline and the final
        full additive model.
        """
        # 1-D variables
        ks = self.best1d[:]
        # Baseline: constant kernel
        ks.append(self.constker)
        # Additive components, if not 1-D
        for k in self.summands:
            if len(k.getActiveDims()) > 1:
                ks.append(k)
        # Full additive model, if involves more than one additive term
        best = self.history[-1]
        if len(self.summands) > 1:
            ks.append(best)

        ks.sort(key=lambda k: round(k.getNLML(), 2))
        ks.sort(key=lambda k: round(k.error(), 4))
        data = ks[0].data
        ds = data.getDataShape()

        nlml_min = round(min([k.getNLML() for k in ks]), 2)
        error_min = round(min([k.error() for k in ks]), 4)

        doc = self.doc
        with doc.create(pl.Table(position='htbp!')) as tab:
            caption_str = "Classification performance of the full model, its additive components (if any), all input variables, and the baseline."
            tab.add_caption(ut.NoEscape(caption_str))

            t = pl.Tabular('rlrr')
            # Header
            t.add_hline()
            t.add_row((pl.MultiColumn(1, align='c', data='Dimensions'),
                       pl.MultiColumn(1, align='c', data='Kernel expression'),
                       pl.MultiColumn(1, align='c', data='NLML'),
                       pl.MultiColumn(1, align='c', data='Error')))
            t.add_hline()

            # Entries
            for k in ks:
                if k is self.constker:
                    row = [
                        ut.italic('--', escape=False),
                        ut.italic('$' + k.latex() + '$ (Baseline)',
                                  escape=False),
                        ut.italic('{0:.2f}'.format(k.getNLML()), escape=False),
                        ut.italic(r'{0:.2f}\%'.format(k.error() * 100),
                                  escape=False)
                    ]
                else:
                    dims = sorted(k.getActiveDims())
                    row = [
                        ut.NoEscape(', '.join([str(d + 1) for d in dims])),
                        ut.NoEscape('$' + k.latex() + '$'),
                        ut.NoEscape('{0:.2f}'.format(k.getNLML())),
                        ut.NoEscape(r'{0:.2f}\%'.format(k.error() * 100))
                    ]
                if round(k.getNLML(), 2) == nlml_min:
                    row[2] = ut.bold(row[2])
                if round(k.error(), 4) == error_min:
                    row[3] = ut.bold(row[3])

                t.add_row(tuple(row))

            t.add_hline()

            tab.append(ut.NoEscape(r'\centering'))
            tab.append(t)
예제 #12
0
    def tabulateVariables(self):
        """
        Create a table that summarises all input variables.
        """
        ks = self.best1d[:]
        ks.append(self.constker)
        ks.sort(key=lambda k: round(k.getNLML(), 2))
        ks.sort(key=lambda k: round(k.error(), 4))
        data = ks[0].data
        ds = data.getDataShape()

        nlml_min = round(min([k.getNLML() for k in ks]), 2)
        error_min = round(min([k.error() for k in ks]), 4)

        doc = self.doc
        with doc.create(pl.Table(position='htbp!')) as tab:
            tab.add_caption(ut.NoEscape("Input variables"))

            t = pl.Tabular('rlrrrcrr')
            # Header
            t.add_hline()
            t.add_row(('', '', pl.MultiColumn(3, align='c', data='Statistics'),
                       pl.MultiColumn(3,
                                      align='c',
                                      data='Classifier Performance')))
            t.add_hline(start=3, end=8)
            t.add_row((pl.MultiColumn(1, align='c', data='Dimension'),
                       pl.MultiColumn(1, align='c', data='Variable'),
                       pl.MultiColumn(1, align='c', data='Min'),
                       pl.MultiColumn(1, align='c', data='Max'),
                       pl.MultiColumn(1, align='c', data='Mean'),
                       pl.MultiColumn(1, align='c', data='Kernel'),
                       pl.MultiColumn(1, align='c', data='NLML'),
                       pl.MultiColumn(1, align='c', data='Error')))
            t.add_hline()

            # Entries
            for k in ks:
                if k is self.constker:
                    row = [
                        ut.italic('--', escape=False),
                        ut.italic('Baseline', escape=False),
                        ut.italic('--', escape=False),
                        ut.italic('--', escape=False),
                        ut.italic('--', escape=False),
                        ut.italic(k.shortInterp(), escape=False),
                        ut.italic('{0:.2f}'.format(k.getNLML()), escape=False),
                        ut.italic(r'{0:.2f}\%'.format(k.error() * 100),
                                  escape=False)
                    ]
                else:
                    dim = k.getActiveDims()[0]
                    row = [
                        dim + 1, data.XLabel[dim],
                        '{0:.2f}'.format(ds['x_min'][dim]),
                        '{0:.2f}'.format(ds['x_max'][dim]),
                        '{0:.2f}'.format(ds['x_mu'][dim]),
                        k.shortInterp(),
                        ut.NoEscape('{0:.2f}'.format(k.getNLML())),
                        ut.NoEscape(r'{0:.2f}\%'.format(k.error() * 100))
                    ]
                if round(k.getNLML(), 2) == nlml_min:
                    row[6] = ut.bold(row[6])
                if round(k.error(), 4) == error_min:
                    row[7] = ut.bold(row[7])

                t.add_row(tuple(row))

            t.add_hline()

            tab.append(ut.NoEscape(r'\centering'))
            tab.append(t)
예제 #13
0
파일: main.py 프로젝트: ryanbehdad/latex
    sentence = f'The project has a loss of ${-1 * SDCF:,.0f}.'

# Create document
doc = pl.Document()
doc.packages.append(pl.Package('booktabs'))

# Add preamble
doc.preamble.append(pl.Command('title', report_title))
doc.preamble.append(pl.Command('author', repot_author))
doc.preamble.append(pl.Command('date', pl.NoEscape(r'\today')))
doc.append(pl.NoEscape(r'\maketitle'))

# Create section 1
with doc.create(pl.Section('Data')):
    doc.append('The cash flow data is as follows.')
    with doc.create(pl.Table(position='htbp')) as table:
        table.add_caption('Cash Flow')
        table.append(pl.Command('centering'))
        table.append(
            pl.NoEscape(df_original.to_latex(escape=False, index=False)))

# Create section 2
with doc.create(pl.Section('Conclusion')):
    doc.append(sentence)

# Create section 3
with doc.create(pl.Section('Appendix')):
    doc.append('The discounted cash flow is shown in the table below.')
    with doc.create(pl.Table(position='htbp')) as table:
        table.add_caption('Discounted Cash Flow')
        table.append(pl.Command('centering'))
예제 #14
0
def table(doc: kgpy.latex.Document) -> pylatex.Table:
    requirements = esis.optics.design.requirements()
    optics_single = optics.as_designed_single_channel()
    opt = esis.optics.design.final(**optics.error_kwargs)

    wavelength = optics_single.bunch.wavelength
    index_o5 = np.nonzero(optics_single.bunch.ion == 'o_5')[0][0]
    wavelength_o5 = wavelength[index_o5]
    # opt = esis.optics.design.final(
    #     pupil_samples=101,
    #     # pupil_is_stratified_random=True,
    #     field_samples=11,
    #     all_channels=False,
    # )

    # psf_diffraction = opt.system.psf_diffraction

    # plt.figure()
    # plt.imshow(psf_diffraction.data[5,~0,..., 0].T, aspect='auto')
    #
    # print('position', psf_diffraction.grid.position)
    # print('position', psf_diffraction.grid.position.points.x.shape)
    # print('position', psf_diffraction.grid.position.points.y.shape)

    # opt.psf_diffraction
    opt.mtf_diffraction
    plt.show()

    # rays_grating = opt.system.raytrace[opt.system.surfaces_all.flat_local.index(opt.grating.surface)]
    # rays_grating.pupil_hist2d(bins=100)
    # plt.show()

    frequency_requirement = 1 / requirements.resolution_angular

    def calc_mtf(optics: esis.optics.Optics):
        rays = optics.rays_output
        mtf, frequency = rays.mtf(
            bins=200,
            frequency_min=frequency_requirement,
        )
        print('mtf', mtf.mean())
        print('frequency', frequency)
        mtf = np.take(a=mtf, indices=[index_o5], axis=rays.axis.wavelength)
        print('mtf', mtf.mean())
        mtf = np.mean(
            mtf.value, axis=rays.axis.field_xy, keepdims=True,
            where=mtf != 0) << mtf.unit
        print('mtf', mtf.mean())
        frequency = frequency.take(indices=[index_o5],
                                   axis=rays.axis.wavelength)
        frequency = np.mean(frequency, axis=rays.axis.field_xy, keepdims=True)

        plt.figure()
        plt.imshow(mtf.squeeze().T)

        with astropy.visualization.quantity_support():
            plt.figure()
            plt.plot(
                frequency.x.take(indices=0, axis=rays.axis.pupil_x).squeeze(),
                mtf.take(indices=0, axis=rays.axis.pupil_x).squeeze())
            plt.plot(
                frequency.y.take(indices=0, axis=rays.axis.pupil_y).squeeze(),
                mtf.take(indices=0, axis=rays.axis.pupil_y).squeeze())

        mtf = np.take(a=mtf, indices=[0], axis=rays.axis.pupil_x)
        print('mtf', mtf.mean())
        index_frequency_requirement = np.argmax(frequency.y.take(
            indices=[0], axis=rays.axis.pupil_x) >= frequency_requirement,
                                                axis=rays.axis.pupil_y)
        index_frequency_requirement = np.expand_dims(
            index_frequency_requirement, axis=rays.axis.pupil_y)
        print('index frequency requirement', index_frequency_requirement.shape)
        # mtf = np.take(a=mtf, indices=[index_frequency_requirement], axis=rays.axis.pupil_y)
        mtf = np.take_along_axis(mtf,
                                 indices=index_frequency_requirement,
                                 axis=rays.axis.pupil_y)
        print('mtf', mtf.mean())
        print('mtf.shape', mtf.shape)
        return mtf.squeeze()

    mtf_nominal = calc_mtf(opt)

    accumulator = dict(mtf=1 * u.dimensionless_unscaled, )

    def add_mtf(
        tabular: pylatex.Tabular,
        name_major: str = '',
        name_minor: str = '',
        value_str: str = '',
        mtf: u.Quantity = 0 * u.dimensionless_unscaled,
    ):
        accumulator['mtf'] *= mtf

        tabular.add_row([
            name_major,
            name_minor,
            value_str,
            f'',
            f'',
            f'{mtf.value:0.3f}',
        ])

    def add_optics(
            tabular: pylatex.Tabular,
            optics: typ.Union[esis.optics.Optics,
                              typ.Tuple[esis.optics.Optics,
                                        esis.optics.Optics]],
            name_major: str = '',
            name_minor: str = '',
            value: typ.Optional[typ.Union[u.Quantity,
                                          typ.Tuple[u.Quantity,
                                                    u.Quantity]]] = None,
            value_format_kwargs: typ.Optional[typ.Dict[str, typ.Any]] = None,
            remove_nominal_mtf: bool = True):

        if value_format_kwargs is None:
            value_format_kwargs = dict(
                digits_after_decimal=3,
                scientific_notation=False,
            )

        if not isinstance(optics, esis.optics.Optics):
            optics_min, optics_max = optics

            mtf_min, mtf_max = calc_mtf(optics_min), calc_mtf(optics_max)

            if mtf_max < mtf_min:
                mtf = mtf_max
            else:
                mtf = mtf_min

            if value is not None:
                value_min, value_max = value
                if value_max == -value_min:
                    value_str = f'$\\pm${kgpy.format.quantity(value_max, **value_format_kwargs)}'
                else:
                    raise NotImplementedError
            else:
                value_str = ''

        else:
            mtf = calc_mtf(optics)
            if value is not None:
                value_str = f'{kgpy.format.quantity(value, **value_format_kwargs)}'
            else:
                value_str = ''

        if remove_nominal_mtf:
            mtf = mtf / mtf_nominal

        add_mtf(
            tabular=tabular,
            name_major=name_major,
            name_minor=name_minor,
            value_str=value_str,
            mtf=mtf,
        )

    opt = esis.optics.design.final(**optics.error_kwargs)
    # print(calc_mtf(opt_err))

    # rays_err = opt_err.rays_output
    # # rays_err.position.z = np.broadcast_to(rays_err.wavelength, rays_err.position.shape, subok=True).copy()
    # # rays_err.position = rays_err.distortion.model(inverse=True)(rays_err.position).to(u.arcsec)
    #
    # mtf, frequency = rays_err.mtf(
    #     bins=200,
    #     frequency_min=frequency_requirement,
    # )
    #
    # index_freq = np.argmax(frequency.y == frequency_requirement)
    # print('index_freq', index_freq)
    #
    # mtf[mtf == 0] = np.nan
    # mtf = np.nanmean(mtf, axis=(rays_err.axis.field_x, rays_err.axis.field_y))[..., 0, 0]
    # print('mtf', mtf[0, index_freq])
    #
    # print(mtf.shape)
    #
    # plt.figure()
    # plt.imshow(mtf.T)
    # # plt.show()
    #
    # with astropy.visualization.quantity_support():
    #     plt.figure()
    #     plt.plot(frequency.x, mtf[0])
    #     plt.plot(frequency.y, mtf[..., 0])
    #
    # plt.show()

    units_psf = u.pix
    plate_scale = optics_single.plate_scale
    focal_length_effective = optics_single.magnification.y * optics_single.primary.focal_length

    opt = esis.optics.design.final(**optics.error_kwargs)
    system_psf = np.nanmean(opt.rays_output.spot_size_rms[..., 0, :])

    frequency_mtf_arcsec = 0.5 * u.cycle / u.arcsec
    frequency_mtf = frequency_mtf_arcsec * plate_scale.y / u.cycle

    def to_mtf(psf_size: u.Quantity):
        psf_size = psf_size / np.sqrt(2)
        alpha = 1 / (2 * psf_size**2)
        return np.exp(-(np.pi * frequency_mtf)**2 / alpha)
        # return np.exp(-(2 * np.pi * frequency_mtf * psf_size) ** 2)

    def to_pix(value: u.Quantity):
        return value / (optics_single.detector.pixel_width / u.pix)

    def from_pix(value: u.Quantity):
        return value * (optics_single.detector.pixel_width / u.pix)

    primary_slope_error = optics_single.primary.slope_error.value
    primary_slope_error_psf = focal_length_effective * np.tan(
        2 * primary_slope_error)
    primary_slope_error_psf /= optics_single.detector.pixel_width / u.pix

    opt_primary_decenter_x_max = optics.error_primary_decenter_x_max()
    opt_primary_decenter_x_min = optics.error_primary_decenter_x_min()
    opt_primary_decenter_y_max = optics.error_primary_decenter_y_max()
    opt_primary_decenter_y_min = optics.error_primary_decenter_y_min()

    distance_grating_to_detector = (
        optics_single.detector.transform.translation_eff -
        optics_single.grating.transform.translation_eff).length
    grating_slope_error = optics_single.grating.slope_error.value
    grating_slope_error_psf = distance_grating_to_detector * np.tan(
        2 * grating_slope_error)
    grating_slope_error_psf /= optics_single.detector.pixel_width / u.pix

    opt_grating_translation_x_min = optics.error_grating_translation_x_min()
    opt_grating_translation_x_max = optics.error_grating_translation_x_max()
    opt_grating_translation_y_min = optics.error_grating_translation_y_min()
    opt_grating_translation_y_max = optics.error_grating_translation_y_max()
    opt_grating_translation_z_min = optics.error_grating_translation_z_min()
    opt_grating_translation_z_max = optics.error_grating_translation_z_max()
    opt_grating_roll_min = optics.error_grating_roll_min()
    opt_grating_roll_max = optics.error_grating_roll_max()
    opt_grating_radius_min = optics.error_grating_radius_min()
    opt_grating_radius_max = optics.error_grating_radius_max()
    opt_grating_ruling_density_min = optics.error_grating_ruling_density_min()
    opt_grating_ruling_density_max = optics.error_grating_ruling_density_max()
    opt_grating_ruling_spacing_linear_min = optics.error_grating_ruling_spacing_linear_min(
    )
    opt_grating_ruling_spacing_linear_max = optics.error_grating_ruling_spacing_linear_max(
    )
    opt_grating_ruling_spacing_quadratic_min = optics.error_grating_ruling_spacing_quadratic_min(
    )
    opt_grating_ruling_spacing_quadratic_max = optics.error_grating_ruling_spacing_quadratic_max(
    )

    opt_detector_translation_x_min = optics.error_detector_translation_x_min()
    opt_detector_translation_x_max = optics.error_detector_translation_x_max()
    opt_detector_translation_y_min = optics.error_detector_translation_y_min()
    opt_detector_translation_y_max = optics.error_detector_translation_y_max()
    opt_detector_translation_z_min = optics.error_detector_translation_z_min()
    opt_detector_translation_z_max = optics.error_detector_translation_z_max()

    rays = opt.system.rays_input.copy()
    rays.position = np.broadcast_to(rays.position,
                                    opt.rays_output.position.shape,
                                    subok=True).copy()
    rays.position[~opt.rays_output.mask] = np.nan
    rays_min = np.nanmin(rays.position,
                         axis=(rays.axis.pupil_x, rays.axis.pupil_y))
    rays_max = np.nanmax(rays.position,
                         axis=(rays.axis.pupil_x, rays.axis.pupil_y))
    rays_range = np.nanmean(rays_max - rays_min)
    detector_x = np.linspace(-1, 1, 100) / 2 * u.pix
    diffraction_intensity = np.sinc(rays_range.x / wavelength_o5 * u.rad *
                                    np.sin(detector_x * opt.plate_scale.x))**2
    model = astropy.modeling.fitting.LevMarLSQFitter()(
        model=astropy.modeling.models.Gaussian1D(),
        x=detector_x,
        y=diffraction_intensity,
    )
    diffraction_limit = np.sqrt(2) * model.stddev.quantity

    accumulator = dict(
        psf_size_squared=0 * u.pix**2,
        mtf=1 * u.dimensionless_unscaled,
        mtf_actual=1 * u.dimensionless_unscaled,
    )

    def add_row_basic(
        tabular: pylatex.Tabular,
        optics: typ.Union[esis.optics.Optics, typ.Tuple[esis.optics.Optics,
                                                        esis.optics.Optics]],
        name_major: str = '',
        name_minor: str = '',
        value_str: str = '',
        psf_size: u.Quantity = 0 * u.um,
        mtf_actual: u.Quantity = 1.0 * u.dimensionless_unscaled,
    ):

        mtf = to_mtf(psf_size)

        tabular.add_row([
            name_major,
            name_minor,
            value_str,
            f'{psf_size.to(u.pix).value:0.2f}',
            f'{(psf_size * optics.plate_scale.y).to(u.arcsec).value:0.2f}',
            f'{mtf.value:0.3f}',
            f'{mtf_actual.value:0.3f}',
        ])

        accumulator['psf_size_squared'] += np.square(psf_size)
        accumulator['mtf_actual'] *= mtf_actual
        accumulator['mtf'] *= mtf

    def add_row(
        tabular: pylatex.Tabular,
        optics: typ.Union[esis.optics.Optics, typ.Tuple[esis.optics.Optics,
                                                        esis.optics.Optics]],
        name_major: str = '',
        name_minor: str = '',
        value: typ.Optional[typ.Union[u.Quantity,
                                      typ.Tuple[u.Quantity,
                                                u.Quantity]]] = None,
        digits_after_decimal: int = 3,
        scientific_notation: bool = False,
        remove_nominal_psf: bool = True,
    ):
        format_kwargs = dict(
            digits_after_decimal=digits_after_decimal,
            scientific_notation=scientific_notation,
        )

        if not isinstance(optics, esis.optics.Optics):
            optics_min, optics_max = optics

            psf_size_min = np.nanmean(
                optics_min.rays_output.spot_size_rms[..., 0, :])
            psf_size_max = np.nanmean(
                optics_max.rays_output.spot_size_rms[..., 0, :])

            if psf_size_max > psf_size_min:
                optics = optics_max
            else:
                optics = optics_min

            if value is not None:
                value_min, value_max = value
                if value_max == -value_min:
                    value_str = f'$\\pm${kgpy.format.quantity(value_max, **format_kwargs)}'
                else:
                    raise NotImplementedError
            else:
                value_str = ''

        else:
            if value is not None:
                value_str = f'{kgpy.format.quantity(value, **format_kwargs)}'
            else:
                value_str = ''

        psf_size = np.nanmean(optics.rays_output.spot_size_rms[..., 0, :])
        mtf_actual = calc_mtf(optics)
        print('mtf actual', mtf_actual)
        if remove_nominal_psf:
            psf_size = np.nan_to_num(
                np.sqrt(np.square(psf_size) - np.square(system_psf)))
            mtf_actual = mtf_actual / mtf_nominal

        add_row_basic(
            tabular=tabular,
            optics=optics,
            name_major=name_major,
            name_minor=name_minor,
            value_str=value_str,
            psf_size=psf_size,
            mtf_actual=mtf_actual,
        )

    def ptp_to_rms(value: u.Quantity) -> u.Quantity:
        return value / np.sqrt(8)

    result = pylatex.Table()
    result._star_latex_name = True
    with result.create(pylatex.Center()) as centering:
        with centering.create(pylatex.Tabular('ll|rrrrr')) as tabular:
            tabular.escape = False
            tabular.add_row([
                r'Element',
                r'',
                r'Tolerance',
                f'$\\sigma$ ({units_psf:latex_inline})',
                f'$\\sigma$ ({u.arcsec:latex_inline})',
                r'\MTF\ from $\sigma$',
                r'\MTF\ actual ',
            ])
            tabular.add_hline()
            add_row(
                tabular=tabular,
                optics=opt,
                name_major='System',
                name_minor='Aberration',
                remove_nominal_psf=False,
            )
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_minor='Diffraction',
                psf_size=diffraction_limit,
            )
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_minor='Thermal drift',
                psf_size=ptp_to_rms(opt.sparcs.pointing_drift /
                                    opt.plate_scale.x *
                                    opt.detector.exposure_length),
            )
            tabular.add_hline()
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_major='Primary',
                name_minor='RMS Slope error',
                value_str=
                f'{kgpy.format.quantity(primary_slope_error, digits_after_decimal=1)}',
                psf_size=primary_slope_error_psf,
                mtf_actual=opt.primary.mtf_degradation_factor,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_primary_decenter_x_min,
                    opt_primary_decenter_x_max,
                ),
                name_minor='Translation $x$',
                value=(
                    -opt_primary_decenter_x_min.primary.translation_error.
                    value.xy.length,
                    opt_primary_decenter_x_max.primary.translation_error.value.
                    xy.length,
                ),
                digits_after_decimal=0,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_primary_decenter_y_min,
                    opt_primary_decenter_y_max,
                ),
                name_minor='Translation $y$',
                value=(
                    -opt_primary_decenter_y_min.primary.translation_error.
                    value.xy.length,
                    opt_primary_decenter_y_max.primary.translation_error.value.
                    xy.length,
                ),
                digits_after_decimal=0,
            )
            tabular.add_hline()
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_major='Grating',
                name_minor='RMS Slope error',
                value_str=
                f'{kgpy.format.quantity(grating_slope_error, digits_after_decimal=1)}',
                psf_size=grating_slope_error_psf,
                mtf_actual=opt.grating.mtf_degradation_factor,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_translation_x_min,
                    opt_grating_translation_x_max,
                ),
                name_minor='Translation $x$',
                value=(
                    -opt_grating_translation_x_min.grating.translation_error.
                    value.xy.length,
                    opt_grating_translation_x_max.grating.translation_error.
                    value.xy.length,
                ),
                digits_after_decimal=0,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_translation_y_min,
                    opt_grating_translation_y_max,
                ),
                name_minor='Translation $y$',
                value=(
                    -opt_grating_translation_y_min.grating.translation_error.
                    value.xy.length,
                    opt_grating_translation_y_max.grating.translation_error.
                    value.xy.length,
                ),
                digits_after_decimal=0,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_translation_z_min,
                    opt_grating_translation_z_max,
                ),
                name_minor='Translation $z$',
                value=(
                    opt_grating_translation_z_min.grating.translation_error.z,
                    opt_grating_translation_z_max.grating.translation_error.z,
                ),
                digits_after_decimal=3,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_roll_min,
                    opt_grating_roll_max,
                ),
                name_minor='Roll',
                value=(
                    opt_grating_roll_min.grating.roll_error,
                    opt_grating_roll_max.grating.roll_error,
                ),
                digits_after_decimal=3,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_radius_min,
                    opt_grating_radius_max,
                ),
                name_minor='Radius',
                value=(
                    opt_grating_radius_min.grating.tangential_radius_error,
                    opt_grating_radius_max.grating.tangential_radius_error,
                ),
                digits_after_decimal=1,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_ruling_density_min,
                    opt_grating_ruling_density_max,
                ),
                name_minor='Ruling density',
                value=(
                    opt_grating_ruling_density_min.grating.
                    ruling_density_error,
                    opt_grating_ruling_density_max.grating.
                    ruling_density_error,
                ),
                digits_after_decimal=1,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_ruling_spacing_linear_min,
                    opt_grating_ruling_spacing_linear_max,
                ),
                name_minor='Linear coeff.',
                value=(
                    opt_grating_ruling_spacing_linear_min.grating.
                    ruling_spacing_coeff_linear_error,
                    opt_grating_ruling_spacing_linear_max.grating.
                    ruling_spacing_coeff_linear_error,
                ),
                digits_after_decimal=1,
                scientific_notation=True,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_grating_ruling_spacing_quadratic_min,
                    opt_grating_ruling_spacing_quadratic_max,
                ),
                name_minor='Quadratic coeff.',
                value=(
                    opt_grating_ruling_spacing_quadratic_min.grating.
                    ruling_spacing_coeff_quadratic_error,
                    opt_grating_ruling_spacing_quadratic_max.grating.
                    ruling_spacing_coeff_quadratic_error,
                ),
                digits_after_decimal=1,
                scientific_notation=True,
            )
            tabular.add_hline()
            add_row(
                tabular=tabular,
                optics=(
                    opt_detector_translation_x_min,
                    opt_detector_translation_x_max,
                ),
                name_major='Detector',
                name_minor='Translation $x$',
                value=(
                    -opt_detector_translation_x_min.detector.translation_error.
                    value.xy.length,
                    opt_detector_translation_x_max.detector.translation_error.
                    value.xy.length,
                ),
                digits_after_decimal=0,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_detector_translation_y_min,
                    opt_detector_translation_y_max,
                ),
                name_minor='Translation $y$',
                value=(
                    -opt_detector_translation_y_min.detector.translation_error.
                    value.xy.length,
                    opt_detector_translation_y_max.detector.translation_error.
                    value.xy.length,
                ),
                digits_after_decimal=0,
            )
            add_row(
                tabular=tabular,
                optics=(
                    opt_detector_translation_z_min,
                    opt_detector_translation_z_max,
                ),
                name_minor='Translation $z$',
                value=(
                    opt_detector_translation_z_min.detector.translation_error.
                    z,
                    opt_detector_translation_z_max.detector.translation_error.
                    z,
                ),
                digits_after_decimal=2,
            )
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_minor='Charge diffusion',
                psf_size=to_pix(opt.detector.charge_diffusion),
            )
            tabular.add_hline()
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_major=r'\SPARCSShort',
                name_minor='Pointing jitter',
                value_str=
                f'$\\pm${kgpy.format.quantity(opt.sparcs.pointing_jitter / 2, digits_after_decimal=2)}',
                psf_size=ptp_to_rms(opt.sparcs.pointing_jitter /
                                    opt.plate_scale.x),
            )
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_minor='Pointing drift',
                value_str=f'{kgpy.format.quantity(opt.sparcs.pointing_drift)}',
                psf_size=ptp_to_rms(opt.sparcs.pointing_drift /
                                    opt.plate_scale.x *
                                    opt.detector.exposure_length),
            )
            pointing = 10 * u.arcmin
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_minor='Roll jitter',
                value_str=
                f'$\\pm${kgpy.format.quantity(opt.sparcs.rlg_jitter / 2, digits_after_decimal=0)}',
                psf_size=ptp_to_rms(2 * np.sin(opt.sparcs.rlg_jitter / 2) *
                                    pointing / opt.plate_scale.x),
            )
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_minor='Roll drift',
                value_str=f'{kgpy.format.quantity(opt.sparcs.rlg_drift)}',
                psf_size=ptp_to_rms(2 * np.sin(
                    opt.sparcs.rlg_drift * opt.detector.exposure_length / 2) *
                                    pointing / opt.plate_scale.x),
            )
            tabular.add_hline()
            tabular.add_hline()
            psf_size_total = np.sqrt(accumulator['psf_size_squared'])
            doc.set_variable_quantity(
                name='spatialResolutionTotal',
                value=2 * psf_size_total * opt.plate_scale.x,
                digits_after_decimal=2,
            )
            add_row_basic(
                tabular=tabular,
                optics=opt,
                name_major='Total',
                psf_size=psf_size_total,
                mtf_actual=accumulator['mtf_actual'],
            )
    result.add_caption(
        pylatex.NoEscape(f"""
Imaging error budget and tolerance analysis results. 
\\MTF\\ is given at {kgpy.format.quantity(frequency_mtf_arcsec, digits_after_decimal=1)}."""
                         ))
    result.append(kgpy.latex.Label('table:errorBudget'))
    return result
예제 #15
0
def create_overleaf_files(overleaf):
    files = []

    articles = get_project_articles(FIGSHARE_PROJECT_ID)
    #print(articles)
    for article in articles:
        #print(article['title'])
        newfiles = get_files_of_article(article['id'])
        for i, f in enumerate(newfiles):
            newfiles[i]['article_id'] = article['id']
            newfiles[i]['article_name'] = article['title']
        files += newfiles

    fdf = pd.DataFrame(files)
    #print("fdf",fdf)

    fdf.sort_values(by=['article_id', 'article_name', 'name'])
    fdfo = fdf[['article_id', 'article_name', 'name']]
    fdfo = fdfo.merge(overleaf[['article_id', 'name', 'overleaf']],
                      on=['article_id', 'name'],
                      how='outer')

    #print("fdfo", fdfo)

    fdfo = fdfo.where(pd.notnull(fdfo), None)

    for_download = overleaf.merge(fdf[['article_id', 'name', 'download_url']],
                                  on=['article_id', 'name'])

    #print("for_download",for_download)

    # create individual files
    for row in for_download.iterrows():
        if len(row[1]['overleaf']) > 0:
            download_url = row[1]['download_url']
            file = raw_issue_request('GET', download_url, binary=True)
            if '.pkl' in row[1]['name']:
                with open(
                        '/mnt/labbook/output/untracked/tmp_overleaf-{}/{}'.
                        format(head, row[1]['name']), 'wb') as f:
                    f.write(file)
                df = pd.read_pickle(
                    '/mnt/labbook/output/untracked/tmp_overleaf-{}/{}'.format(
                        head, row[1]['name']))
                df.to_latex(
                    '/mnt/labbook/output/untracked/overleaf-{}/figshare/{}.tex'
                    .format(head, row[1]['overleaf']))
                repo.git.add('figshare/{}.tex'.format(row[1]['overleaf']))
            else:
                extension = row[1]['name'].split('.')[-1]
                with open(
                        '/mnt/labbook/output/untracked/overleaf-{}/figshare/{}.{}'
                        .format(head, row[1]['overleaf'],
                                extension), 'wb') as f:
                    f.write(file)
                    repo.git.add('figshare/{}.{}'.format(
                        row[1]['overleaf'], extension))

    # create bibliography file
    adf = pd.DataFrame(articles)
    #print(adf)
    bib_data = BibliographyData()

    for row in for_download.iterrows():

        if len(row[1]['overleaf']) > 0:
            idx = adf[adf['id'] == row[1]['article_id']].index[0]
            bib_data.add_entry(key=row[1]['overleaf'],
                               entry=Entry('article', [
                                   ('title', adf.at[idx, 'title']),
                                   ('journal', "figshare"),
                                   ('doi', adf.at[idx, 'doi']),
                               ]))

    bib_data.to_file(
        '/mnt/labbook/output/untracked/overleaf-{}/figures_tables.bib'.format(
            head))
    repo.git.add('figures_tables.bib')

    # write supplementary tex

    geometry_options = {"tmargin": "1cm", "lmargin": "1cm"}
    doc = ltx.Document(geometry_options=geometry_options)
    doc.preamble.append(ltx.Package('biblatex', options=['sorting=none']))
    doc.preamble.append(
        ltx.Command('addbibresource',
                    arguments=[ltx.NoEscape("figures_tables.bib")]))
    doc.preamble.append(ltx.Package('booktabs'))
    doc.preamble.append(ltx.Package('longtable'))

    with doc.create(ltx.Subsection('images and tables supplementary file')):
        for row in for_download.iterrows():
            if len(row[1]['overleaf']) > 0:
                idx = adf[adf['id'] == row[1]['article_id']].index[0]
                #print("The name is...",row[1]['name'])
                if '.pkl' in row[1]['name']:
                    #print("I should be including something here")
                    with doc.create(ltx.Table(position='hbt')) as table_holder:
                        table_holder.append(
                            ltx.Command('input',
                                        arguments=[
                                            ltx.NoEscape(
                                                "figshare/{}.tex".format(
                                                    row[1]['overleaf']))
                                        ]))
                        if row[1]['caption'] is not None:
                            table_holder.add_caption(row[1]['caption'])
                            with open(
                                    "/mnt/labbook/output/untracked/overleaf-{}/figshare/{}_caption.tex"
                                    .format(head, row[1]['overleaf']),
                                    "w") as text_file:
                                text_file.write(row[1]['caption'])
                        else:
                            table_holder.add_caption(adf.at[idx, 'title'])
                            with open(
                                    "/mnt/labbook/output/untracked/overleaf-{}/figshare/{}_caption.tex"
                                    .format(head, row[1]['overleaf']),
                                    "w") as text_file:
                                text_file.write(adf.at[idx, 'title'])
                        repo.git.add('figshare/{}_caption.tex'.format(
                            row[1]['overleaf']))
                        table_holder.append(
                            ltx.Command(
                                'cite',
                                arguments=[ltx.NoEscape(row[1]['overleaf'])]))

                else:
                    with doc.create(
                            ltx.Figure(position='hbt')) as image_holder:
                        image_holder.add_image('figshare/{}'.format(
                            row[1]['overleaf']))
                        #print("THE CAPTION IS:", row[1]['caption'])
                        if row[1]['caption'] is not None:
                            image_holder.add_caption(row[1]['caption'])
                            with open(
                                    "/mnt/labbook/output/untracked/overleaf-{}/figshare/{}_caption.tex"
                                    .format(head, row[1]['overleaf']),
                                    "w") as text_file:
                                text_file.write(
                                    ltx.utils.escape_latex(row[1]['caption']))
                        else:
                            image_holder.add_caption(
                                ltx.utils.escape_latex(adf.at[idx, 'title']))
                            with open(
                                    "/mnt/labbook/output/untracked/overleaf-{}/figshare/{}_caption.tex"
                                    .format(head, row[1]['overleaf']),
                                    "w") as text_file:
                                text_file.write(
                                    ltx.utils.escape_latex(adf.at[idx,
                                                                  'title']))
                        repo.git.add('figshare/{}_caption.tex'.format(
                            row[1]['overleaf']))
                        image_holder.append(
                            ltx.Command(
                                'cite',
                                arguments=[ltx.NoEscape(row[1]['overleaf'])]))

    doc.append(ltx.Command('printbibliography'))

    doc.generate_tex(
        '/mnt/labbook/output/untracked/overleaf-{}/supplementary'.format(head))
    repo.git.add('supplementary.tex')
예제 #16
0
def _add_data(doc: pl.Document, dataset):
    name = f'{dataset}_NR2_GBReg'
    directory = dataset

    aVp_graph = f'{name}.jpg'
    angle_dist_graph = f'{name}_angledistribution.jpg'
    error_dist_graph = f'{name}_errordistribution.jpg'
    sqerror_graph = f'{name}_sqerror_vs_actual.jpg'
    stats_csv_all = f'{name}_stats_all.csv'
    stats_csv_out = f'{name}_stats_out.csv'

    actualVpred_file = os.path.join(directory, aVp_graph)
    ang_dist_file = os.path.join(directory, angle_dist_graph)
    error_dist_file = os.path.join(directory, error_dist_graph)
    sqerror_file = os.path.join(directory, sqerror_graph)

    df_all = pd.read_csv(os.path.join(directory, stats_csv_all))
    df_out = pd.read_csv(os.path.join(directory, stats_csv_out))

    with doc.create(pl.Section(f'Results')):
        with doc.create(pl.Subsection('Summary of method:')):
            doc.append('Trained on PreAF2 dataset.')
            doc.append('\n')
            doc.append(f'Dataset tested: {dataset}')
            doc.append('\n')
            doc.append(f'GBR parameters: {gbr_params}.')
            doc.append('\n')

    with doc.create(pl.Subsection('Summary of the data:')):
        with doc.create(pl.Figure(position='!htbp')) as actualVpred:
            actualVpred.add_image(actualVpred_file, width='300px')
            actualVpred.add_caption(
                'Graph showing the predicted packing angle against the actual packing angle.'
            )

        with doc.create(pl.Table(position='!htbp')) as table:
            table.add_caption('Summary of results for all data')
            table.append(pl.Command('centering'))
            table.append(pl.NoEscape(df_all.to_latex(escape=False)))

        with doc.create(pl.Table(position='!htbp')) as table:
            table.add_caption('Summary of results for outliers.')
            table.append(pl.Command('centering'))
            table.append(pl.NoEscape(df_out.to_latex(escape=False)))

        with doc.create(pl.Figure(position='!htbp')) as graphs:
            with doc.create(
                    pl.SubFigure(position='!htbp',
                                 width=pl.NoEscape(
                                     r'0.30\linewidth'))) as ang_dist_graph:
                ang_dist_graph.add_image(ang_dist_file,
                                         width=pl.NoEscape(r'\linewidth'))
                ang_dist_graph.add_caption(
                    'Frequency distribution of the packing angle.')
            with doc.create(
                    pl.SubFigure(position='!htbp',
                                 width=pl.NoEscape(
                                     r'0.33\linewidth'))) as error_dist_graph:
                error_dist_graph.add_image(error_dist_file,
                                           width=pl.NoEscape(r'\linewidth'))
                error_dist_graph.add_caption(
                    'Distribution of errors calculated as the difference between the predicted and actual interface \
                        angle.')
            with doc.create(
                    pl.SubFigure(position='!htbp',
                                 width=pl.NoEscape(
                                     r'0.33\linewidth'))) as sqerror_graph:
                sqerror_graph.add_image(sqerror_file,
                                        width=pl.NoEscape(r'\linewidth'))
                sqerror_graph.add_caption(
                    'Squared error in predicted packing angle against actual packing angle.'
                )
            graphs.add_caption('Graphs for further metrics.')