コード例 #1
0
ファイル: main_app.py プロジェクト: SharonGoliath/dao2caom2
def get_energy_axis_function_delta(parameters):
    uri = parameters.get('uri')
    header = parameters.get('header')
    execution_path = _get_execution_path(parameters)
    if execution_path is ExecutionPath.SPECT_CALIBRATED:
        cdelt = header.get('CDELT1')
    else:
        data_product_type = get_data_product_type(header)
        if data_product_type == DataProductType.SPECTRUM:
            wavelength = _get_wavelength(header)
            cdelt = header.get('DELTA_WL')
            if wavelength is None:
                cdelt = None
            else:
                if cdelt is None:
                    dispersion = header.get('DISPERSI')
                    dispaxis = _get_dispaxis(header)
                    if dispaxis == 1:
                        xbin = mc.to_float(header.get('XBIN'))
                    else:
                        xbin = mc.to_float(header.get('YBIN'))
                    cdelt = dispersion * 15.0 * xbin / 1000.0
        else:
            cdelt = header.get('BANDPASS')
    return cdelt
コード例 #2
0
def build_time(override, almaca_name):

    # HK 05-09-19
    # For the time dimension:
    # - resolution: this is far less clear to me, but I think in principle,
    # once could try to make an image using some time-based subset of the full
    # measurement set.  So in principle, I suppose one could make [sampleSize]
    # independent images.  In practice, there probably would be insufficient
    # data to actually make decent images for such a small subset of the
    # data.  Something like the number of times that the source is observed in
    # between calibrators would probably be a more appropriate / practical
    # level of time sampling, but I don't think there would be an easy way to
    # pull that information out of the metadata.  The time resolution value
    # could be listed as 'null' to avoid the confusion.
    resolution = None

    start_date = mc.to_float(override.get('start_date'))
    end_date = mc.to_float(override.get('end_date'))

    # HK 14-08-09
    # If 'exposure' is supposed to be the total (useful) exposure time
    # on-source, then this parameter should be msmd.effexposuretime().
    # The 'itime' parameter calculated in get_msmd.py gives a larger
    # value, as I believe it includes the full time on-source, regardless
    # of what mode the data is being taken in.
    exposure_time = mc.to_float(override.get('effexposuretime'))

    time_bounds = Interval(start_date,
                           end_date,
                           samples=[shape.SubInterval(start_date, end_date)])
    return Time(bounds=time_bounds,
                dimension=1,
                resolution=resolution,
                sample_size=mc.to_float(override.get('time_sample_size')),
                exposure=exposure_time)
コード例 #3
0
ファイル: telescopes.py プロジェクト: opencadc/dao2caom2
 def get_time_resolution(self, ext):
     exptime = mc.to_float(self._headers[ext].get('EXPTIME'))
     ncombine = mc.to_float(self._headers[ext].get('NCOMBINE'))
     if ncombine is None:
         ncombine = 1
     else:
         exptime = exptime * ncombine
     return exptime / ncombine
コード例 #4
0
ファイル: main_app.py プロジェクト: SharonGoliath/dao2caom2
def _get_position_by_scale_size_bin(header, key):
    result = None
    platescale = mc.to_float(header.get('PLTSCALE'))
    pixsize = mc.to_float(header.get('PIXSIZE'))
    xbin = mc.to_float(header.get('XBIN'))
    if (platescale is not None and pixsize is not None and xbin is not None):
        result = platescale * pixsize * xbin / 3600000.0
    return result
コード例 #5
0
ファイル: main_app.py プロジェクト: SharonGoliath/dao2caom2
def get_time_exposure(header):
    exptime = mc.to_float(header.get('EXPTIME'))
    ncombine = mc.to_float(header.get('NCOMBINE'))
    if ncombine is not None:
        # DB - approximation of exposure time for products (assume identical
        # EXPTIME)
        exptime *= ncombine
    return exptime
コード例 #6
0
ファイル: main_app.py プロジェクト: SharonGoliath/dao2caom2
def get_time_resolution(header):
    exptime = mc.to_float(header.get('EXPTIME'))
    ncombine = mc.to_float(header.get('NCOMBINE'))
    if ncombine is None:
        ncombine = 1
    else:
        exptime = exptime * ncombine
    return exptime / ncombine
コード例 #7
0
ファイル: telescopes.py プロジェクト: opencadc/dao2caom2
 def _get_position_by_scale_size_bin(self, ext):
     result = None
     platescale = mc.to_float(self._headers[ext].get('PLTSCALE'))
     pixsize = mc.to_float(self._headers[ext].get('PIXSIZE'))
     xbin = mc.to_float(self._headers[ext].get('XBIN'))
     if platescale is not None and pixsize is not None and xbin is not None:
         result = platescale * pixsize * xbin / 3600000.0
     return result
コード例 #8
0
ファイル: astro_composable.py プロジェクト: opencadc/caom2eng
def build_plane_time_sample(start_date, end_date):
    """Create a SubInterval for the plane-level bounding box for time, given
    the start and end dates.
    :param start_date minimum date
    :param end_date maximum date. """
    start_date.format = 'mjd'
    end_date.format = 'mjd'
    return caom_shape.SubInterval(
        mc.to_float(start_date.value),
        mc.to_float(end_date.value))
コード例 #9
0
ファイル: astro_composable.py プロジェクト: opencadc/caom2eng
def build_plane_time_interval(start_date, end_date, samples):
    """Create an Interval for the plane-level bounding box for time, given
    the start and end dates, and a list of samples.
    :param samples list of SubInterval instances
    :param start_date minimum SubInterval date
    :param end_date maximum SubInterval date. """
    time_bounds = caom_Interval(mc.to_float(start_date.value),
                                mc.to_float(end_date.value),
                                samples=samples)
    return time_bounds
コード例 #10
0
def build_plane_time_sample(start_date, end_date):
    """Create a SubInterval for the plane-level bounding box for time, given
    the start and end dates.
    :param start_date minimum date
    :param end_date maximum date."""
    start_date.format = 'mjd'
    end_date.format = 'mjd'
    return caom_shape.SubInterval(
        mc.to_float(start_date.value),
        mc.to_float(end_date.value),
    )
コード例 #11
0
ファイル: main_app.py プロジェクト: opencadc/neossat2caom2
def _get_energy(header):
    # DB 24-09-19
    # if bandpass IS None: set min_wl to 0.4, max_wl to 0.9 (microns)
    min_wl = 0.4
    max_wl = 0.9
    # header units are Angstroms
    bandpass = header.get('BANDPASS')
    if bandpass is not None:
        temp = bandpass.split(',')
        min_wl = mc.to_float(temp[0]) / 1e4
        max_wl = mc.to_float(temp[1]) / 1e4
    return min_wl, max_wl
コード例 #12
0
def build_plane_time_interval(start_date, end_date, samples):
    """Create an Interval for the plane-level bounding box for time, given
    the start and end dates, and a list of samples.
    :param samples list of SubInterval instances
    :param start_date minimum SubInterval date
    :param end_date maximum SubInterval date."""
    time_bounds = caom_Interval(
        mc.to_float(start_date.value),
        mc.to_float(end_date.value),
        samples=samples,
    )
    return time_bounds
コード例 #13
0
ファイル: main_app.py プロジェクト: opencadc/omm2caom2
    def _update_time_bounds(self, observation, storage_name):
        """Add chunk time bounds to the chunk from the first part, by
        referencing information from the second header."""

        lower_values = ''
        upper_values = ''
        with fits.open(storage_name.sources_names[0]) as fits_data:
            xtension = fits_data[1].header['XTENSION']
            extname = fits_data[1].header['EXTNAME']
            if 'BINTABLE' in xtension and 'PROVENANCE' in extname:
                for ii in fits_data[1].data[0]['STARTTIME']:
                    lower_values = f'{ii} {lower_values}'
                for ii in fits_data[1].data[0]['DURATION']:
                    upper_values = f'{ii} {upper_values} '
            else:
                raise mc.CadcException(
                    f'Opened a composite file that does not match the '
                    f'expected profile '
                    f'(XTENSION=BINTABLE/EXTNAME=PROVENANCE). '
                    f'{xtension} {extname}'
                )

        for plane in observation.planes:
            for artifact in observation.planes[plane].artifacts:
                parts = observation.planes[plane].artifacts[artifact].parts
                for p in parts:
                    if p == '0':
                        lower = lower_values.split()
                        upper = upper_values.split()
                        if len(lower) != len(upper):
                            raise mc.CadcException(
                                'Cannot make RefCoords with inconsistent '
                                'values.'
                            )
                        chunk = parts[p].chunks[0]
                        bounds = CoordBounds1D()
                        chunk.time.axis.bounds = bounds
                        for ii in range(len(lower)):
                            mjd_start, mjd_end = ac.convert_time(
                                mc.to_float(lower[ii]), mc.to_float(upper[ii])
                            )
                            lower_refcoord = RefCoord(0.5, mjd_start)
                            upper_refcoord = RefCoord(1.5, mjd_end)
                            r = CoordRange1D(lower_refcoord, upper_refcoord)
                            bounds.samples.append(r)
                        # if execution has gotten to this point, remove range
                        # if it exists, since only one of bounds or range
                        # should be provided, and bounds is more specific. PD,
                        # slack, 2018-07-16
                        if chunk.time.axis.range is not None:
                            chunk.time.axis.range = None
コード例 #14
0
ファイル: main_app.py プロジェクト: opencadc/omm2caom2
    def get_position_resolution(self, ext):
        """Calculate the Plane-level position RNDER values from other FITS header
        values. Ignore values used by the telescope as defaults.

        Called to fill a blueprint value, must have a
        parameter named ext for import_module loading and execution."""
        temp = None
        temp_astr = mc.to_float(self._headers[ext].get('RMSASTR'))
        if temp_astr is not None and temp_astr != -1.0:
            temp = temp_astr
        temp_mass = mc.to_float(self._headers[ext].get('RMS2MASS'))
        if temp_mass is not None and temp_mass != -1.0:
            temp = temp_mass
        return temp
コード例 #15
0
ファイル: telescopes.py プロジェクト: opencadc/dao2caom2
 def get_energy_axis_function_delta(self, ext):
     wavelength = self._get_wavelength(ext)
     cdelt = self._headers[ext].get('DELTA_WL')
     if wavelength is None:
         cdelt = None
     else:
         if cdelt is None:
             dispersion = self._headers[ext].get('DISPERSI')
             dispaxis = self._get_dispaxis(ext)
             if dispaxis == 1:
                 xbin = mc.to_float(self._headers[ext].get('XBIN'))
             else:
                 xbin = mc.to_float(self._headers[ext].get('YBIN'))
             cdelt = dispersion * 15.0 * xbin / 1000.0
     return cdelt
コード例 #16
0
def get_position_resolution(header):
    """Calculate the Plane-level position RNDER values from other FITS header
    values. Ignore values used by the telescope as defaults.

    Called to fill a blueprint value, must have a
    parameter named header for import_module loading and execution.

    :param header Array of astropy headers"""
    temp = None
    temp_astr = mc.to_float(header.get('RMSASTR'))
    if temp_astr != -1.0:
        temp = temp_astr
    temp_mass = mc.to_float(header.get('RMS2MASS'))
    if temp_mass != -1.0:
        temp = temp_mass
    return temp
コード例 #17
0
ファイル: scrape.py プロジェクト: opencadc/neossat2caom2
def _read_cache(in_dir):
    content = {}
    fqn = os.path.join(in_dir, NEOSSAT_CACHE)
    if os.path.exists(fqn):
        with open(fqn, 'r') as f:
            for line in f:
                temp = line.split(',')
                temp_bool = False if temp[1].strip() == 'False' else True
                content[temp[0]] = [temp_bool, mc.to_float(temp[2].strip())]
    return content
コード例 #18
0
def _update_position(chunk, science_fqn):
    """This function assumes that if the code got here, the science file is
    on disk."""
    logging.debug('Begin _update_position')
    assert isinstance(chunk, Chunk), 'Expecting type Chunk'

    if (chunk.position is not None and chunk.position.axis is not None):
        logging.debug('position exists, calculate footprints for {}.'.format(
            science_fqn))
        full_area, footprint_xc, footprint_yc, ra_bary, dec_bary, \
            footprintstring, stc = footprintfinder.main(
                '-r -f  {}'.format(science_fqn))
        logging.debug('footprintfinder result: full area {} '
                      'footprint xc {} footprint yc {} ra bary {} '
                      'dec_bary {} footprintstring {} stc {}'.format(
                          full_area, footprint_xc, footprint_yc, ra_bary,
                          dec_bary, footprintstring, stc))
        bounds = CoordPolygon2D()
        coords = None
        fp_results = stc.split('Polygon FK5')
        if len(fp_results) > 1:
            coords = fp_results[1].split()
        else:
            fp_results = stc.split('Polygon ICRS')
            if len(fp_results) > 1:
                coords = fp_results[1].split()

        if coords is None:
            raise mc.CadcException('Do not recognize footprint {}'.format(stc))

        index = 0
        while index < len(coords):
            vertex = ValueCoord2D(mc.to_float(coords[index]),
                                  mc.to_float(coords[index + 1]))
            bounds.vertices.append(vertex)
            index += 2
            logging.debug('Adding vertex\n{}'.format(vertex))
        chunk.position.axis.bounds = bounds
    else:
        logging.info('No position information for footprint generation.')
    logging.debug('Done _update_position.')
コード例 #19
0
def read_md_pk(fqn):
    temp = mc.read_from_file(fqn)
    assert temp is not None, 'expected result'
    result = {'spectral_windows': []}
    for line in temp:
        temp2 = line.split(',', 1)
        if temp2[0].strip() == 'spectral_windows':
            x = temp2[1].strip().split(',')
            count = 0
            while count < len(x):
                y = (mc.to_float(x[count].replace('(', '').replace(
                    '[', '').replace(']', '').replace(')', '')),
                     mc.to_float(x[count + 1].replace('(', '').replace(
                         '[', '').replace(']', '').replace(')', '')))
                count += 2
                result[temp2[0]].append(y)
        elif type(temp2[1]) is str:
            result[temp2[0]] = temp2[1].strip()
        else:
            result[temp2[0]] = temp2[1]
    return result
コード例 #20
0
def _update_ngvs_time(chunk, provenance, obs_id):
    logging.debug(f'Begin _update_ngvs_time for {obs_id}')
    if (chunk is not None and provenance is not None and
            len(provenance.inputs) > 0):
        # bounds = ctor
        config = mc.Config()
        config.get_executors()
        subject = mc.define_subject(config)
        client = CAOM2RepoClient(
            subject, config.logging_level, 'ivo://cadc.nrc.ca/ams')
        metrics = mc.Metrics(config)
        bounds = CoordBounds1D()
        min_date = 0
        max_date = sys.float_info.max
        exposure = 0
        for entry in provenance.inputs:
            ip_obs_id, ip_product_id = mc.CaomName.decompose_provenance_input(
                entry.uri)
            logging.info(f'Retrieving provenance metadata for {ip_obs_id}.')
            ip_obs = mc.repo_get(client, 'CFHT', ip_obs_id, metrics)
            if ip_obs is not None:
                ip_plane = ip_obs.planes.get(ip_product_id)
                if (ip_plane is not None and ip_plane.time is not None and
                        ip_plane.time.bounds is not None):
                    bounds.samples.append(CoordRange1D(
                        RefCoord(pix=0.5, val=ip_plane.time.bounds.lower),
                        RefCoord(pix=1.5, val=ip_plane.time.bounds.upper)))
                    min_date = min(ip_plane.time.bounds.lower, min_date)
                    max_date = max(ip_plane.time.bounds.upper, max_date)
                    exposure += ip_plane.time.exposure
        axis = Axis(ctype='TIME', cunit='d')
        time_axis = CoordAxis1D(axis=axis,
                                error=None,
                                range=None,
                                bounds=bounds,
                                function=None)
        temporal_wcs = TemporalWCS(axis=time_axis, timesys=None, trefpos=None,
                                   mjdref=None, exposure=mc.to_float(exposure),
                                   resolution=None)
        chunk.time = temporal_wcs
    logging.debug(f'End _update_ngvs_time.')
コード例 #21
0
def _update_from_comment(observation, phangs_name, headers):
    # From ER: 04-03-21
    # COMMENT Produced with PHANGS-ALMA pipeline version 4.0 Build 935
    # - Provenance.version
    # COMMENT Galaxy properties from PHANGS sample table version 1.6
    # COMMENT Calibration Level 4 (ANALYSIS_PRODUCT)
    # - Calibration level (either 3 or 4)
    # COMMENT PHANGS-ALMA Public Release 1
    # - Provenance.project = PHANGS-ALMA
    # COMMENT Generated by the Physics at High Angular resolution
    # COMMENT in nearby GalaxieS (PHANGS) collaboration
    # - Provenance.organization = PHANGS
    # COMMENT Canonical Reference: Leroy et al. (2021), ApJ, Submitted
    # - Update to reference when accepted
    # COMMENT Release generated at 2021-03-04T07:28:10.245340
    # - Provenance.lastExecuted
    # COMMENT Data from ALMA Proposal ID: 2017.1.00886.L
    # - Proposal.proposalID
    # COMMENT ALMA Proposal PI: Schinnerer, Eva
    # - Proposal.pi_name
    # COMMENT Observed in MJD interval [58077.386275,58081.464121]
    # COMMENT Observed in MJD interval [58290.770032,58365.629222]
    # COMMENT Observed in MJD interval [58037.515807,58047.541173]
    # COMMENT Observed in MJD interval [58353.589805,58381.654757]
    # COMMENT Observed in MJD interval [58064.3677,58072.458597]
    # COMMENT Observed in MJD interval [58114.347649,58139.301879]
    chunk = None
    for plane in observation.planes.values():
        if plane.product_id != phangs_name.product_id:
            continue
        if plane.provenance is None:
            plane.provenance = Provenance(name='PHANGS-ALMA pipeline')

        for artifact in plane.artifacts.values():
            if artifact.uri != phangs_name.file_uri:
                continue
            for part in artifact.parts.values():
                chunk = part.chunks[0]
                break

        for entry in headers[0].get('COMMENT'):
            if 'pipeline version ' in entry:
                plane.provenance.version = entry.split(' version ')[1]
            elif 'Calibration Level' in entry:
                level = entry.split()[2]
                if level == '4':
                    plane.calibration_level = CalibrationLevel.ANALYSIS_PRODUCT
            elif 'PHANGS-ALMA Public Release' in entry:
                plane.provenance.project = 'PHANGS-ALMA'
            elif 'in nearby GalaxieS (PHANGS) collaboration' in entry:
                plane.provenance.organization = 'PHANGS'
            elif 'Release generated at ' in entry:
                plane.provenance.last_executed = mc.make_time_tz(
                    entry.split(' at ')[1])
            elif 'Data from ALMA Proposal ID:' in entry:
                observation.proposal = Proposal(entry.split(':')[1].strip())
            elif 'Canonical Reference: ' in entry:
                plane.provenance.producer = entry.split(': ')[1]
            elif 'ALMA Proposal PI:' in entry:
                observation.proposal.pi_name = entry.split(': ')[1]
            elif 'Observed in MJD interval ' in entry:
                if chunk is not None:
                    bits = entry.split()[4].split(',')
                    start_ref_coord = RefCoord(
                        0.5, mc.to_float(bits[0].replace('[', '')))
                    end_ref_coord = RefCoord(
                        1.5, mc.to_float(bits[1].replace(']', '')))
                    sample = CoordRange1D(start_ref_coord, end_ref_coord)
                    if chunk.time is None:
                        coord_bounds = CoordBounds1D()
                        axis = CoordAxis1D(axis=Axis('TIME', 'd'))
                        chunk.time = TemporalWCS(axis, timesys='UTC')
                        chunk.time.axis.bounds = coord_bounds
                    chunk.time.axis.bounds.samples.append(sample)
コード例 #22
0
ファイル: main_app.py プロジェクト: SharonGoliath/dao2caom2
def _get_wavelength(header):
    return mc.to_float(header.get('WAVELENG'))
コード例 #23
0
def build_observation(db_content, observation, md_name):

    override = read_md_pk(md_name)

    fqn = override.get('fqn')
    almaca_name = AlmacaName(fname_on_disk=fqn)
    # logging.error(db_content.colnames)
    # logging.error('fqn is {}'.format(fqn))
    field_index = _get_index(almaca_name, db_content)
    # field_index = 0
    if observation is None:
        observation = _build_obs(override, db_content, fqn, field_index,
                                 almaca_name, md_name)

    provenance = get_provenance(almaca_name)
    provenance.inputs.add(
        PlaneURI('caom:ALMA/A001_X88b_X23/A001_X88b_X23-raw'))

    # HK 07-02-20
    # I'm looking at the very first entry, A002_Xb999fd_X602.SCI.J1851+0035.
    # The time bounds listed under all of the second-level planes correspond
    # to a date of Oct 20, 2016, which agrees with the observing date I pull
    # up on listobs.  But in the top level plane, the metaRelease date is
    # listed as Oct 12, 2016.  As we discussed earlier this week, it doesn't
    # make sense to have the meta data released before the observation was
    # even taken. Using the 'end time' of the observation that's already
    # pulled for a lower plane, and putting that as the metaRelease date in
    # the top level would be a good solution.  NB: since all spws are observed
    # simultaneously, you'll get the same answer for whichever of the
    # [high/low]res_spw[X] entries that you pull the information from.
    input_meta_data = read_md_pk(almaca_name.input_ms_metadata)
    meta_release = mc.to_float(input_meta_data.get('end_date'))
    meta_release = time.Time(meta_release, format='mjd')
    meta_release.format = 'isot'
    meta_release_dt = mc.make_time(meta_release.value)

    release_date = db_content['Release date'][field_index]
    if release_date is None:
        raise mc.CadcException('No release date for {}'.format(fqn))
    else:
        release_date = time.Time(release_date).to_datetime()

    logging.error('Add plane {} to {}'.format(almaca_name.product_id,
                                              almaca_name.obs_id))
    plane = Plane(product_id=almaca_name.product_id,
                  data_release=release_date,
                  meta_release=meta_release_dt,
                  provenance=provenance)

    plane.position = build_position(db_content, field_index, md_name)
    plane.energy = build_energy(override)
    plane.polarization = None
    plane.time = build_time(override, almaca_name)

    # HK 14-08-2019
    # dataProductType should be 'visibility'
    plane.data_product_type = DataProductType.VISIBILITY
    plane.calibration_level = CalibrationLevel.CALIBRATED

    observation.planes.add(plane)
    observation.meta_release = plane.meta_release
    # TODO hard-coded
    observation.members.add(ObservationURI('caom:ALMA/A001_X88b_X23'))

    # HK 29-07-19
    # qa/ contains images, plots, and web page status views generated
    # during the original (non-CANFAR) calibration of the raw data.
    # We may want to consider retaining these files as well, as they give
    # a more advanced user an easier way to check on data quality,
    # potential issues with calibration, etc.  I believe they come
    # packaged with the rest of the 'products' tarball on the archive,
    # so they would be obtainable even if we do not keep a copy.  These
    # files are fairly small.
    # TODO override.get('artifact_uri')
    artifact = Artifact(uri=almaca_name.uri,
                        product_type=almaca_name.intent,
                        release_type=ReleaseType.DATA,
                        content_type='application/x-tar',
                        content_length=None)
    plane.artifacts.add(artifact)
    return observation
コード例 #24
0
ファイル: caom_composable.py プロジェクト: opencadc/caom2pipe
def exec_footprintfinder(chunk,
                         science_fqn,
                         log_file_directory,
                         obs_id,
                         params='-f'):
    """Execute the footprintfinder on a file. All preconditions for successful
    execution should be in place i.e. the file exists, and is unzipped (because
    that is faster).

    :param chunk The CAOM Chunk that will have Position Bounds information
        added
    :param science_fqn A string of the fully-qualified file name for
        footprintfinder to run on
    :param log_file_directory A string of the fully-qualified name for the log
        directory, where footprintfinder output files will be moved to, after
        execution
    :param obs_id specifies location where footprintfinder log files end up
    :param params specific footprintfinder parameters by collection - default
        forces full-chip, regardless of illumination
    """
    logging.debug(f'Begin _update_position for {obs_id}')
    mc.check_param(chunk, Chunk)

    # local import because footprintfinder depends on matplotlib being
    # installed, which is not declared as a caom2pipe dependency
    import footprintfinder

    if (chunk.position is not None and chunk.position.axis is not None):
        logging.debug(
            f'position exists, calculate footprints for {science_fqn}.')
        for parameters in [params, f'{params} -m 0.2', '-f']:
            # try in decreasing fidelity to get a Polygon that is supported
            # by CAOM's Polygon/MultiPolygon structures
            #
            # -m 0.2 fewer points
            # -f full chip

            full_area, footprint_xc, footprint_yc, ra_bary, dec_bary, \
                footprintstring, stc = footprintfinder.main(
                    f'-r {parameters} {science_fqn}')
            logging.debug(f'footprintfinder result: full area {full_area} '
                          f'footprint xc {footprint_xc} footprint yc '
                          f'{footprint_yc} ra bary {ra_bary} dec_bary '
                          f'{dec_bary} footprintstring {footprintstring} '
                          f'stc {stc}')
            coords = None
            fp_results = stc.split('Polygon FK5')
            if len(fp_results) > 1:
                coords = fp_results[1].split()
            else:
                fp_results = stc.split('Polygon ICRS')
                if len(fp_results) > 1:
                    coords = fp_results[1].split()

            if coords is not None:
                break

        if coords is None:
            raise mc.CadcException(F'Do not recognize footprint {stc}')

        bounds = CoordPolygon2D()
        index = 0
        while index < len(coords):
            vertex = ValueCoord2D(mc.to_float(coords[index]),
                                  mc.to_float(coords[index + 1]))
            bounds.vertices.append(vertex)
            index += 2
            logging.debug(f'Adding vertex\n{vertex}')
        chunk.position.axis.bounds = bounds

        prefix = os.path.basename(science_fqn).replace('.fits', '')
        return_file = f'{prefix}_footprint.txt'
        return_string_file = f'{prefix}_footprint_returnstring.txt'
        _handle_footprint_logs(log_file_directory, return_file)
        _handle_footprint_logs(log_file_directory, return_string_file)

    else:
        logging.info('No position information for footprint generation.')
    logging.debug('Done _update_position.')
コード例 #25
0
ファイル: main_app.py プロジェクト: opencadc/neossat2caom2
def get_time_delta(header):
    exptime = mc.to_float(header.get('EXPOSURE'))  # in s
    return exptime / (24.0 * 3600.0)
コード例 #26
0
ファイル: telescopes.py プロジェクト: opencadc/dao2caom2
 def _get_wavelength(self, ext):
     return mc.to_float(self._headers[ext].get('WAVELENG'))
コード例 #27
0
def build_energy(override):

    spectral_windows = override.get('spectral_windows')
    sample_size = mc.to_float(override.get('energy_sample_size'))

    # HK 19-08-19
    # I'm still not quite sure that I follow this one.  I understand your
    # point from earlier about merging together overlapping wavelength
    # ranges, and that should be fine, although it might hide some
    # important information in the energy:resolution parameter, since
    # each of the overlapping ranges may have different spectral
    # resolutions.  By my approximate calculations, the 4 wavelength
    # ranges covered are 0.00259 to 0.00263, 0.00263 to 0.00267, 0.0026025
    # to 0.0026038, and 0.0026018 to 0.0026044.  If I were to merge those,
    # I would get 0.00259 to 0.00267.  I don't understand how the caom2
    # model lists 0.00259 to 0.002602 and then 0.002626 and 0.002672.
    # Specifically, the wavelengths covered by the first of the 4 ranges
    # I list, already span a much larger range than the first range listed
    # in caom2.

    energy = Energy()
    energy.em_band = EnergyBand.MILLIMETER
    energy.dimension = 1

    wvlns = []
    mid_wvln = []

    for spw in spectral_windows:
        wvln = numpy.array((_from_hz_to_m(spw[0]), _from_hz_to_m(spw[1])))
        wvlns.append(wvln)
        mid_wvln.append(wvln[0] + wvln[1])
    order = numpy.argsort(mid_wvln)

    min_bound = None
    max_bound = None
    si = []
    for idx in order:
        lower = min(wvlns[idx])
        upper = max(wvlns[idx])

        si = _add_subinterval(si, (lower, upper))
        if min_bound is not None:
            min_bound = min(min_bound, lower)
        else:
            min_bound = lower
        if max_bound is not None:
            max_bound = max(max_bound, upper)
        else:
            max_bound = upper

    samples = []
    for s in si:
        samples.append(shape.SubInterval(s[0], s[1]))

    energy.bounds = Interval(min_bound, max_bound, samples=samples)

    # HK 15-10-19
    #
    # It looks like the caom2 model puts energy in wavelength units (I'm not
    # clear whether that is metres or centimetres?), whereas the numbers
    # I quoted above are directly from msmd and are given in frequency units
    # of Hertz.  I'm not sure what level of precision you would use for the
    # conversion, but here's roughly what you'd want to do:
    #
    # [resolution in wavelength units] / [central wavelength] =
    # [resolution in frequency units] /[central frequency]
    #
    # Using [central wavelength] = [speed of light] / [central frequency], and
    # a speed of light of 2.9979e8 m/s (or whatever precision you need, and
    # convert to cm/s if needed), you should be able to run the calculation
    # with values you've already extracted.  For a central frequency of
    # 113GHz, I get a value of about 3.7e-7 m, assuming I've done my quick
    # calculation correctly.
    #
    # NB: since both the sampleSize and resolution parameters are looking at a
    # differential wavelength measurement, both conversions would follow the
    # same formula as I've written.
    #
    mean_frequency = (_from_m_to_hz(min_bound) + _from_m_to_hz(max_bound)) / 2
    energy.sample_size = _delta_hz_to_m(sample_size, mean_frequency)
    energy_resolution = mc.to_float(override.get('energy_resolution'))

    # HK 03-12-19
    # resolving power is unit-less
    # ResolvingPower = mean[frequency_Hz] / chanres_Hz
    energy.resolving_power = mean_frequency / energy_resolution

    # HK 3-10-19
    # energy: bandpassName: could this also be Band3?  (I know it is already
    # listed under 'instrument' in the top level plane)
    energy.bandpass_name = _get_band_name(override)
    return energy