コード例 #1
0
def test_main_app(vo_client, data_client_mock, repo_get_mock, vo_mock,
                  test_name):
    obs_id = os.path.basename(test_name)
    storage_name = storage_names.get_storage_name(
        test_storage_name.LOOKUP[obs_id][0],
        test_storage_name.LOOKUP[obs_id][0])
    working_dir = get_work_dir(test_name)
    output_file = f'{TEST_DATA_DIR}/{working_dir}/{obs_id}.actual.xml'
    input_file = f'{TEST_DATA_DIR}/{working_dir}/{obs_id}.in.xml'
    obs_path = f'{TEST_DATA_DIR}/{working_dir}/{obs_id}.expected.xml'
    data_client_mock.return_value.get_file_info.side_effect = get_file_info
    vo_client.return_value.get_node.side_effect = _get_node_mock
    repo_get_mock.side_effect = _repo_read_mock
    vo_mock.side_effect = _vo_mock

    sys.argv = \
        (f'{main_app.APPLICATION} --no_validate --local '
         f'{_get_local(test_name)} -i {input_file} -o {output_file} --plugin '
         f'{PLUGIN} --module {PLUGIN} --lineage '
         f'{test_storage_name.get_lineage(test_name)}').split()
    print(sys.argv)
    main_app.to_caom2()

    compare_result = mc.compare_observations(output_file, obs_path)
    if compare_result is not None:
        raise AssertionError(compare_result)
コード例 #2
0
def get_lineage(obs_id):
    result = ''
    for ii in LOOKUP[obs_id]:
        storage_name = storage_names.get_storage_name(ii, ii)
        result = f'{result} {storage_name.lineage}'
    result = result.replace('.header', '')
    return result
コード例 #3
0
def test_single():
    test_entry = 'MegaPipe.358.122.G.MP9401.fits'
    test_subject = storage_names.get_storage_name(test_entry, test_entry)
    assert test_subject.obs_id == 'MegaPipe.358.122', 'wrong obs id'
    assert test_subject.product_id == 'MegaPipe.358.122.G.MP9401', \
        'wrong product id'
    assert test_subject.filter_name == 'G', 'wrong filter name'
コード例 #4
0
def get_ngvs_bandpass_name(uri):
    reverse_filter_lookup = {
        'i': 'i.MP9703',
        'g': 'g.MP9402',
        'r': 'r.MP9602',
        'u': 'u.MP9302',
        'z': 'z.MP9901'}
    storage_name = sn.get_storage_name(uri, uri)
    result = None
    if storage_name.filter_name is not None:
        result = reverse_filter_lookup.get(storage_name.filter_name)
    return result
コード例 #5
0
def get_data_product_type(uri):
    result = DataProductType.IMAGE
    storage_name = sn.get_storage_name(uri, uri)
    if storage_name.is_catalog:
        # PD 09-12-20
        # I need to modify the ObsCore view to filter the observations with
        # DataProductType.catalog out (not compliant to spec) but there is a
        # different value measurements that means roughly the same thing.
        # There should be a DataProductType constant declared in the py library
        # for this value.
        result = DataProductType.MEASUREMENTS
    return result
コード例 #6
0
def _filter_args(args):
    uris_for_later = []
    result = []
    if args.lineage:
        for ii in args.lineage:
            uri = ii.split('/', 1)[1]
            result.append(uri)
            storage_name = sn.get_storage_name(uri, uri)
            if not storage_name.use_metadata:
                uris_for_later.append(uri)
    else:
        raise mc.CadcException(
            f'Could not define uri from these args {args}')
    return result, uris_for_later
コード例 #7
0
def accumulate_bp(bp, uri):
    """Configure the telescope-specific ObsBlueprint at the CAOM model
    Observation level."""
    logging.debug(f'Begin accumulate_bp for {uri}.')
    bp.configure_position_axes((1, 2))

    scheme, archive, file_name = mc.decompose_uri(uri)
    storage_name = sn.get_storage_name(file_name, file_name)
    if sn.is_ngvs(uri):
        _accumulate_ngvs_bp(bp, storage_name)
    else:
        _accumulate_mp_bp(bp, storage_name)

    # they're all DerivedObservations
    bp.set('DerivedObservation.members', {})
    bp.set('Observation.type', 'OBJECT')

    bp.set('Observation.proposal.id', 'get_proposal_id(header)')

    bp.clear('Plane.metaRelease')
    bp.add_fits_attribute('Plane.metaRelease', 'REL_DATE')

    bp.clear('Chunk.position.resolution')
    bp.add_fits_attribute('Chunk.position.resolution', 'FINALIQ')

    bp.set('Observation.instrument.name', INSTRUMENT)
    bp.set('Observation.telescope.name', 'CFHT 3.6m')
    x, y, z = ac.get_geocentric_location('cfht')
    bp.set('Observation.telescope.geoLocationX', x)
    bp.set('Observation.telescope.geoLocationY', y)
    bp.set('Observation.telescope.geoLocationZ', z)

    bp.set('Plane.calibrationLevel', 'get_calibration_level(uri)')
    bp.set('Plane.dataProductType', 'get_data_product_type(uri)')
    bp.set('Plane.provenance.producer', 'CADC')

    bp.set('Artifact.productType', 'get_artifact_product_type(uri)')

    if storage_name.collection == sn.MP_COLLECTION:
        _accumulate_mp_bp(bp, storage_name)
    else:
        _accumulate_ngvs_bp(bp, storage_name)

    logging.debug('Done accumulate_bp.')
コード例 #8
0
def test_is_valid():
    for key, value in LOOKUP.items():
        for entry in value:
            sn = storage_names.get_storage_name(entry, entry)
            assert sn.is_valid()
            assert sn.obs_id == key, f'wrong obs id {sn.obs_id}'
コード例 #9
0
def get_provenance_version(uri):
    storage_name = sn.get_storage_name(uri, uri)
    return storage_name.version
コード例 #10
0
def get_calibration_level(uri):
    result = CalibrationLevel.PRODUCT
    storage_name = sn.get_storage_name(uri, uri)
    if storage_name.is_catalog:
        result = CalibrationLevel.ANALYSIS_PRODUCT
    return result
コード例 #11
0
def update(observation, **kwargs):
    """Called to fill multiple CAOM model elements and/or attributes, must
    have this signature for import_module loading and execution.

    :param observation A CAOM Observation model instance.
    :param **kwargs Everything else."""
    logging.debug('Begin update.')
    mc.check_param(observation, Observation)
    fqn = kwargs.get('fqn')
    headers = kwargs.get('headers')
    uri = kwargs.get('uri')

    if uri is not None:
        storage_name = sn.get_storage_name(uri, uri)
    elif fqn is not None:
        temp = os.path.basename(fqn)
        storage_name = sn.get_storage_name(temp, temp)
    else:
        raise mc.CadcException(f'Cannot define a MEGAPIPEName instance for '
                               f'{observation.observation_id}')
    if headers is None:
        logging.warning(f'No metadata for {storage_name.file_name}')
        return observation

    logging.debug(f'Update for {observation.observation_id} with '
                  f'{storage_name.file_name}.')

    max_meta_release = observation.meta_release
    min_seeing = None
    if (observation.environment is not None and
            observation.environment.seeing is not None):
        min_seeing = observation.environment.seeing
    if not storage_name.is_catalog:
        for plane in observation.planes.values():
            max_meta_release = _update_release_date(
                plane, max_meta_release, headers)
            if plane.product_id != storage_name.product_id:
                continue
            min_seeing = _minimize(min_seeing,
                                   _get_keyword(headers, 'FINALIQ'))
            for artifact in plane.artifacts.values():
                if artifact.uri != storage_name.file_uri:
                    continue
                if (artifact.product_type is ProductType.WEIGHT and
                        storage_name.collection == sn.MP_COLLECTION):
                    artifact.parts = None
                    continue
                for part in artifact.parts.values():
                    for chunk in part.chunks:
                        if _informative_uri(storage_name.file_name):
                            _update_energy(chunk, headers, storage_name,
                                           observation.observation_id)
                        if not storage_name.is_catalog:
                            if storage_name.collection == sn.MP_COLLECTION:
                                if chunk.position is not None:
                                    chunk.position.resolution = None

                        # SGw - 24-11-20 - set observable/axis to None: there
                        # is no such information in the files
                        if chunk.observable_axis is not None:
                            chunk.observable_axis = None
                        if chunk.observable is not None:
                            chunk.observable = None

            if (_informative_uri(storage_name.file_name) and
                    plane.provenance is not None):
                # SGw - 22-01-21
                # When re-processing, I sometimes find that an image wasn't as
                # well calibrated as I thought it was and it gets removed from
                # the next generation data products. So I would like the
                # ability to remove inputs
                cc.update_plane_provenance_single(
                    plane, headers, 'HISTORY', 'CFHT',
                    _repair_history_provenance_value,
                    observation.observation_id)
                if plane.provenance.run_id == 'None':
                    plane.provenance.run_id = None
                if (plane.provenance.keywords is not None and
                        'None' in plane.provenance.keywords):
                    plane.provenance.keywords.remove('None')

            # _update_ngvs_time is dependent on provenance information that is
            # generated right before this
            if (storage_name.collection == sn.NGVS_COLLECTION and
                    not storage_name.is_catalog):
                for artifact in plane.artifacts.values():
                    if artifact.uri != storage_name.file_uri:
                        continue
                    for part in artifact.parts.values():
                        for chunk in part.chunks:
                            _update_ngvs_time(chunk, plane.provenance,
                                              observation.observation_id)
    observation.meta_release = max_meta_release
    if observation.environment is not None:
        observation.environment.seeing = min_seeing
    if (observation.target is not None and
            storage_name.collection == sn.MP_COLLECTION):
        observation.target.standard = False
    cc.update_observation_members(observation)
    logging.debug('Done update.')
    return observation