Example #1
0
    def setUp(self):
        """ Check that a the AIMS system or this script hasn't been modified.
        This function checks that a downloaded file still has the same md5.
        """
        logging_aims()
        channel_id                   = '8365'
        from_date                    = '2008-09-30T00:27:27Z'
        thru_date                    = '2008-09-30T00:30:00Z'
        level_qc                     = 1
        aims_rss_val                 = 100
        xml_url                      = 'http://data.aims.gov.au/gbroosdata/services/rss/netcdf/level%s/%s' % (str(level_qc), str(aims_rss_val))

        aims_xml_info                = parse_aims_xml(xml_url)
        channel_id_info = aims_xml_info[channel_id]
        self.netcdf_tmp_file_path    = download_channel(channel_id, from_date, thru_date, level_qc)
        modify_soop_trv_netcdf(self.netcdf_tmp_file_path, channel_id_info)

        # force values of attributes which change all the time
        netcdf_file_obj              = Dataset(self.netcdf_tmp_file_path, 'a', format='NETCDF4')
        netcdf_file_obj.date_created = "1970-01-01T00:00:00Z"
        netcdf_file_obj.history      = 'data validation test only'
        netcdf_file_obj.close()

        shutil.move(self.netcdf_tmp_file_path, remove_creation_date_from_filename(self.netcdf_tmp_file_path))
        self.netcdf_tmp_file_path    = remove_creation_date_from_filename(self.netcdf_tmp_file_path)
Example #2
0
    def setUp(self):
        """ Check that a the AIMS system or this script hasn't been modified.
        This function checks that a downloaded file still has the same md5.
        """
        logging_aims()
        channel_id = '9272'
        from_date = '2016-01-01T00:00:00Z'
        thru_date = '2016-01-02T00:00:00Z'
        level_qc = 1
        faimms_rss_val = 1
        xml_url = 'http://data.aims.gov.au/gbroosdata/services/rss/netcdf/level%s/%s' % (
            str(level_qc), str(faimms_rss_val))

        aims_xml_info = parse_aims_xml(xml_url)
        channel_id_info = aims_xml_info[channel_id]
        self.netcdf_tmp_file_path = download_channel(channel_id, from_date,
                                                     thru_date, level_qc)
        modify_faimms_netcdf(self.netcdf_tmp_file_path, channel_id_info)

        # force values of attributes which change all the time
        netcdf_file_obj = Dataset(self.netcdf_tmp_file_path,
                                  'a',
                                  format='NETCDF4')
        netcdf_file_obj.date_created = "1970-01-01T00:00:00Z"  # epoch
        netcdf_file_obj.history = 'data validation test only'
        netcdf_file_obj.close()
Example #3
0
def modify_soop_trv_netcdf(netcdf_file_path, channel_id_info):
    """
    Modify the downloaded NetCDF file so it passes both CF and IMOS checker
    input:
    netcdfFile_path(str)    : path of netcdf file to modify
    channel_id_index(tupple) : information from xml for the channel
    """
    logger = logging_aims()

    modify_aims_netcdf(netcdf_file_path, channel_id_info)
    netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
    ship_code       = netcdf_file_obj.platform_code
    vessel_name     = ship_callsign(ship_code)

    if vessel_name is None:
        logger.error('   UNKNOWN SHIP - channel %s' % str(channel_id_info['channel_id']))
        netcdf_file_obj.close()
        return False

    # add gatts to net_cDF
    netcdf_file_obj.cdm_data_type = 'Trajectory'
    netcdf_file_obj.vessel_name   = vessel_name
    netcdf_file_obj.trip_id       = channel_id_info['trip_id']
    netcdf_file_obj.cdm_data_type = "Trajectory"
    coordinates_att               = "TIME LATITUDE LONGITUDE DEPTH"

    # depth
    depth                 = netcdf_file_obj.variables['depth']
    depth.positive        = 'down'
    depth.axis            = 'Z'
    depth.reference_datum = 'sea surface'
    depth.valid_max       = 30.0
    depth.valid_min       = -10.0
    netcdf_file_obj.renameVariable('depth', 'DEPTH')

    # latitude longitude
    latitude                      = netcdf_file_obj.variables['LATITUDE']
    latitude.ancillary_variables  = 'LATITUDE_quality_control'

    longitude                     = netcdf_file_obj.variables['LONGITUDE']
    longitude.ancillary_variables = 'LONGITUDE_quality_control'

    latitude_qc                   = netcdf_file_obj.variables['LATITUDE_quality_control']
    latitude_qc.long_name         = 'LATITUDE quality control'
    latitude_qc.standard_name     = 'latitude status_flag'
    longitude_qc                  = netcdf_file_obj.variables['LONGITUDE_quality_control']
    longitude_qc.long_name        = 'LONGITUDE quality control'
    longitude_qc.standard_name    = 'longitude status_flag'

    netcdf_file_obj.close()

    netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
    main_var        = get_main_soop_trv_var(netcdf_file_path)
    netcdf_file_obj.variables[main_var].coordinates = coordinates_att

    netcdf_file_obj.close()

    if not convert_time_cf_to_imos(netcdf_file_path):
        return False

    remove_dimension_from_netcdf(netcdf_file_path)  # last modification to do !

    return True
Example #4
0
        shutil.move(self.netcdf_tmp_file_path, remove_creation_date_from_filename(self.netcdf_tmp_file_path))
        self.netcdf_tmp_file_path    = remove_creation_date_from_filename(self.netcdf_tmp_file_path)

    def tearDown(self):
        shutil.copy(self.netcdf_tmp_file_path, os.path.join(os.environ['data_wip_path'], 'nc_unittest_%s.nc' % self.md5_netcdf_value))
        shutil.rmtree(os.path.dirname(self.netcdf_tmp_file_path))

    def test_aims_validation(self):
        self.md5_expected_value = '18770178cd71c228e8b59ccba3c7b8b5'
        self.md5_netcdf_value   = md5(self.netcdf_tmp_file_path)

        self.assertEqual(self.md5_netcdf_value, self.md5_expected_value)


if __name__ == '__main__':
    me  = singleton.SingleInstance()
    os.environ['data_wip_path'] = os.path.join(os.environ.get('WIP_DIR'), 'SOOP', 'SOOP_TRV_RSS_Download_temporary')
    set_up()
    res = data_validation_test.main(exit=False)

    logger = logging_aims()

    if res.result.wasSuccessful():
        process_qc_level(1)  # no need to process level 0 for SOOP TRV
    else:
        logger.warning('Data validation unittests failed')

    close_logger(logger)
    exit(0)
Example #5
0
if __name__ == '__main__':
    vargs = args()
    me = singleton.SingleInstance()
    os.environ['data_wip_path'] = os.path.join(os.environ.get('WIP_DIR'), 'ANMN', 'NRS_AIMS_Darwin_Yongala_data_rss_download_temporary')
    global TMP_MANIFEST_DIR
    global TESTING

    set_up()

    # data validation test
    runner = data_validation_test.TextTestRunner()
    itersuite = data_validation_test.TestLoader().loadTestsFromTestCase(AimsDataValidationTest)
    res = runner.run(itersuite)

    logger = logging_aims()
    if not DATA_WIP_PATH:
        logger.error('environment variable data_wip_path is not defined.')
        exit(1)

    # script optional argument for testing only. used in process_monthly_channel
    TESTING = vargs.testing

    rm_tmp_dir(DATA_WIP_PATH)

    if len(os.listdir(ANMN_NRS_INCOMING_DIR)) >= 2:
        logger.warning('Operation aborted, too many files in INCOMING_DIR')
        exit(0)
    if len(os.listdir(ANMN_NRS_ERROR_DIR)) >= 2:
        logger.warning('Operation aborted, too many files in ERROR_DIR')
        exit(0)
Example #6
0
        if sys.version_info[0] < 3:
            self.md5_expected_value = '18770178cd71c228e8b59ccba3c7b8b5'
        else:
            self.md5_expected_value = '2cc22593a87186d992090cc138f5daa8'

        self.md5_netcdf_value = md5(self.netcdf_tmp_file_path)

        self.assertEqual(self.md5_netcdf_value, self.md5_expected_value)


if __name__ == '__main__':
    me = singleton.SingleInstance()

    os.environ['data_wip_path'] = os.path.join(
        os.environ.get('WIP_DIR'), 'SOOP', 'SOOP_TRV_RSS_Download_temporary')
    set_up()

    # initialise logging
    logging_aims()
    global logger
    logger = logging.getLogger(__name__)

    # data validation to make sure input files don't vary. Manual debug required if different
    res = data_validation_test.main(exit=False)
    if res.result.wasSuccessful():
        process_qc_level(1)  # no need to process level 0 for SOOP TRV
    else:
        logger.error(
            'Data validation unittests failed. Manual check required to see differences of Input NetCDF files'
        )
Example #7
0
def modify_soop_trv_netcdf(netcdf_file_path, channel_id_info):
    """
    Modify the downloaded NetCDF file so it passes both CF and IMOS checker
    input:
    netcdfFile_path(str)    : path of netcdf file to modify
    channel_id_index(tupple) : information from xml for the channel
    """
    logger = logging_aims()

    modify_aims_netcdf(netcdf_file_path, channel_id_info)
    netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
    ship_code = netcdf_file_obj.platform_code
    vessel_name = ship_callsign(ship_code)

    if vessel_name is None:
        logger.error('   UNKNOWN SHIP - channel %s' %
                     str(channel_id_info['channel_id']))
        netcdf_file_obj.close()
        return False

    # add gatts to net_cDF
    netcdf_file_obj.cdm_data_type = 'Trajectory'
    netcdf_file_obj.vessel_name = vessel_name
    netcdf_file_obj.trip_id = channel_id_info['trip_id']
    netcdf_file_obj.cdm_data_type = "Trajectory"
    coordinates_att = "TIME LATITUDE LONGITUDE DEPTH"

    # depth
    depth = netcdf_file_obj.variables['depth']
    depth.positive = 'down'
    depth.axis = 'Z'
    depth.reference_datum = 'sea surface'
    depth.valid_max = 30.0
    depth.valid_min = -10.0
    netcdf_file_obj.renameVariable('depth', 'DEPTH')

    # latitude longitude
    latitude = netcdf_file_obj.variables['LATITUDE']
    latitude.ancillary_variables = 'LATITUDE_quality_control'

    longitude = netcdf_file_obj.variables['LONGITUDE']
    longitude.ancillary_variables = 'LONGITUDE_quality_control'

    latitude_qc = netcdf_file_obj.variables['LATITUDE_quality_control']
    latitude_qc.long_name = 'LATITUDE quality control'
    latitude_qc.standard_name = 'latitude status_flag'
    longitude_qc = netcdf_file_obj.variables['LONGITUDE_quality_control']
    longitude_qc.long_name = 'LONGITUDE quality control'
    longitude_qc.standard_name = 'longitude status_flag'

    netcdf_file_obj.close()

    netcdf_file_obj = Dataset(netcdf_file_path, 'a', format='NETCDF4')
    main_var = get_main_soop_trv_var(netcdf_file_path)
    netcdf_file_obj.variables[main_var].coordinates = coordinates_att

    netcdf_file_obj.close()

    if not convert_time_cf_to_imos(netcdf_file_path):
        return False

    remove_dimension_from_netcdf(netcdf_file_path)  # last modification to do !

    return True