shutil.move(self.netcdf_tmp_file_path, remove_creation_date_from_filename(self.netcdf_tmp_file_path)) self.netcdf_tmp_file_path = remove_creation_date_from_filename(self.netcdf_tmp_file_path) def tearDown(self): shutil.copy(self.netcdf_tmp_file_path, os.path.join(os.environ['data_wip_path'], 'nc_unittest_%s.nc' % self.md5_netcdf_value)) shutil.rmtree(os.path.dirname(self.netcdf_tmp_file_path)) def test_aims_validation(self): self.md5_expected_value = '18770178cd71c228e8b59ccba3c7b8b5' self.md5_netcdf_value = md5(self.netcdf_tmp_file_path) self.assertEqual(self.md5_netcdf_value, self.md5_expected_value) if __name__ == '__main__': me = singleton.SingleInstance() os.environ['data_wip_path'] = os.path.join(os.environ.get('WIP_DIR'), 'SOOP', 'SOOP_TRV_RSS_Download_temporary') set_up() res = data_validation_test.main(exit=False) logger = logging_aims() if res.result.wasSuccessful(): process_qc_level(1) # no need to process level 0 for SOOP TRV else: logger.warning('Data validation unittests failed') close_logger(logger) exit(0)
logger.warning('Operation aborted, too many files in INCOMING_DIR') exit(0) if len(os.listdir(ANMN_NRS_ERROR_DIR)) >= 2: logger.warning('Operation aborted, too many files in ERROR_DIR') exit(0) if not res.failures: for level in [0, 1]: date_str_now = datetime.datetime.now().strftime('%Y%m%d%H%M%S') TMP_MANIFEST_DIR = os.path.join(DATA_WIP_PATH, 'manifest_dir_tmp_{date}'.format( date=date_str_now)) os.makedirs(TMP_MANIFEST_DIR) process_qc_level(level) if len(os.listdir(TMP_MANIFEST_DIR)) > 0: incoming_dir_file = os.path.join(DATA_WIP_PATH, 'anmn_nrs_aims_FV0{level}_{date}.dir_manifest'.format( level=str(level), date=date_str_now)) with open(incoming_dir_file, 'w') as manifest_file: manifest_file.write("%s\n" % TMP_MANIFEST_DIR) os.chmod(incoming_dir_file, 0o0664) # change to 664 for pipeline v2 shutil.move(incoming_dir_file, os.path.join(ANMN_NRS_INCOMING_DIR, os.path.basename(incoming_dir_file))) else: logger.warning('Data validation unittests failed') close_logger(logger) exit(0)
def process_monthly_channel(channel_id, aims_xml_info, level_qc): """ Downloads all the data available for one channel_id and moves the file to a wip_path dir channel_id(str) aims_xml_info(tuple) level_qc(int) aims_service : 1 -> FAIMMS data 100 -> SOOP TRV data 300 -> NRS DATA for monthly data download, only 1 and 300 should be use """ logger.info('>> QC%s - Processing channel %s' % (str(level_qc), str(channel_id))) channel_id_info = aims_xml_info[channel_id] from_date = channel_id_info['from_date'] thru_date = channel_id_info['thru_date'] [start_dates, end_dates] = create_list_of_dates_to_download(channel_id, level_qc, from_date, thru_date) if len(start_dates) != 0: # download monthly file for start_date, end_date in zip(start_dates, end_dates): start_date = start_date.strftime("%Y-%m-%dT%H:%M:%SZ") end_date = end_date.strftime("%Y-%m-%dT%H:%M:%SZ") netcdf_tmp_file_path = download_channel(channel_id, start_date, end_date, level_qc) contact_aims_msg = "Process of channel aborted - CONTACT AIMS" if netcdf_tmp_file_path is None: logger.error(' Channel %s - not valid zip file - %s' % (str(channel_id), contact_aims_msg)) break # NO_DATA_FOUND file only means there is no data for the selected time period. Could be some data afterwards if is_no_data_found(netcdf_tmp_file_path): logger.warning(' Channel %s - No data for the time period:%s - %s' % (str(channel_id), start_date, end_date)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) else: if is_time_var_empty(netcdf_tmp_file_path): logger.error(' Channel %s - No values in TIME variable - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break if not modify_anmn_nrs_netcdf(netcdf_tmp_file_path, channel_id_info): logger.error(' Channel %s - Could not modify the NetCDF file - Process of channel aborted' % str(channel_id)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break main_var = get_main_netcdf_var(netcdf_tmp_file_path) if has_var_only_fill_value(netcdf_tmp_file_path, main_var): logger.error(' Channel %s - _Fillvalues only in main variable - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break if get_anmn_nrs_site_name(netcdf_tmp_file_path) == []: logger.error(' Channel %s - Unknown site_code gatt value - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break if not is_time_monotonic(netcdf_tmp_file_path): logger.error(' Channel %s - TIME value is not strickly monotonic - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break # check every single file of the list. We don't assume that if one passes, all pass ... past proved this wip_path = os.environ.get('data_wip_path') checker_retval = pass_netcdf_checker(netcdf_tmp_file_path, tests=['cf:latest', 'imos:1.3']) if not checker_retval: logger.error(' Channel %s - File does not pass CF/IMOS compliance checker - Process of channel aborted' % str(channel_id)) shutil.copy(netcdf_tmp_file_path, os.path.join(wip_path, 'errors')) logger.error(' File copied to %s for debugging' % (os.path.join(wip_path, 'errors', os.path.basename(netcdf_tmp_file_path)))) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break netcdf_tmp_file_path = fix_data_code_from_filename(netcdf_tmp_file_path) netcdf_tmp_file_path = fix_provider_code_from_filename(netcdf_tmp_file_path, 'IMOS_ANMN') if re.search('IMOS_ANMN_[A-Z]{1}_', netcdf_tmp_file_path) is None: logger.error(' Channel %s - File name Data code does not pass REGEX - Process of channel aborted' % str(channel_id)) shutil.copy(netcdf_tmp_file_path, os.path.join(wip_path, 'errors')) logger.error(' File copied to %s for debugging' % (os.path.join(wip_path, 'errors', os.path.basename(netcdf_tmp_file_path)))) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break move_to_tmp_incoming(netcdf_tmp_file_path) if TESTING: # The 2 next lines download the first month only for every single channel. This is only used for testing save_channel_info(channel_id, aims_xml_info, level_qc, end_date) break save_channel_info(channel_id, aims_xml_info, level_qc, end_date) else: logger.info('QC%s - Channel %s already up to date' % (str(level_qc), str(channel_id))) close_logger(logger)
def process_monthly_channel(channel_id, aims_xml_info, level_qc): """ Downloads all the data available for one channel_id and moves the file to a wip_path dir channel_id(str) aims_xml_info(tuple) level_qc(int) aims_service : 1 -> FAIMMS data 100 -> SOOP TRV data 300 -> NRS DATA for monthly data download, only 1 and 300 should be use """ logger.info('>> QC%s - Processing channel %s' % (str(level_qc), str(channel_id))) channel_id_info = aims_xml_info[channel_id] from_date = channel_id_info['from_date'] thru_date = channel_id_info['thru_date'] [start_dates, end_dates] = create_list_of_dates_to_download(channel_id, level_qc, from_date, thru_date) if len(start_dates) != 0: # download monthly file for start_date, end_date in zip(start_dates, end_dates): start_date = start_date.strftime("%Y-%m-%dT%H:%M:%SZ") end_date = end_date.strftime("%Y-%m-%dT%H:%M:%SZ") netcdf_tmp_file_path = download_channel(channel_id, start_date, end_date, level_qc) contact_aims_msg = "Process of channel aborted - CONTACT AIMS" if netcdf_tmp_file_path is None: logger.error(' Channel %s - not valid zip file - %s' % (str(channel_id), contact_aims_msg)) break # NO_DATA_FOUND file only means there is no data for the selected time period. Could be some data afterwards if is_no_data_found(netcdf_tmp_file_path): logger.warning(' Channel %s - No data for the time period:%s - %s' % (str(channel_id), start_date, end_date)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) else: if is_time_var_empty(netcdf_tmp_file_path): logger.error(' Channel %s - No values in TIME variable - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break if not modify_faimms_netcdf(netcdf_tmp_file_path, channel_id_info): logger.error(' Channel %s - Could not modify the NetCDF file - Process of channel aborted' % str(channel_id)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break main_var = get_main_faimms_var(netcdf_tmp_file_path) if has_var_only_fill_value(netcdf_tmp_file_path, main_var): logger.error(' Channel %s - _Fillvalues only in main variable - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break if get_faimms_site_name(netcdf_tmp_file_path) == [] or get_faimms_platform_type(netcdf_tmp_file_path) == []: logger.error(' Channel %s - Unknown site_code gatt value - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break if not is_time_monotonic(netcdf_tmp_file_path): logger.error(' Channel %s - TIME value is not strickly monotonic - %s' % (str(channel_id), contact_aims_msg)) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break # check every single file of the list. We don't assume that if one passes, all pass ... past proved this wip_path = DATA_WIP_PATH checker_retval = pass_netcdf_checker(netcdf_tmp_file_path, tests=['cf:latest', 'imos:1.3']) if not checker_retval: logger.error(' Channel %s - File does not pass CF/IMOS compliance checker - Process of channel aborted' % str(channel_id)) shutil.copy(netcdf_tmp_file_path, os.path.join(wip_path, 'errors')) logger.error(' File copied to %s for debugging' % (os.path.join(wip_path, 'errors', os.path.basename(netcdf_tmp_file_path)))) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break netcdf_tmp_file_path = fix_data_code_from_filename(netcdf_tmp_file_path) netcdf_tmp_file_path = fix_provider_code_from_filename(netcdf_tmp_file_path, 'IMOS_FAIMMS') if re.search('IMOS_FAIMMS_[A-Z]{1}_', netcdf_tmp_file_path) is None: logger.error(' Channel %s - File name Data code does not pass REGEX - Process of channel aborted' % str(channel_id)) shutil.copy(netcdf_tmp_file_path, os.path.join(wip_path, 'errors')) logger.error(' File copied to %s for debugging' % (os.path.join(wip_path, 'errors', os.path.basename(netcdf_tmp_file_path)))) shutil.rmtree(os.path.dirname(netcdf_tmp_file_path)) break move_to_tmp_incoming(netcdf_tmp_file_path) if TESTING: # The 2 next lines download the first month only for every single channel. This is only used for testing save_channel_info(channel_id, aims_xml_info, level_qc, end_date) break save_channel_info(channel_id, aims_xml_info, level_qc, end_date) else: logger.info('QC%s - Channel %s already up to date' % (str(level_qc), str(channel_id))) close_logger(logger)