def convert_json(data): if type(data) is str: # return a list temp = json.loads(data) if type(temp) is dict: return [temp] else: return temp elif type(data) in (list, dict): # return a string return json.dumps(data) else: msg = 'Non-compatible data type for input data of type {0}'.format( type(data)) base_logger.critical(msg) raise ESPAException(msg)
def get_filename(work_dir, product_id): """Retrieve the Landsat metadata filename to use The file may have issues, so call the fix function to remove those issues. """ logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER) filename = '' # Save the current directory and change to the work directory current_directory = os.getcwd() os.chdir(work_dir) try: for meta_file in glob.glob('{0}_MTL.*'.format(product_id)): if ('old' not in meta_file and not meta_file.startswith('lnd')): # Save the filename and break out of the directory loop filename = meta_file break if filename == '': raise ESPAException( 'Unable to locate the MTL file in [{0}]'.format(work_dir)) logger.info('Located MTL file: [{0}]'.format(filename)) filename = fix_file(filename) logger.info('Using MTL file: [{0}]'.format(filename)) finally: # Change back to the original directory os.chdir(current_directory) return filename
def distribute_product_remote(immutability, product_name, source_path, packaging_path, cache_path, parms): logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER) opts = parms['options'] env = Environment() # Determine the remote hostname to use destination_host = utilities.get_cache_hostname(env.get_cache_host_list()) # Deliver the product files # Attempt X times sleeping between each attempt sleep_seconds = settings.DEFAULT_SLEEP_SECONDS max_number_of_attempts = settings.MAX_DISTRIBUTION_ATTEMPTS max_package_attempts = settings.MAX_PACKAGING_ATTEMPTS max_delivery_attempts = settings.MAX_DELIVERY_ATTEMPTS attempt = 0 product_file = 'ERROR' cksum_file = 'ERROR' while True: try: # Package the product files # Attempt X times sleeping between each sub_attempt sub_attempt = 0 while True: try: (product_full_path, cksum_full_path, local_cksum_value) = package_product(immutability, source_path, packaging_path, product_name) except Exception: logger.exception("An exception occurred processing %s" % product_name) if sub_attempt < max_package_attempts: sleep(sleep_seconds) # sleep before trying again sub_attempt += 1 continue else: raise break # Distribute the product # Attempt X times sleeping between each sub_attempt sub_attempt = 0 while True: try: (remote_cksum_value, product_file, cksum_file) = \ transfer_product(immutability, destination_host, cache_path, opts['destination_username'], opts['destination_pw'], product_full_path, cksum_full_path) except Exception: logger.exception("An exception occurred processing %s" % product_name) if sub_attempt < max_delivery_attempts: sleep(sleep_seconds) # sleep before trying again sub_attempt += 1 continue else: raise break # Checksum validation if local_cksum_value.split()[0] != remote_cksum_value.split()[0]: raise ESPAException("Failed checksum validation between" " %s and %s:%s" % (product_full_path, destination_host, product_file)) # Always log where we placed the files logger.info("Delivered product to %s at location %s" " and cksum location %s" % (destination_host, product_file, cksum_file)) except Exception: if attempt < max_number_of_attempts: sleep(sleep_seconds) # sleep before trying again attempt += 1 # adjust for next set sleep_seconds = int(sleep_seconds * 1.5) continue else: raise break return (product_file, cksum_file)
def distribute_statistics_remote(immutability, product_id, source_path, destination_host, destination_path, destination_username, destination_pw): ''' Description: Transfers the statistics to the specified directory on the destination host Parameters: product_id - The unique product ID associated with the files. source_path - The full path to where the statistics files to distribute reside. destination_host - The hostname/url for where to distribute the files. destination_path - The full path on the local system to copy the statistics files into. destination_username - The user name to use for FTP destination_pw - The password to use for FTP Note: - It is assumed ssh has been setup for access between the localhost and destination system - It is assumed a stats directory exists under the current directory ''' logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER) d_name = 'stats' # Save the current directory location current_directory = os.getcwd() # Attempt X times sleeping between each attempt attempt = 0 sleep_seconds = settings.DEFAULT_SLEEP_SECONDS while True: # Change to the source directory os.chdir(source_path) try: stats_wildcard = ''.join([product_id, '*']) stats_path = os.path.join(destination_path, d_name) stats_files = os.path.join(d_name, stats_wildcard) remote_stats_wildcard = os.path.join(stats_path, stats_wildcard) # Create the statistics directory on the destination host logger.info("Creating directory {0} on {1}". format(stats_path, destination_host)) cmd = ' '.join(['ssh', '-q', '-o', 'StrictHostKeyChecking=no', destination_host, 'mkdir', '-p', stats_path]) output = '' try: logger.debug(' '.join(["mkdir cmd:", cmd])) output = utilities.execute_cmd(cmd) finally: if len(output) > 0: logger.info(output) # Change the attributes on the files so that we can remove them if immutability: cmd = ' '.join(['ssh', '-q', '-o', 'StrictHostKeyChecking=no', destination_host, 'sudo', 'chattr', '-if', remote_stats_wildcard]) output = '' try: logger.debug(' '.join(["chattr remote stats cmd:", cmd])) output = utilities.execute_cmd(cmd) except Exception: pass finally: if len(output) > 0: logger.info(output) # Remove any pre-existing statistics cmd = ' '.join(['ssh', '-q', '-o', 'StrictHostKeyChecking=no', destination_host, 'rm', '-f', remote_stats_wildcard]) output = '' try: logger.debug(' '.join(["rm remote stats cmd:", cmd])) output = utilities.execute_cmd(cmd) finally: if len(output) > 0: logger.info(output) # Transfer the stats statistics transfer.transfer_file('localhost', stats_files, destination_host, stats_path, destination_username=destination_username, destination_pw=destination_pw) logger.info("Verifying statistics transfers") # NOTE - Re-purposing the stats_files variable stats_files = glob.glob(stats_files) for file_name in stats_files: local_cksum_value = 'a b' remote_cksum_value = 'b c' # Generate a local checksum value cmd = ' '.join([settings.ESPA_CHECKSUM_TOOL, file_name]) try: logger.debug(' '.join(["checksum cmd:", cmd])) local_cksum_value = utilities.execute_cmd(cmd) except Exception: if len(local_cksum_value) > 0: logger.error(local_cksum_value) raise # Generate a remote checksum value remote_file = os.path.join(destination_path, file_name) cmd = ' '.join(['ssh', '-q', '-o', 'StrictHostKeyChecking=no', destination_host, settings.ESPA_CHECKSUM_TOOL, remote_file]) try: remote_cksum_value = utilities.execute_cmd(cmd) except Exception: if len(remote_cksum_value) > 0: logger.error(remote_cksum_value) raise # Checksum validation if (local_cksum_value.split()[0] != remote_cksum_value.split()[0]): raise ESPAException("Failed checksum validation between" " %s and %s:%s" % (file_name, destination_host, remote_file)) # Change the attributes on the files so that we can't remove them if immutability: cmd = ' '.join(['ssh', '-q', '-o', 'StrictHostKeyChecking=no', destination_host, 'sudo', 'chattr', '+i', remote_stats_wildcard]) output = '' try: logger.debug(' '.join(["chattr remote stats cmd:", cmd])) output = utilities.execute_cmd(cmd) finally: if len(output) > 0: logger.info(output) except Exception: logger.exception("An exception occurred processing %s" % product_id) if attempt < settings.MAX_DELIVERY_ATTEMPTS: sleep(sleep_seconds) # sleep before trying again attempt += 1 continue else: raise finally: # Change back to the previous directory os.chdir(current_directory) break