def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/ssara_rsmas.log' logger = RsmasLogger(file_name=logfile_name) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_ssara_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir, inps.wall_time) sys.exit(0) if not os.path.isdir(project_slc_dir): os.makedirs(project_slc_dir) os.chdir(inps.slc_dir) logger.log(loglevel.INFO, "DATASET: %s", str(inps.custom_template_file.split('/')[-1].split(".")[0])) logger.log(loglevel.INFO, "DATE: %s", datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")) succesful = run_ssara(project_slc_dir, inps.custom_template_file, inps.delta_lat, logger) logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) logfile_name = inps.work_dir + '/ssara_rsmas.log' logger = RsmasLogger(file_name=logfile_name) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_ssara_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] job_obj = JOB_SUBMIT(inps) if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) if not os.path.isdir(project_slc_dir): os.makedirs(project_slc_dir) os.chdir(inps.slc_dir) logger.log(loglevel.INFO, "DATASET: %s", str(inps.custom_template_file.split('/')[-1].split(".")[0])) logger.log(loglevel.INFO, "DATE: %s", datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")) succesful = run_ssara(project_slc_dir, inps.custom_template_file, inps.delta_lat, logger) logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/asfserial_rsmas.log' logger = RsmasLogger(file_name=logfile_name) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_asfserial_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] work_dir = inps.work_dir if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, inps.wall_time) os.chdir(inps.work_dir) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') os.chdir(inps.slc_dir) try: os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt') except OSError: pass generate_files_csv(project_slc_dir, inps.custom_template_file) succesful = run_download_asf_serial(project_slc_dir, logger) change_file_permissions() logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) logfile_name = inps.work_dir + '/ssara_rsmas.log' logger = RsmasLogger(file_name=logfile_name) #import pdb; pdb.set_trace() if not inps.template[inps.prefix + 'Stack.slcDir'] is None: inps.download_dir = inps.template[inps.prefix + 'Stack.slcDir'] if 'COSMO' in inps.template['ssaraopt.platform']: inps.download_dir = os.path.join(inps.work_dir, 'RAW_data') else: inps.download_dir = os.path.join(inps.work_dir, 'SLC') ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_ssara_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] job_obj = JOB_SUBMIT(inps) if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) if not os.path.isdir(inps.download_dir): os.makedirs(inps.download_dir) os.chdir(inps.download_dir) succesful = run_ssara(inps.download_dir, inps.custom_template_file, inps.delta_lat, logger) return None
from minsar.utils.download_ssara import add_polygon_to_ssaraopt pathObj = PathFind() ############## DIRECTORY AND FILE CONSTANTS ############## OPERATIONS_DIRECTORY = os.getenv('OPERATIONS') SCRATCH_DIRECTORY = os.getenv('SCRATCHDIR') TEMPLATE_DIRECTORY = os.path.join(OPERATIONS_DIRECTORY, "TEMPLATES") LOGS_DIRECTORY = os.path.join(OPERATIONS_DIRECTORY, "LOGS") ERRORS_DIRECTORY = os.path.join(OPERATIONS_DIRECTORY, "ERRORS") STORED_DATE_FILE = os.path.join(OPERATIONS_DIRECTORY, "stored_date.date") DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" logger_file = os.path.join(LOGS_DIRECTORY, "run_operations.log") logger_run_operations = RsmasLogger(logger_file) def create_run_operations_parser(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description= """Submits processing jobs for each datasest template present in the $OPERATIONS/TEMPLATES/ directory. \nPlace run_operations_LSF.job file into $OPERATIONS directory and submit with bsub < run_operations_LSF.job. \nIt runs run_operations.py once daily at 12:00 PM.""" ) parser.add_argument('-v', '--version', action='version',
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) logfile_name = inps.work_dir + '/asfserial_rsmas.log' global logger logger = RsmasLogger(file_name=logfile_name) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_asfserial_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] job_obj = JOB_SUBMIT(inps) if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) os.chdir(inps.work_dir) if not inps.template[inps.prefix + 'Stack.slcDir'] is None: inps.slc_dir = inps.template[inps.prefix + 'Stack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') global project_slc_dir project_slc_dir = os.path.join(inps.work_dir, 'SLC') if not os.path.exists(inps.slc_dir): os.mkdir(inps.slc_dir) os.chdir(inps.slc_dir) try: os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt') except OSError: pass dataset_template = Template(inps.custom_template_file) dataset_template.options.update( PathFind.correct_for_ssara_date_format(dataset_template.options)) subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv", shell=True).wait() seasonal_start_date = None seasonal_end_date = None try: if dataset_template.options[ 'seasonalStartDate'] is not None and dataset_template.options[ 'seasonalEndDate'] is not None: seasonal_start_date = dataset_template.options['seasonalStartDate'] seasonal_end_date = dataset_template.options['seasonalEndDate'] except: pass if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None: seasonal_start_date = inps.seasonalStartDate seasonal_end_date = inps.seasonalEndDate if seasonal_start_date is not None and seasonal_end_date is not None: generate_seasonal_files_csv(dataset_template, seasonal_start_date, seasonal_end_date) else: generate_files_csv(project_slc_dir, dataset_template) parallel = False try: if dataset_template.options['parallel'] == 'yes': parallel = True except: pass """if inps.parallel == 'yes': parallel = True""" threads = os.cpu_count() try: if dataset_template.options['threads'] is not None: threads = int(dataset_template.options['threads']) except: pass """if inps.processes is not None: processes = inps.processes""" if parallel: run_parallel_download_asf_serial(project_slc_dir, threads) else: succesful = run_download_asf_serial(project_slc_dir, logger) logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) change_file_permissions() logger.log(loglevel.INFO, "------------------------------------") subprocess.Popen("rm " + project_slc_dir + "/new_files*.csv", shell=True).wait() return None
import os import glob import subprocess import configparser from natsort import natsorted import xml.etree.ElementTree as ET import shutil from mintpy.defaults.auto_path import autoPath from minsar.objects.rsmas_logging import RsmasLogger, loglevel from minsar.objects.dataset_template import Template from mimt.objects.auto_defaults import PathFind ############################################################################### pathObj = PathFind() logfile_name = pathObj.logdir + '/process_rsmas.log' logger = RsmasLogger(file_name=logfile_name) ########################################################################## def send_logger(): return logger ########################################################################## def remove_directories(directories_to_delete): """ Removes given existing directories. """ for directory in directories_to_delete:
#!/usr/bin/env python3 import os import pandas as pd import sys import requests import argparse import time from datetime import datetime from minsar.objects.rsmas_logging import RsmasLogger, loglevel from minsar.objects.auto_defaults import PathFind from minsar.objects import message_rsmas pathObj = PathFind() logfile_name = pathObj.logdir + '/generate_templates.log' logger = RsmasLogger(logfile_name) DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" def cmd_line_parse(argv): parser = argparse.ArgumentParser( description='Generate Processing Template Files', formatter_class=argparse.RawTextHelpFormatter, epilog=None) ##### Input infile = parser.add_argument_group('File to Generate', 'File to Generate') infile.add_argument("--dataset", dest='dataset',
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='download_rsmas') config = putils.get_config_defaults(config_file='job_defaults.cfg') if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) logfile_name = inps.work_dir + '/asfserial_rsmas.log' logger = RsmasLogger(file_name=logfile_name) ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_asfserial_rsmas' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] work_dir = inps.work_dir if inps.wall_time == 'None': inps.wall_time = config['download_rsmas']['walltime'] js.submit_script(job_name, job_file_name, sys.argv[:], work_dir, inps.wall_time) os.chdir(inps.work_dir) if not inps.template['topsStack.slcDir'] is None: inps.slc_dir = inps.template['topsStack.slcDir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'SLC') project_slc_dir = os.path.join(inps.work_dir, 'SLC') os.chdir(inps.slc_dir) try: os.remove(os.path.expanduser('~') + '/.bulk_download_cookiejar.txt') except OSError: pass dataset_template = Template(inps.custom_template_file) dataset_template.options.update( PathFind.correct_for_ssara_date_format(dataset_template.options)) subprocess.Popen("rm new_files.csv", shell=True).wait() standardTuple = (inps, dataset_template) if inps.seasonalStartDate is not None and inps.seasonalEndDate is not None: ogStartYearInt = int( dataset_template.options['ssaraopt.startDate'][:4]) if int(inps.seasonalStartDate) > int(inps.seasonalEndDate): y = 1 else: y = 0 YearRange = int(dataset_template.options['ssaraopt.endDate'] [:4]) - ogStartYearInt + 1 if YearRange > 1 and y == 1: YearRange = YearRange - 1 seasonalStartDateAddOn = '-' + inps.seasonalStartDate[: 2] + '-' + inps.seasonalStartDate[ 2:] seasonalEndDateAddOn = '-' + inps.seasonalEndDate[: 2] + '-' + inps.seasonalEndDate[ 2:] ogEndDate = dataset_template.options['ssaraopt.endDate'] for x in range(YearRange): seasonalTuple = standardTuple + (x, ogStartYearInt, y, YearRange, seasonalStartDateAddOn, seasonalEndDateAddOn, ogEndDate) generate_files_csv(project_slc_dir, inps.custom_template_file, seasonalTuple) y += 1 else: generate_files_csv(project_slc_dir, inps.custom_template_file, standardTuple) succesful = run_download_asf_serial(project_slc_dir, logger) change_file_permissions() logger.log(loglevel.INFO, "SUCCESS: %s", str(succesful)) logger.log(loglevel.INFO, "------------------------------------") return None
def main(iargs=None): inps = putils.cmd_line_parse(iargs, script='') if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) logfile_name = inps.work_dir + '/gep_download.log' logger = RsmasLogger(file_name=logfile_name) if not inps.template['raw_image_dir'] is None: inps.slc_dir = inps.template['raw_image_dir'] else: inps.slc_dir = os.path.join(inps.work_dir, 'raw') project_slc_dir = os.path.join(inps.work_dir, 'raw') ######################################### # Submit job ######################################### if inps.submit_flag: job_file_name = 'download_gep_csk' job_name = inps.custom_template_file.split(os.sep)[-1].split('.')[0] job_obj = JOB_SUBMIT(inps) if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) sys.exit(0) if not os.path.isdir(project_slc_dir): os.makedirs(project_slc_dir) os.chdir(inps.slc_dir) logger.log(loglevel.INFO, "DATASET: %s", str(inps.custom_template_file.split('/')[-1].split(".")[0])) logger.log(loglevel.INFO, "DATE: %s", datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")) start_date = inps.template['ssaraopt.startDate'] start_date = datetime.datetime.strptime(start_date, '%Y%m%d') end_date = inps.template['ssaraopt.endDate'] end_date = datetime.datetime.strptime(end_date, '%Y%m%d') if 'stripmapStack.boundingBox' in inps.template: bbox = inps.template['stripmapStack.boundingBox'] else: bbox = inps.template['topsStack.boundingBox'] bbox = bbox.split(' ') bbox = '{},{},{},{}'.format(bbox[2], bbox[0], bbox[3], bbox[1]) user = subprocess.check_output( "grep gepuser $RSMASINSAR_HOME/3rdparty/SSARA/password_config.py |\ sed 's/\"//g''' | cut -d '=' -f 2", shell=True).decode('UTF-8').split('\n')[0] passwd = subprocess.check_output( "grep geppass $RSMASINSAR_HOME/3rdparty/SSARA/password_config.py |\ sed 's/\"//g''' | cut -d '=' -f 2", shell=True).decode('UTF-8').split('\n')[0] command_get_list = 'curl -s "https://catalog.terradue.com/csk/search?format=atom&count=1000&bbox={bbox}" |\ xmllint --format - | grep enclosure | sed "s/.*<link rel="enclosure".*href="\(.*\)"\/>/\1/g"'.format( bbox=bbox) print(command_get_list) data_list = subprocess.check_output(command_get_list, shell=True).decode( 'UTF-8') #os.system(command_get_list) data_list = data_list.split('/>\n') data_list = [x.split('"')[-2] for x in data_list[0:-1]] cmd_all = [] for data in data_list: date = datetime.datetime.strptime( data.split('.h5')[0].split('_')[-1][0:8], '%Y%m%d') if date >= start_date and date <= end_date: cmd = 'curl -u {username}:{password} -o $(basename ${enclosure}) {enclosure}'.format( username=user, password=passwd, enclosure=data) cmd_all.append(cmd) pool = mp.Pool(6) pool.map(os.system, cmd_all) pool.close() logger.log(loglevel.INFO, "Download Finish") logger.log(loglevel.INFO, "------------------------------------") return None