def create_job(arg, job_queue, wuid=None, job_num=None): """Test function for hello world job json creation.""" if wuid is None or job_num is None: raise RuntimeError("Need to specify workunit id and job num.") # set job type and disk space reqs job_type = "job-hello_world:master" # resolve hysds job params = { "dt": datetime.utcnow().isoformat(), } job = resolve_hysds_job(job_type, job_queue, priority=0, params=params, job_name="%s-%s" % (job_type, params['dt']), payload_hash=get_payload_hash(params)) # add workflow info job['payload']['_sciflo_wuid'] = wuid job['payload']['_sciflo_job_num'] = job_num logger.info("job: {}".format(json.dumps(job, indent=2))) return job
def sling_extract_job(sling_extract_version, slc_id, url_type, download_url, queue, archive_file, prod_date, priority, aoi, wuid=None, job_num=None): """Map function for spyddder-man extract job.""" # set job type and disk space reqs #job_type = "job-spyddder-extract:{}".format(spyddder_extract_version) logger.info("\nsling_extract_job for :%s" % slc_id) job_type = "job-spyddder-sling-extract-{}:{}".format( url_type, sling_extract_version) # resolve hysds job params = {"slc_id": slc_id} job = resolve_hysds_job(job_type, queue, priority=priority, params=params, job_name="%s-%s" % (job_type, slc_id)) # add workflow info #if wuid is not None and job_num is not None: job['payload']['_sciflo_wuid'] = wuid job['payload']['_sciflo_job_num'] = job_num #print("job: {}".format(json.dumps(job, indent=2))) return submit_hysds_job(job)
def extract_job(spyddder_extract_version, queue, localize_url, file, prod_name, prod_date, priority, aoi, wuid=None, job_num=None): """Map function for spyddder-man extract job.""" if wuid is None or job_num is None: raise RuntimeError("Need to specify workunit id and job num.") # set job type and disk space reqs job_type = "job-spyddder-extract:{}".format(spyddder_extract_version) # resolve hysds job params = { "localize_url": localize_url, "file": file, "prod_name": prod_name, "prod_date": prod_date, "aoi": aoi, } job = resolve_hysds_job(job_type, queue, priority=priority, params=params, job_name="%s-%s-%s" % (job_type, aoi, prod_name)) # save to archive_filename if it doesn't match url basename if os.path.basename(localize_url) != file: job['payload']['localize_urls'][0]['local_path'] = file # add workflow info job['payload']['_sciflo_wuid'] = wuid job['payload']['_sciflo_job_num'] = job_num print("job: {}".format(json.dumps(job, indent=2))) return job
def submit_job(job_name, job_params): # submit mozart job try: job_json = resolve_hysds_job(job_params["type"], job_params["queue"], job_params["priority"], job_params["tags"], job_params["params"], job_name=job_name, payload_hash=job_params["payload_hash"], enable_dedup=job_params["enable_dedup"]) ident = submit_hysds_job(job_json) print("JOB ID: {}".format(ident)) except Exception as e: raise Exception("Failed to submit HySDS Job:\nERROR: {0}".format(e))
def construct_job_payload(self, params=None, dataset_id=None, pge_config=None, job_type=None, job_queue=None, payload_hash=None): """ Uses resolve hysds job to get the job json :param params: :param dataset_id: :param pge_config: :param job_type: :param job_queue: :param payload_hash: :return: """ if dataset_id is not None: job_name = job_type + "_" + pge_config[ "pge_name"] + "_" + dataset_id else: job_name = job_type + "_" + pge_config["pge_name"] try: if dataset_id is not None: tags = [pge_config["pge_name"], dataset_id] else: tags = [pge_config["pge_name"]] job = resolve_hysds_job(job_type, job_queue, params=params, job_name=job_name, enable_dedup=True, tags=tags, payload_hash=payload_hash) except Exception as e: raise Exception(e) except: raise RuntimeError( "Wasn't able to get Job JSON from resolve_hysds_job.") print(json.dumps(job, sort_keys=True, indent=4, separators=(',', ': '))) return job