def createRsc(self): '''Creates a ROI-PAC style RSC for the height file with dimensions.''' from collections import OrderedDict rdict = OrderedDict() #####For processing radar coordinates if not self.geo: rdict['LAT_REF1'] = self.minlat rdict['LON_REF1'] = self.minlon rdict['LAT_REF2'] = self.minlat rdict['LON_REF2'] = self.maxlon rdict['LAT_REF3'] = self.maxlat rdict['LON_REF3'] = self.maxlon rdict['LAT_REF4'] = self.maxlat rdict['LON_REF4'] = self.minlon else: #####For processing in geo coordinates rdict['X_FIRST'] = self.minlon rdict['Y_FIRST'] = self.maxlat rdict['Y_STEP'] = (self.minlat - self.maxlat)/(1.0*self.length) rdict['X_STEP'] = (self.maxlon - self.minlon)/(1.0*self.length) rdict['WIDTH'] = self.width rdict['FILE_LENGTH'] = self.length rdict['RANGE_PIXEL_SIZE'] = self.drg rdict['AZIMUTH_PIXEL_SIZE'] = self.daz rscname = '{0}.rsc'.format(self.hgt) try: ts.write_rsc(rdict, rscname) except Exception as e: toContext(process,ErrorCodes['Write Error'],str(e)) print(e) sys.exit(ErrorCodes['Write Error'])
def quickrun(process, command): exitV = 0 if os.system(command): message = process + ': failed' exitV = 1 toContext(process, exitV, message) return else: message = process + ': completed' toContext(process, exitV, message)
def main(fname): process = 'driver_swath_stitcher' try: inps = json.load(open(fname)) ss = SS() if(len(inps['files']) < 2): print('Expecting at least two input files') raise Exception ss.load_ts(inps['files']) ss.create_output(inps['output']) ss._niter = inps['niter'] ss.merge_datasets() try: os.mkdir(inps['dataset_id']) except Exception: pass met= {} dset= {} met['product_orig'] = inps['product_orig'] met['ts_type'] = inps['ts_type'] met['track_number'] = inps['track_number'] minlat,maxlat,dlat,minlon,maxlon,dlon = ss.get_common_bbox() minlat = round(minlat,1) maxlat = round(maxlat,1) minlon = round(minlon,1) maxlon = round(maxlon,1) dset['location'] = {'type':'Polygon','coordinate': [[minlon,maxlat],[maxlon,maxlat], [maxlon,minlat],[minlon,minlat], [minlon,maxlat]]} dset['starttime'] = dt.fromtimestamp(ss._fpo['time'][ss._dates_indx[0]][0]).isoformat() dset['endtime'] = dt.fromtimestamp(ss._fpo['time'][ss._dates_indx[0]][-1]).isoformat() dset['version'] = inps['version'] if 'label' in inps: dset['label'] = inps['label'] fp = open(os.path.join(inps['dataset_id'],inps['dataset_id'] + '.met.json'),'w') json.dump(met,fp,indent=4) fp.close() fp = open(os.path.join(inps['dataset_id'],inps['dataset_id'] + '.dataset.json'),'w') json.dump(dset,fp,indent=4) fp.close() shutil.move(inps['output'],inps['dataset_id']) except Exception as e: message = 'driver_swath_stitcher.py: run failed with exception ' + str(e) exit = 1 toContext(process,exit,message) raise
def load_pickle(fname='insar.cpk'): ''' Loads the pickle file from insarApp runs. ''' import cPickle import isce import isceobj try: insarObj = cPickle.load(open(fname, 'rb')) except Exception as e: print(e) toContext(process,ErrorCodes['Pickle Load Error'],str(e)) sys.exit(ErrorCodes['Pickle Load Error']) return insarObj
def run(self, ops): filename = ops.inputFile self._productList.append(filename) #try: process = 'InterferogramTrigger' try: listMeta = self.createMetadata(filename) self._sensor = listMeta[0][0].spacecraftName #db start #self._sensor = 'CSKS4' #db end self._prepareInterferogram = createPrepareInterferogram( self._sensor) self._inputFile = self.createInputFile(listMeta) # hack to make isce believe this is the command line self._insar = self._insarClass(cmdline=self._inputFile) self._insar._insar.unwrappedIntFilename = self._insar._insar.topophaseFlatFilename.replace( '.flat', '.unw') #these tow statements need to be here before configure in order to be set self._insar._insar.geocode_bbox = self.createGeoBBox(listMeta) self._insar._insar.geocode_list = self.createGeocodeList( self._insar._insar) self._insar._configure() self._insar.run() #here dump insar object # delete it and reload from file self.createPngList(self._insar._insar) self.createBrowseImages() self.createProductList() self.createProductJson(listMeta) except Exception as e: print(e) message = 'InterferogramTrigger.py: run failed with exception ' + str( e) exit = 1 toContext(process, exit, message) raise Exception exit = 0 message = 'InterferogramTrigger: completed' toContext(process, exit, message) return 0
def main(): import json inputs = json.load(open(sys.argv[1])) ''' parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-i','--input',dest='input',type=str,help='Input json filename containing metadata') parser.add_argument('-o','--output',dest='output',type=str,help='Output prefix name for the results') parser.add_argument('-p','--project',dest='project',type=str,help='Project that belongs too') args = parser.parse_args() ''' process = 'networkSelector' message = '' exitv = 0 try: outputFile = inputs['networkSelector']['outputFile'] with open(inputs['networkSelector']['inputFile']) as fp: meta = json.load(fp) fm = FrameMetadata() fm.load(meta) sensor = fm.getSpacecraftName() tbp, peg = checkPegRegion(fm, inputs['project']) message = 'Found complete PEG region' if (not tbp): exitv = 10 message = 'Not found complete PEG region' else: tbpNew, pegNew = checkCoherence(tbp, peg, inputs['project']) if (not tbpNew): exitv = 11 message = 'Coherence below threshold' else: for i in range(len(tbpNew)): with open(outputFile + '_' + str(i), 'w') as fp: json.dump(toDict(tbpNew[i]), fp, indent=4) except Exception as e: exitv = 255 message = 'Failed with exception %s: %s' % (str(e), traceback.format_exc()) toContext(process, exitv, message) toContext(process, exitv, message) return exitv
def main(self): self.help() import time import math import isceobj from make_raw import make_raw timeStart = time.time() exit = 0 process = 'extractMetadata' message = 'Info extraction succeeded' try: makeRaw = make_raw() makeRaw.wireInputPort(name='sensor', object=self.sensor) makeRaw.wireInputPort(name='doppler', object=self.doppler) makeRaw.make_raw() except Exception as e: exit = 1 message = 'make_raw failed with exception ' + str(e) toContext(process, exit, message) raise Exception try: self.frame = makeRaw.getFrame() self.frame._squintAngle = math.radians(makeRaw.getSquint()) self.frame.doppler = makeRaw.dopplerValues.getDopplerCoefficients( )[0] self.frame.datasetType = self.datasetType fie = FIE() frameInfo = fie.extractInfoFromFrame(self.frame) except Exception as e: exit = 2 message = 'extractInfoFromFrame failed with exception ' + str(e) toContext(process, exit, message) raise Exception try: if (frameInfo): frameInfo.dump(self.metadataFile) dummyFile = self.frame.getImage().getFilename() os.system("rm -rf " + dummyFile + "*") except Exception as e: exit = 3 message = 'saving metadata file failed with exception ' + str(e) toContext(process, exit, message) raise Exception #if it gets here return 0 toContext(process, exit, message) return 0
def run(self, inputs): filename = inputs['createInterferogram']['inputFile'] if 'productList' in inputs['createInterferogram']: self._productListAux = inputs['createInterferogram']['productList'] self._productList.append(filename) #try: process = 'Interferogram' try: listMeta = self.createMetadata(filename) self._sensor = listMeta[0][0].spacecraftName #db start #self._sensor = 'CSKS4' #db end self._prepareInterferogram = createPrepareInterferogram( self._sensor) self._inputFile = self.createInputFile(listMeta, inputs) # hack to make isce believe this is the command line self._insar = self._insarClass(cmdline=self._inputFile) self._insar.configure() #these tow statements need to be here before configure in order to be set self._insar._insar.geocode_bbox = self.createGeoBBox(listMeta) #self._insar._insar.geocode_list = self.createGeocodeList(self._insar._insar) self._insar._configure() self._insar.run() #here dump insar object # delete it and reload from file pk.dump(self._insar._insar, open(self._insarPckName, 'wb')) self.createPngList(self._insar._insar) self.createBrowseImages() self.createProductList() self.createProductJson(listMeta) except Exception as e: message = 'Interferogram.py: run failed with exception ' + str(e) exit = 1 toContext(process, exit, message) raise exit = 0 message = 'Interferogram: completed' toContext(process, exit, message) return 0
def download(self, model=None, dirname=None): '''Download weather model data. If files already exist, data is not downloaded.''' self.model = model.upper() #If directory doesn't exist create it if not os.path.isdir(dirname): os.mkdir(dirname) for utc,which in zip([self.masterdate, self.slavedate],['Master','Slave']): datestr, hr = utc2datehour(utc, model=self.model) try: fname = Downloader[model]([datestr], "{0:02d}".format(hr), dirname) except Exception as e: print(e) toContext(process,ErrorCodes['Download Not Available' + ' ' + which ],str(e)) sys.exit(ErrorCodes['Download Not Available' + ' ' + which ]) self.flist.append(fname[0])
def correctUnw(self, prefix): ''' Correct the unwrapped interferogram using the estimated delay map. ''' try: Oamp = ts.load_mmap(self.unw, self.width, self.length, map='BIL', nchannels=2, channel=1,datatype=np.float32, quiet=True) Oifg = ts.load_mmap(self.unw, self.width, self.length, map='BIL', nchannels=2, channel=1,datatype=np.float32, quiet=True) except Exception as e: toContext(process,ErrorCodes['Memmap Error'],str(e)) print(e) sys.exit(ErrorCodes['Memmap Error']) try: Aifg = ts.load_mmap(self.outfile+'.rdr', self.width, self.length, datatype=np.float32,quiet=True) except Exception as e: toContext(process,ErrorCodes['Memmap Error'],str(e)) print(e) sys.exit(ErrorCodes['Memmap Error']) corrname = '{0}_{1}'.format(prefix, self.unw) try: fout = open(corrname, 'w') for kk in xrange(self.length): dat = Oifg[kk,:] * Aifg[kk,:] amp = Omap[kk,:] amp.tofile(fout) dat.tofile(fout) except Exception as e: toContext(process,ErrorCodes['Write Error'],str(e)) print(e) sys.exit(ErrorCodes['Write Error']) fout.close() ifImage = IF.createImage() accessMode = 'read' dataType = 'CFLOAT' ifImage.initImage(corrname,accessMode,self.width,dataType) descr = 'Troposheric corrected unwrapped flattened interferogram' ifImage.setImageType('cpx') ifImage.addDescription(descr) ifImage.renderHdr() self.insar._insar.correctedTopophaseFilename = corrname fp = open(self.pickle,'w') cp.dump(self.insar,fp) fp.close() toContext(process,0,'pyAPSCorrections applied')
def extractFeatures(inputs, label): process = 'extractFeatures' cwd = None try: url = inputs['url'] #otherwise prdbase gets messed up if (url.endswith('/')): url = url[:-1] urlsplit = url.split('/') prdbase = (urlsplit[-2] + '_' + urlsplit[-1]).replace('__', '_') product = 'images_' + prdbase try: os.mkdir(product) except Exception: pass return fe = FE(url, product, .4) res = fe.extractFeatures() cwd = os.getcwd() os.chdir(product) json.dump({ 'url': url, 'label': label }, open(product + '.met.json', 'w')) for k, v in res['outputs'].items(): for k1, v1 in v.items(): v1.tofile(k1 + '_choTh_' + str(k) + '.img') os.chdir(cwd) except Exception as e: if cwd: os.chdir(cwd) exitv = 10 message = 'Failed with exception %s: %s' % (str(e), traceback.format_exc()) toContext(process, exitv, message)
def main(): ariahome = os.environ['ARIAMH_HOME'] inputs = json.load(open(sys.argv[1])) meta = inputs['metaFile'] inputFile = os.path.join(ariahome, 'conf', 'velocityMapParams.json') processes = [ "runStack:getMetadata", "runStack:stageInterferograms", "runStack:runQA", "runStack:getAuxData", "runStack:prepGIAnT_cali" ] commands = [ 'getMetadata.py ' + sys.argv[1], 'stageInterferograms.py ' + meta, 'runQA.py ' + inputFile, 'getAuxData.py ' + inputFile, 'prepGIAnT_cali.py ' + inputFile ] for process, command in zip(processes, commands): quickrun(process, command) try: os.system('cp context.json ./GIAnT') os.chdir('./GIAnT') except Exception: toContext("runStack:chdir", exitV, "Failed chdir to GIAnT") processes = [ "runStack:prepxml", "runStack:PrepIgramStack", "runStack:ProcessStack", "runStack:TimefnInvert" ] commands = [ 'python prepxml.py', 'PrepIgramStack.py', 'ProcessStack.py', 'TimefnInvert.py' ] for process, command in zip(processes, commands): quickrun(process, command) finalize(inputs['productName'], meta)
def finalize(prdName, meta): import shutil fp = open('../valid.list') pair = fp.readlines()[0].split()[0] fp.close() #extract velocity form results los = 'LOS_velocity.geo' command = 'extractVelocity.py -i Stack/TS-PARAMS.h5 -o ' + los + ' -x ../insar/' + pair + '/insarProc.xml' process = "extractVelocity" quickrun(process, command) exitV = 0 dirName = prdName #create product dir try: os.mkdir(dirName) except: exitV = 1 toContext("runStack:finalize", exitV, "Failed to create product directory") #create .met.json fp = open(os.path.join(dirName, prdName + '.met.json'), 'w') json.dump({'losVelocity': los, 'interferograms': 'ifg.list'}, fp, indent=4) fp.close() #create png from velocity and move all the products into the product dir try: createImage('mdx.py -P ' + los, los) productList = ['ifg.list', '../' + meta] listFiles = os.listdir('./') for fl in listFiles: if (fl.count('.geo')): productList.append(fl) #just in case the default self._inputFile has been modified for fileName in productList: shutil.move(fileName, dirName) except Exception: exitV = 1 toContext( "runStack:finalize", exitV, "Failed to create image or moving products to product directory") #move the product dir up try: shutil.move(dirName, '../') except Exception: toContext("runStack:finalize", exitV, "Failed to move product directory") #move up os.system('mv context.json ../') os.chdir('../')
def extractFeatures(infile): process = 'extractFeatures' try: inputs = json.load(open(infile)) url = inputs['url'] #otherwise prdbase gets messed up if (url.endswith('/')): url = url[:-1] urlsplit = url.split('/') #need to be consistent with the naming convention bur we are not if (url.count('CSK')): prdbase = (urlsplit[-2] + '_' + urlsplit[-1]).replace('__', '_') elif (url.count('S1')): prdbase = urlsplit[-1] product = 'features_' + prdbase fe = FE(url, product) res = fe.extractFeatures() except Exception as e: exitv = 10 message = 'Failed with exception %s: %s' % (str(e), traceback.format_exc()) toContext(process, exitv, message) sys.exit(1) #Get the default version try: version = re.search(r"/(v[^/]+)/", url).group(1) except Exception as e2: print( "Failed to get version from URL. Using extractor product version. {0}.{1}" .format(type(e2), e2)) version = "v1.0" try: os.mkdir(product) except Exception: pass cwd = os.getcwd() os.chdir(product) uu = UrlUtils() try: command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join( url, prdbase + '.met.json') print(command) os.system(command) except Exception: os.chdir(cwd) exitv = 11 message = 'Failed to download metadata for ' + prdbase toContext(process, exitv, message) sys.exit(1) if not os.path.exists(prdbase + '.met.json'): try: newdl = prdbase.replace("-" + version, "") print("Met JSON not found, attempting to grab:", newdl) command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join( url, newdl + '.met.json') print(command) os.system(command) except Exception: pass try: command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join( url, prdbase + '.dataset.json') print(command) os.system(command) except Exception as e: print("Failed to download datasets.json. Ignoring. {0}.{1}".format( type(e), e)) try: toAdd = json.load(open(prdbase + '.met.json')) for key in [ 'orbit', 'tags', 'inputFile', 'input_has_id', 'product_type', 'dataset_type', 'orbitNumber' ]: try: #no use for orbit del toAdd[key] except Exception: pass res.update(toAdd) os.remove(prdbase + '.met.json') try: dset = json.load(open(prdbase + '.dataset.json')) version = dset["version"] os.remove(prdbase + '.dataset.json') except Exception as e: print("Failed to get version from dataset. Using URL. {0}.{1}". format(type(e), e)) res["interferogram_version"] = version res["interferogram_id"] = dset.get("label", prdbase) with open(product + '.met.json', 'w') as fp: json.dump(res, fp, indent=True) dset["creation_timestamp"] = datetime.datetime.now().isoformat() dset["label"] = product with open( os.path.join(os.path.dirname(__file__), "..", "conf", "dataset_versions.json"), "r") as fp: dset["version"] = json.load(fp).get("features", "v1.0") with open(product + '.dataset.json', 'w') as fp: json.dump(dset, fp, indent=True) except Exception as e: print("[ERROR] Exception occured. {0}:{1}\n{2}".format( e, type(e), traceback.format_exc())) os.chdir(cwd) exitv = 12 message = 'Failed create metadata file for ' + product toContext(process, exitv, message) sys.exit(1) exitv = 0 os.chdir(cwd) message = 'Extract features finished with no errors.' toContext(process, exitv, message)
def estimateDelay(self): '''Estimate the delay using PyAPS.''' self.outfile = 'corrections_pyAPS_%s'%(self.model) print('Processing master delay') delay = np.zeros((self.length, self.width), dtype=np.float32) ######Processing in radar coordinates if not self.geo: try: atmobj = pyaps.PyAPS_rdr(self.flist[0],self.hgt,grib=self.model,demfmt='HGT') atmobj.getgeodelay(delay, lat=self.lat, lon=self.lon, inc=self.inc,wvl=self.wvl) except Exception as e: toContext(process,ErrorCodes['Master Delay Estimation Failed'],str(e)) print(e) sys.exit(ErrorCodes['Master Delay Estimation Failed']) else: #######Processing in geo coordinates try: atmobj = pyaps.PyAPS_geo(self.flist[0], self.hgt, grib=self.model,demfmt='HGT') atmobj.getdelay(delay, inc=self.inc,wvl=sel.wvl) except Exception as e: toContext(process,ErrorCodes['Master Delay Estimation Failed'],str(e)) print(e) sys.exit(ErrorCodes['Master Delay Estimation Failed']) del atmobj if(np.isnan(np.sum(delay))): print("pyAPSCorrect.py estimateDelay: found NaN. Aborting") toContext(process,ErrorCodes['NaN Present'],"pyAPSCorrect.py estimateDelay: found NaN. Aborting") sys.exit(ErrorCodes['NaN Present']) print('Processing slave delay') delay_slav = np.zeros((self.length, self.width), dtype=np.float32) if not self.geo: try: atmobj = pyaps.PyAPS_rdr(self.flist[1],self.hgt,grib=self.model,demfmt='HGT') atmobj.getgeodelay(delay_slav, lat=self.lat, lon=self.lon, inc=self.inc,wvl=self.wvl) except Exception as e: print(e) toContext(process,ErrorCodes['Slave Delay Estimation Failed'],str(e)) sys.exit(ErrorCodes['Slave Delay Estimation Failed']) else: try: atmobj = pyaps.PyAPS_geo(self.flist[1], self.hgt,grib=self.model,demfmt='HGT') atmobj.getdelay(delay_slav, inc=self.inc,wvl=self.wvl) except Exception as e: toContext(process,ErrorCodes['Slave Delay Estimation Failed'],str(e)) print(e) sys.exit(ErrorCodes['Slave Delay Estimation Failed']) del atmobj if(np.isnan(np.sum(delay_slav))): print("pyAPSCorrect.py estimateDelay: found NaN. Aborting") toContext(process,ErrorCodes['NaN Present'],"pyAPSCorrect.py estimateDelay: found NaN. Aborting") sys.exit(ErrorCodes['NaN']) delay -= delay_slav del delay_slav try: #import pdb #pdb.set_trace() self.insar._insar.correctionsFilename = self.outfile+'.rdr' delay.astype(np.float32).tofile(self.insar._insar.correctionsFilename) ifImage = IF.createImage() accessMode = 'read' dataType = 'FLOAT' ifImage.initImage(self.insar._insar.correctionsFilename,accessMode,self.width,dataType) descr = 'Troposheric corrections' ifImage.setImageType('sbi') ifImage.addDescription(descr) ifImage.renderHdr() except Exception as e: toContext(process,ErrorCodes['Write Error'],str(e)) print(e) sys.exit(ErrorCodes['Write Error']) cJ = np.complex64(1.0j) delay = np.exp(cJ*(delay)) try: delay.tofile(self.outfile+'.mph') except Exception as e: toContext(process,ErrorCodes['Write Error'],str(e)) print(e) sys.exit(ErrorCodes['Write Error']) #since some time this is the only stage executed dump the pickle #if there are other stages they'll be overwritten fp = open(self.pickle,'w') cp.dump(self.insar,fp) fp.close() toContext(process,0,'pyAPSCorrections delay created')
def trainPredictor(infile): process = 'trainPredictor' # fix the random seed to ensure reproducibility np.random.seed(seed=train_state) inputs = loadjson(infile) outputs = {} outbase = 'predictor%s' % mdy cwd = os.getcwd() try: clfinputs = {} clfinputs['clf_file'] = inputs['clf_name'] + '.pkl' clfinputs['clf_type'] = inputs['clf_type'] clfinputs['classmap'] = loadClassmap(inputs["classmap_file"]) clfinputs['features'] = loadjson(inputs["feat_file"]) inputurls = inputs.pop('urls', []) crossvalidate = inputs.pop('crossvalidate', 0) saveclf = inputs.pop('saveclf', 0) cacheoutput = inputs.pop('cacheoutput', 0) if not pathexists(outbase): os.mkdir(outbase) if cacheoutput and not pathexists(pathjoin(outbase, cache_dir)): os.mkdir(pathjoin(outbase, cache_dir)) os.chdir(outbase) except Exception as e: exitv = 10 message = 'IO Preprocessing failed with exception %s: %s' % ( str(e), traceback.format_exc()) toContext(process, exitv, message) sys.exit(1) try: trdat = collectTrainingData(inputurls, clfinputs, cache=cacheoutput) X, y = trdat['X'], trdat['y'] traintags, trainurls = trdat['traintags'], trdat['trainurls'] errors, skiplist = trdat['skiplist'], trdat['errors'] print('loaded %d training samples (%d skipped)' % (len(y), len(skiplist))) except Exception as e: exitv = 11 message = 'Training data collection failed with exception %s: %s' % ( str(e), traceback.format_exc()) toContext(process, exitv, message) sys.exit(1) try: if crossvalidate: cvoutpkl = "cvout.pkl" cvlogfile = 'cvout.log' print('evaluating model via %s cross-validation (logfile=%s)...' % (cv_type, cvlogfile)) starttime = time.time() cvout = crossValidatePredictor(X, y, clfinputs, logfile=cvlogfile) outputs['cv_time'] = time.time() - starttime outputs['cv_out'] = cvoutpkl outputs['cv_log'] = cvlogfile with open(cvoutpkl, 'wb') as fid: pickle.dump(cvout, fid) print('done, output saved to %s.' % cvoutpkl) except Exception as e: exitv = 12 message = 'Cross-validation failed with exception %s: %s' % ( str(e), traceback.format_exc()) toContext(process, exitv, message) sys.exit(1) try: if saveclf: starttime = time.time() clf, clfcv = train(X, y, clfinputs) clffile = clfinputs['clf_file'] if clffile[0] != '/': clffile = pathjoin(cwd, clffile) # path relative to cwd clfjson = clffile.replace('.pkl', '.json') outputs['clf_time'] = time.time() - starttime outputs['clf_file'] = clffile print( "training classifier using all available data for deployment..." ) with open(clffile, 'wb') as fid: pickle.dump(clf, fid) with open(clfjson, 'w') as fid: json.dump(clfinputs, fid) print('done, output saved to %s.' % clffile) except Exception as e: exitv = 13 message = 'Classifier training failed with exception %s: %s' % ( str(e), traceback.format_exc()) toContext(process, exitv, message) sys.exit(1) try: json.dump(outputs, open(outbase + '.met.json', 'w'), indent=True) except Exception: os.chdir(cwd) exitv = 14 message = 'Failed to create metadata file for ' + outbase toContext(process, exitv, message) sys.exit(1) exitv = 0 os.chdir(cwd) message = 'trainPredictor finished with no errors.' toContext(process, exitv, message)
def main(): process = 'validation' try: inputs = json.load(open(sys.argv[1])) clf_json = inputs['clf_json'] url = inputs['feat_url'] #otherwise prdbase gets messed up if (url.endswith('/')): url = url[:-1] uu = UrlUtils() urlsplit = url.split('/') if urlsplit[-2].startswith("feature"): feat_json = (urlsplit[-2] + '_' + urlsplit[-1]).replace( '__', '_') + '.met.json' else: feat_json = urlsplit[-1] + ".met.json" print(url, urlsplit, feat_json) try: command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join( url, feat_json) os.system(command) except Exception: exitv = 11 message = 'Failed to download metadata for ' + prdbase toContext(process, exitv, message) sys.exit(1) product = feat_json.replace('features_', 'validation_').replace('.met.json', '') p = Predictor(clf_json) pred, lab = p.predict(feat_json)[0] try: os.mkdir(product) except Exception: pass cwd = os.getcwd() os.chdir(product) res = { 'prob': pred, 'pred_lab': lab, 'orig_prod': feat_json.replace('features_', '').replace('.met.json', '') } #for provenance add the inputs used for the run toAdd = json.load(open(os.path.join(cwd, clf_json))) res.update(toAdd) #move the feat_file to the product dir so it gets picked up os.system('mv ' + os.path.join('..', toAdd['feat_file']) + ' ./') json.dump(res, open(product + '.met.json', 'w'), indent=True) except Exception as e: os.chdir(cwd) exitv = 10 message = 'Failed with exception %s: %s' % (str(e), traceback.format_exc()) toContext(process, exitv, message) sys.exit(1) os.chdir(cwd) exitv = 0 message = 'Validation finished with no errors.' toContext(process, exitv, message)