def findFrame(): totFound = 0 for dire in ['asc', 'dsc']: for sc in ['CSKS1', 'CSKS2', 'CSKS3', 'CSKS4']: params = { 'spacecraftName': sc, 'direction': dire, 'responseGroups': 'Medium' } resp, cnt = requester.request("%s?%s" % (rest_url, urlencode(params))) metList = [] if (resp['status'] == PegRegionChecker.STATUS_QUERY_OK): results = json.loads(cnt) if results['result']: for result in results['result']: fm = FrameMetadata() fm.load(result) tbp, peg = checkPegRegion(sc, fm) if (not tbp): exit = 10 else: tbpNew, pegNew = checkCoherence(tbp, peg) if (not tbpNew): exit = 10 else: return fm.url
def createMetaObjects(metaList): retList = [] for meta in metaList: fm = FrameMetadata() fm.load(meta) retList.append(fm) return retList
def main(): import pdb pdb.set_trace() PR = PegRegionChecker() PR.initPegList('pegfile_alos.txt_test') PR._sensorType = "ALOS" #list1 = PR.mockRestCall() #list2 = PR.mockRestCall() FM = FrameMetadata() FM.load(sys.argv[1]) tbp, peg = PR.runNominalMode(FM) print('dummy') '''
def createMetadata(self, filename): fp = open(filename) listMeta = json.load(fp) fp.close() ret = [] for listFrame in listMeta: frames = [] for frame in listFrame: fm = FrameMetadata() fm.load(frame) frames.append(fm) ret.append(frames) return ret
def main(): import json inputs = json.load(open(sys.argv[1])) ''' parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-i','--input',dest='input',type=str,help='Input json filename containing metadata') parser.add_argument('-o','--output',dest='output',type=str,help='Output prefix name for the results') parser.add_argument('-p','--project',dest='project',type=str,help='Project that belongs too') args = parser.parse_args() ''' process = 'networkSelector' message = '' exitv = 0 try: outputFile = inputs['networkSelector']['outputFile'] with open(inputs['networkSelector']['inputFile']) as fp: meta = json.load(fp) fm = FrameMetadata() fm.load(meta) sensor = fm.getSpacecraftName() tbp, peg = checkPegRegion(fm, inputs['project']) message = 'Found complete PEG region' if (not tbp): exitv = 10 message = 'Not found complete PEG region' else: tbpNew, pegNew = checkCoherence(tbp, peg, inputs['project']) if (not tbpNew): exitv = 11 message = 'Coherence below threshold' else: for i in range(len(tbpNew)): with open(outputFile + '_' + str(i), 'w') as fp: json.dump(toDict(tbpNew[i]), fp, indent=4) except Exception as e: exitv = 255 message = 'Failed with exception %s: %s' % (str(e), traceback.format_exc()) toContext(process, exitv, message) toContext(process, exitv, message) return exitv
def extractInfo(self): try: fm = FrameMetadata() fm._dataset_type = self._frame.datasetType fm._sensingStart = self._frame.getSensingStart() fm._sensingStop = self._frame.getSensingStop() fm._spacecraftName = self._frame.getInstrument().getPlatform( ).getSpacecraftName() try: fm._spacecraftName = fm._spacecraftName.decode('utf-8') except: pass fm._lookDirection = self._lookDirectionMap[ self._frame.getInstrument().getPlatform().pointingDirection] fm._doppler = self._frame.doppler fm._prf = self._frame.PRF fm._startingRange = self._frame.startingRange uorb = self._frame.orbit._unpackOrbit() fm._orbit = uorb # try since sometimes is and empty string. if so set it to None try: fm._frameNumber = int(self._frame.getFrameNumber()) except: fm._frameNumber = None try: fm._orbitNumber = int(self._frame.getOrbitNumber()) except: fm._orbitNumber = None try: fm._beamID = self._frame.getInstrument().getBeamNumber() fm._beamID = fm._beamID.decode('utf-8') except: fm._beamID = None try: fm._trackNumber = int(self._frame.getTrackNumber()) except: fm._trackNumber = None self.extractTrack(fm) self.extractOrbitRepeat(fm) self.extractPlatform(fm) if self.computeBaseline(fm): # double check if during the baseline computation somebody else became a master if (self.masterExists(fm) and fm.reference): # if so recompute the baseline self.computeBaseline(fm) self.computeLatitudeIndeces(fm) self.computeFrameID(fm) except Exception as e: print(e) raise Exception return fm
def computeBaseline(self, fm): ret = True oi = OrbitInfo(fm) requester = Http() uu = UrlUtils() rest_url = uu.rest_url fmRef = FrameMetadata() # just need an estimate bbox, dummy = self.calculateCorners() fm._bbox = [] fm._refbbox = [] for bb in bbox: fm._bbox.append( [round(bb.getLatitude(), 2), round(bb.getLongitude(), 2)]) fm._refbbox.append( [round(bb.getLatitude(), 2), round(bb.getLongitude(), 2)]) if (fm._bbox[0][0] < fm._bbox[2][0]): # if latEarly < latLate then asc otherwise dsc fm._direction = 'asc' else: fm._direction = 'dsc' baseline = [0, 0, 0] uu = UrlUtils() extremes = fm.getExtremes(fm.bbox) latMin = extremes[0] latMax = extremes[1] latDelta = (latMax - latMin) / 3. latitudeResolution = .1 params = { 'sensor': fm.platform, 'trackNumber': fm.trackNumber, 'dataset_type': fm.dataset_type, 'latitudeIndexMin': int(math.floor((latMin - latDelta) / latitudeResolution)), 'latitudeIndexMax': int(math.ceil((latMax + latDelta) / latitudeResolution)), 'direction': fm.direction, 'system_version': uu.version, 'lookDirection': fm.lookDirection, 'reference': True } if fm.beamID: params['beamID'] = fm.beamID #print("params", params) query = buildQuery(params, ['within']) #print("query: %s" % json.dumps(query, indent=2)) metList, status = postQuery(query) # if empty no results available if status: metObj = createMetaObjects(metList) if metObj: # there should be only one result if (len(metObj) > 1): print( "WARNING FrameInfoExtractor: Expecting only one frame to be reference" ) fmRef = metObj[0] oiRef = OrbitInfo(fmRef) oi.computeBaseline(oiRef) bl = oi.getBaseline() baseline = [bl['horz'], bl['vert'], bl['total']] fm.refbbox = fmRef.refbbox fm.reference = False fm._bbox = [] for bb in bbox: fm._bbox.append([ round(bb.getLatitude(), 2), round(bb.getLongitude(), 2) ]) if (fm._bbox[0][0] < fm._bbox[2][0]): # if latEarly < latLate then asc otherwise dsc fm._direction = 'asc' else: fm._direction = 'dsc' else: import numpy as np fm.reference = True pos = np.array(fm._bbox) d10 = pos[1] - pos[0] d30 = pos[3] - pos[0] d23 = pos[2] - pos[3] d21 = pos[2] - pos[1] pos[0] += self._buffer * (-d10 - d30) pos[1] += self._buffer * (d10 - d21) pos[2] += self._buffer * (d23 + d21) pos[3] += self._buffer * (-d23 + d30) fm._refbbox = pos.tolist() fm.horizontalBaseline = baseline[0] fm.verticalBaseline = baseline[1] fm.totalBaseline = baseline[2] else: ret = False return ret
def check_reference(dataset, md): """Check reference of this metadata against what's in GRQ.""" # get config uu = UrlUtils() rest_url = uu.rest_url # is this scene a reference? fm_md = copy.deepcopy(md) fm = FrameMetadata() fm.load(fm_md) #sys.stderr.write("fm.reference: %s\n" % fm.reference) #sys.stderr.write("fm.trackNumber: %s\n" % fm.trackNumber) #sys.stderr.write("fm.beamID: %s\n" % fm.beamID) #sys.stderr.write("fm.latitudeIndexMin: %s\n" % fm.latitudeIndexMin) #sys.stderr.write("fm.latitudeIndexMax: %s\n" % fm.latitudeIndexMax) # if not a reference, save if fm.reference == False: return { 'ok_to_save': True, 'suspicious_flag': False, 'suspicious_code': '' } # check if reference exists already extremes = fm.getExtremes(fm.bbox) latMin = extremes[0] latMax = extremes[1] lonMin = extremes[2] lonMax = extremes[3] latDelta = (latMax - latMin) / 3. latitudeResolution = .1 params = { 'sensor': fm.platform, 'dataset_type': dataset, 'trackNumber': fm.trackNumber, 'latitudeIndexMin': int(math.floor((latMin - latDelta) / latitudeResolution)), 'latitudeIndexMax': int(math.ceil((latMax + latDelta) / latitudeResolution)), 'system_version': uu.version, 'direction': fm.direction, 'lookDirection': fm.lookDirection, 'reference': True, } if fm.beamID: params['beamID'] = fm.beamID metList, status = postQuery(buildQuery(params, ['within'])) metObj = createMetaObjects(metList) # if none found, save if len(metObj) == 0: return { 'ok_to_save': True, 'suspicious_flag': False, 'suspicious_code': '' } # loop over frames and check if in this frame's bbox inbbox_count = 0 frames = [] for met_idx, tmp_fm in enumerate(metObj): inbbox = fm.isInBbox(tmp_fm.refbbox) if inbbox: inbbox_count += 1 frames.append({ 'id': os.path.splitext(metList[met_idx]['dfdn']['ProductName'])[0], 'archive_filename': metList[met_idx]['archive_filename'], 'inbbox': inbbox, }) #print "true_count:", true_count # if all not in bbox, okay to save but flag suspicious if inbbox_count == 0: return { 'ok_to_save': True, 'frames': frames, 'suspicious_flag': True, 'suspicious_code': 'no_frames_in_bbox' } # if one is in bbox, not okay to update elif inbbox_count == 1: #return { 'ok_to_save': False, 'reprocess': True, 'suspicious_flag': True, 'suspicious_code': 'one_frame_in_bbox' } # fail for now; it can be eventually reprocessed after the initial re-ingest return { 'ok_to_save': False, 'frames': frames, 'reprocess': False, 'suspicious_flag': True, 'suspicious_code': 'one_frame_in_bbox' } # if more than one is in bbox, not okay to update and flag else: return { 'ok_to_save': False, 'frames': frames, 'reprocess': False, 'suspicious_flag': True, 'suspicious_code': 'more_than_one_frame_in_bbox' }
def createMeta(self, metaDict): fm = FrameMetadata() fm.load(metaDict) return fm