Esempio n. 1
0
    def metaQuery(self, queryKey, peg, date, delta):

        uu = UrlUtils()
        beamID = self._frame.beamID
        direction = self._frame.direction
        lat_min = min(peg.latStart, peg.latEnd)
        lat_max = max(peg.latStart, peg.latEnd)
        params = {
            queryKey:
            getattr(self._frame, self._frame._mappingLoad[queryKey]),
            #'orbitNumber':orbit,
            'beamID':
            beamID,
            'system_version':
            uu.version,
            'latitudeIndexMin':
            int(math.floor(
                (lat_min) / FrameInfoExtractor._latitudeResolution)),
            'latitudeIndexMax':
            int(math.floor(
                (lat_max) / FrameInfoExtractor._latitudeResolution)),
            'direction':
            direction
        }

        metList = postQuery(buildQuery(params, ['cross-boundaries']))

        if metList:
            metList = [
                self.refineFromTime(metList[0], date, delta), metList[1]
            ]
            metList = [self.refineByPlatform(metList[0]), metList[1]]
        return metList
Esempio n. 2
0
def sort_data(inps):
    urls = []
    durls = {}
    coords = {}
    for fid in range(inps['frameIDs'][0], inps['frameIDs'][1] + 1):
        for plat in inps['platforms']:
            for sw in inps['swaths']:
                meta = {
                    'frameID': str(fid),
                    'trackNumber': str(inps['track']),
                    'direction.raw': inps['direction'],
                    'dataset_type': 'interferogram',
                    'platform.raw': plat,
                    "swath": sw
                }
                if 'tags' in inps:
                    meta['tags'] = inps['tags']
                query = buildQuery(meta, [])
                #sv in form 'v1.1.1'
                ret, status = postQuery(query,
                                        sv=inps['sys_ver'],
                                        conf=inps['conf'])
                for r in ret:
                    urls.append(r['url'])
                    ms, sl = get_dates(urls[-1], True)
                    durls[ms + sl + str(sw)] = r['url']
                    coords[r['url']] = r['imageCorners']
    ourls = np.unique(np.array(urls))
    urls = rm_incomplete_swaths(ourls, len(inps['swaths']))
    urls = rm_dups(urls, durls, inps['swaths'])
    urls = get_urls_sets(urls, coords, inps['nscenes'])
    return urls
Esempio n. 3
0
    def searchMasterFrames(self, peg):
        uu = UrlUtils()
        beamID = self._frame.beamID
        direction = self._frame.direction
        lat_min = min(peg.latStart, peg.latEnd)
        lat_max = max(peg.latStart, peg.latEnd)
        params = {
            'platform':
            self._frame.spacecraftName,
            'trackNumber':
            self._frame.trackNumber,
            'dataset_type':
            self._frame.dataset_type,
            'beamID':
            beamID,
            'system_version':
            uu.version,
            'latitudeIndexMin':
            int(math.floor(
                (lat_min) / FrameInfoExtractor._latitudeResolution)),
            'latitudeIndexMax':
            int(math.floor(
                (lat_max) / FrameInfoExtractor._latitudeResolution)),
            'direction':
            direction
        }

        #get the list of meta close to the reference frame
        metList = postQuery(buildQuery(params, ['cross-boundaries']))

        if metList[0]:
            metList = self.refineFromTime(metList[0], self._frame.sensingStart,
                                          self._maxTimeStitch)
        return metList
Esempio n. 4
0
def getMetadata(track=None,
                frame=None,
                beam=None,
                passdir=None,
                platform=None):
    """
    Download metadata json from product repo for product with ID passed in.
    """
    uu = UrlUtils()

    params = {
        "dataset": "interferogram",
        "trackNumber": str(track),
        "direction": passdir,
        "latitudeIndexMin": frame[0],
        "latitudeIndexMax": frame[1],
        "beamID": beam,
        'system_version': uu.version
    }

    # get GRQ request
    '''
    r = requests.get(url, params=params, verify=False)
    r.raise_for_status()
    res_json = r.json()
    if res_json['count'] == 0:
        raise ValueError("Found no interferogram product for Track %d, Frame %d."%(track,frame))
    '''
    query = buildQuery(params, 'within')
    metList, status = postQuery(query)

    return metList
def prep_inputs(ml_dir, ctx_file, in_file):
    # get context
    with open(ctx_file) as f:
        j = json.load(f)

    # get kwargs
    kwargs = j  #mstarch - with containerization, "kwargs" are in context at top level #json.loads(j['rule']['kwargs'])

    # get classmap file and version
    cm_file = os.path.basename(kwargs['classmap_file'].strip())
    match = re.search(r'classmap_(datav.*?)\.json', cm_file)
    if not match:
        raise RuntimeError("Failed to extract classmap version: %s" % cm_file)
    cm_version = match.group(1)

    # get features file and version
    ft_file = os.path.basename(kwargs['feat_file'].strip())
    match = re.search(r'(featv.*?)\.json', ft_file)
    if not match:
        raise RuntimeError("Failed to extract feature version: %s" % ft_file)
    ft_version = match.group(1)

    # set classifier ID
    clf_version = kwargs['clf_version']
    clf_type = kwargs['clf_type']
    username = j['username']  #mstarch - username is a paramemter
    rule_name = j['name']  #mstarch - rule_name is a parameter
    clf_name = "predictor_model-phunw_clfv%s_%s_%s-%s-%s" % (
        clf_version, cm_version, ft_version, username, rule_name)

    # get urls
    ret, status = postQuery({'query': j['query']
                             })  #mstarch - passthrough is now a parameter
    urls = [i['url'] for i in ret]

    # create input json
    input = {
        "clf_name": clf_name,
        "clf_type": clf_type,
        "classmap_file": cm_file,
        "feat_file": ft_file,
        "crossvalidate": 0,
        "saveclf": 1,
        "cacheoutput": 0,
        "urls": urls,
    }

    # create product directory and chdir
    os.makedirs(clf_name)
    os.chdir(clf_name)

    # write input file
    with open(in_file, 'w') as f:
        json.dump(input, f, indent=2)

    # copy classmap and feature files
    shutil.copy(os.path.join(ml_dir, 'classmaps', cm_file), cm_file)
    shutil.copy(os.path.join(ml_dir, 'features', ft_file), ft_file)
Esempio n. 6
0
def getUrls(sensor, token):
    meta = {'dataset': 'interferogram', 'sensor': sensor, 'tags': [token]}
    ret, status = postQuery(buildQuery(meta, []))
    inps = []
    if len(ret):
        cnt = 0
        for i in ret:
            label = getIntLabel(hasToken(i['user_tags'], token))
            if 'user_tags' in i and label is not None:
                inps.append([i['url'], label])
    return inps
Esempio n. 7
0
def getData(args):
    uu = UrlUtils()
    # create a password manager
    meta = {'tags': args.tags, 'tag_operator': args.operator}
    ret, status = postQuery(buildQuery(meta))
    try:
        os.mkdir(args.dir)
    except Exception:
        print("directory", args.dir, "already present")
    os.chdir(args.dir)
    for i in ret:
        url = i['url']
        odir = os.getcwd()
        ndir = url.split('/')[-1]
        try:
            os.mkdir(ndir)
        except Exception:
            pass
        os.chdir(ndir)
        for pr in args.products:
            if pr.endswith('.xml'):
                command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join(
                    url, pr.replace('.xml', ''))
                os.system(command)
            command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join(
                url, pr)
            os.system(command)

        try:
            password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()

            # Add the username and password.
            password_mgr.add_password(None,
                                      urllib.parse.urlparse(url).netloc,
                                      uu.dav_u, uu.dav_p)

            handler = urllib.request.HTTPBasicAuthHandler(password_mgr)
            # create "opener" (OpenerDirector instance)
            opener = urllib.request.build_opener(handler)
            # use the opener to fetch a URL
            response = opener.open(url).read().decode('utf-8')
        except Exception as e:
            print(e)
        if (response):
            parser = MyHTMLParser(args.re, args.nre)
            parser.feed(response)
            print(parser.results)
            for i in parser.results:
                command = 'curl -k -f -u' + uu.dav_u + ':' + uu.dav_p + ' -O ' + os.path.join(
                    url, i)
                os.system(command)

        os.chdir(odir)
Esempio n. 8
0
def getData(sensor, token):
    meta = {'dataset': 'interferogram', 'sensor': sensor, 'tags': [token]}
    ret, status = postQuery(buildQuery(meta, []))
    if len(ret):
        cnt = 0
        cnt1 = 0
        inps = []
        for i in ret:
            label = getIntLabel(hasToken(i['user_tags'], token))
            if 'user_tags' in i and label is not None:
                print('processing', i['url'], cnt, len(ret))
                extractFeatures(i, label)
                cnt += 1
    print(cnt)
    '''
Esempio n. 9
0
 def masterExists(self, fm):
     uu = UrlUtils()
     extremes = fm.getExtremes(fm.bbox)
     latMin = extremes[0]
     latMax = extremes[1]
     latDelta = (latMax - latMin) / 3.
     latitudeResolution = .1
     params = {
         'sensor':
         fm.platform,
         'trackNumber':
         fm.trackNumber,
         'latitudeIndexMin':
         int(math.floor((latMin - latDelta) / latitudeResolution)),
         'latitudeIndexMax':
         int(math.ceil((latMax + latDelta) / latitudeResolution)),
         'dataset_type':
         fm.dataset_type,
         'system_version':
         uu.version,
         'direction':
         fm.direction,
         'lookDirection':
         fm.lookDirection,
         'reference':
         True,
     }
     if fm.beamID:
         params['beamID'] = fm.beamID
     exists = False
     metList, status = postQuery(buildQuery(params, ['within']))
     if (status):
         metObj = createMetaObjects(metList)
         if (len(metObj) > 1):
             print(
                 "WARNING FrameInfoExtractor: Expecting only one frame to be reference"
             )
         if metObj:
             exists = True
     return exists
Esempio n. 10
0
def sort_data_dev(inps):
    urls = []
    durls = {}
    coords = {}
    swaths = {}
    for plat in inps['platforms']:
        for sw in inps['swaths']:
            meta = {
                'trackNumber': str(inps['track']),
                'direction.raw': inps['direction'],
                'dataset_type': 'interferogram',
                'latitudeIndexMin': str(inps['latitudeIndexMin']),
                'latitudeIndexMax': str(inps['latitudeIndexMax']),
                'platform.raw': plat,
                'swath': sw
            }
            options = ['cross-boundaries']
            if 'tags' in inps:
                meta['tags'] = inps['tags']
            query = buildQuery(meta, options)
            #sv in form 'v1.1.1'
            ret, status = postQuery(query,
                                    sv=inps['sys_ver'],
                                    conf=inps['conf'])
            for r in ret:
                urls.append(r['url'])
                ms, sl = get_dates(urls[-1], True)
                durls[ms + sl + str(sw)] = r['url']
                coords[r['url']] = r['imageCorners']
                swaths[r['url']] = sw

    ourls = np.unique(np.array(urls))
    urls = rm_incomplete_swaths(ourls, len(inps['swaths']))
    urls = rm_dups(urls, durls, inps['swaths'])
    dates_complete = get_urls_sets_dev(
        urls, coords, swaths,
        [inps['latitudeIndexMin'] / 10., inps['latitudeIndexMax'] / 10.])
    urls, dates_incomplete = url_from_dates(dates_complete)
    return urls, dates_incomplete
Esempio n. 11
0
def loadQuery(querymeta, queryoptions=[], queryoutfile=None, cache=False):
    '''
    builds/posts the faceted search query specified in querymeta and dumps the
    result to queryoutfile. if queryoutfile already exists, the query is loaded from
    disk rather than executed.
    '''

    if not cache or not pathexists(queryoutfile):
        print('executing faceted search query...')
        from utils.queryBuilder import postQuery, buildQuery
        from utils.contextUtils import toContext
        ret, status = postQuery(buildQuery(querymeta, queryoptions))
        if cache and status:
            # only dump the query if caching enabled and postQuery succeeds
            with open(queryoutfile, 'wb') as fid:
                pickle.dump(ret, fid)
    elif cache:
        print('loading cached query from %s...' % queryoutfile)
        with open(queryoutfile, 'rb') as fid:
            ret = pickle.load(fid)
    print('query returned %d products' % len(ret))
    return ret
Esempio n. 12
0
def get_mets(inps):
    mets = []
    for plat in inps['platforms']:
        for sw in inps['swaths']:
            meta = {
                'trackNumber': str(inps['track']),
                'direction.raw': inps['direction'],
                'dataset_type': 'interferogram',
                'latitudeIndexMin': str(inps['latitudeIndexMin']),
                'latitudeIndexMax': str(inps['latitudeIndexMax']),
                'platform.raw': plat,
                'swath': sw
            }
            options = ['cross-boundaries']
            if 'tags' in inps:
                meta['tags'] = inps['tags']
            query = buildQuery(meta, options)
            #sv in form 'v1.1.1'
            ret, status = postQuery(query,
                                    sv=inps['sys_ver'],
                                    conf=inps['conf'])
            for r in ret:
                mets.append(r)
    return mets
Esempio n. 13
0
    def searchSlaveFrames(self, peg):
        uu = UrlUtils()
        beamID = self._frame.beamID
        direction = self._frame.direction
        lat_min = min(peg.latStart, peg.latEnd)
        lat_max = max(peg.latStart, peg.latEnd)
        params = {
            'sensor':
            self._frame.platform,
            'trackNumber':
            self._frame.trackNumber,
            'dataset_type':
            self._frame.dataset_type,
            'beamID':
            beamID,
            'system_version':
            uu.version,
            'latitudeIndexMin':
            int(math.floor(
                (lat_min) / FrameInfoExtractor._latitudeResolution)),
            'latitudeIndexMax':
            int(math.floor(
                (lat_max) / FrameInfoExtractor._latitudeResolution)),
            'direction':
            direction
        }

        #get the list of meta close to the reference frame
        metList = postQuery(buildQuery(params, ['cross-boundaries']))

        if metList:
            metList = self.refineByPlatform(metList[0])
        newMet = []
        for met in metList:
            newMet.append(self.groupByTime(met))
        return newMet
Esempio n. 14
0
    def computeBaseline(self, fm):

        ret = True
        oi = OrbitInfo(fm)
        requester = Http()
        uu = UrlUtils()
        rest_url = uu.rest_url

        fmRef = FrameMetadata()
        # just need an estimate
        bbox, dummy = self.calculateCorners()
        fm._bbox = []
        fm._refbbox = []
        for bb in bbox:
            fm._bbox.append(
                [round(bb.getLatitude(), 2),
                 round(bb.getLongitude(), 2)])
            fm._refbbox.append(
                [round(bb.getLatitude(), 2),
                 round(bb.getLongitude(), 2)])
        if (fm._bbox[0][0] < fm._bbox[2][0]):
            # if latEarly < latLate then asc otherwise dsc
            fm._direction = 'asc'
        else:
            fm._direction = 'dsc'

        baseline = [0, 0, 0]
        uu = UrlUtils()
        extremes = fm.getExtremes(fm.bbox)
        latMin = extremes[0]
        latMax = extremes[1]
        latDelta = (latMax - latMin) / 3.
        latitudeResolution = .1
        params = {
            'sensor':
            fm.platform,
            'trackNumber':
            fm.trackNumber,
            'dataset_type':
            fm.dataset_type,
            'latitudeIndexMin':
            int(math.floor((latMin - latDelta) / latitudeResolution)),
            'latitudeIndexMax':
            int(math.ceil((latMax + latDelta) / latitudeResolution)),
            'direction':
            fm.direction,
            'system_version':
            uu.version,
            'lookDirection':
            fm.lookDirection,
            'reference':
            True
        }
        if fm.beamID:
            params['beamID'] = fm.beamID
        #print("params", params)
        query = buildQuery(params, ['within'])
        #print("query: %s" % json.dumps(query, indent=2))
        metList, status = postQuery(query)

        # if empty no results available
        if status:
            metObj = createMetaObjects(metList)
            if metObj:
                # there should be only one result
                if (len(metObj) > 1):
                    print(
                        "WARNING FrameInfoExtractor: Expecting only one frame to be reference"
                    )

                fmRef = metObj[0]
                oiRef = OrbitInfo(fmRef)
                oi.computeBaseline(oiRef)
                bl = oi.getBaseline()
                baseline = [bl['horz'], bl['vert'], bl['total']]
                fm.refbbox = fmRef.refbbox
                fm.reference = False
                fm._bbox = []
                for bb in bbox:
                    fm._bbox.append([
                        round(bb.getLatitude(), 2),
                        round(bb.getLongitude(), 2)
                    ])
                if (fm._bbox[0][0] < fm._bbox[2][0]):
                    # if latEarly < latLate then asc otherwise dsc
                    fm._direction = 'asc'
                else:
                    fm._direction = 'dsc'
            else:
                import numpy as np
                fm.reference = True
                pos = np.array(fm._bbox)
                d10 = pos[1] - pos[0]
                d30 = pos[3] - pos[0]
                d23 = pos[2] - pos[3]
                d21 = pos[2] - pos[1]
                pos[0] += self._buffer * (-d10 - d30)
                pos[1] += self._buffer * (d10 - d21)
                pos[2] += self._buffer * (d23 + d21)
                pos[3] += self._buffer * (-d23 + d30)
                fm._refbbox = pos.tolist()

            fm.horizontalBaseline = baseline[0]
            fm.verticalBaseline = baseline[1]
            fm.totalBaseline = baseline[2]

        else:
            ret = False

        return ret
Esempio n. 15
0
def check_reference(dataset, md):
    """Check reference of this metadata against what's in GRQ."""

    # get config
    uu = UrlUtils()
    rest_url = uu.rest_url

    # is this scene a reference?
    fm_md = copy.deepcopy(md)
    fm = FrameMetadata()
    fm.load(fm_md)

    #sys.stderr.write("fm.reference: %s\n" % fm.reference)
    #sys.stderr.write("fm.trackNumber: %s\n" % fm.trackNumber)
    #sys.stderr.write("fm.beamID: %s\n" % fm.beamID)
    #sys.stderr.write("fm.latitudeIndexMin: %s\n" % fm.latitudeIndexMin)
    #sys.stderr.write("fm.latitudeIndexMax: %s\n" % fm.latitudeIndexMax)

    # if not a reference, save
    if fm.reference == False:
        return {
            'ok_to_save': True,
            'suspicious_flag': False,
            'suspicious_code': ''
        }

    # check if reference exists already
    extremes = fm.getExtremes(fm.bbox)
    latMin = extremes[0]
    latMax = extremes[1]
    lonMin = extremes[2]
    lonMax = extremes[3]
    latDelta = (latMax - latMin) / 3.
    latitudeResolution = .1
    params = {
        'sensor':
        fm.platform,
        'dataset_type':
        dataset,
        'trackNumber':
        fm.trackNumber,
        'latitudeIndexMin':
        int(math.floor((latMin - latDelta) / latitudeResolution)),
        'latitudeIndexMax':
        int(math.ceil((latMax + latDelta) / latitudeResolution)),
        'system_version':
        uu.version,
        'direction':
        fm.direction,
        'lookDirection':
        fm.lookDirection,
        'reference':
        True,
    }
    if fm.beamID:
        params['beamID'] = fm.beamID
    metList, status = postQuery(buildQuery(params, ['within']))
    metObj = createMetaObjects(metList)

    # if none found, save
    if len(metObj) == 0:
        return {
            'ok_to_save': True,
            'suspicious_flag': False,
            'suspicious_code': ''
        }

    # loop over frames and check if in this frame's bbox
    inbbox_count = 0
    frames = []
    for met_idx, tmp_fm in enumerate(metObj):
        inbbox = fm.isInBbox(tmp_fm.refbbox)
        if inbbox: inbbox_count += 1
        frames.append({
            'id':
            os.path.splitext(metList[met_idx]['dfdn']['ProductName'])[0],
            'archive_filename':
            metList[met_idx]['archive_filename'],
            'inbbox':
            inbbox,
        })

    #print "true_count:", true_count

    # if all not in bbox, okay to save but flag suspicious
    if inbbox_count == 0:
        return {
            'ok_to_save': True,
            'frames': frames,
            'suspicious_flag': True,
            'suspicious_code': 'no_frames_in_bbox'
        }

    # if one is in bbox, not okay to update
    elif inbbox_count == 1:
        #return { 'ok_to_save': False, 'reprocess': True, 'suspicious_flag': True, 'suspicious_code': 'one_frame_in_bbox' }
        # fail for now; it can be eventually reprocessed after the initial re-ingest
        return {
            'ok_to_save': False,
            'frames': frames,
            'reprocess': False,
            'suspicious_flag': True,
            'suspicious_code': 'one_frame_in_bbox'
        }

    # if more than one is in bbox, not okay to update and flag
    else:
        return {
            'ok_to_save': False,
            'frames': frames,
            'reprocess': False,
            'suspicious_flag': True,
            'suspicious_code': 'more_than_one_frame_in_bbox'
        }
Esempio n. 16
0
def get_list(version,sensor):
    meta = {'system_version':version,'dataset':'interferogram','sensor':sensor}
    query = qb.buildQuery(meta,[])
    return qb.postQuery(query,version)