def _constructSolrQuery(self, startIndex, entriesPerPage, variables): #set default sort order sort='Granule-StartTimeLong+desc' filterQuery = None queries = [] for key, value in variables.iteritems(): #query = '' if key == 'startTime': startTime = DateUtility.convertISOToUTCTimestamp(value) if startTime is not None: query = 'Granule-StopTimeLong:' query += '['+str(startTime)+'%20TO%20*]' queries.append(query) elif key == 'endTime': stopTime = DateUtility.convertISOToUTCTimestamp(value) if stopTime is not None: query = 'Granule-StartTimeLong:' query += '[*%20TO%20'+str(stopTime)+']' queries.append(query) elif key == 'keyword': newValue = urllib.quote(value) query = 'SearchableText-LowerCased:('+newValue+')' queries.append(query) elif key == 'datasetId': query = 'Dataset-PersistentId:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'shortName': query = 'Dataset-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'granuleName': query = 'Granule-Name-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'granuleIds': granuleIds = [] for granuleId in value: granuleIds.append(str(granuleId)) query = 'Granule-Id:('+'+OR+'.join(granuleIds)+')' queries.append(query) startIndex = 0 elif key == 'sortBy': sortByMapping = {'timeAsc': 'Granule-StartTimeLong+asc'} if value in sortByMapping.keys(): sort = sortByMapping[value] elif key == 'bbox': filterQuery = self._constructBoundingBoxQuery(value) #if query != '': # queries.append('%2B'+query) if len(queries) == 0: queries.append('*') query = 'q='+'+AND+'.join(queries)+'&fq=Granule-AccessType:(OPEN+OR+PREVIEW+OR+SIMULATED+OR+REMOTE)+AND+Granule-Status:ONLINE&version=2.2&start='+str(startIndex)+'&rows='+str(entriesPerPage)+'&indent=on&wt=json&sort='+sort if filterQuery is not None: query += '&' + filterQuery logging.debug('solr query: '+query) return query
def _populateItem(self, solrResponse, doc, item): doc['Granule-StartTimeLong'][0] = DateUtility.convertTimeLongToRFC822(doc['Granule-StartTimeLong'][0]) doc['Granule-StopTimeLong'][0] = DateUtility.convertTimeLongToRFC822(doc['Granule-StopTimeLong'][0]) doc['Granule-ArchiveTimeLong'][0] = DateUtility.convertTimeLongToRFC822(doc['Granule-ArchiveTimeLong'][0]) doc['GranuleLink'] = self._getLinkToGranule(doc) doc['GranuleFileSize'] = dict(zip(doc['GranuleArchive-Type'], doc['GranuleArchive-FileSize'])) if 'GranuleReference-Type' in doc: doc['GranuleReference'] = dict([(doc['GranuleReference-Type'][i], doc['GranuleReference-Path'][i]) for i,x in enumerate(doc['GranuleReference-Status']) if x=="ONLINE"])
def _populate(self, solrDatasetResponse, solrGranuleResponse=None): if solrDatasetResponse is not None: solrJson = json.loads(solrDatasetResponse) logging.debug('dataset count: ' + str(len(solrJson['response']['docs']))) if len(solrJson['response']['docs']) == 1: # ok now populate variables! doc = solrJson['response']['docs'][0] self.variables['doc'] = doc # Format dates try: self.variables[ 'DatasetCitation_ReleaseYear'] = DateUtility.convertTimeLong( doc['DatasetCitation-ReleaseDateLong'][0], '%Y') except: pass # Link to dataset portal page self.variables[ 'DatasetPortalPage'] = self.portalUrl + '/' + doc[ 'Dataset-ShortName'][0] # Set default pub date to x hours ago because we cast all granules archived within the last x hours self.variables['PubDate'] = DateUtility.pastDateRFC822( self.archivedWithin) else: raise Exception('No dataset found') if solrGranuleResponse is not None: solrGranuleJson = json.loads(solrGranuleResponse) logging.debug('granule count: ' + str(len(solrGranuleJson['response']['docs']))) pubDate = 0 for doc in solrGranuleJson['response']['docs']: if (doc['Granule-ArchiveTimeLong'][0] > pubDate): pubDate = doc['Granule-ArchiveTimeLong'][0] self._populateItem(solrGranuleResponse, doc, None) if pubDate != 0: # Set pub date to latest granule archive date self.variables[ 'PubDate'] = DateUtility.convertTimeLongToRFC822(pubDate) self.variables['granules'] = solrGranuleJson['response']['docs'] else: raise Exception('No granules found')
def _generateUrl(self, requestHandler): url = self._configuration.get('solr', 'url') parameters = {} parameters['wt'] = 'json' parameters['group'] = 'true' parameters['group.limit'] = -1 #parameters['facet.limit'] = 10 parameters['fl'] = 'time,productTypePrefix,productType' parameters['group.field'] = 'crid' parameters['omitHeader'] = 'true' parameters['q'] = '*:*' parameters['fq'] = [] parameters['sort'] = 'crid desc' try: parameters['fq'].append('collection:"' + requestHandler.get_argument('collection') + '"') except: pass try: parameters['fq'].append( 'productType:"' + requestHandler.get_argument('productType') + '"') except: pass try: start = requestHandler.get_argument('start') if len(start) == 10: start += 'T00:00:00' except: raise Exception('Missing start parameter.') try: end = requestHandler.get_argument('end') if len(end) == 10: end += 'T23:59:59' except: end = start[0:10] + 'T23:59:59' logging.debug('start: ' + start) logging.debug('end: ' + end) start = DateUtility.convertISOToUTCTimestamp(start) end = DateUtility.convertISOToUTCTimestamp(end) + 999 parameters['fq'].append('time:[' + str(start) + ' TO ' + str(end) + ']') url += '/select?' + urllib.urlencode(parameters, True) logging.debug("proxy to url : " + url) return url
def _populate(self, solrDatasetResponse, solrGranuleResponse = None): if solrDatasetResponse is not None: solrJson = json.loads(solrDatasetResponse) logging.debug('dataset count: '+str(len(solrJson['response']['docs']))) if len(solrJson['response']['docs']) == 1: # ok now populate variables! doc = solrJson['response']['docs'][0] self.variables['doc'] = doc # Format dates try: self.variables['DatasetCitation_ReleaseYear'] = DateUtility.convertTimeLong(doc['DatasetCitation-ReleaseDateLong'][0], '%Y') except: pass # Link to dataset portal page self.variables['DatasetPortalPage'] = self.portalUrl+'/'+doc['Dataset-ShortName'][0] # Set default pub date to x hours ago because we cast all granules archived within the last x hours self.variables['PubDate'] = DateUtility.pastDateRFC822(self.archivedWithin) else: raise Exception('No dataset found') if solrGranuleResponse is not None: solrGranuleJson = json.loads(solrGranuleResponse) logging.debug('granule count: '+str(len(solrGranuleJson['response']['docs']))) pubDate = 0 for doc in solrGranuleJson['response']['docs']: if (doc['Granule-ArchiveTimeLong'][0] > pubDate): pubDate = doc['Granule-ArchiveTimeLong'][0] self._populateItem(solrGranuleResponse, doc, None) if pubDate != 0: # Set pub date to latest granule archive date self.variables['PubDate'] = DateUtility.convertTimeLongToRFC822(pubDate) self.variables['granules'] = solrGranuleJson['response']['docs'] else: raise Exception('No granules found')
def _generateUrl(self, requestHandler): url = self._configuration.get('solr', 'url') parameters = {} parameters['wt'] = 'json' parameters['group'] = 'true' parameters['group.limit'] = -1 #parameters['facet.limit'] = 10 parameters['fl'] = 'time,productTypePrefix,productType' parameters['group.field'] = 'crid' parameters['omitHeader'] = 'true' parameters['q'] = '*:*' parameters['fq'] = [] parameters['sort'] = 'crid desc' try: parameters['fq'].append('collection:"' + requestHandler.get_argument('collection') + '"') except: pass try: parameters['fq'].append('productType:"' + requestHandler.get_argument('productType') + '"') except: pass try: start = requestHandler.get_argument('start') if len(start) == 10: start += 'T00:00:00' except: raise Exception('Missing start parameter.') try: end = requestHandler.get_argument('end') if len(end) == 10: end += 'T23:59:59' except: end = start[0:10] + 'T23:59:59' logging.debug('start: ' + start) logging.debug('end: ' + end) start = DateUtility.convertISOToUTCTimestamp(start) end = DateUtility.convertISOToUTCTimestamp(end) + 999 parameters['fq'].append('time:[' + str(start) + ' TO ' + str(end) + ']') url += '/select?' + urllib.urlencode(parameters, True) logging.debug("proxy to url : " + url) return url
def _populateItem(self, solrResponse, doc, item): persistentId = doc['_source']['identifier'] idTuple = ('identifier', persistentId) """ if persistentId == '': idTuple = ('shortName', doc['Dataset-ShortName'][0]) """ item.append({'name': 'title', 'value': doc['_source']['title']}) item.append({ 'name': 'content', 'value': doc['_source']['description'] }) item.append({ 'name': 'link', 'attribute': { 'href': self.url + self.searchBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('full', 'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' } }) """ item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'iso')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }}) item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'gcmd')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'GCMD Metadata' }}) """ #Only generate granule search link if dataset has granules if (doc['_source']['identifier'].lower() in self.datasets): supportedGranuleParams = dict([ (key, value) for key, value in self.parameters.items() if key in ['bbox', 'startTime', 'endTime'] ]) supportedGranuleParams['identifier'] = persistentId item.append({ 'name': 'link', 'attribute': { 'href': self.url + self.searchBasePath + 'granule?' + urllib.parse.urlencode(supportedGranuleParams), 'rel': 'search', 'type': 'application/atom+xml', 'title': 'Product Search' } }) """ if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '': item.append({'name': 'link', 'attribute': {'href': doc['Dataset-ImageUrl'][0], 'rel': 'enclosure', 'type': 'image/jpg', 'title': 'Thumbnail' }}) if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc: url = dict(zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath'])) if 'LOCAL-OPENDAP' in url: item.append({'name': 'link', 'attribute': {'href': url['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) elif 'REMOTE-OPENDAP' in url: item.append({'name': 'link', 'attribute': {'href': url['REMOTE-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) if 'LOCAL-FTP' in url: item.append({'name': 'link', 'attribute': {'href': url['LOCAL-FTP'], 'rel': 'enclosure', 'type': 'text/plain', 'title': 'FTP URL' }}) elif 'REMOTE-FTP' in url: item.append({'name': 'link', 'attribute': {'href': url['REMOTE-FTP'], 'rel': 'enclosure', 'type': 'text/plain', 'title': 'FTP URL' }}) if doc['DatasetPolicy-ViewOnline'][0] == 'Y' and doc['DatasetPolicy-AccessType-Full'][0] in ['OPEN', 'PREVIEW', 'SIMULATED', 'REMOTE']: portalUrl = self.portalUrl+'/'+doc['Dataset-ShortName'][0] item.append({'name': 'link', 'attribute': {'href': portalUrl, 'rel': 'enclosure', 'type': 'text/html', 'title': 'Dataset Information' }}) updated = None if 'DatasetMetaHistory-LastRevisionDateLong' in doc and doc['DatasetMetaHistory-LastRevisionDateLong'][0] != '': updated = DateUtility.convertTimeLongToIso(doc['DatasetMetaHistory-LastRevisionDateLong'][0]) else: updated = datetime.datetime.utcnow().isoformat()+'Z' item.append({'name': 'updated', 'value': updated}) """ item.append({'name': 'id', 'value': doc['_source']['identifier']}) """ item.append({'namespace': 'podaac', 'name': 'datasetId', 'value': doc['Dataset-PersistentId'][0]}) item.append({'namespace': 'podaac', 'name': 'shortName', 'value': doc['Dataset-ShortName'][0]}) """ if doc['_source']['west_longitude'] is not None and doc['_source'][ 'south_latitude'] is not None and doc['_source'][ 'east_longitude'] is not None and doc['_source'][ 'north_latitude'] is not None: item.append({ 'namespace': 'georss', 'name': 'where', 'value': { 'namespace': 'gml', 'name': 'Envelope', 'value': [{ 'namespace': 'gml', 'name': 'lowerCorner', 'value': ' '.join([ str(doc['_source']['west_longitude']), str(doc['_source']['south_latitude']) ]) }, { 'namespace': 'gml', 'name': 'upperCorner', 'value': ' '.join([ str(doc['_source']['east_longitude']), str(doc['_source']['north_latitude']) ]) }] } }) if 'start_time' in doc['_source'] and doc['_source'][ 'start_time'] is not None: item.append({ 'namespace': 'time', 'name': 'start', 'value': DateUtility.convertTimeLongToIso(doc['_source']['start_time']) }) if 'stop_time' in doc['_source'] and doc['_source'][ 'stop_time'] is not None: item.append({ 'namespace': 'time', 'name': 'end', 'value': DateUtility.convertTimeLongToIso(doc['_source']['stop_time']) }) if 'full' in self.parameters and self.parameters['full']: self._populateItemWithAllMetadata(doc['_source'], item)
def _populateItem(self, solrResponse, doc, item): persistentId = doc['Dataset-PersistentId'][0] idTuple = ('datasetId', persistentId) if persistentId == '': idTuple = ('shortName', doc['Dataset-ShortName'][0]) portalUrl = "" if doc['DatasetPolicy-ViewOnline'][0] == 'Y' and doc[ 'DatasetPolicy-AccessType-Full'][0] in [ 'OPEN', 'PREVIEW', 'SIMULATED', 'REMOTE' ]: portalUrl = self.portalUrl + '/' + doc['Dataset-ShortName'][0] item.append({ 'name': 'enclosure', 'attribute': { 'url': portalUrl, 'type': 'text/html', 'length': '0' } }) item.append({'name': 'title', 'value': doc['Dataset-LongName'][0]}) item.append({ 'name': 'description', 'value': doc['Dataset-Description'][0] }) item.append({'name': 'link', 'value': portalUrl}) item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.searchBasePath + 'dataset?' + urllib.parse.urlencode( dict([idTuple, ('full', 'true'), ('format', 'rss')])), 'type': 'application/rss+xml', 'length': '0' } }) item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.metadataBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('format', 'iso')])), 'type': 'text/xml', 'length': '0' } }) item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.metadataBasePath + 'dataset?' + urllib.parse.urlencode(dict([idTuple, ('format', 'gcmd')])), 'type': 'text/xml', 'length': '0' } }) #Only generate granule search link if dataset has granules if (doc['Dataset-ShortName'][0] in self.datasets): supportedGranuleParams = dict([ (key, value) for key, value in self.parameters.items() if key in ['bbox', 'startTime', 'endTime', 'format'] ]) if persistentId == '': supportedGranuleParams['shortName'] = doc['Dataset-ShortName'][ 0] else: supportedGranuleParams['datasetId'] = persistentId item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.searchBasePath + 'granule?' + urllib.parse.urlencode(supportedGranuleParams), 'type': 'application/rss+xml', 'length': '0' } }) if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '': item.append({ 'name': 'enclosure', 'attribute': { 'url': doc['Dataset-ImageUrl'][0], 'type': 'image/jpg', 'length': '0' } }) if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc: url = dict( list( zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath']))) if 'LOCAL-OPENDAP' in url: item.append({ 'name': 'enclosure', 'attribute': { 'url': url['LOCAL-OPENDAP'], 'type': 'text/html', 'length': '0' } }) elif 'REMOTE-OPENDAP' in url: item.append({ 'name': 'enclosure', 'attribute': { 'url': url['REMOTE-OPENDAP'], 'type': 'text/html', 'length': '0' } }) if 'LOCAL-FTP' in url: item.append({ 'name': 'enclosure', 'attribute': { 'url': url['LOCAL-FTP'], 'type': 'text/plain', 'length': '0' } }) elif 'REMOTE-FTP' in url: item.append({ 'name': 'enclosure', 'attribute': { 'url': url['REMOTE-FTP'], 'type': 'text/plain', 'length': '0' } }) updated = None if 'DatasetMetaHistory-LastRevisionDateLong' in doc and doc[ 'DatasetMetaHistory-LastRevisionDateLong'][0] != '': updated = DateUtility.convertTimeLongToIso( doc['DatasetMetaHistory-LastRevisionDateLong'][0]) else: updated = datetime.datetime.utcnow().isoformat() + 'Z' item.append({'name': 'pubDate', 'value': updated}) item.append({'name': 'guid', 'value': persistentId}) item.append({ 'namespace': 'podaac', 'name': 'datasetId', 'value': doc['Dataset-PersistentId'][0] }) item.append({ 'namespace': 'podaac', 'name': 'shortName', 'value': doc['Dataset-ShortName'][0] }) if doc['DatasetCoverage-WestLon'][0] != '' and doc[ 'DatasetCoverage-SouthLat'][0] != '' and doc[ 'DatasetCoverage-EastLon'][0] != '' and doc[ 'DatasetCoverage-NorthLat'][0] != '': item.append({ 'namespace': 'georss', 'name': 'where', 'value': { 'namespace': 'gml', 'name': 'Envelope', 'value': [{ 'namespace': 'gml', 'name': 'lowerCorner', 'value': ' '.join([ doc['DatasetCoverage-WestLon'][0], doc['DatasetCoverage-SouthLat'][0] ]) }, { 'namespace': 'gml', 'name': 'upperCorner', 'value': ' '.join([ doc['DatasetCoverage-EastLon'][0], doc['DatasetCoverage-NorthLat'][0] ]) }] } }) if 'DatasetCoverage-StartTimeLong' in doc and doc[ 'DatasetCoverage-StartTimeLong'][0] != '': item.append({ 'namespace': 'time', 'name': 'start', 'value': DateUtility.convertTimeLongToIso( doc['DatasetCoverage-StartTimeLong'][0]) }) if 'DatasetCoverage-StopTimeLong' in doc and doc[ 'DatasetCoverage-StopTimeLong'][0] != '': item.append({ 'namespace': 'time', 'name': 'end', 'value': DateUtility.convertTimeLongToIso( doc['DatasetCoverage-StopTimeLong'][0]) }) if 'full' in self.parameters and self.parameters['full']: if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc: for i, x in enumerate(doc['DatasetLocationPolicy-Type']): item.append({ 'namespace': 'podaac', 'name': self._camelCaseStripHyphen(x.title()), 'value': doc['DatasetLocationPolicy-BasePath'][i] }) del doc['DatasetLocationPolicy-Type'] del doc['DatasetLocationPolicy-BasePath'] multiValuedElementsKeys = ('DatasetRegion-', 'DatasetCharacter-', 'DatasetCitation-', 'DatasetContact-Contact-', 'DatasetDatetime-', 'DatasetInteger-', 'DatasetParameter-', 'DatasetProject-', 'DatasetReal-', 'DatasetResource-', 'DatasetSoftware-', 'DatasetSource-', 'DatasetVersion-', 'Collection-') self._populateItemWithPodaacMetadata(doc, item, multiValuedElementsKeys)
def _populateItem(self, solrResponse, doc, item): item.append({'name': 'title', 'value': doc['_source']['name']}) #item.append({'name': 'content', 'value': doc['Granule-Name'][0]}) updated = None startTime = None if 'start_time' in doc['_source'] and doc['_source'][ 'start_time'] is not None: updated = DateUtility.convertTimeLongToIso( doc['_source']['start_time']) startTime = updated else: updated = datetime.datetime.utcnow().isoformat() + 'Z' item.append({'name': 'updated', 'value': updated}) item.append({ 'name': 'id', 'value': doc['_source']['identifier'] + ':' + doc['_source']['name'] }) parameters = { 'identifier': doc['_source']['identifier'], 'name': doc['_source']['name'] } parameters['full'] = 'true' item.append({ 'name': 'link', 'attribute': { 'href': self.url + self.searchBasePath + 'granule?' + urllib.parse.urlencode(parameters), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' } }) del parameters['full'] ''' parameters['format'] = 'iso' item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }}) parameters['format'] = 'fgdc' item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'FGDC Metadata' }}) #item.append({'name': 'description', 'value': doc['Dataset-Description'][0]}) #item.append({'name': 'link', 'value': self.portalUrl+'/'+doc['Dataset-ShortName'][0]}) #link = self._getLinkToGranule(doc) #if link['href'] is not None: # item.append({'name': 'link', 'attribute': link}) if 'GranuleReference-Type' in doc: if 'Granule-DataFormat' in doc: type = 'application/x-' + doc['Granule-DataFormat'][0].lower() else: type = 'text/plain' #Look for ONLINE reference only granuleRefDict = dict([(doc['GranuleReference-Type'][i], doc['GranuleReference-Path'][i]) for i,x in enumerate(doc['GranuleReference-Status']) if x=="ONLINE"]) if 'LOCAL-OPENDAP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) elif 'REMOTE-OPENDAP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['REMOTE-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) if 'LOCAL-FTP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['LOCAL-FTP'], 'rel': 'enclosure', 'type': type, 'title': 'FTP URL' }}) elif 'REMOTE-FTP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['REMOTE-FTP'], 'rel': 'enclosure', 'type': type, 'title': 'FTP URL' }}) ''' item.append({ 'namespace': 'gibs', 'name': 'identifier', 'value': doc['_source']['identifier'] }) ''' item.append({'namespace': 'podaac', 'name': 'shortName', 'value': doc['Dataset-ShortName'][0]}) if 'GranuleSpatial-NorthLat' in doc and 'GranuleSpatial-EastLon' in doc and 'GranuleSpatial-SouthLat' in doc and 'GranuleSpatial-WestLon' in doc: item.append({'namespace': 'georss', 'name': 'where', 'value': {'namespace': 'gml', 'name': 'Envelope', 'value': [{'namespace': 'gml', 'name': 'lowerCorner', 'value': ' '.join([doc['GranuleSpatial-WestLon'][0], doc['GranuleSpatial-SouthLat'][0]])}, {'namespace': 'gml', 'name': 'upperCorner', 'value': ' '.join([doc['GranuleSpatial-EastLon'][0], doc['GranuleSpatial-NorthLat'][0]])}]}}) ''' if startTime is not None: item.append({ 'namespace': 'time', 'name': 'start', 'value': startTime }) if 'stop_time' in doc['_source'] and doc['_source'][ 'stop_time'] is not None: item.append({ 'namespace': 'time', 'name': 'end', 'value': DateUtility.convertTimeLongToIso(doc['_source']['stop_time']) }) if 'full' in self.parameters and self.parameters['full']: self._populateItemWithAllMetadata(doc['_source'], item)
def _populateItem(self, solrResponse, doc, item): persistentId = doc['Dataset-PersistentId'][0] idTuple = ('datasetId', persistentId) if persistentId == '': idTuple = ('shortName', doc['Dataset-ShortName'][0]) item.append({'name': 'title', 'value': doc['Dataset-LongName'][0]}) item.append({'name': 'content', 'value': doc['Dataset-Description'][0]}) item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('full', 'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'PO.DAAC Metadata' }}) item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'iso')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }}) item.append({'name': 'link', 'attribute': {'href': self.url + self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 'gcmd')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'GCMD Metadata' }}) #Only generate granule search link if dataset has granules if (doc['Dataset-ShortName'][0] in self.datasets): supportedGranuleParams = dict([(key,value) for key,value in self.parameters.iteritems() if key in ['bbox', 'startTime', 'endTime']]) if persistentId == '': supportedGranuleParams['shortName'] = doc['Dataset-ShortName'][0] else: supportedGranuleParams['datasetId'] = persistentId item.append({'name': 'link', 'attribute': {'href': self.url + self.searchBasePath + 'granule?' + urllib.urlencode(supportedGranuleParams), 'rel': 'search', 'type': 'application/atom+xml', 'title': 'Granule Search' }}) if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '': item.append({'name': 'link', 'attribute': {'href': doc['Dataset-ImageUrl'][0], 'rel': 'enclosure', 'type': 'image/jpg', 'title': 'Thumbnail' }}) if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc: url = dict(zip(doc['DatasetLocationPolicy-Type'], doc['DatasetLocationPolicy-BasePath'])) if 'LOCAL-OPENDAP' in url: item.append({'name': 'link', 'attribute': {'href': url['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) elif 'REMOTE-OPENDAP' in url: item.append({'name': 'link', 'attribute': {'href': url['REMOTE-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) if 'LOCAL-FTP' in url: item.append({'name': 'link', 'attribute': {'href': url['LOCAL-FTP'], 'rel': 'enclosure', 'type': 'text/plain', 'title': 'FTP URL' }}) elif 'REMOTE-FTP' in url: item.append({'name': 'link', 'attribute': {'href': url['REMOTE-FTP'], 'rel': 'enclosure', 'type': 'text/plain', 'title': 'FTP URL' }}) if doc['DatasetPolicy-ViewOnline'][0] == 'Y' and doc['DatasetPolicy-AccessType-Full'][0] in ['OPEN', 'PREVIEW', 'SIMULATED', 'REMOTE']: portalUrl = self.portalUrl+'/'+doc['Dataset-ShortName'][0] item.append({'name': 'link', 'attribute': {'href': portalUrl, 'rel': 'enclosure', 'type': 'text/html', 'title': 'Dataset Information' }}) updated = None if 'DatasetMetaHistory-LastRevisionDateLong' in doc and doc['DatasetMetaHistory-LastRevisionDateLong'][0] != '': updated = DateUtility.convertTimeLongToIso(doc['DatasetMetaHistory-LastRevisionDateLong'][0]) else: updated = datetime.datetime.utcnow().isoformat()+'Z' item.append({'name': 'updated', 'value': updated}) item.append({'name': 'id', 'value': persistentId}) item.append({'namespace': 'podaac', 'name': 'datasetId', 'value': doc['Dataset-PersistentId'][0]}) item.append({'namespace': 'podaac', 'name': 'shortName', 'value': doc['Dataset-ShortName'][0]}) if doc['DatasetCoverage-WestLon'][0] != '' and doc['DatasetCoverage-SouthLat'][0] != '' and doc['DatasetCoverage-EastLon'][0] != '' and doc['DatasetCoverage-NorthLat'][0] != '': item.append({'namespace': 'georss', 'name': 'where', 'value': {'namespace': 'gml', 'name': 'Envelope', 'value': [{'namespace': 'gml', 'name': 'lowerCorner', 'value': ' '.join([doc['DatasetCoverage-WestLon'][0], doc['DatasetCoverage-SouthLat'][0]]) }, {'namespace': 'gml', 'name': 'upperCorner', 'value': ' '.join([doc['DatasetCoverage-EastLon'][0], doc['DatasetCoverage-NorthLat'][0]])}]}}) if 'DatasetCoverage-StartTimeLong' in doc and doc['DatasetCoverage-StartTimeLong'][0] != '': item.append({'namespace': 'time', 'name': 'start', 'value': DateUtility.convertTimeLongToIso(doc['DatasetCoverage-StartTimeLong'][0])}) if 'DatasetCoverage-StopTimeLong' in doc and doc['DatasetCoverage-StopTimeLong'][0] != '': item.append({'namespace': 'time', 'name': 'end', 'value': DateUtility.convertTimeLongToIso(doc['DatasetCoverage-StopTimeLong'][0])}) if 'full' in self.parameters and self.parameters['full']: if 'DatasetLocationPolicy-Type' in doc and 'DatasetLocationPolicy-BasePath' in doc: for i, x in enumerate(doc['DatasetLocationPolicy-Type']): item.append({'namespace': 'podaac', 'name': self._camelCaseStripHyphen(x.title()), 'value': doc['DatasetLocationPolicy-BasePath'][i]}) del doc['DatasetLocationPolicy-Type'] del doc['DatasetLocationPolicy-BasePath'] multiValuedElementsKeys = ('DatasetRegion-', 'DatasetCharacter-', 'DatasetCitation-', 'DatasetContact-Contact-', 'DatasetDatetime-', 'DatasetInteger-', 'DatasetParameter-', 'DatasetProject-', 'DatasetReal-', 'DatasetResource-', 'DatasetSoftware-', 'DatasetSource-', 'DatasetVersion-', 'Collection-') self._populateItemWithPodaacMetadata(doc, item, multiValuedElementsKeys)
def _constructSolrQuery(self, startIndex, entriesPerPage, variables): queries = [] sort = None filterQuery = None for key, value in variables.iteritems(): #query = '' if key == 'startTime': startTime = DateUtility.convertISOToUTCTimestamp(value) if startTime is not None: query = 'stop_time:' query += '['+str(startTime)+'%20TO%20*]' queries.append(query) elif key == 'endTime': stopTime = DateUtility.convertISOToUTCTimestamp(value) if stopTime is not None: query = 'start_time:' query += '[*%20TO%20'+str(stopTime)+']' queries.append(query) elif key == 'keyword': newValue = urllib.quote(value) query = newValue queries.append(query) elif key == 'identifier': query = 'identifier:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'shortName': query = 'Dataset-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'platform': query = 'platform:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'instrument': query = 'instrument:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'fileFormat': query = 'DatasetPolicy-DataFormat-LowerCased:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'status': query = 'DatasetPolicy-AccessType-LowerCased:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'processLevel': query = 'Dataset-ProcessingLevel-LowerCased:'+value queries.append(query) elif key == 'sortBy': sortByMapping = {'timeDesc': 'start_time:desc', 'timeAsc': 'start_time:asc'} if value in sortByMapping.keys(): sort = sortByMapping[value] elif key == 'bbox': filterQuery = self._constructBoundingBoxQuery(value) #if query != '': # queries.append('%2B'+query) if len(queries) == 0: queries.append('*') query = 'q='+'+AND+'.join(queries)+'&from='+str(startIndex)+'&size='+str(entriesPerPage) if sort is not None: query += '&sort=' + sort if filterQuery is not None: query += '&' + filterQuery logging.debug('solr query: '+query) return query
def _constructSolrQuery(self, startIndex, entriesPerPage, variables): queries = [] sort = None filterQuery = None for key, value in variables.iteritems(): #query = '' if key == 'startTime': startTime = DateUtility.convertISOToUTCTimestamp(value) if startTime is not None: query = 'DatasetCoverage-StopTimeLong-Long:' query += '['+str(startTime)+'%20TO%20*]' queries.append(query) elif key == 'endTime': stopTime = DateUtility.convertISOToUTCTimestamp(value) if stopTime is not None: query = 'DatasetCoverage-StartTimeLong-Long:' query += '[*%20TO%20'+str(stopTime)+']' queries.append(query) elif key == 'keyword': newValue = urllib.quote(value) query = 'SearchableText-LowerCased:('+newValue+')' queries.append(query) elif key == 'datasetId': query = 'Dataset-PersistentId:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'shortName': query = 'Dataset-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'satellite': query = 'DatasetSource-Source-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'instrument': query = 'DatasetSource-Sensor-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'fileFormat': query = 'DatasetPolicy-DataFormat-LowerCased:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'status': query = 'DatasetPolicy-AccessType-LowerCased:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'processLevel': query = 'Dataset-ProcessingLevel-LowerCased:'+value queries.append(query) elif key == 'sortBy': sortByMapping = {'timeDesc': 'DatasetCoverage-StartTimeLong-Long+desc', 'timeAsc': 'DatasetCoverage-StartTimeLong-Long+asc', 'popularityDesc': 'Dataset-AllTimePopularity+desc', 'popularityAsc': 'Dataset-AllTimePopularity+asc'} if value in sortByMapping.keys(): sort = sortByMapping[value] elif key == 'bbox': filterQuery = self._constructBoundingBoxQuery(value) #if query != '': # queries.append('%2B'+query) if len(queries) == 0: queries.append('*') query = 'q='+'+AND+'.join(queries)+'&fq=DatasetPolicy-AccessType-Full:(OPEN+OR+PREVIEW+OR+SIMULATED+OR+REMOTE)+AND+DatasetPolicy-ViewOnline:Y&version=2.2&start='+str(startIndex)+'&rows='+str(entriesPerPage)+'&indent=on&wt=json' if sort is not None: query += '&sort=' + sort if filterQuery is not None: query += '&' + filterQuery logging.debug('solr query: '+query) return query
def _populateItem(self, solrResponse, doc, item): item.append({'name': 'title', 'value': doc['_source']['name']}) #item.append({'name': 'content', 'value': doc['Granule-Name'][0]}) updated = None startTime = None if 'start_time' in doc['_source'] and doc['_source']['start_time'] is not None: updated = DateUtility.convertTimeLongToIso(doc['_source']['start_time']) startTime = updated else: updated = datetime.datetime.utcnow().isoformat()+'Z' item.append({'name': 'updated', 'value': updated}) item.append({'name': 'id', 'value': doc['_source']['identifier'] + ':' + doc['_source']['name']}) parameters = {'identifier': doc['_source']['identifier'], 'name': doc['_source']['name']} parameters['full'] = 'true' item.append({'name': 'link', 'attribute': {'href': self.url+self.searchBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' }}) del parameters['full'] ''' parameters['format'] = 'iso' item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }}) parameters['format'] = 'fgdc' item.append({'name': 'link', 'attribute': {'href': self.url+self.metadataBasePath + 'granule?' + urllib.urlencode(parameters), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'FGDC Metadata' }}) #item.append({'name': 'description', 'value': doc['Dataset-Description'][0]}) #item.append({'name': 'link', 'value': self.portalUrl+'/'+doc['Dataset-ShortName'][0]}) #link = self._getLinkToGranule(doc) #if link['href'] is not None: # item.append({'name': 'link', 'attribute': link}) if 'GranuleReference-Type' in doc: if 'Granule-DataFormat' in doc: type = 'application/x-' + doc['Granule-DataFormat'][0].lower() else: type = 'text/plain' #Look for ONLINE reference only granuleRefDict = dict([(doc['GranuleReference-Type'][i], doc['GranuleReference-Path'][i]) for i,x in enumerate(doc['GranuleReference-Status']) if x=="ONLINE"]) if 'LOCAL-OPENDAP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) elif 'REMOTE-OPENDAP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['REMOTE-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 'OPeNDAP URL' }}) if 'LOCAL-FTP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['LOCAL-FTP'], 'rel': 'enclosure', 'type': type, 'title': 'FTP URL' }}) elif 'REMOTE-FTP' in granuleRefDict: item.append({'name': 'link', 'attribute': {'href': granuleRefDict['REMOTE-FTP'], 'rel': 'enclosure', 'type': type, 'title': 'FTP URL' }}) ''' item.append({'namespace': 'gibs', 'name': 'identifier', 'value': doc['_source']['identifier']}) ''' item.append({'namespace': 'podaac', 'name': 'shortName', 'value': doc['Dataset-ShortName'][0]}) if 'GranuleSpatial-NorthLat' in doc and 'GranuleSpatial-EastLon' in doc and 'GranuleSpatial-SouthLat' in doc and 'GranuleSpatial-WestLon' in doc: item.append({'namespace': 'georss', 'name': 'where', 'value': {'namespace': 'gml', 'name': 'Envelope', 'value': [{'namespace': 'gml', 'name': 'lowerCorner', 'value': ' '.join([doc['GranuleSpatial-WestLon'][0], doc['GranuleSpatial-SouthLat'][0]])}, {'namespace': 'gml', 'name': 'upperCorner', 'value': ' '.join([doc['GranuleSpatial-EastLon'][0], doc['GranuleSpatial-NorthLat'][0]])}]}}) ''' if startTime is not None: item.append({'namespace': 'time', 'name': 'start', 'value': startTime}) if 'stop_time' in doc['_source'] and doc['_source']['stop_time'] is not None: item.append({'namespace': 'time', 'name': 'end', 'value': DateUtility.convertTimeLongToIso(doc['_source']['stop_time'])}) if 'full' in self.parameters and self.parameters['full']: self._populateItemWithAllMetadata(doc['_source'], item)
def _populateItem(self, solrResponse, doc, item): item.append({'name': 'title', 'value': doc['Granule-Name'][0]}) item.append({'name': 'description', 'value': doc['Granule-Name'][0]}) #item.append({'name': 'description', 'value': doc['Dataset-Description'][0]}) #item.append({'name': 'link', 'value': self.portalUrl+'/'+doc['Dataset-ShortName'][0]}) updated = None startTime = None if 'Granule-StartTimeLong' in doc and doc['Granule-StartTimeLong'][ 0] != '': updated = DateUtility.convertTimeLongToIso( doc['Granule-StartTimeLong'][0]) startTime = updated else: updated = datetime.datetime.utcnow().isoformat() + 'Z' item.append({'name': 'pubDate', 'value': updated}) item.append({ 'name': 'guid', 'value': doc['Dataset-PersistentId'][0] + ':' + doc['Granule-Name'][0] }) link = self._getLinkToGranule(doc) if link is not None: item.append({'name': 'link', 'value': link}) parameters = { 'datasetId': doc['Dataset-PersistentId'][0], 'granuleName': doc['Granule-Name'][0] } parameters['full'] = 'true' parameters['format'] = 'rss' item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.searchBasePath + 'granule?' + urllib.urlencode(parameters), 'type': 'application/rss+xml', 'length': '0' } }) del parameters['full'] parameters['format'] = 'iso' item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.metadataBasePath + 'granule?' + urllib.urlencode(parameters), 'type': 'text/xml', 'length': '0' } }) parameters['format'] = 'fgdc' item.append({ 'name': 'enclosure', 'attribute': { 'url': self.url + self.metadataBasePath + 'granule?' + urllib.urlencode(parameters), 'type': 'text/xml', 'length': '0' } }) if 'GranuleReference-Type' in doc: if 'Granule-DataFormat' in doc: type = 'application/x-' + doc['Granule-DataFormat'][0].lower() else: type = 'text/plain' #Look for ONLINE reference only granuleRefDict = dict([ (doc['GranuleReference-Type'][i], doc['GranuleReference-Path'][i]) for i, x in enumerate(doc['GranuleReference-Status']) if x == "ONLINE" ]) if 'LOCAL-OPENDAP' in granuleRefDict: item.append({ 'name': 'enclosure', 'attribute': { 'url': granuleRefDict['LOCAL-OPENDAP'], 'type': 'text/html', 'length': '0' } }) elif 'REMOTE-OPENDAP' in granuleRefDict: item.append({ 'name': 'enclosure', 'attribute': { 'url': granuleRefDict['REMOTE-OPENDAP'], 'type': 'text/html', 'length': '0' } }) if 'LOCAL-FTP' in granuleRefDict: item.append({ 'name': 'enclosure', 'attribute': { 'url': granuleRefDict['LOCAL-FTP'], 'type': type, 'length': '0' } }) elif 'REMOTE-FTP' in granuleRefDict: item.append({ 'name': 'enclosure', 'attribute': { 'url': granuleRefDict['REMOTE-FTP'], 'type': type, 'length': '0' } }) item.append({ 'namespace': 'podaac', 'name': 'datasetId', 'value': doc['Dataset-PersistentId'][0] }) item.append({ 'namespace': 'podaac', 'name': 'shortName', 'value': doc['Dataset-ShortName'][0] }) if 'GranuleSpatial-NorthLat' in doc and 'GranuleSpatial-EastLon' in doc and 'GranuleSpatial-SouthLat' in doc and 'GranuleSpatial-WestLon' in doc: item.append({ 'namespace': 'georss', 'name': 'where', 'value': { 'namespace': 'gml', 'name': 'Envelope', 'value': [{ 'namespace': 'gml', 'name': 'lowerCorner', 'value': ' '.join([ doc['GranuleSpatial-WestLon'][0], doc['GranuleSpatial-SouthLat'][0] ]) }, { 'namespace': 'gml', 'name': 'upperCorner', 'value': ' '.join([ doc['GranuleSpatial-EastLon'][0], doc['GranuleSpatial-NorthLat'][0] ]) }] } }) if 'Granule-StartTimeLong' in doc and doc['Granule-StartTimeLong'][ 0] != '': item.append({ 'namespace': 'time', 'name': 'start', 'value': DateUtility.convertTimeLongToIso( doc['Granule-StartTimeLong'][0]) }) if 'Granule-StopTimeLong' in doc and doc['Granule-StopTimeLong'][ 0] != '': item.append({ 'namespace': 'time', 'name': 'end', 'value': DateUtility.convertTimeLongToIso( doc['Granule-StopTimeLong'][0]) }) if 'full' in self.parameters and self.parameters['full']: multiValuedElementsKeys = ('GranuleArchive-', 'GranuleReference-') self._populateItemWithPodaacMetadata(doc, item, multiValuedElementsKeys)
def _constructSolrQuery(self, startIndex, entriesPerPage, variables): #set default sort order sort = 'desc' filterQuery = None queries = [] for key, value in variables.items(): #query = '' if key == 'startTime': startTime = DateUtility.convertISOToUTCTimestamp(value) if startTime is not None: query = 'stop_time:' query += '[' + str(startTime) + ' TO *]' queries.append(query) elif key == 'endTime': stopTime = DateUtility.convertISOToUTCTimestamp(value) if stopTime is not None: query = 'start_time:' query += '[* TO ' + str(stopTime) + ']' queries.append(query) elif key == 'keyword': newValue = urllib.parse.quote(value) query = 'SearchableText-LowerCased:(' + newValue + ')' queries.append(query) elif key == 'identifier': query = 'identifier:"' + value + '"' queries.append(query) elif key == 'shortName': query = 'Dataset-ShortName-Full:' + self._urlEncodeSolrQueryValue( value) queries.append(query) elif key == 'name': query = 'name:"' + value + '"' queries.append(query) elif key == 'granuleIds': granuleIds = [] for granuleId in value: granuleIds.append(str(granuleId)) query = 'Granule-Id:(' + '+OR+'.join(granuleIds) + ')' queries.append(query) startIndex = 0 elif key == 'sortBy': sortByMapping = {'timeAsc': 'asc'} if value in list(sortByMapping.keys()): sort = sortByMapping[value] elif key == 'bbox': filterQuery = self._constructBoundingBoxQuery(value) #if query != '': # queries.append('%2B'+query) if len(queries) == 0: queries.append('*') query = 'q=' + '+AND+'.join(queries) + '&from=' + str( startIndex) + '&size=' + str(entriesPerPage) if filterQuery is not None: query += '&' + filterQuery logging.debug('solr query: ' + query) return json.dumps({ 'query': { 'filtered': { 'query': { 'query_string': { 'query': ' AND '.join(queries) } }, 'filter': { 'term': { 'status': 'online' } } } }, 'from': startIndex, 'size': entriesPerPage, 'sort': [{ 'start_time': { 'order': sort } }] })
def _constructSolrQuery(self, startIndex, entriesPerPage, variables): queries = [] sort = None filterQuery = None for key, value in variables.items(): #query = '' if key == 'startTime': startTime = DateUtility.convertISOToUTCTimestamp(value) if startTime is not None: query = 'DatasetCoverage-StopTimeLong-Long:' query += '['+str(startTime)+'%20TO%20*]' queries.append(query) elif key == 'endTime': stopTime = DateUtility.convertISOToUTCTimestamp(value) if stopTime is not None: query = 'DatasetCoverage-StartTimeLong-Long:' query += '[*%20TO%20'+str(stopTime)+']' queries.append(query) elif key == 'keyword': newValue = urllib.parse.quote(value) query = 'SearchableText-LowerCased:('+newValue+')' queries.append(query) elif key == 'datasetId': query = 'Dataset-PersistentId:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'shortName': query = 'Dataset-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'satellite': query = 'DatasetSource-Source-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'instrument': query = 'DatasetSource-Sensor-ShortName-Full:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'fileFormat': query = 'DatasetPolicy-DataFormat-LowerCased:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'status': query = 'DatasetPolicy-AccessType-LowerCased:'+self._urlEncodeSolrQueryValue(value) queries.append(query) elif key == 'processLevel': query = 'Dataset-ProcessingLevel-LowerCased:'+value queries.append(query) elif key == 'sortBy': sortByMapping = {'timeDesc': 'DatasetCoverage-StartTimeLong-Long+desc', 'timeAsc': 'DatasetCoverage-StartTimeLong-Long+asc', 'popularityDesc': 'Dataset-AllTimePopularity+desc', 'popularityAsc': 'Dataset-AllTimePopularity+asc'} if value in list(sortByMapping.keys()): sort = sortByMapping[value] elif key == 'bbox': filterQuery = self._constructBoundingBoxQuery(value) #if query != '': # queries.append('%2B'+query) if len(queries) == 0: queries.append('*') query = 'q='+'+AND+'.join(queries)+'&fq=DatasetPolicy-AccessType-Full:(OPEN+OR+PREVIEW+OR+SIMULATED+OR+REMOTE)+AND+DatasetPolicy-ViewOnline:Y&version=2.2&start='+str(startIndex)+'&rows='+str(entriesPerPage)+'&indent=on&wt=json' if sort is not None: query += '&sort=' + sort if filterQuery is not None: query += '&' + filterQuery logging.debug('solr query: '+query) return query