def get(self, requestHandler): super(Writer, self).get(requestHandler) try: httpUtility = HttpUtility() result = httpUtility.getResponse(self.url, self.onResponse) except BaseException as exception: raise exception
def _getResponse(self, startIndex, entriesPerPage, parameters, facets): query = self._constructQuery(startIndex, entriesPerPage, parameters, facets) url = self._configuration.get('solr', 'datasetUrl') httpUtility = HttpUtility() httpUtility.getResponse(url + '/_search/?' + query, self._onResponse)
def _getResponse(self, startIndex, entriesPerPage, variables): query = self._constructSolrQuery(startIndex, entriesPerPage, variables) url = self._configuration.get('solr', 'datasetUrl') httpUtility = HttpUtility() httpUtility.getResponse(url + '/_search/?' + query, self._onSolrResponse)
def _getSolrResponse(self, startIndex, entriesPerPage, variables): query = self._constructSolrQuery(startIndex, entriesPerPage, variables) url = self._configuration.get('solr', 'granuleUrl') httpUtility = HttpUtility() httpUtility.getResponse(url + '/select/?' + query, self._onSolrGranuleResponse)
def _getHasGranuleResponse(self, callback): url = self._configuration.get('solr', 'granuleUrl') httpUtility = HttpUtility() return httpUtility.getResponse( url + '/_search', callback, '{"query" : {"match_all" : {}}, "size" : 0, "facets" : { "identifier" : { "terms" : {"field" : "identifier"}}}}' )
def _getSolrHasGranuleResponse(self, callback): url = self._configuration.get('solr', 'granuleUrl') httpUtility = HttpUtility() return httpUtility.getResponse( url + '/select?q=*:*&facet=true&facet.field=Dataset-ShortName-Full&facet.limit=-1&rows=0&indent=on&wt=json&version=2.2', callback)
def post(self, requestHandler): super(Writer, self).post(requestHandler) data = json.loads(requestHandler.request.body) data["id"] = str(uuid.uuid4()) data["submit_date"] = datetime.utcnow().isoformat() + "Z" httpUtility = HttpUtility() solrUrl = self._configuration.get('solr', 'url') + "/update/json/docs?commit=true" result = httpUtility.getResponse(solrUrl, self.onResponse, body=json.dumps(data), headers={'Content-Type': 'application/json'})
def get(self, requestHandler): super(PassThroughWriter, self).get(requestHandler) url = None try: url = requestHandler.get_argument('url') except: raise Exception('Missing url.') if self._isAllowed(url) == False: raise Exception('Not allowed to connect to that url: ' + url) #io = None try: """ logging.debug('url: '+url) io = urllib2.urlopen(url) message = io.info() for headerEntry in message.headers: pair = headerEntry.split(':') logging.debug('key: '+pair[0]+', value: '+pair[1].replace('\r\n', '')) requestHandler.set_header(pair[0], pair[1].replace('\r\n', '')) while True: data = io.read() if data == '': break else: requestHandler.write(data) """ httpUtility = HttpUtility() result = httpUtility.getResponse(url, self.onResponse) """ for header in result['header']: logging.debug('header: '+header[0]+':'+header[1]) requestHandler.set_header(header[0], header[1]) requestHandler.write(result['data']) """ except BaseException as exception: raise exception """
def _onSolrDetermineProcessLevelResponse(self, response): try: #Determine dataset processing level processingLevel = None solrJson = json.loads(response.body) if len(solrJson['response']['docs']) >= 1: if 'bbox' in self.variables: processingLevel = solrJson['response']['docs'][0]['Dataset-ProcessingLevel-Full'][0] if processingLevel is not None and processingLevel.find('2') != -1: if self._configuration.get('service', 'bbox') == 'l2': #Call Matt's L2 Search Service #raise Exception(self._configuration.get('service', 'l2')+'?'+requestHandler.request.query) httpUtility = HttpUtility() url = self._configuration.get('service', 'l2') + '?' if 'format' not in self.requestHandler.request.arguments: url += 'format=atom&' url += self.requestHandler.request.query logging.debug("Calling L2 Service: " + url) result = httpUtility.getResponse(url, self._onL2Response) else: points = self.variables['bbox'].split(',') if len(points) == 4: spatialSearch = SpatialSearch( self._configuration.get('service', 'database') ) spatialResult = spatialSearch.searchGranules( int(self.startIndex), int(self.entriesPerPage), float(points[0]), float(points[1]), float(points[2]), float(points[3]) ) logging.debug("Granule spatial search returned") #if len(spatialResult[0]) > 0: self.variables['granuleIds'] = spatialResult[0] self.variables['granuleIdsFound'] = spatialResult[1] del self.variables['bbox'] solrJson = {'responseHeader': {'params': {}}, 'response': {}} solrJson['response']['numFound'] = int(self.variables['granuleIdsFound']) solrJson['response']['start'] = int(self.startIndex) solrJson['responseHeader']['params']['rows'] = int(self.entriesPerPage) solrJson['response']['docs'] = [] for name in self.variables['granuleIds']: solrJson['response']['docs'].append({'Granule-Name': [name]}) solrResponse = json.dumps(solrJson) searchText = '' if 'keyword' in self.variables: searchText = self.variables['keyword'] openSearchResponse = self._generateOpenSearchResponse( solrResponse, searchText, self._configuration.get('service', 'url')+self.requestHandler.request.path, self.searchParameters, self.pretty ) self.requestHandler.set_header("Content-Type", "application/xml") #requestHandler.set_header("Content-Type", "application/rss+xml") #requestHandler.write(solrResponse) self.requestHandler.write(openSearchResponse) self.requestHandler.finish() else: #Dataset is not an L2 dataset so handle search via Solr try: self._getSolrResponse(self.startIndex, self.entriesPerPage, self.variables) except: logging.exception('Failed to get solr response.') else: #Not a bounding box search so handle search via Solr try: self._getSolrResponse(self.startIndex, self.entriesPerPage, self.variables) except: logging.exception('Failed to get solr response.') else: #Dataset metadata cannot be retreived so return empty search result solrJson = {'responseHeader': {'params': {}}, 'response': {}} solrJson['response']['numFound'] = 0 solrJson['response']['start'] = int(self.startIndex) solrJson['responseHeader']['params']['rows'] = int(self.entriesPerPage) solrJson['response']['docs'] = [] solrResponse = json.dumps(solrJson) self._writeResponse(solrResponse) except BaseException as exception: logging.exception('Failed to determine dataset processing level for bbox search ' + str(exception)) self._handleException(str(exception))
def _getSingleSolrDatasetResponse(self, variables, callback): query = self._constructSingleSolrDatasetQuery(variables) url = self._configuration.get('solr', 'datasetUrl') httpUtility = HttpUtility() return httpUtility.getResponse(url + '/select/?' + query, callback)