def doSearch(self, query, search): try: key = '' query = urllib.quote(query) user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' url = 'http://www.faroo.com/api?q=' + query + '&start=1&length=10&l=en&src=web&f=json&key=' + key request = urllib2.Request(url) request.add_header('User-Agent', user_agent) request_opener = urllib2.build_opener() response = request_opener.open(request) response_data = response.read() json_result = json.loads(response_data) result_list = json_result.get('results') if(result_list is None): return None i = 0 farooResults = list() for result in result_list: i = i+1 lnk = SearchResult() lnk.searchID = search lnk.title = result['title'] lnk.url = result['url'] lnk.snippet = result.get('kwic') lnk.rank = i farooResults.append(lnk) return farooResults except: return None
def doSearch(self, query, search): try: key = '' query = urllib.quote(query) user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' url = 'http://www.faroo.com/api?q=' + query + '&start=1&length=10&l=en&src=web&f=json&key=' + key request = urllib2.Request(url) request.add_header('User-Agent', user_agent) request_opener = urllib2.build_opener() response = request_opener.open(request) response_data = response.read() json_result = json.loads(response_data) result_list = json_result.get('results') if (result_list is None): return None i = 0 farooResults = list() for result in result_list: i = i + 1 lnk = SearchResult() lnk.searchID = search lnk.title = result['title'] lnk.url = result['url'] lnk.snippet = result.get('kwic') lnk.rank = i farooResults.append(lnk) return farooResults except: return None
def doSearch(self, query, search): try: #search_type: Web, Image, News, Video ##INSERT BING KEY HERE key= '' query = urllib.quote(query) # create credential for authentication user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' credentials = (':%s' % key).encode('base64')[:-1] auth = 'Basic %s' % credentials url = 'https://api.datamarket.azure.com/Data.ashx/Bing/Search/Web?Query=%27'+query+'%27&$top=10&$format=json' request = urllib2.Request(url) request.add_header('Authorization', auth) request.add_header('User-Agent', user_agent) request_opener = urllib2.build_opener() response = request_opener.open(request) response_data = response.read() json_result = json.loads(response_data) result_d = json_result.get('d') if result_d is None: return None result_list = result_d.get('results') # Creamos la lista de resultados bingResults = list() i = 0 for result in result_list: i = i + 1 lnk = SearchResult() lnk.searchID = search lnk.title = result['Title'] lnk.url = result['Url'] lnk.snippet = result['Description'] lnk.rank = i bingResults.append(lnk) return bingResults except: return None
def doSearch(self, query, search): try: #search_type: Web, Image, News, Video ##INSERT BING KEY HERE key = '' query = urllib.quote(query) # create credential for authentication user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' credentials = (':%s' % key).encode('base64')[:-1] auth = 'Basic %s' % credentials url = 'https://api.datamarket.azure.com/Data.ashx/Bing/Search/Web?Query=%27' + query + '%27&$top=10&$format=json' request = urllib2.Request(url) request.add_header('Authorization', auth) request.add_header('User-Agent', user_agent) request_opener = urllib2.build_opener() response = request_opener.open(request) response_data = response.read() json_result = json.loads(response_data) result_d = json_result.get('d') if result_d is None: return None result_list = result_d.get('results') # Creamos la lista de resultados bingResults = list() i = 0 for result in result_list: i = i + 1 lnk = SearchResult() lnk.searchID = search lnk.title = result['Title'] lnk.url = result['Url'] lnk.snippet = result['Description'] lnk.rank = i bingResults.append(lnk) return bingResults except: return None
def doSearch(self, query, search): try: key = '' cx = '' query = urllib.quote(query) try: user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' url = 'https://www.googleapis.com/customsearch/v1?key='+key+'&cx='+cx+'&q='+query request = urllib2.Request(url) request.add_header('User-Agent', user_agent) request_opener = urllib2.build_opener() response = request_opener.open(request) response_data = response.read() json_result = json.loads(response_data) result_list = json_result.get('items') if result_list is None: return None except urllib2.HTTPError: return None googleResults = list() i= 0 for result in result_list: i = i+1 lnk = SearchResult() lnk.searchID = search lnk.title = result['title'] lnk.url = result['link'] lnk.snippet = result['snippet'] lnk.rank = i googleResults.append(lnk) return googleResults except: return None
def doSearch(self, query, search): try: key = '' cx = '' query = urllib.quote(query) try: user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' url = 'https://www.googleapis.com/customsearch/v1?key=' + key + '&cx=' + cx + '&q=' + query request = urllib2.Request(url) request.add_header('User-Agent', user_agent) request_opener = urllib2.build_opener() response = request_opener.open(request) response_data = response.read() json_result = json.loads(response_data) result_list = json_result.get('items') if result_list is None: return None except urllib2.HTTPError: return None googleResults = list() i = 0 for result in result_list: i = i + 1 lnk = SearchResult() lnk.searchID = search lnk.title = result['title'] lnk.url = result['link'] lnk.snippet = result['snippet'] lnk.rank = i googleResults.append(lnk) return googleResults except: return None
def doSearch(self, query, search): try: mydata = [('dcs.source', 'etools'), ('dcs.output.format', 'JSON'), ('results', '10'), ('query', query)] url = 'http://localhost:9000/dcs/rest' user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)' mydata = urllib.urlencode(mydata) request = urllib2.Request(url, mydata) request.add_header('User-Agent', user_agent) request.add_header('Content-type', 'application/x-www-form-urlencoded') response_data = urllib2.urlopen(request).read() json_result = json.loads(response_data) result_list = json_result.get('documents') if result_list is None: return None carrotResults = list() i = 0 for result in result_list: i = i + 1 lnk = SearchResult() lnk.searchID = search lnk.title = result['title'] lnk.url = result['url'] lnk.snippet = result.get('snippet') lnk.rank = i carrotResults.append(lnk) return carrotResults except: return None