def testBooleanExpression(queriesAndResultsPath, binary_path): #Start the engine server args = [ binary_path, '--config-file=./boolean-expression-test/config.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #construct the query #format : phrase,proximity||rid1 rid2 rid3 ...ridn failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: value=line.split('||') phrase=value[0] expectedRecordIds=(value[1]).split() query='http://localhost:' + port + '/search?q='+ urllib.quote(phrase) print query try: response = urllib2.urlopen(query).read() response_json = json.loads(response) except urllib2.HTTPError as err: failCount += 1 #print response_json['results'] #check the result failCount += checkResult(query, response_json['results'], expectedRecordIds) test_lib.killServer(serverHandle) print '==============================' return failCount
def testPhraseSearch(queriesAndResultsPath, binary_path): #Start the engine server args = [ binary_path, '--config-file=./positionalRanking_phraseSearch/conf-positionalRanking.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #construct the query #format : phrase,proximity||rid1 rid2 rid3 ...ridn failTotal = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: value=line.split('||') phrase=value[0] expectedRecordIds=(value[1]).split() scores = (value[2]).split() query='http://localhost:' + port + '/search?q='+ urllib.quote(phrase) print query response = urllib2.urlopen(query).read() response_json = json.loads(response) #print response_json['results'] #check the result failTotal += checkResult(query, response_json['results'], expectedRecordIds, scores) test_lib.killServer(serverHandle) print '==============================' return failTotal
def testFuzzyA1(queriesAndResultsPath, binary_path): args = [binary_path, "--config-file=./fuzzy_a1/conf.xml"] if test_lib.confirmPortAvailable(port) == False: print "Port " + str(port) + " already in use - aborting" return -1 print "starting engine: " + args[0] + " " + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) # construct the query failCount = 0 f_in = open(queriesAndResultsPath, "r") for line in f_in: # get the query keyword and results value = line.split("||") queryValue = value[0].split() resultValue = (value[1]).split() # construct the query query = "http://localhost:" + port + "/search?" query = query + prepareQuery(queryValue) # print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) # check the result failCount += checkResult(query, response_json["results"], resultValue) test_lib.killServer(serverHandle) print "==============================" return failCount
def testPhraseSearch(queriesAndResultsPath, binary_path): #Start the engine server args = [ binary_path, '--config-file=./phraseSearch/ps2.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #construct the query #format : phrase,proximity||rid1 rid2 rid3 ...ridn f_in = open(queriesAndResultsPath, 'r') for line in f_in: value=line.split('||') phrase=value[0] expectedRecordIds=(value[1]).split() query='http://localhost:' + port + '/search?q='+ urllib.quote(phrase) + '&sort=ranking&orderby=desc' print query response = urllib2.urlopen(query).read() response_json = json.loads(response) #print response_json['results'] #check the result checkResult(query, response_json['results'], expectedRecordIds) test_lib.killServer(serverHandle) print '=============================='
def testExactA1(queriesAndResultsPath, binary_path): #Start the engine server args = [binary_path, '--config-file=./analyzer_exact_a1/conf.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryValue = value[0].split() resultValue = (value[1]).split() #construct the query query = 'http://localhost:' + port + '/search?' query = query + prepareQuery(queryValue) #print query #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) test_lib.killServer(serverHandle) print '==============================' return failCount
def testGeo(queriesAndResultsPath, binary_path): # Start the engine server args = [binary_path, "--config-file=./geo/conf.xml"] if test_lib.confirmPortAvailable(port) == False: print "Port " + str(port) + " already in use - aborting" return -1 serverHandle = test_lib.startServer(args) # make sure that start the engine up test_lib.pingServer(port, "q=goods&clat=61.18&clong=-149.1&radius=0.5") # construct the query failCount = 0 radius = 0.25 f_in = open(queriesAndResultsPath, "r") for line in f_in: # get the query keyword and results value = line.split("||") queryGeo = value[0].split("+") resultValue = (value[1]).split() # construct the query query = "http://localhost:" + port + "/search?" query = query + prepareQuery(queryGeo[1], queryGeo[0], str(radius)) # print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) # check the result failCount += checkResult(query, response_json["results"], resultValue) test_lib.killServer(serverHandle) return failCount
def testEmptyRecordBoostField(queriesAndResultsPath, binary_path): #Start the engine server args = [ binary_path, '--config-file=./empty_recordBoostField/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0].split() resultValue=(value[1]).split() #construct the query query='http://localhost:' + port + '/search?' query = query + prepareQuery(queryValue) #print query #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue ) test_lib.killServer(serverHandle) print '==============================' return failCount
def testReassignId(binary_path,jsonRecordsPath): #Start the engine server args = [ binary_path, '--config-file=./reassignid-during-delete/srch2-config.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #load record f_test = open(jsonRecordsPath,'r') jsonRecords = json.loads(f_test.read()) for record in jsonRecords: recordId = record['id'] record = json.dumps(record) #Insert one record and delete it immediately addRecord(record) deleteRecord(recordId) test_lib.killServer(serverHandle) print 'test pass' print '==============================' return 0
def testGeo(queriesAndResultsPath, binary_path): # Start the engine server args = [binary_path, '--config-file=./geo/conf.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port, 'q=goods&clat=61.18&clong=-149.1&radius=0.5') #construct the query failCount = 0 radius = 0.25 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryGeo = value[0].split('+') resultValue = (value[1]).split() #construct the query query = 'http://localhost:' + port + '/search?' query = query + prepareQuery(queryGeo[1], queryGeo[0], str(radius)) #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) test_lib.killServer(serverHandle) return failCount
def testMultipleCores(queriesAndResultsPath, binary_path): #Start the engine server args = [ binary_path, '--config-file=./access_control/conf-acl.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) failCount = 0 print "Test core1 - access control" f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') if(value[0] == 'S'): queryValue=value[1].split(' ') allResults=value[2].split('@') for coreResult in allResults: resultValue=coreResult.split() #construct the query query='http://localhost:' + port + '/' + queryValue[0] + '/search?' query = query + prepareQuery(queryValue[1], queryValue[2], False) #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) else: # the line is command query (insert/delete/update/acl etc) coreName = value[1] command = value[2] payload = value[3] if coreName == "": query='http://localhost:' + port + '/' + command else: query='http://localhost:' + port + '/' + coreName + '/' + command print query request = urllib2.Request(query, data=payload) request.get_method = lambda: 'PUT' opener = urllib2.build_opener(urllib2.HTTPHandler) url = opener.open(request) time.sleep(1) time.sleep(5) test_lib.killServer(serverHandle) print '==============================' return failCount
def testFacetedSearch(f_in, f_facet, binary_path): # Start the engine server args = [binary_path, '--config-file=./faceted_search/conf.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) #parse used to extract facet fields from input facet_parser = argparse.ArgumentParser() facet_parser.add_argument('-f', metavar='facet', nargs='+', action='append') #construct the query failCount = 0 for line in f_in: #get the query keyword and results value = line.split('||') tmpQueryValue = value[0].split(',') queryValue = tmpQueryValue[0].split() #line input is the format query, facet args||numResults #extracting and parsing the facet args facet_args = facet_parser.parse_args(tmpQueryValue[1].split()) facetedFields = facet_args.f facet_args.f = [] resultValue = (value[1]).split() #construct the query query = 'http://localhost:' + port + '/search?' query = query + (prepareQuery(queryValue, facetedFields)) #print query # get facet correct result from file facetResultValue = [] for i in xrange(0, len(facetedFields)): facetResultValue.append(f_facet.next().strip()) # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, resultValue, facetResultValue) test_lib.killServer(serverHandle) print '==============================' return failCount
def startSrch2Engine(): global serverHandle #Start the engine server args = [binary_path , '--config-file=adapter_sqlite/conf.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port' + str(port) + ' already in use -aborting ' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port)
def testFacetedSearch(f_in , f_facet, binary_path): # Start the engine server args = [ binary_path, '--config-file=./faceted_search/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) #parse used to extract facet fields from input facet_parser= argparse.ArgumentParser() facet_parser.add_argument('-f', metavar='facet', nargs='+', action='append') #construct the query failCount = 0 for line in f_in: #get the query keyword and results value=line.split('||') tmpQueryValue= value[0].split(',') queryValue= tmpQueryValue[0].split() #line input is the format query, facet args||numResults #extracting and parsing the facet args facet_args=facet_parser.parse_args(tmpQueryValue[1].split()) facetedFields= facet_args.f facet_args.f=[] resultValue=(value[1]).split() #construct the query query='http://localhost:' + port + '/search?' query = query + (prepareQuery(queryValue, facetedFields)) #print query # get facet correct result from file facetResultValue=[] for i in xrange(0, len(facetedFields)): facetResultValue.append(f_facet.next().strip()) # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, resultValue , facetResultValue ) test_lib.killServer(serverHandle) print '==============================' return failCount
def testNewFeatures(queriesAndResultsPath, facetResultsPath, binary_path): # Start the engine server args = [ binary_path, '--config-file=./test_solr_compatible_query_syntax/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) # get facet correct result from file f_facet = open(facetResultsPath, 'r') facetResultValue = [] for facet_line in f_facet: facetResultValue.append(facet_line.strip()) #construct the query failCount = 0 j = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryValue = value[0] resultValue = (value[1]).split() #construct the query query = 'http://localhost:' + port + '/search?' query = query + prepareQuery(queryValue) #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, resultValue, facetResultValue[j]) j = j + 1 #print j #print '------------------------------------------------------------------' print '==============================' test_lib.killServer(serverHandle) return failCount
def runTest(queriesAndResultsPath, binary_path, configFile): #Start the engine server args = [binary_path, '--config-file=' + configFile] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) try: #construct the query #format : phrase,proximity||rid1 rid2 rid3 ...ridn failTotal = 0 f_in = open(queriesAndResultsPath, 'r') queryId = 1 for line in f_in: value = line.split('||') phrase = value[0] phrase = phrase.replace(' ', '%20') phrase = phrase.replace('"', '%22') expectedRecordIds = (value[1]).split() query = 'http://localhost:' + port + '/search?q=' + phrase #query='http://localhost:' + port + '/search?q='+ urllib.quote(phrase) #query = query.replace("%26", "&"); #query = query.replace("%3D", "="); print "query : " + query response = urllib2.urlopen(query).read() response_json = json.loads(response) #print response_json['results'] #check the result #print queryId , ":" #print "[" #for obj in response_json['results']: # print obj['record_id'] # print obj['snippet'] # print "," #print "]," failTotal += checkResult(query, response_json['results'], expectedRecordIds, queryId) queryId += 1 print '==============================' return failTotal finally: test_lib.killServer(serverHandle)
def runTest(queriesAndResultsPath, binary_path, configFile): # Start the engine server args = [binary_path, "--config-file=" + configFile] if test_lib.confirmPortAvailable(port) == False: print "Port " + str(port) + " already in use - aborting" return -1 print "starting engine: " + args[0] + " " + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) try: # construct the query # format : phrase,proximity||rid1 rid2 rid3 ...ridn failTotal = 0 f_in = open(queriesAndResultsPath, "r") queryId = 1 for line in f_in: value = line.split("||") phrase = value[0] phrase = phrase.replace(" ", "%20") phrase = phrase.replace('"', "%22") expectedRecordIds = (value[1]).split() query = "http://localhost:" + port + "/search?q=" + phrase # query='http://localhost:' + port + '/search?q='+ urllib.quote(phrase) # query = query.replace("%26", "&"); # query = query.replace("%3D", "="); print "query : " + query response = urllib2.urlopen(query).read() response_json = json.loads(response) # print response_json['results'] # check the result # print queryId , ":" # print "[" # for obj in response_json['results']: # print obj['record_id'] # print obj['snippet'] # print "," # print "]," failTotal += checkResult(query, response_json["results"], expectedRecordIds, queryId) queryId += 1 print "==============================" return failTotal finally: test_lib.killServer(serverHandle)
def testNewFeatures(queriesAndResultsPath,facetResultsPath, binary_path): # Start the engine server args = [ binary_path, '--config-file=./test_solr_compatible_query_syntax/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) # get facet correct result from file f_facet = open(facetResultsPath , 'r') facetResultValue = [] for facet_line in f_facet: facetResultValue.append(facet_line.strip()) #construct the query failCount = 0 j=0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0] resultValue=(value[1]).split() #construct the query query='http://localhost:' + port + '/search?' query = query + prepareQuery(queryValue) #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, resultValue, facetResultValue[j]) j=j+1 #print j #print '------------------------------------------------------------------' print '==============================' test_lib.killServer(serverHandle) return failCount
def testFuzzyM1(queriesAndResultsPath, binary_path): # Start the engine server args = [ binary_path, '--config-file=./fuzzy_m1/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting server: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port, 'q=goods&clat=61.18&clong=-149.1&radius=0.5') #construct the query print "queriesAndResultsPath: ",queriesAndResultsPath failCount = 0 radius = 0.5 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #print"line is", line #get the query keyword and results value = line.split('||') queryValue = value[0].split('^') queryKeyword = queryValue[0].split() queryGeo = queryValue[1].split('+') resultValue = (value[1]).split() #construct the query query = 'http://localhost:' + port + '/search?' query = query + prepareQuery(queryKeyword, queryGeo[1], queryGeo[0], str(radius)) #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) test_lib.killServer(serverHandle) return failCount
def testEmptyIndex(binary_path): #Start the engine server args = [binary_path, '--config-file=./empty_index/conf.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #add an record addQuery = 'http://localhost:' + str(port) + '/docs' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request( addQuery, '{"id":"1234", "name":"Toy Story", "category":"shop"}') request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['insert'] != "success": print "/docs operation failed: " + response return -1 time.sleep(11) #do query exitCode = 0 query = 'http://localhost:' + str(port) + '/search?q=toy' response = urllib2.urlopen(query).read() response_json = json.loads(response) if len(response_json['results'] ) != 1 or response_json['results'][0]['record']['id'] != "1234": print 'test failed' exitCode = 1 test_lib.killServer(serverHandle) print 'test pass' print '==============================' return exitCode
def testFuzzyAttributeBasedSearch(queriesAndResultsPath, binary_path): # Start the engine server args = [ binary_path, '--config-file=./fuzzy_attribute_based_search/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryValue = value[0].split() resultValue = (value[1]).split() #construct the query query = 'http://localhost:' + port + '/search?' query = query + prepareQuery(queryValue) #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) #get pid of srch2-search-server and kill the process print '==============================' test_lib.killServer(serverHandle) return failCount
def testExactAttributeBasedSearch(queriesAndResultsPath, binary_path, configFile): # Start the engine server args = [ binary_path, '--config-file=' + configFile ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0].split() resultValue=(value[1]).split() #construct the query query='http://localhost:' + str(port) + '/search?' query = query + prepareQuery(queryValue) #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue ) f_in.close() print '==============================' test_lib.killServer(serverHandle) return failCount
def testEmptyIndex(binary_path): #Start the engine server args = [ binary_path, '--config-file=./empty_index/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) #add an record addQuery='http://localhost:' + str(port) + '/docs' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(addQuery, '{"id":"1234", "name":"Toy Story", "category":"shop"}') request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['insert'] != "success": print "/docs operation failed: " + response return -1 time.sleep(11) #do query exitCode = 0 query='http://localhost:' + str(port) + '/search?q=toy' response = urllib2.urlopen(query).read() response_json = json.loads(response) if len(response_json['results']) != 1 or response_json['results'][0]['record']['id'] != "1234": print 'test failed' exitCode = 1 test_lib.killServer(serverHandle) print 'test pass' print '==============================' return exitCode
def testDateAndTime(queriesAndResultsPath , binary_path): # Start the engine server args = [ binary_path, '--config-file=./date_time_new_features_test/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0] resultValue=(value[1]).split() #construct the query query='http://localhost:' + str(port) + '/search?' query = query + queryValue #print query # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, resultValue ) test_lib.killServer(serverHandle) print '==============================' return failCount
def testMultipleCores(queriesAndResultsPath, binary_path): #Start the engine server args = [binary_path, '--config-file=./access_control/conf-acl.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) failCount = 0 print "Test core1 - access control" f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') if (value[0] == 'S'): queryValue = value[1].split(' ') allResults = value[2].split('@') for coreResult in allResults: resultValue = coreResult.split() #construct the query query = 'http://localhost:' + port + '/' + queryValue[ 0] + '/search?' query = query + prepareQuery(queryValue[1], queryValue[2], False) #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) else: # the line is command query (insert/delete/update/acl etc) coreName = value[1] command = value[2] payload = value[3] if coreName == "": query = 'http://localhost:' + port + '/' + command else: query = 'http://localhost:' + port + '/' + coreName + '/' + command print query request = urllib2.Request(query, data=payload) request.get_method = lambda: 'PUT' opener = urllib2.build_opener(urllib2.HTTPHandler) url = opener.open(request) time.sleep(1) time.sleep(5) test_lib.killServer(serverHandle) print '==============================' return failCount
def testMultipleCores(queriesAndResultsPath, queriesAndResultsPath2, binary_path): #Start the engine server args = [ binary_path, '--config-file=./multicore/conf-multicore.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) # sometime it fails, it might be that the multicore need more time to load ? time.sleep(2) if test_lib.pingServer(port) != 0: print 'pingServer failed, here is the args:' print args failCount = 0 ####################################### # Basic multi-core functional testing # ####################################### print "Test suite #1 - basic multi-core functionality" f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0].split() allResults=value[1].split('@') coreNum=0 for coreResult in allResults: resultValue=coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query='http://localhost:' + port + '/search?' else: query='http://localhost:' + port + '/core' + str(coreNum) + '/search?' query = query + prepareQuery(queryValue, False) #do the query response = urllib2.urlopen(query).read() # TODO - Replace srch2 bad JSON (spurious comma). Ticket SRCN-335 already filed. #response = re.sub('[,][}]', '}', response) #print query + ' Got ==> ' + response response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) coreNum += 1 ####################################################################################### # Core 1 and Core 4 have different configurations, but on the same data. # # We now test for the differences in those settings. # # In queriesAndResults2.txt, here is an explanation of each test: # # 1) Aviatro||@156001 693000 # # Fuzzy match is off in core1 and on in core4, so only core4 should # # return any result records # # 2) Aviat||@156001 693000 # # Core1 has prefix matching off and core4 allows prefixes to match, # # so only core4 should return any results # # 3) monkeys||135001@ # # Core4 uses stop-words2.txt which has "monkeys", so core4 should not # # return results. Core1 has the usual stop words file and should # # find "monkeys". # # 4) Rings||908 927 492002 492003 634004 634005 634006@908 927 492002 492003 634004 # # Test different <rows> setting. Core1 can return up to 10 records, but Core4 is # # limited to just 5. # # 5) martn~||156001 525017 693000@ # # Core1 will fuzzy match martn against "Martin" in 3 records, because # # it's similarity threshold is 0.75. Core4 has a higher threshold # # of 0.85, and should not return any matching records. # ####################################################################################### print "\nTest suite #2: Comparing different engine configurations on the same data source" f_in = open(queriesAndResultsPath2, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0].split() allResults=value[1].split('@') coreNum= [1,4] # coreNum are the literal core numbers to use in path this time index = 0 # and index iterates coreNum for coreResult in allResults: resultValue=coreResult.split() #construct the query query='http://localhost:' + port + '/core' + str(coreNum[index]) + '/search?' query = query + prepareQuery(queryValue, False) #do the query response = urllib2.urlopen(query).read() # TODO - Replace srch2 bad JSON (spurious comma). Ticket SRCN-335 already filed. #response = re.sub('[,][}]', '}', response) # print query + ' Got ==> ' + response response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) index += 1 # Test search_all functionality query='http://localhost:' + port + '/_all/search?' + prepareQuery(queryValue, False) response = urllib2.urlopen(query).read() response_json = json.loads(response) # Check the search_all result index = 0 for coreResult in allResults: resultValue = coreResult.split() coreName = 'core' + str(coreNum[index]) failCount += checkResult(query, response_json[coreName]['results'], resultValue) index +=1 time.sleep(5) test_lib.killServer(serverHandle) print '==============================' return failCount
def pingServer(self): test_lib.pingServer(port)
def test(queriesAndResultsPath, binary_path, configFilePath): #Start the engine server args = [ binary_path, '--config-file=' + configFilePath] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port, 'q=garbage', 30) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #if line is empty ignore if len(line) == 0: continue #ignore comments if line[0] == '#': continue #get the query keyword and results value=line.split('||') if len(value) < 2: continue # ignore bad line if value[0] == 'W': # sleep between test cases for merge process to finish. sleepTime = value[1] time.sleep(float(sleepTime)) elif value[0] == 'C': # the line is command query (feedback) command = value[1] payload = value[2] coreName = '' if len(value) > 3: coreName = value[3].strip('\n').strip() if coreName == "": query='http://localhost:' + port + '/' + command else: query='http://localhost:' + port + '/' + coreName + '/' + command print query + " -X PUT -d '" + payload.strip('\n') + "'" request = urllib2.Request(query, data=payload) request.get_method = lambda: 'PUT' opener = urllib2.build_opener(urllib2.HTTPHandler) url = opener.open(request) time.sleep(1) else: # the line is a search query queryValue=value[1].split() resultValue=(value[2]).split() coreName = '' if len(value) > 3: coreName = value[3].strip('\n').strip() #construct the query if coreName == '': query='http://localhost:' + port + '/search?' else: query='http://localhost:' + port + '/' + coreName + '/' + 'search?' query = query + prepareQuery(queryValue) print query #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue ) test_lib.killServer(serverHandle) print '==============================' return failCount
def testMultipleCores(queriesAndResultsPath, queriesAndResultsPath2, binary_path): #Start the engine server args = [binary_path, '--config-file=./multicore/conf-multicore.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) # sometime it fails, it might be that the multicore need more time to load ? time.sleep(2) if test_lib.pingServer(port) != 0: print 'pingServer failed, here is the args:' print args failCount = 0 ####################################### # Basic multi-core functional testing # ####################################### print "Test suite #1 - basic multi-core functionality" f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryValue = value[0].split() allResults = value[1].split('@') coreNum = 0 for coreResult in allResults: resultValue = coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query = 'http://localhost:' + port + '/search?' else: query = 'http://localhost:' + port + '/core' + str( coreNum) + '/search?' query = query + prepareQuery(queryValue, False) #do the query response = urllib2.urlopen(query).read() # TODO - Replace srch2 bad JSON (spurious comma). Ticket SRCN-335 already filed. #response = re.sub('[,][}]', '}', response) #print query + ' Got ==> ' + response response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) coreNum += 1 ####################################################################################### # Core 1 and Core 4 have different configurations, but on the same data. # # We now test for the differences in those settings. # # In queriesAndResults2.txt, here is an explanation of each test: # # 1) Aviatro||@156001 693000 # # Fuzzy match is off in core1 and on in core4, so only core4 should # # return any result records # # 2) Aviat||@156001 693000 # # Core1 has prefix matching off and core4 allows prefixes to match, # # so only core4 should return any results # # 3) monkeys||135001@ # # Core4 uses stop-words2.txt which has "monkeys", so core4 should not # # return results. Core1 has the usual stop words file and should # # find "monkeys". # # 4) Rings||908 927 492002 492003 634004 634005 634006@908 927 492002 492003 634004 # # Test different <rows> setting. Core1 can return up to 10 records, but Core4 is # # limited to just 5. # # 5) martn~||156001 525017 693000@ # # Core1 will fuzzy match martn against "Martin" in 3 records, because # # it's similarity threshold is 0.75. Core4 has a higher threshold # # of 0.85, and should not return any matching records. # ####################################################################################### print "\nTest suite #2: Comparing different engine configurations on the same data source" f_in = open(queriesAndResultsPath2, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryValue = value[0].split() allResults = value[1].split('@') coreNum = [ 1, 4 ] # coreNum are the literal core numbers to use in path this time index = 0 # and index iterates coreNum for coreResult in allResults: resultValue = coreResult.split() #construct the query query = 'http://localhost:' + port + '/core' + str( coreNum[index]) + '/search?' query = query + prepareQuery(queryValue, False) #do the query response = urllib2.urlopen(query).read() # TODO - Replace srch2 bad JSON (spurious comma). Ticket SRCN-335 already filed. #response = re.sub('[,][}]', '}', response) # print query + ' Got ==> ' + response response_json = json.loads(response) #check the result failCount += checkResult(query, response_json['results'], resultValue) index += 1 # Test search_all functionality query = 'http://localhost:' + port + '/_all/search?' + prepareQuery( queryValue, False) response = urllib2.urlopen(query).read() response_json = json.loads(response) # Check the search_all result index = 0 for coreResult in allResults: resultValue = coreResult.split() coreName = 'core' + str(coreNum[index]) failCount += checkResult(query, response_json[coreName]['results'], resultValue) index += 1 time.sleep(5) test_lib.killServer(serverHandle) print '==============================' return failCount
def testMultipleCores(queriesAndResultsPath, binary_path): if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 #Start the engine server args = [ binary_path, '--config-file=./multiport/conf-multiport.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) failCount = 0 ####################################### # Basic multi-core functional testing # ####################################### print "Test suite #1 - basic multi-core functionality" f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value=line.split('||') queryValue=value[0].split() allResults=value[1].split('@') coreNum=0 for coreResult in allResults: resultValue=coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query='http://*****:*****@') coreNum=0 for coreResult in allResults: resultValue=coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query='http://*****:*****@') coreNum=0 for coreResult in allResults: resultValue=coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query='http://localhost:' + core2ControlPort + '/search?' else: query='http://localhost:' + core2ControlPort + '/core' + str(coreNum) + '/search?' query = query + prepareQuery(queryValue, False) try: #do the query response = urllib2.urlopen(query).read() #print query + ' Got ==> ' + response response_json = json.loads(response) except urllib2.HTTPError as err: if err.code == 404: print query + ' test pass' else: # did not get expected file not found error failCount += 1 raise coreNum += 1 f_in.close() test_lib.killServer(serverHandle) print '==============================' return failCount
def testSaveShutdownRestart(binary_path): #Start the engine server binary= [ binary_path, '--config-file=./save_shutdown_restart_export_test/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + binary[0] + ' ' + binary[1] proc = subprocess.Popen(binary) test_lib.pingServer(port) #save the index saveQuery='http://localhost:' + port + '/save' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(saveQuery, '') request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['save'] != "success": print "Save operation failed: " + response exit(-1) #shutdown use system kill subprocess.call(["kill", "-2", "%d" % proc.pid]) proc.wait() #search a query for checking if the server is shutdown try: query='http://localhost:' + port + '/search?q=good' response = urllib2.urlopen(query).read() print response except: print 'server has been shutdown' else: print 'server is not shutdown' exit(-1) #restart proc = subprocess.Popen(binary) test_lib.pingServer(port) #search a query for checking if the server is shutdown query='http://localhost:' + port + '/search?q=good' response = urllib2.urlopen(query).read() if response == 0: print 'server does not start' exit(-1) else: print 'server start' #export data to json exportQuery='http://localhost:' + port + '/export?exported_data_file=exportData.json' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(exportQuery, '') request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['export'] != "success": print "Export operation failed: " + response exit(-1) #get pid of srch2-search-server and kill the process #shutdown use restful API killQuery = 'http://localhost:' + port + '/_all/shutdown' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(killQuery, '') request.get_method = lambda: 'PUT' response = opener.open(request) print response.read() import time time.sleep(2) #search a query for checking if the server is shutdown try: query='http://localhost:' + port + '/search?q=good' response = urllib2.urlopen(query).read() print response except: print 'server has been shutdown' else: print 'server is not shutdown' exit(-1) #proc.send_signal(signal.SIGUSR1) print 'test pass' print '==============================' return 0
def testNewFeatures( binary_path): # Start the engine server args = [ binary_path, '--config-file=./test_search_by_id/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) ## first look for id=2 print "#1: search for id=2 and should find it" query = 'http://localhost:' + port + '/search?docid=2' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount = checkResult(query, response_json,['2'] ) # second search for 200 which is not there print "# 2. Search for id=200 and should not find it" query = 'http://localhost:' + port + '/search?docid=200' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result # NOTE: If you inexplicably don't understand why this test fails and record 200 is found, try deleting # the index files. They're probably leftover from a prior execution. failCount += checkResult(query, response_json,[] ) # now insert 200 print "# 3. inserts id=200" insertQuery = 'http://localhost:' + port + '/docs' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(insertQuery, '{"model": "BMW","price":1.5,"likes":1,"expiration":"01/01/1911", "category": "second verycommonword vitamin Food & Beverages Retail Goods Specialty", "name": "Moondog Visions", "relevance": 8.0312880237855993, "lat": 61.207107999999998, "lng": -149.86541, "id": "200"}') request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['insert'] != "success": print "Insertion of record 200 failed: " + response failCount += 1 time.sleep(10) # third search for 200 which is there print "# 4. search for id=200 and should find it this time" query = 'http://localhost:' + port + '/search?docid=200' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json,['200'] ) # now delete record 2 print "# 5. delete id=2" deleteQuery = 'http://localhost:' + str(port) + '/docs?id=2' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(deleteQuery, '') request.get_method = lambda: 'DELETE' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['delete'] != "success": print "Deletion of record 2 failed: " + response failCount += 1 time.sleep(10) # search for record 2 which should not be there print "# 6. search for id=2 and should not find it" query = 'http://localhost:' + port + '/search?docid=2' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json,[] ) # now insert 2 print "# 7. insert id=2" insertQuery = 'http://localhost:' + port + '/docs' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(insertQuery, '{"model": "BMW","price":1.5,"likes":1,"expiration":"01/01/1911", "category": "record 2 second verycommonword vitamin Food & Beverages Retail Goods Specialty", "name": "Moondog Visions", "relevance": 8.0312880237855993, "lat": 61.207107999999998, "lng": -149.86541, "id": "2"}') request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['insert'] != "success": print "Insertion of record 2 failed: " + response failCount += 1 time.sleep(10) # third search for 200 which is there print "# 8. searches for id=2 and should find it again" query = 'http://localhost:' + port + '/search?docid=2' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json,['2'] ) test_lib.killServer(serverHandle) print '==============================' return failCount
def test(queriesAndResultsPath, binary_path, configFilePath): #Start the engine server args = [binary_path, '--config-file=' + configFilePath] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port, 'q=garbage', 30) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #if line is empty ignore if len(line) == 0: continue #ignore comments if line[0] == '#': continue #get the query keyword and results value = line.split('||') if len(value) < 2: continue # ignore bad line if value[0] == 'C': # the line is command query (insert/delete/update/acl etc) command = value[1] payload = value[2] coreName = '' if len(value) > 3: coreName = value[3].strip('\n').strip() if coreName == "": query = 'http://localhost:' + port + '/' + command else: query = 'http://localhost:' + port + '/' + coreName + '/' + command print query request = urllib2.Request(query, data=payload) request.get_method = lambda: 'PUT' opener = urllib2.build_opener(urllib2.HTTPHandler) url = opener.open(request) time.sleep(1) else: # the line is a search query queryValue = value[1].split() resultValue = (value[2]).split() coreName = '' if len(value) > 3: coreName = value[3].strip('\n').strip() #construct the query if coreName == '': query = 'http://localhost:' + port + '/search?' else: query = 'http://localhost:' + port + '/' + coreName + '/' + 'search?' query = query + prepareQuery(queryValue) print query #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result if value[0] == 'F': # test case for facet query failCount += checkFacetResults(query, response_json, resultValue) elif value[0] == 'H': # test case for checking highlighting only failCount += checkFieldsInResults(query, response_json['results'], resultValue, 'snippet') elif value[ 0] == 'R': # test case for only checking fields in response failCount += checkFieldsInResults(query, response_json['results'], resultValue, 'record') else: failCount += checkResult(query, response_json['results'], resultValue) test_lib.killServer(serverHandle) print '==============================' return failCount
def testMultipleCores(queriesAndResultsPath, binary_path): if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 #Start the engine server args = [binary_path, '--config-file=./multiport/conf-multiport.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port) failCount = 0 ####################################### # Basic multi-core functional testing # ####################################### print "Test suite #1 - basic multi-core functionality" f_in = open(queriesAndResultsPath, 'r') for line in f_in: #get the query keyword and results value = line.split('||') queryValue = value[0].split() allResults = value[1].split('@') coreNum = 0 for coreResult in allResults: resultValue = coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query = 'http://*****:*****@') coreNum = 0 for coreResult in allResults: resultValue = coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query = 'http://*****:*****@') coreNum = 0 for coreResult in allResults: resultValue = coreResult.split() #construct the query if coreNum == 0: # test default core (unnamed core) on 0th iteration query = 'http://localhost:' + core2ControlPort + '/search?' else: query = 'http://localhost:' + core2ControlPort + '/core' + str( coreNum) + '/search?' query = query + prepareQuery(queryValue, False) try: #do the query response = urllib2.urlopen(query).read() #print query + ' Got ==> ' + response response_json = json.loads(response) except urllib2.HTTPError as err: if err.code == 404: print query + ' test pass' else: # did not get expected file not found error failCount += 1 raise coreNum += 1 f_in.close() test_lib.killServer(serverHandle) print '==============================' return failCount
continue else: return False counter = counter + len(value) return True if __name__ == "__main__": #Start the engine server args = [ sys.argv[1], '--config-file=./top_k/conf.xml' ] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' os._exit(-1) serverHandle = test_lib.startServer(args) test_lib.pingServer(port) base = 'http://localhost:' + port #base = "http://shrek.calit2.uci.edu:8081" query = "obam" topk_A = str(10) topk_B = str(20) if len(sys.argv) == 5: query = sys.argv[2] if (sys.argv[3] < sys.argv[4]): topk_A = str(sys.argv[3]) topk_B = str(sys.argv[4]) exitCode = 0 else: topk_B = str(sys.argv[3])
def test(queriesAndResultsPath, binary_path, configFilePath): #Start the engine server args = [ binary_path, '--config-file=' + configFilePath] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) test_lib.pingServer(port, 'q=garbage', 30) #construct the query failCount = 0 f_in = open(queriesAndResultsPath, 'r') for line in f_in: #if line is empty ignore if len(line) == 0: continue #ignore comments if line[0] == '#': continue #get the query keyword and results value=line.split('||') if len(value) < 2: continue # ignore bad line if value[0] == 'C': # the line is command query (insert/delete/update/acl etc) command = value[1] payload = value[2] coreName = '' if len(value) > 3: coreName = value[3].strip('\n').strip() if coreName == "": query='http://localhost:' + port + '/' + command else: query='http://localhost:' + port + '/' + coreName + '/' + command print query request = urllib2.Request(query, data=payload) request.get_method = lambda: 'PUT' opener = urllib2.build_opener(urllib2.HTTPHandler) url = opener.open(request) time.sleep(1) else: # the line is a search query queryValue=value[1].split() resultValue=(value[2]).split() coreName = '' if len(value) > 3: coreName = value[3].strip('\n').strip() #construct the query if coreName == '': query='http://localhost:' + port + '/search?' else: query='http://localhost:' + port + '/' + coreName + '/' + 'search?' query = query + prepareQuery(queryValue) print query #do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result if value[0] == 'F': # test case for facet query failCount += checkFacetResults(query, response_json, resultValue ) elif value[0] == 'H': # test case for checking highlighting only failCount += checkFieldsInResults(query, response_json['results'], resultValue, 'snippet') elif value[0] == 'R': # test case for only checking fields in response failCount += checkFieldsInResults(query, response_json['results'], resultValue, 'record' ) else: failCount += checkResult(query, response_json['results'], resultValue ) test_lib.killServer(serverHandle) print '==============================' return failCount
def testNewFeatures(binary_path): # Start the engine server args = [binary_path, '--config-file=./test_search_by_id/conf.xml'] if test_lib.confirmPortAvailable(port) == False: print 'Port ' + str(port) + ' already in use - aborting' return -1 print 'starting engine: ' + args[0] + ' ' + args[1] serverHandle = test_lib.startServer(args) #make sure that start the engine up test_lib.pingServer(port) ## first look for id=2 print "#1: search for id=2 and should find it" query = 'http://localhost:' + port + '/search?docid=2' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount = checkResult(query, response_json, ['2']) # second search for 200 which is not there print "# 2. Search for id=200 and should not find it" query = 'http://localhost:' + port + '/search?docid=200' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result # NOTE: If you inexplicably don't understand why this test fails and record 200 is found, try deleting # the index files. They're probably leftover from a prior execution. failCount += checkResult(query, response_json, []) # now insert 200 print "# 3. inserts id=200" insertQuery = 'http://localhost:' + port + '/docs' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request( insertQuery, '{"model": "BMW","price":1.5,"likes":1,"expiration":"01/01/1911", "category": "second verycommonword vitamin Food & Beverages Retail Goods Specialty", "name": "Moondog Visions", "relevance": 8.0312880237855993, "lat": 61.207107999999998, "lng": -149.86541, "id": "200"}' ) request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['insert'] != "success": print "Insertion of record 200 failed: " + response failCount += 1 time.sleep(10) # third search for 200 which is there print "# 4. search for id=200 and should find it this time" query = 'http://localhost:' + port + '/search?docid=200' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, ['200']) # now delete record 2 print "# 5. delete id=2" deleteQuery = 'http://localhost:' + str(port) + '/docs?id=2' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(deleteQuery, '') request.get_method = lambda: 'DELETE' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['delete'] != "success": print "Deletion of record 2 failed: " + response failCount += 1 time.sleep(10) # search for record 2 which should not be there print "# 6. search for id=2 and should not find it" query = 'http://localhost:' + port + '/search?docid=2' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, []) # now insert 2 print "# 7. insert id=2" insertQuery = 'http://localhost:' + port + '/docs' opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request( insertQuery, '{"model": "BMW","price":1.5,"likes":1,"expiration":"01/01/1911", "category": "record 2 second verycommonword vitamin Food & Beverages Retail Goods Specialty", "name": "Moondog Visions", "relevance": 8.0312880237855993, "lat": 61.207107999999998, "lng": -149.86541, "id": "2"}' ) request.get_method = lambda: 'PUT' response = opener.open(request).read() jsonResponse = json.loads(response) if jsonResponse['log'][0]['insert'] != "success": print "Insertion of record 2 failed: " + response failCount += 1 time.sleep(10) # third search for 200 which is there print "# 8. searches for id=2 and should find it again" query = 'http://localhost:' + port + '/search?docid=2' # do the query response = urllib2.urlopen(query).read() response_json = json.loads(response) #check the result failCount += checkResult(query, response_json, ['2']) test_lib.killServer(serverHandle) print '==============================' return failCount