def getSimplybooksData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + ' warn message') isbn = isbn.decode("ASCII") simplybooks_url = "http://www.simplybooks.in/search.php?search_keyword=" + isbn # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(simplybooks_url) except Exception: logging.warn('simplybooks Exception: ' + simplybooks_url) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() # Try and process the page. # The class should have been defined first, remember. try: myparser = SimplybooksParser() try: myparser.parse(s) except Exception: logging.warn("Parsing complete") price = myparser.get_price() if (price == None): logging.warn(logPrefix + "price is None") raise Exception(logPrefix + " price is None") price = utilities.cleanInteger(price) price = int(price) mrp = myparser.get_mrp() if (mrp == None): logging.warn(logPrefix + "mrp is None, setting this equal to price") mrp = price mrp = utilities.cleanInteger(mrp) mrp = int(mrp) simplybooks_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn=" + isbn + "&vendor=simplybooks&url=" + simplybooks_url.replace( '&', "$") buynow_url = simplybooks_url simplybooks_data = [ "SimplyBooks", "Rs. " + str(price), "Rs. " + str(mrp), buynow_url ] logging.warn(simplybooks_data) for s in simplybooks_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty") raise Exception(logPrefix + s + " is empty") return simplybooks_data except Exception: logging.warn("Error in parsing simplybooks data") return []
def getLandmarkData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'start fetching data') isbn = isbn.decode("ASCII") landmark_url = "http://www.landmarkonthenet.com/books/search/?q=" + isbn # Get a file-like object for the Python Web site's home page. try:f = urllib.urlopen(landmark_url) except Exception: logging.warn('landmarks Exception in urlopen for isbn'+isbn) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = LandMarkParser() try: myparser.parse(s) except: logging.warn("landmarks parsing complete") #get the price price = myparser.get_price() if(price == None): logging.warn(logPrefix + " price is None for isbn:"+isbn) raise Exception(logPrefix + " price is None") price = utilities.cleanInteger(price) price = int(price) #get the mrp mrp = myparser.get_mrp() if(mrp == None): logging.warn(logPrefix + " mrp is None for isbn:"+isbn) raise Exception(logPrefix + " mrp is None") mrp = utilities.cleanInteger(mrp) mrp = int(mrp) landmark_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=landmark&url=" + landmark_url.replace('&', "$") buynow_url = landmark_url landmark_data = ["Landmark", "Rs. " + str(price), "Rs. " + str(mrp), buynow_url] logging.warn(landmark_data) for s in landmark_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:"+isbn) raise Exception(logPrefix + s + " is empty") return landmark_data except Exception: logging.warn("Error in parsing landmark data for isbn:"+isbn) return []
def getBookaddaData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'start fetching data') isbn = isbn.decode("ASCII") bookadda_url = "http://www.bookadda.com/general-search?searchkey=" + isbn # Get a file-like object for the Python Web site's home page. try:f = urllib.urlopen(bookadda_url) except Exception: logging.warn('bookadda Exception') return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = BookaddaParser() try: myparser.parse(s) except: logging.warn("parsing complete for bookadda") #get the price price = myparser.get_price() if(price == None): logging.warn(logPrefix + " price is None for isbn:"+isbn) raise Exception(logPrefix + " price is None") price = utilities.cleanInteger(price) price = int(price) #get the mrp mrp = myparser.get_mrp() if(mrp == None): logging.warn(logPrefix + " mrp is None for isbn:"+isbn) raise Exception(logPrefix + " mrp is None") mrp = utilities.cleanInteger(mrp) mrp = int(mrp) bookadda_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=bookadda&url=" + bookadda_url.replace('&', "$") buynow_url = bookadda_url bookadda_data = ["BookAdda", "Rs. " + str(price), "Rs. " + str(mrp), buynow_url] logging.warn(bookadda_data) for s in bookadda_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:"+isbn) raise Exception(logPrefix + s + " is empty") return bookadda_data except Exception: logging.warn(logPrefix+"Error in parsing bookadda data for isbn:"+isbn) return []
def getSimplybooksData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + ' warn message') isbn = isbn.decode("ASCII") simplybooks_url = "http://www.simplybooks.in/search.php?search_keyword=" + isbn # Get a file-like object for the Python Web site's home page. try:f = urllib.urlopen(simplybooks_url) except Exception: logging.warn('simplybooks Exception: '+simplybooks_url) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() # Try and process the page. # The class should have been defined first, remember. try: myparser = SimplybooksParser() try: myparser.parse(s) except Exception: logging.warn("Parsing complete") price = myparser.get_price() if(price == None): logging.warn(logPrefix+"price is None") raise Exception(logPrefix + " price is None") price = utilities.cleanInteger(price) price = int(price) mrp = myparser.get_mrp() if(mrp == None): logging.warn(logPrefix+"mrp is None, setting this equal to price") mrp = price mrp = utilities.cleanInteger(mrp) mrp = int(mrp) simplybooks_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=simplybooks&url=" + simplybooks_url.replace('&', "$") buynow_url = simplybooks_url simplybooks_data = ["SimplyBooks", "Rs. "+str(price), "Rs. "+str(mrp), buynow_url] logging.warn(simplybooks_data) for s in simplybooks_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty") raise Exception(logPrefix + s + " is empty") return simplybooks_data except Exception: logging.warn("Error in parsing simplybooks data") return []
def getInfibeamData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'getting infibeam data') isbn = isbn.decode("ASCII") infibeam_url = "http://www.infibeam.com/search?q=" + isbn # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(infibeam_url) except Exception: logging.warn('infibeam Exception in urlopen for isbn:'+isbn) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = InfibeamParser() try: myparser.parse(s) except Exception: logging.warn("infibeam parsing complete") price = myparser.get_price() if(price == None): logging.warn(logPrefix + " price is None for isbn:"+isbn) raise Exception(logPrefix + " price is None") price = utilities.cleanInteger(price) price = int(price) mrp = myparser.get_mrp() if(mrp == None): logging.warn(logPrefix + " mrp is None for isbn:"+isbn) mrp = price mrp = utilities.cleanInteger(mrp) mrp = int(mrp) infibeam_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=infibeam&url="+ infibeam_url.replace('&', "$") buynow_url = infibeam_url infibeam_data = ["Infibeam", "Rs. "+str(price), "Rs. "+str(mrp), buynow_url] logging.warn(infibeam_data) for s in infibeam_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:" + isbn) raise Exception(logPrefix + s + " is empty") return infibeam_data except Exception: logging.warn(logPrefix+"exception in parsing for isbn:" + isbn) return []
def getRediffData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'start fetching data') isbn = isbn.decode("ASCII") rediff_url = "http://books.rediff.com/book/" + isbn # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(rediff_url) except Exception: logging.warn('Rediff Exception while urlopen for isbn:' + isbn) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = RediffParser() try: myparser.parse(s) except Exception: logging.warn("parsing complete for rediff") price = myparser.get_price() if (price == None): logging.warn(logPrefix + " price is None for isbn:" + isbn) raise Exception(logPrefix + " price is None") price = price.split(".")[1] price = utilities.cleanInteger(price) price = int(price) rediff_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn=" + isbn + "&vendor=rediffbooks&url=" + rediff_url.replace( '&', "$") buynow_url = rediff_url rediff_data = ["Rediff Books", "Rs. " + str(price), buynow_url] logging.warn(rediff_data) for s in rediff_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:" + isbn) raise Exception(logPrefix + s + " is empty") return rediff_data except Exception: logging.warn("Error in parsing rediff data for isbn:" + isbn) return []
def getRediffData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'start fetching data') isbn = isbn.decode("ASCII") rediff_url = "http://books.rediff.com/book/" + isbn # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(rediff_url) except Exception: logging.warn('Rediff Exception while urlopen for isbn:'+isbn) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = RediffParser() try: myparser.parse(s) except Exception: logging.warn("parsing complete for rediff") price = myparser.get_price() if(price == None): logging.warn(logPrefix + " price is None for isbn:"+isbn) raise Exception(logPrefix + " price is None") price = price.split(".")[1] price = utilities.cleanInteger(price) price = int(price) rediff_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=rediffbooks&url=" + rediff_url.replace('&', "$") buynow_url = rediff_url rediff_data = ["Rediff Books","Rs. "+str(price),buynow_url] logging.warn(rediff_data) for s in rediff_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:"+isbn) raise Exception(logPrefix + s + " is empty") return rediff_data except Exception: logging.warn("Error in parsing rediff data for isbn:"+isbn) return []
def getIndiaplazaData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'start fetching data') isbn = isbn.decode("ASCII") indiaplaza_url = "http://www.indiaplaza.com/searchproducts.aspx?sn=books&affid=133884&q=" + isbn # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(indiaplaza_url) except Exception: logging.warn('indiaplaza Exception in urlopen for isbn:'+isbn) return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = IndiaplazaParser() try: myparser.parse(s) except Exception: logging.error( "Error in parsing " ) #get the price price = myparser.get_price() if(price == None): logging.warn(logPrefix + " price is None for isbn:"+isbn) raise Exception(logPrefix + " price is None") price = price.split(".")[1] price = utilities.cleanInteger(price) price = int(price) #get the mrp mrp = myparser.get_mrp() if(mrp == None): logging.warn(logPrefix + " mrp is None for isbn:"+isbn) raise Exception(logPrefix + " mrp is None") mrp = mrp.split(".")[1] mrp = utilities.cleanInteger(mrp) mrp = int(mrp) indiaplaza_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=indiaplaza&url=" + indiaplaza_url.replace('&', "$") buynow_url = indiaplaza_url indiaplaza_data = ["Indiaplaza", "Rs. " + str(price), "Rs. " + str(mrp), buynow_url] logging.warn(indiaplaza_data) for s in indiaplaza_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:"+isbn) raise Exception(logPrefix + s + " is empty") return indiaplaza_data except Exception: logging.warn("Error in parsing indiaplaza data for isbn:"+isbn) return []
def getFlipkartData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + ' starting flipkart parsing') isbn = isbn.decode("ASCII") flipkart_url = "http://www.flipkart.com/m/search-all?query="+isbn; # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(flipkart_url) logging.warn('flipkart after urlopen') except Exception: logging.warn('flipkart Exception in urlopen') return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() logging.warn("read flipkart data") # Try and process the page. # The class should have been defined first, remember. try: # if True: myparser = FlipkartParser() logging.warn("starting the parser") myparser.parse(s) logging.warn("parsing complete") price = myparser.get_price() if(price == None): logging.warn(logPrefix + " price is None") raise Exception(logPrefix + " price is None") mrp = myparser.get_mrp() if(mrp == None): logging.warn(logPrefix + " mrp is None") raise Exception(logPrefix + " mrp is None") authornames = myparser.get_authorname() if(len(authornames) == 0): logging.warn(logPrefix + " author is None") raise Exception(logPrefix + " No author Names") display_author = None for author in authornames: if display_author == None: display_author = author else: display_author = display_author + " " + author bookname = myparser.get_bookname() if(bookname == None): logging.warn(logPrefix + " bookname is None") raise Exception(logPrefix + " bookname is None") thumbnail = myparser.get_thumbnail() if(thumbnail == None): logging.warn(logPrefix + " thumbnail is None") raise Exception(logPrefix + " thumbnail is None") flipkart_url = "http://www.flipkart.com/books/" + isbn + "?affid=vdhawalgma" flipkart_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn="+isbn+"&vendor=flipkart&url=" + flipkart_url.replace('&', "$") buynow_url = flipkart_url price_val = int(price.split(" ")[1]) price_val = utilities.cleanInteger(price_val) price_val = int(price_val) mrp_val = int(mrp.split(" ")[1]) discount = (mrp_val - price_val)*100/mrp_val flipkart_data = [isbn,bookname,display_author,"",thumbnail,"Flipkart",str(price),str(mrp),str(discount)+"%", buynow_url] return flipkart_data except Exception: logging.warn("Error in parsing flipkart data") return []
def getBookaddaData(isbn): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') logging.warn(logPrefix + 'start fetching data') isbn = isbn.decode("ASCII") bookadda_url = "http://www.bookadda.com/general-search?searchkey=" + isbn # Get a file-like object for the Python Web site's home page. try: f = urllib.urlopen(bookadda_url) except Exception: logging.warn('bookadda Exception') return [] # Read from the object, storing the page's contents in 's'. s = f.read() f.close() try: myparser = BookaddaParser() try: myparser.parse(s) except: logging.warn("parsing complete for bookadda") #get the price price = myparser.get_price() if (price == None): logging.warn(logPrefix + " price is None for isbn:" + isbn) raise Exception(logPrefix + " price is None") price = utilities.cleanInteger(price) price = int(price) #get the mrp mrp = myparser.get_mrp() if (mrp == None): logging.warn(logPrefix + " mrp is None for isbn:" + isbn) raise Exception(logPrefix + " mrp is None") mrp = utilities.cleanInteger(mrp) mrp = int(mrp) bookadda_url = "http://bestonlinedealsindia.appspot.com/redirect?isbn=" + isbn + "&vendor=bookadda&url=" + bookadda_url.replace( '&', "$") buynow_url = bookadda_url bookadda_data = [ "BookAdda", "Rs. " + str(price), "Rs. " + str(mrp), buynow_url ] logging.warn(bookadda_data) for s in bookadda_data: if len(s) == 0 or s == None: logging.warn(logPrefix + s + " is empty for isbn:" + isbn) raise Exception(logPrefix + s + " is empty") return bookadda_data except Exception: logging.warn(logPrefix + "Error in parsing bookadda data for isbn:" + isbn) return []
def get(self): inputisbn = self.request.get("isbn") logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', filename='/tmp/myapp.log', filemode='w') #log the metric for this request metricquery = IsbnMetricsDB.gql("WHERE isbn = :1",inputisbn) isbnmetricsdata = metricquery.get() if(not isbnmetricsdata): isbnmetricsdata = IsbnMetricsDB() isbnmetricsdata.hits = 0 isbnmetricsdata.isbn = inputisbn isbnmetricsdata.redirecttoflipkart = 0 isbnmetricsdata.redirecttoindiaplaza = 0 isbnmetricsdata.redirecttoinfibeam = 0 isbnmetricsdata.redirecttorediff = 0 isbnmetricsdata.redirecttobookadda = 0 isbnmetricsdata.redirecttosimplybooks = 0 isbnmetricsdata.redirecttolandmark = 0 isbnmetricsdata.hits += 1 isbnmetricsdata.put() #check in isbn DB first query = IsbnDB.gql("WHERE isbn = :1", inputisbn) isbndata = query.get() current_time = datetime.now() fetchDataAgain = False if(isbndata): date_created = isbndata.created if (current_time-date_created).days > 3: #delete the older entry along with all the associated entries in DetailDB table db.delete(isbndata.all_vendors) isbndata.all_vendors = [] fetchDataAgain = True newBook = None if(fetchDataAgain == True or not isbndata): logging.warn("calling flipkart") flipkart_data = flipkart.getFlipkartData(inputisbn) logging.warn("called flipkart") logging.warn(flipkart_data) if(len(flipkart_data)>0): logging.warn(flipkart_data) #if this is the first time for this book, persist all info if (not isbndata): newBook = IsbnDB() newBook.isbn = flipkart_data[0] newBook.name = flipkart_data[1].strip('\n') newBook.author = flipkart_data[2].strip('\n') newBook.thumbnail_url = flipkart_data[4] #else reuse the old data else: newBook = isbndata newBook.created = current_time flipkartmetricquery = VendorMetricDB.gql("WHERE vendorname = :1", "flipkart") flipkartmetricdata = flipkartmetricquery.get() if(not flipkartmetricdata): flipkartmetricdata = VendorMetricDB() flipkartmetricdata.vendorname = "flipkart" flipkartmetricdata.resultcount = 0 flipkartmetricdata.topresult = 0 flipkartmetricdata.resultcount += 1 flipkartmetricdata.put() flipkart_details = DetailDB() flipkart_details.vendor_name = flipkart_data[5].strip('\n') flipkart_details.price = flipkart_data[6] flipkart_details.mrp = flipkart_data[7] flipkart_details.discount = flipkart_data[8] flipkart_details.buy_now_url = flipkart_data[9] flipkart_details.put() newBook.all_vendors.append(flipkart_details.key()) #spawn all the parsing threads landmarkThread = landmarks.LandmarkThread(inputisbn) rediffThread = rediff.RediffThread(inputisbn) infibeamThread = infibeam.InfibeamThread(inputisbn) indiaplazaThread = indiaplaza.IndiaplazaThread(inputisbn) bookaddaThread = bookadda.BookaddaThread(inputisbn) simplybooksThread = simplybooks.SimplybooksThread(inputisbn) landmarkThread.start() simplybooksThread.start() bookaddaThread.start() indiaplazaThread.start() infibeamThread.start() rediffThread.start() #wait for all threads simplybooksThread.join() bookaddaThread.join() indiaplazaThread.join() infibeamThread.join() rediffThread.join() landmarkThread.join() #gather data from landmark #landmark_data = landmarks.getLandMarkData(inputisbn) landmark_data = landmarkThread.get_result() if(len(landmark_data)>0): landmark_details = DetailDB() landmark_details.vendor_name = landmark_data[0] landmark_details.price = landmark_data[1] landmark_details.mrp = landmark_data[2] landmark_details.buy_now_url = landmark_data[3] landmark_price = (landmark_details.price.split(" ")[1]) price_val = int(utilities.cleanInteger(landmark_price)) mrp_val = int(landmark_details.mrp.split(" ")[1]) landmark_details.discount = str((mrp_val - price_val)*100/mrp_val)+"%" landmark_details.put() newBook.all_vendors.append(landmark_details.key()) landmarkmetricquery = VendorMetricDB.gql("WHERE vendorname = :1", "landmark") landmarkmetricdata = landmarkmetricquery.get() if(not landmarkmetricdata): landmarkmetricdata = VendorMetricDB() landmarkmetricdata.vendorname = "landmark" landmarkmetricdata.resultcount = 0 landmarkmetricdata.topresult = 0 landmarkmetricdata.resultcount += 1 landmarkmetricdata.put() #gather data from rediff #rediff_data = rediff.getRediffData(inputisbn) rediff_data = rediffThread.get_result() if(len(rediff_data)>0): rediff_details = DetailDB() rediff_details.vendor_name = rediff_data[0] rediff_details.price = rediff_data[1] rediff_details.buy_now_url = rediff_data[2] rediff_details.mrp = flipkart_details.mrp rediff_price = (rediff_details.price.split(" ")[1]) price_val = int(utilities.cleanInteger(rediff_price)) mrp_val = int(rediff_details.mrp.split(" ")[1]) rediff_details.discount = str((mrp_val - price_val)*100/mrp_val)+"%" rediff_details.put() newBook.all_vendors.append(rediff_details.key()) rediffmetricquery = VendorMetricDB.gql("WHERE vendorname = :1", "rediff") rediffmetricdata = rediffmetricquery.get() if(not rediffmetricdata): rediffmetricdata = VendorMetricDB() rediffmetricdata.vendorname = "rediff" rediffmetricdata.resultcount = 0 rediffmetricdata.topresult = 0 rediffmetricdata.resultcount += 1 rediffmetricdata.put() #gather infibeam data #infibeam_data = infibeam.getInfibeamData(inputisbn) infibeam_data = infibeamThread.get_result() if(len(infibeam_data)>0): infibeam_details = DetailDB() infibeam_details.vendor_name = infibeam_data[0] infibeam_details.price = infibeam_data[1] infibeam_details.mrp = infibeam_data[2] infibeam_details.buy_now_url = infibeam_data[3] infibeam_price = (infibeam_details.price.split(" ")[1]) price_val = int(utilities.cleanInteger(infibeam_price)) mrp_val = int(infibeam_details.mrp.split(" ")[1]) infibeam_details.discount = str((mrp_val - price_val)*100/mrp_val)+"%" infibeam_details.put() newBook.all_vendors.append(infibeam_details.key()) infibeammetricquery = VendorMetricDB.gql("WHERE vendorname = :1", "infibeam") infibeammetricdata = infibeammetricquery.get() if(not infibeammetricdata): infibeammetricdata = VendorMetricDB() infibeammetricdata.vendorname = "infibeam" infibeammetricdata.resultcount = 0 infibeammetricdata.topresult = 0 infibeammetricdata.resultcount += 1 infibeammetricdata.put() #gather indiaplaza data #indiaplaza_data = indiaplaza.getIndiaplazaData(inputisbn) indiaplaza_data = indiaplazaThread.get_result() if(len(indiaplaza_data)>0): indiaplaza_details = DetailDB() indiaplaza_details.vendor_name = indiaplaza_data[0] indiaplaza_details.price = indiaplaza_data[1] indiaplaza_details.mrp = indiaplaza_data[2] indiaplaza_details.buy_now_url = indiaplaza_data[3] indiaplaza_price = (indiaplaza_details.price.split(" ")[1]) price_val = int(utilities.cleanInteger(indiaplaza_price)) mrp_val = int(indiaplaza_details.mrp.split(" ")[1]) indiaplaza_details.discount = str((mrp_val - price_val)*100/mrp_val)+"%" indiaplaza_details.put() newBook.all_vendors.append(indiaplaza_details.key()) indiaplazametricquery = VendorMetricDB.gql("WHERE vendorname = :1", "indiaplaza") indiaplazametricdata = indiaplazametricquery.get() if(not indiaplazametricdata): indiaplazametricdata = VendorMetricDB() indiaplazametricdata.vendorname = "indiaplaza" indiaplazametricdata.resultcount = 0 indiaplazametricdata.topresult = 0 indiaplazametricdata.resultcount += 1 indiaplazametricdata.put() simplybooks_data = simplybooksThread.get_result() if(len(simplybooks_data)>0): simplybooks_details = DetailDB() simplybooks_details.vendor_name = simplybooks_data[0] simplybooks_details.price = simplybooks_data[1] simplybooks_details.mrp = simplybooks_data[2] simplybooks_details.buy_now_url = simplybooks_data[3] simplybooks_price = (simplybooks_details.price.split(" ")[1]) price_val = int(utilities.cleanInteger(simplybooks_price)) mrp_val = int(simplybooks_details.mrp.split(" ")[1]) simplybooks_details.discount = str((mrp_val - price_val)*100/mrp_val)+"%" simplybooks_details.put() newBook.all_vendors.append(simplybooks_details.key()) simplybooksmetricquery = VendorMetricDB.gql("WHERE vendorname = :1", "simplybooks") simplybooksmetricdata = simplybooksmetricquery.get() if(not simplybooksmetricdata): simplybooksmetricdata = VendorMetricDB() simplybooksmetricdata.vendorname = "simplybooks" simplybooksmetricdata.resultcount = 0 simplybooksmetricdata.topresult = 0 simplybooksmetricdata.resultcount += 1 simplybooksmetricdata.put() bookadda_data = bookaddaThread.get_result() if(len(bookadda_data)>0): bookadda_details = DetailDB() bookadda_details.vendor_name = bookadda_data[0] bookadda_details.price = bookadda_data[1] bookadda_details.mrp = bookadda_data[2] bookadda_details.buy_now_url = bookadda_data[3] bookadda_price = (bookadda_details.price.split(" ")[1]) price_val = int(utilities.cleanInteger(bookadda_price)) mrp_val = int(bookadda_details.mrp.split(" ")[1]) bookadda_details.discount = str((mrp_val - price_val)*100/mrp_val)+"%" bookadda_details.put() newBook.all_vendors.append(bookadda_details.key()) bookaddametricquery = VendorMetricDB.gql("WHERE vendorname = :1", "bookadda") bookaddametricdata = bookaddametricquery.get() if(not bookaddametricdata): bookaddametricdata = VendorMetricDB() bookaddametricdata.vendorname = "bookadda" bookaddametricdata.resultcount = 0 bookaddametricdata.topresult = 0 bookaddametricdata.resultcount += 1 bookaddametricdata.put() newBook.put() #by this time, the data should be present in the DB updated_data = None if not newBook: query1 = IsbnDB.gql("WHERE isbn = :1", inputisbn) updated_data = query1.get() else: updated_data = newBook #read from DB & return the result self.response.out.write("<html><head></head><body>") if(updated_data): #sort the results all_vendors = [] for vendor in updated_data.all_vendors: all_vendors.append(db.get(vendor)) all_vendors_sorted = sort(all_vendors) vendormetricquery = VendorMetricDB.gql("WHERE vendorname = :1", all_vendors_sorted[0].vendor_name.replace(' ','').lower()) vendormetricdata = vendormetricquery.get() if(not vendormetricdata): vendormetricdata = VendorMetricDB() vendormetricdata.vendorname = all_vendors_sorted[0].vendor_name.replace(' ','').lower() vendormetricdata.resultcount = 0 vendormetricdata.topresult = 0 vendormetricdata.topresult += 1 vendormetricdata.put() #write html to output stream html = "<div style='float: left;margin-right:10px;'><img height='100' id='bookThumbnail' src='" + updated_data.thumbnail_url + "'></div>" html += "<div style='float: left; word-wrap: break-word; width: 60%;'><div id='bookName' style='color:white;'>"+ updated_data.name + "</div>" html += "<div style='color:white' id = 'authorName'>" + updated_data.author +"</div>" html += "</div><div style='clear:both'></div>" self.response.out.write(html) self.response.out.write("<br><ul id = 'priceGrid' data-count-theme='b' data-role='listview' data-theme='a'>") for vendor in all_vendors_sorted: if(vendor): self.response.out.write(getLi(vendor)) self.response.out.write("</ul>") else: self.response.out.write("We are sorry, couldn't find any match") self.response.out.write("</body></html>")