def __init__(self, model=None, query=None, using=None, cache=DEFAULT_CACHE, timeout=30): super(AmazonQuerySet, self).__init__(model, query, using, cache, timeout) # deferred import because bserial.serial uses Book model from bserial.serial import AmazonBookInterface self.amazon = AmazonBookInterface()
class AmazonQuerySet(CacheQuerySet): """ QuerySet that can retrieve items from amazon api """ _max_results = 10 def __init__(self, model=None, query=None, using=None, cache=DEFAULT_CACHE, timeout=30): super(AmazonQuerySet, self).__init__(model, query, using, cache, timeout) # deferred import because bserial.serial uses Book model from bserial.serial import AmazonBookInterface self.amazon = AmazonBookInterface() def lookup(self, *args, **kwargs): # if no id's specified, use books in this queryset if not args and not kwargs.has_key("ItemId"): kwargs["ItemId"] = [ book.asin for book in self.all()[:self._max_results] ] # perform lookup. books is a list. no results => empty books = self.amazon.lookup(*args, **kwargs) # cache results for book in books: self.cache_add(book) return books def batch_lookup(self, *args, **kwargs): """ DANGEROUS! same as lookup, but performs multiple queries for large counts It will keep sending amazon api requests, regardless of how many books are out there. """ # if no id's specified, use books in this queryset if not args and not kwargs.has_key("ItemId"): kwargs["ItemId"] = [ book.asin for book in self.all() ] books = [] # iterate through item ids in steps, performing a lookup for each for seg_index in xrange(0,len(kwargs["ItemId"]), self._max_results): seg_ciel = seg_index + self._max_results seg_ids = kwargs["ItemId"][seg_index:seg_ciel] seg_kwargs = deepcopy(kwargs) seg_kwargs["ItemId"] = seg_ids seg_books = self.amazon.lookup(*args, **seg_kwargs) books += seg_books # cache results for book in books: self.cache_add(book) return books def search(self, *args, **kwargs): books = self.amazon.search(*args, **kwargs) for book in books: self.cache_add(book) return books