def upload(self, key, i): """Uploads a cover to coverstore and returns the response.""" olid = key.split("/")[-1] if i.file is not None and hasattr(i.file, 'value'): data = i.file.value else: data = None if i.url and i.url.strip() == "http://": i.url = "" user = accounts.get_current_user() params = dict(author=user and user.key, data=data, source_url=i.url, olid=olid, ip=web.ctx.ip) upload_url = '%s/%s/upload2' % (get_coverstore_url(), self.cover_category) if upload_url.startswith("//"): upload_url = "http:" + upload_url try: response = urllib2.urlopen(upload_url, urllib.urlencode(params)) out = response.read() except urllib2.HTTPError as e: out = e.read() return web.storage(simplejson.loads(out))
def get_covers(self, offset=0, limit=20): editions = self.get_editions(offset, limit) olids = [e['key'].split('/')[-1] for e in editions] try: url = '%s/b/query?cmd=ids&olid=%s' % (get_coverstore_url(), ",".join(olids)) data = urllib2.urlopen(url).read() cover_ids = simplejson.loads(data) except IOError as e: print('ERROR in getting cover_ids', str(e), file=web.debug) cover_ids = {} def make_cover(edition): edition = dict(edition) edition.pop('type', None) edition.pop('subjects', None) edition.pop('languages', None) olid = edition['key'].split('/')[-1] if olid in cover_ids: edition['cover_id'] = cover_ids[olid] return edition return [make_cover(e) for e in editions]
def query_coverstore(category, **kw): try: url = "%s/%s/query?%s" % (get_coverstore_url(), category, urllib.urlencode(kw)) json = urllib2.urlopen(url).read() return simplejson.loads(json) except IOError: return []
def upload(self, key, i): """Uploads a cover to coverstore and returns the response.""" olid = key.split("/")[-1] if i.file is not None and hasattr(i.file, 'value'): data = i.file.value else: data = None if i.url and i.url.strip() == "http://": i.url = "" user = accounts.get_current_user() params = { "author": user and user.key, "data": data, "source_url": i.url, "olid": olid, "ip": web.ctx.ip } upload_url = '%s/%s/upload2' % (get_coverstore_url(), self.cover_category) if upload_url.startswith("//"): upload_url = "http:" + upload_url try: response = urllib.request.urlopen(upload_url, urllib.parse.urlencode(params)) out = response.read() except urllib.error.HTTPError as e: out = {'error': e.read()} return web.storage(simplejson.loads(out))
def get_covers(self, offset=0, limit=20): editions = self.get_editions(offset, limit) olids = [e['key'].split('/')[-1] for e in editions] try: url = '%s/b/query?cmd=ids&olid=%s' % (get_coverstore_url(), ",".join(olids)) data = urllib2.urlopen(url).read() cover_ids = simplejson.loads(data) except IOError, e: print >> web.debug, 'ERROR in getting cover_ids', str(e) cover_ids = {}
def info(self): url = '%s/%s/id/%s.json' % (get_coverstore_url(), self.category, self.id) try: d = simplejson.loads(urllib2.urlopen(url).read()) d['created'] = parse_datetime(d['created']) if d['author'] == 'None': d['author'] = None d['author'] = d['author'] and web.ctx.site.get(d['author']) return web.storage(d) except IOError: # coverstore is down return None
def url(self, size="M"): return "%s/%s/id/%s-%s.jpg" % (get_coverstore_url(), self.category, self.id, size.upper())