def update_category_list(entry): catlist = E.load(file(os.path.join('static', 'catlist.atom'), 'r')).xml_root categories = entry.xml_root.get_children('category', ATOM10_NS) modified = False for category in categories: token = category.get_attribute_value('term', '') + category.get_attribute_value('scheme', '') if token not in _seen_categories: modified = True _seen_categories.append(token) attrs = {} term = category.get_attribute_value('term', None) if term: attrs[u'term'] = term scheme = category.get_attribute_value('scheme', None) if scheme: attrs[u'scheme'] = scheme label = category.get_attribute_value('label', None) if label: attrs[u'label'] = label E(u'category', attributes=attrs, namespace=ATOM10_NS, prefix=ATOM10_PREFIX, parent=catlist) if modified: updated = catlist.get_child('updated', ATOM10_NS) updated.xml_text = get_isodate() file(os.path.join('static', 'catlist.atom'), 'w').write(catlist.xml())
def create_entry(self, text): uuid = generate_uuid_uri() entry = E.load('./entry.atom').xml_root entry.get_child('id', ns=entry.xml_ns).xml_text = uuid dt = get_isodate() entry.get_child('published', ns=entry.xml_ns).xml_text = dt entry.get_child('updated', ns=entry.xml_ns).xml_text = dt entry.get_child('content', ns=entry.xml_ns).xml_text = unicode(text) return uuid, entry
def make_entry(self, msg, node): uuid = generate_uuid_uri() entry = E.load('./entry.atom').xml_root entry.get_child('id', ns=entry.xml_ns).xml_text = uuid dt = get_isodate() entry.get_child('author', ns=entry.xml_ns).get_child('name', ns=entry.xml_ns).xml_text = unicode(self.profile.username) entry.get_child('published', ns=entry.xml_ns).xml_text = dt entry.get_child('updated', ns=entry.xml_ns).xml_text = dt entry.get_child('content', ns=entry.xml_ns).xml_text = unicode(msg) if node != self.pubsub_top_level_node: tag = extract_url_trail(node) E(u'category', namespace=entry.xml_ns, prefix=entry.xml_prefix, attributes={u'term': unicode(tag)}, parent=entry) return uuid, entry
def send_sqs_blip_notification(blip_type, member, start, end, aws_key, aws_private_key): entry = member.atom href = entry.filtrate(lookup_links, rel=u'edit')[0].get_attribute('href') n = Notification(u'create', unicode(member.collection.get_base_edit_uri())) n.links.append(Link(unicode(href), u'self')) if blip_type == 'create': n.published = entry.published.xml_text elif blip_type == 'update': n.updated = entry.updated.xml_text n.expires = get_isodate(end) tokens = urlparse(str(href)) host = tokens[1] s3_href = unicode(href).replace(host, 's3.amazonaws.com') path = '/%s_%s/%s' % (member.collection.store.storage.unique_prefix, member.collection.name_or_id, os.path.split(tokens[2])[-1]) expires = int(time.mktime(end.timetuple())) s3_qs = hmac.new(aws_private_key, s3_canonical_string('GET', path, {}, expires=str(expires)), sha) s3_qs = urllib.quote_plus(base64.encodestring(s3_qs.digest()).strip()) s3_href = unicode(s3_href).replace(tokens[2], '%s?AWSAccessKeyId=%s&Expires=%d&Signature=%s' % (path, aws_key, expires, s3_qs)) n.links.append(Link(s3_href, u's3')) n.from_entry(entry) blip = n.to_entry() retry = 5 while True: try: member.collection.m.push(blip) break except HTTPException: if retry > 5: break retry = retry + 1 time.sleep(0.0001)
def on_create(self, member, content): entry = member.atom title = entry.get_child('title', entry.xml_ns) if not title: E(u'title', namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=entry) title.xml_text = content.get('name') author = entry.get_child('author', entry.xml_ns) author_name = self.member_type.params['name'] if author: name = author.get_child('name', entry.xml_ns) if not name: name = E(u'name', namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=author) name.xml_text = author_name else: author = E(u'author', namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=entry) E(u'name', namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=author) name.xml_text = author_name location = content.get('location', None) if location: lat, lg = location.decode('utf-8').split(' ') g = E(u'point', content=location.decode('utf-8'), namespace=u'http://www.georss.org/georss', prefix=u'georss', parent=entry) geo = E('Point', namespace=u'http://www.w3.org/2003/01/geo/wgs84_pos#', prefix=u'geo', parent=entry) E(u'lat', content=lat, namespace=u'http://www.w3.org/2003/01/geo/wgs84_pos#', prefix=u'geo', parent=geo) E(u'long', content=lg, namespace=u'http://www.w3.org/2003/01/geo/wgs84_pos#', prefix=u'geo', parent=geo) #cherrypy.request._content = content startdate = content.get('startdate', None) if startdate: try: tokens = startdate.split('/') dt = get_isodate(datetime(int(tokens[2]), int(tokens[0]), int(tokens[1]))) except: dt = unicode(startdate) E(u'start-time', content=dt, namespace=LLUP_NS, prefix=LLUP_PREFIX, parent=entry) enddate = content.get('enddate', None) if enddate: try: tokens = enddate.split('/') dt = get_isodate(datetime(int(tokens[2]), int(tokens[0]), int(tokens[1]))) except: dt = unicode(enddate) E(u'expires', content=dt, namespace=LLUP_NS, prefix=LLUP_PREFIX, parent=entry) E(u'end-time', content=dt, namespace=LLUP_NS, prefix=LLUP_PREFIX, parent=entry) genre = content.get('genre', None) if genre: genre = genre.decode('utf-8') attrs = {u'term': genre, u'label': genre.capitalize(), u'scheme': u'http://personplacething.info/music/genre'} E(u'category', attributes=attrs, namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=entry) cats = content.get('tags', None) if cats: cats = cats.split(',') for cat in cats: cat = cat.decode('utf-8') attrs = {u'term': cat, u'label': cat} E(u'category', attributes=attrs, namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=entry) desc = content.get('description', '') content = entry.get_child('content', entry.xml_ns) if not content: content = E(u'content', namespace=entry.xml_ns, prefix=entry.xml_prefix, parent=entry) content.xml_text = desc #print entry.xml() return member, None