def newPost(self, blogId, title, content, date=None, other=None): t = time.localtime() created = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) par = { "event":content,"subject":title,"year":t[0],"day":t[2],"hour":t[3],"mon":t[1],"min":t[4] } if other: par.update(other) params = self.prepare_call(par) res = self.server.LJ.XMLRPC.postevent(par) p = Post(ID=res['itemid'],Title=title,Content=content,Created=created) if other: p.Data = PostData(pickle.dumps(other)) return p
def newPost(self, blogId, title, content, date=None, other=None): if not hasattr(self,"cookie"): self.login() blen = self.server.diary.len(self.username) self.server.diary.set(self.cookie, -1, content) created = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) return Post(ID=blen, Content=content, Created=created, Title=created)
def getPosts(self, blogId): """ Returns a list of the most recent posts in the system. """ #todo: check for 'authorName' try: res = self.server.metaWeblog.getRecentPosts( str(blogId), self.username, self.password, 15) except: # fallback res = self.server.blogger.getRecentPosts(APIKEY, str(blogId), self.username, self.password, 15) ret = [] for post in res: content = post['content'] m = self.postre.match(content) if m: title = m.group(1) cat = m.group(2) content = m.group(3) else: title = "" updated = time.strftime( '%Y-%m-%dT%H:%M:%SZ', time.strptime(str(post['dateCreated']), '%Y%m%dT%H:%M:%S')) ret += Post(ID=post['postid'], Content=content, Title=title, Created=updated) return ret
def getPost(self, blogid, postid): postid = int(postid) html = self.server.diary.get(self.username, postid) (created, updated) = self.server.diary.getDates(self.username, postid) updated = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.strptime(str(updated),'%Y%m%dT%H:%M:%S')) return Post( ID=str(postid), Content=unicode(html), Created=updated, Data=PostData(Pickle=pickle.dumps(None)), Title=updated )
def newPost(self, blogId, title, content, date=None, other=None): t = time.localtime() created = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()) par = { "event": content, "subject": title, "year": t[0], "day": t[2], "hour": t[3], "mon": t[1], "min": t[4] } if other: par.update(other) params = self.prepare_call(par) res = self.server.LJ.XMLRPC.postevent(par) p = Post(ID=res['itemid'], Title=title, Content=content, Created=created) if other: p.Data = PostData(pickle.dumps(other)) return p
def getEvent(self, eventprops): params = self.prepare_call(eventprops) res = self.server.LJ.XMLRPC.getevents(params) ret = [] for event in res['events']: created = time.strftime( '%Y-%m-%dT%H:%M:%SZ', time.strptime(event['eventtime'], '%Y-%m-%d %H:%M:%S')) ret += [ Post(ID=unicode(event['itemid']), Content=unicode(event["event"]), Created=created, Data=PostData(Pickle=pickle.dumps(event["props"])), Title=unicode(event['subject'])) ] return ret
def newPost(self, blogId, title, content, date=None,other=None): """ Make a new post to Blogger, returning it's ID """ (created, headers) = self._makeCommonHeaders(date) headers["Content-type"] = "application/atom+xml" path = self.feedpath % (blogId) body = self._makeBody(title, content, created) conn = httplib.HTTPConnection(self.host) conn.request("POST", path, body, headers) response = conn.getresponse() resp = response.read() conn.close() amatch = self.id_re.search(feedparser.parse(resp)['entries'][0]['id']) if amatch: return Post(ID=amatch.group(1),Title=title,Content=content,Created=created) return None
def getPost(self, blogId, postId): """ Returns a post """ (created, headers) = self._makeCommonHeaders() conn = httplib.HTTPConnection(self.host) path = self.postpath % (blogId, postId) conn.request("GET", path, "", headers) response = conn.getresponse() xml = response.read() conn.close() res = [] for post in feedparser.parse(xml)['entries']: content = post['content'][0]['value'] if post['content'][0]['mode'] == 'escaped': unescape(content) thepost = Post() thepost.Title = post['title'] thepost.Created = post['modified'] thepost.ID = self.id_re.search( post['id'] ).group(1) thepost.Content = content res += [ thepost ] if res: return res[0] return None
conn = httplib.HTTPConnection(self.host) path = self.feedpath % (blogId) try: conn.request("GET", path, "", headers) response = conn.getresponse() except Exception,e: raise Exception("Network operation failed: "+str(e)) xml = response.read() conn.close() res = [] try: for post in feedparser.parse(xml)['entries']: content = post['content'][0]['value'] if post['content'][0]['mode'] == 'escaped': unescape(content) thepost = Post() thepost.Title = post['title'] thepost.Created = post['modified'] thepost.ID = self.id_re.search( post['id'] ).group(1) thepost.Content = content res += [ thepost ] except: raise Exception("Couldn't parse the response from the server! Bad xml?") return res def getPost(self, blogId, postId): """ Returns a post """ (created, headers) = self._makeCommonHeaders() conn = httplib.HTTPConnection(self.host) path = self.postpath % (blogId, postId) conn.request("GET", path, "", headers)