def store(site, entry): id = entry['id'] cursor = DBM.cursor() cursor.execute("select link_id from links where link_id = %s", (id,)) result = cursor.fetchone() if not result: return False annotation = read_annotation(KEY+str(id)) if not annotation: data = {} else: data = json.loads(annotation) if data[site]: if data[site]['ts'] >= entry['ts']: return False del(data[site]) data[site] = entry data = json.dumps(data) if data: store_annotation(KEY+str(id), data) print data cursor.close() return entry['ts']
def store(site, entry): id = entry['id'] cursor = DBM.cursor() cursor.execute("select link_id from links where link_id = %s", (id, )) result = cursor.fetchone() if not result: return False annotation = read_annotation(KEY + str(id)) if not annotation: data = {} else: data = json.loads(annotation) if data[site]: if data[site]['ts'] >= entry['ts']: return False del (data[site]) data[site] = entry data = json.dumps(data) if data: store_annotation(KEY + str(id), data) print data cursor.close() return entry['ts']
def store(self, annotation = False): if self.history_size > 0: if not self.history: self.history = [] self.history.append([int(time.time()), len(self.group.instances), int(round(self.total_load))]) self.history = self.history[-self.history_size:] pickle.dump(self, open(self.datafile, "wb" )) if annotation: import utils text = json.dumps(self.__getstate__(), skipkeys=True) utils.store_annotation("ec2_watch", text)
except (urllib2.URLError, urllib2.HTTPError, UnicodeEncodeError), e: print "connection failed (%s) %s" % (e, rss) return False if not doc.entries or doc.status == 304: return False for e in doc.entries: ts = analyze_entry(site, e) if ts and ts > last_checked: modified = True last_checked = data[site]['ts'] = ts if modified: data = json.dumps(data) store_annotation(KEY+"checked", data) def analyze_entry(site, e): if hasattr(e, 'published_parsed') and e.published_parsed: timestamp = time.mktime(e.published_parsed) elif hasattr(e, 'updated_parsed') and e.updated_parsed: timestamp = time.mktime(e.updated_parsed) else: return False if timestamp > time.time(): timestamp = time.time() if hasattr(e, "content"): content = e.summary else: content = e.description
except (urllib2.URLError, urllib2.HTTPError, UnicodeEncodeError), e: print "connection failed (%s) %s" % (e, rss) return False if not doc.entries or doc.status == 304: return False for e in doc.entries: ts = analyze_entry(site, e) if ts and ts > last_checked: modified = True last_checked = data[site]['ts'] = ts if modified: data = json.dumps(data) store_annotation(KEY + "checked", data) def analyze_entry(site, e): if hasattr(e, 'published_parsed') and e.published_parsed: timestamp = time.mktime(e.published_parsed) elif hasattr(e, 'updated_parsed') and e.updated_parsed: timestamp = time.mktime(e.updated_parsed) else: return False if timestamp > time.time(): timestamp = time.time() if hasattr(e, "content"): content = e.summary else: content = e.description