def _handle_bookmark(self, data): """ Accept a data dictionary drawn from the Delicious API and syncs it to the database. """ try: # Just test the URL in case it's already been logged by another bookmarking service like Delicious. l = Link.objects.get(url=data['url']) # And just quit out silently if it already exists. logger.debug("Link already exists for %s" % data["title"].encode("utf-8")) except Link.DoesNotExist: # If it doesn't exist, add it fresh. logger.debug("Adding link to %s" % data["title"].encode("utf-8")) l = Link( title=data['title'], pub_date=data['date'], url=data['url'], ) l.save()
def _handle_digg(self, d): """ Accepts a data dictionary harvest from Digg's API and logs any new ones the database. """ try: # Just test the URL in case it's already been logged. l = Link.objects.get(url=d['url']) # And just quit out silently if it already exists. logger.debug("Digg already exists for %s" % d["title"]) except Link.DoesNotExist: # If it doesn't exist, add it fresh. logger.debug("Adding link to %s" % d["title"]) l = Link( url = d['url'], title = d['title'], description = d['description'], pub_date = d['date'], tags = d['topic'], ) l.save()
def _handle_bookmark(info): """ Accept a data dictionary drawn from the Delicious API and syncs it to the database. """ try: # Just test the URL in case it's already been logged by another bookmarking service like Delicious. l = Link.objects.get(url=info['href']) # And just quit out silently if it already exists. log.debug("Link already exists for %s" % info["description"]) except Link.DoesNotExist: # If it doesn't exist, add it fresh. log.debug("Adding link to %s" % info["description"]) l = Link( url = info['href'], title = info['description'], description = info.get('extended', ''), pub_date = utils.parsedate(str(info['time'])), tags = info['tag'] ) l.save()