def post(self): result = None try: result = TwitterActor().act() except Exception, e: msg = Exceptions.format_last() logging.error(msg) result = (e, msg) set_error_status(RequestHelper(self))
def ingest(cls, handler, source_name): helper = RequestHelper(handler) source_name = urllib.unquote(source_name) keep = handler.request.get("keep") if keep: keep = int(keep) else: keep = 50 # TODO: get from cache f = Feed.get_by_source_name(source_name, return_none=True) if not f: helper.error(404) return results = {} entries = [] results['created'] = entries # TODO: use etag from previous ingest error_call = lambda entry, ex: logging.error(Exceptions.format_last()) user = users.get_current_user() if not user: # there is no logged in user for cron requests user = User(Services.API_USER) try: for artifact_guid, entry, created in model.ingest_feed_entries( f, user, error_call=error_call): entries.append({ "artifact-guid": artifact_guid, "url": entry.link, "title": entry.title, "created": created }) finally: # delete oldest feed entries # TODO: shouldn't I be deleting ArtifactContent instances also? def delete_info(c): try: i = c.info if i: i.delete() except Exception, e: pass deleted_key_names = ArtifactContent.delete_oldest_by_source( f.artifact_source, keep, pre_call=delete_info) results['deleted'] = deleted_key_names Counters.source_counter(f.artifact_source.name).decrement( len(deleted_key_names))
class MixtureHandler(webapp.RequestHandler): def get(self, source_name, **kw): helper = RequestHelper(self) try: speaker_name = self.request.get("s", None) if speaker_name: speaker = new_speaker(speaker_name)[1] mixer = Mixer.new(speaker) else: speaker_name, speaker = new_random_speaker() logging.debug("speaker: %s" % str(speaker)) mixer = Mixer(speaker) # direct message message = self.request.get("q", None) if message: message = urllib.unquote(message) sources, content = mixer.mix_response(message) else: if not source_name: sources, content = mixer.mix_random_limit_sources( 2, degrade=True) else: source_name = urllib.unquote(source_name) logging.debug("get source_name: %s" % source_name) if ";" in source_name: # multiple sources sources_split = set(source_name.split(";")) sources, content = mixer.mix_sources(*sources_split) else: # single source sources, content = mixer.mix_sources(source_name) logging.debug("sources: %s" % str(sources)) source_hash_list = [s.name for s in sources] mix_hash = { "sources": source_hash_list, "speaker": { "name": speaker_name, "id": hash(speaker), "details": str(speaker) }, "body": content } helper.write_json(mix_hash) except NotFoundException, ex: helper.error(404, Exceptions.format(ex)) logging.error(ex) except DataException, ex: helper.error(503, Exceptions.format(ex)) logging.error(ex)
def ingest(cls, handler, source_name): helper = RequestHelper(handler) source_name = urllib.unquote(source_name) keep = handler.request.get("keep") if keep: keep = int(keep) else: keep = 50 # TODO: get from cache f = Feed.get_by_source_name(source_name, return_none=True) if not f: helper.error(404) return results = {} entries = [] results['created'] = entries # TODO: use etag from previous ingest error_call = lambda entry, ex: logging.error(Exceptions.format_last()) user = users.get_current_user() if not user: # there is no logged in user for cron requests user = User(Services.API_USER) try: for artifact_guid, entry, created in model.ingest_feed_entries(f, user, error_call=error_call): entries.append({ "artifact-guid": artifact_guid, "url": entry.link, "title": entry.title, "created": created }) finally: # delete oldest feed entries # TODO: shouldn't I be deleting ArtifactContent instances also? def delete_info(c): try: i = c.info if i: i.delete() except Exception, e: pass deleted_key_names = ArtifactContent.delete_oldest_by_source(f.artifact_source, keep, pre_call=delete_info) results['deleted'] = deleted_key_names Counters.source_counter(f.artifact_source.name).decrement(len(deleted_key_names))
def get(self, **kw): helper = RequestHelper(self) message = self.request.get("q", None) if not message: helper.error(400, "q must be provided") return message = urllib.unquote(message) try: sources, response = Mixer(new_speaker()).mix_response(message) logging.debug("sources:%s, response:%s" % (sources, response)) result = dict(response=response) helper.write_json(result) except DataException, ex: helper.error(503, Exceptions.format(ex)) logging.error(ex)
def get(self, source_name, **kw): helper = RequestHelper(self) try: speaker_name = self.request.get("s", None) if speaker_name: speaker = new_speaker(speaker_name)[1] mixer = Mixer.new(speaker) else: speaker_name, speaker = new_random_speaker() logging.debug("speaker: %s" % str(speaker)) mixer = Mixer(speaker) # direct message message = self.request.get("q", None) if message: message = urllib.unquote(message) sources, content = mixer.mix_response(message) else: if not source_name: sources, content = mixer.mix_random_limit_sources(2, degrade=True) else: source_name = urllib.unquote(source_name) logging.debug("get source_name: %s" % source_name) if ";" in source_name: # multiple sources sources_split = set(source_name.split(";")) sources, content = mixer.mix_sources(*sources_split) else: # single source sources, content = mixer.mix_sources(source_name) logging.debug("sources: %s" % str(sources)) source_hash_list = [s.name for s in sources] mix_hash = {"sources": source_hash_list, "speaker": {"name": speaker_name, "id": hash(speaker), "details": str(speaker)}, "body": content} helper.write_json(mix_hash) except NotFoundException, ex: helper.error(404, Exceptions.format(ex)) logging.error(ex)