def post(self, **kw): helper = RequestHelper(self) results = {} source_q = ArtifactSource.all() for s in source_q: artifact_q = ArtifactInfo.find_by_source(s) count = len([a for a in artifact_q]) counter = Counters.source_counter(s.name) old_count = counter.count() counter.set(count) source_result = { 'old': old_count } # if source is linked to a feed, I can't delete it feed = Feed.get_by_source(s, return_none=True) if feed: source_result['feed'] = feed.url if not count and not feed: s.delete() source_result['deleted'] = True if count: source_result['new'] = count results[s.name] = source_result helper.write_json(results)
def delete(self, **kw): helper = RequestHelper(self) q = self.request.get("q", None) if not q: helper.error(400, "q not provided.") return results = {} deleted_guids = [] results["deleted"] = deleted_guids errors = 0 q_results = ArtifactContent.all().search(q) infos = [] batch_size = 50 q_count = q_results.count() batches = (q_count / batch_size) + 1 count = 0 for i in range(0, batches): for c in q_results.fetch(batch_size, i * batch_size): try: logging.debug("deleting guid:%s" % c.guid) c.delete() count += 1 deleted_guids.append(c.guid) if c.info is not None: c.info.delete() except Exception, ex: logging.error(ex) errors += 1
def post(self, **kw): helper = RequestHelper(self) results = {} source_q = ArtifactSource.all() for s in source_q: artifact_q = ArtifactInfo.find_by_source(s) count = len([a for a in artifact_q]) counter = Counters.source_counter(s.name) old_count = counter.count() counter.set(count) source_result = {'old': old_count} # if source is linked to a feed, I can't delete it feed = Feed.get_by_source(s, return_none=True) if feed: source_result['feed'] = feed.url if not count and not feed: s.delete() source_result['deleted'] = True if count: source_result['new'] = count results[s.name] = source_result helper.write_json(results)
def get(self, name, **kw): helper = RequestHelper(self) source = ArtifactSourceAccessor.get_by_name(name, return_none=True) if not source: helper.error(404) return helper.write_json(source_hash(source))
def get(self, **kw): helper = RequestHelper(self) # read actions from queue; return as json q = get_activity_queue() items = q.items() helper.write_json(items)
def get(self, page): page = int(page) if page else 0 helper = RequestHelper(self) results = [] for u in UrlResource.all().fetch(self.BATCH_SIZE, page * self.BATCH_SIZE): results.append(resource_hash(u)) helper.write_json(results)
def get(self): helper = RequestHelper(self) results = [] for s in db.stats.KindStat.all(): results.append({'kind': s.kind_name, 'count': s.count, 'bytes': s.bytes}) helper.write_json(results)
def put(self, name, **kw): helper = RequestHelper(self) source = ArtifactSourceAccessor.get_by_name(name, return_none=True) if source: helper.set_status(409, "duplicate ArtifactSouce") return ArtifactSourceAccessor.create(name) helper.set_status(204)
def args_call(*args, **kw): handler = args[0] helper = RequestHelper(handler) if not Authenticator.is_authenticated(handler.request.path, handler.request.method, helper): helper.header(error_header, error_message) helper.error(403) else: f(*args, **kw)
def args_call(*args, **kw): handler = args[0] helper = RequestHelper(handler) if not users.is_current_user_admin(): helper.header(error_header, error_msg) helper.error(403) else: f(*args, **kw)
def delete(self, name, **kw): helper = RequestHelper(self) try: ArtifactSourceAccessor.delete_by_name(name) helper.set_status(204) except NotFoundException, ex: helper.error(404) return
def get(self, path): helper = RequestHelper(self) t_api = TwitterConnector.new_api() try: result_hash = t_api.FetchResource("%s" % path) logging.debug("result_hash: %s" % result_hash) helper.write_json(result_hash) except twitter.TwitterError, e: helper.write(e)
def delete(self): helper = RequestHelper(self) search_results = self.__search(helper) if search_results: keys = [u.key() for u in search_results] db.delete(keys) helper.write_json([k.name() for k in keys]) else: helper.set_status(204)
def get(self): helper = RequestHelper(self) results = [] for s in db.stats.KindStat.all(): results.append({ 'kind': s.kind_name, 'count': s.count, 'bytes': s.bytes }) helper.write_json(results)
def get(self): helper = RequestHelper(self) # calculates random number of minutes minute = random.choice(self._minutes) taskqueue.add(url=SafeTwitterActorHandler.PATH, countdown=minute * 60) msg = 'scheduled %d minutes in the future' % minute logging.debug(msg) helper.write_json({'msg': msg})
def get(cls, rhandler, guid, **kw): helper = RequestHelper(rhandler) artifact_info = ArtifactInfo.get_by_guid(guid) artifact_content = ArtifactContent.get_by_guid(guid) if artifact_info and artifact_content: artifact_hash = ArtifactsHelper.artifact_to_hash(artifact_info, artifact_content) helper.write_json(artifact_hash) else: helper.error(404)
def respond(cls, handler): helper = RequestHelper(handler) directs, publics = TwitterActor().respond() if not directs: directs = 0 if not publics: publics = 0 result = dict(directs=directs, publics=publics) helper.write_json(result)
def post(self): helper = RequestHelper(self) if not len(self.request.body): helper.error(400, "body required") return decoded_body = urllib.unquote(self.request.body) t_api = TwitterConnector.new_api() status = t_api.PostUpdate(decoded_body) helper.write_json(status.asDict())
def ingest(cls, handler, source_name): helper = RequestHelper(handler) source_name = urllib.unquote(source_name) keep = handler.request.get("keep") if keep: keep = int(keep) else: keep = 50 # TODO: get from cache f = Feed.get_by_source_name(source_name, return_none=True) if not f: helper.error(404) return results = {} entries = [] results['created'] = entries # TODO: use etag from previous ingest error_call = lambda entry, ex: logging.error(Exceptions.format_last()) user = users.get_current_user() if not user: # there is no logged in user for cron requests user = User(Services.API_USER) try: for artifact_guid, entry, created in model.ingest_feed_entries( f, user, error_call=error_call): entries.append({ "artifact-guid": artifact_guid, "url": entry.link, "title": entry.title, "created": created }) finally: # delete oldest feed entries # TODO: shouldn't I be deleting ArtifactContent instances also? def delete_info(c): try: i = c.info if i: i.delete() except Exception, e: pass deleted_key_names = ArtifactContent.delete_oldest_by_source( f.artifact_source, keep, pre_call=delete_info) results['deleted'] = deleted_key_names Counters.source_counter(f.artifact_source.name).decrement( len(deleted_key_names))
def get(self, feed_url): feed_url = "http://%s" % feed_url helper = RequestHelper(self) entries = [ dict(title=e.title, link=e.link, content=e.stripped_content, modified=str(e.modified)) for e in generate_feed_entries(feed_url) ] helper.write_json(entries)
def __default(self): helper = RequestHelper(self) m = self.request.method if m in ("DELETE", "PUT"): sleep = random.randint(0, 10) / 2 logging.debug("sleeping for %ds" % sleep) time.sleep(sleep) elif m == "GET": helper.write_json({'msg': 'hack success'}) else: pass
def get(self, username): helper = RequestHelper(self) t_api = TwitterConnector.new_api() friends = t_api.GetFriends() found = None for f in friends: if f.screen_name == username: found = f break result = {} result['friend'] = True if found else False helper.write_json(result)
def get(self, **kw): helper = RequestHelper(self) start = int(self.request.get("start", 0)) count = int(self.request.get("count", 10)) q = ArtifactInfo.all().order("-modified") json_results = [] if q.count(): for a_info in q.fetch(count, start): a_content = ArtifactAccessor.get_content_by_guid(a_info.guid) json_results.append(ArtifactsHelper.artifact_to_hash(a_info, a_content)) helper.write_json(json_results)
def args_call(*args, **kw): handler = args[0] helper = RequestHelper(handler) user = users.get_current_user() if not user: helper.header(error_header, error_msg) helper.error(403) elif not emails or (not user.email() in emails): helper.header(error_header, "unauthorized-user") helper.error(403) else: f(*args, **kw)
def get(self, page): helper = RequestHelper(self) page = int(page) t_api = TwitterConnector.new_api() raw_mentions = t_api.getReplies(page=page) pretty_mentions = [] for m in raw_mentions: d = dict(id=m.id, created=m.created_at, user=m.user.screen_name, text=m.text) pretty_mentions.append(d) helper.write_json(pretty_mentions)
def put(self, key): helper = RequestHelper(self) if not self.request.body: helper.error(400, "body required") return value = self.request.body logging.info("%s=%s" % (key, value)) try: ConfigurationAccessor.update(**{key: value}) except IllegalArgumentException, e: helper.error(400, "invalid key") return
def args_call(*args, **kw): handler = args[0] helper = RequestHelper(handler) auth_header = handler.request.headers.get(Headers.AUTHORIZATION, "") code, headers, user = DigestAuth(realm, users).authenticate(handler.request.method, handler.request.path, auth_header) if code >= 400 and code < 499: for k, v in headers: helper.header(k, v) helper.error(code) else: kw['username'] = user return f(*args, **kw)
def get(self): # friend to follower ratio, used to find non-spam followers ff_ratio = self.request.get("ratio") helper = RequestHelper(self) t_api = TwitterConnector.new_api() filtered_followers = ([user_hash(u) for u in t_api.GetFollowers()]) if ff_ratio: filtered_followers = filter(lambda u: u['follower-to-friend-ratio'] > float(ff_ratio), filtered_followers) followers = sorted_user_list(filtered_followers) helper.write_json(followers)
def ingest(cls, handler, source_name): helper = RequestHelper(handler) source_name = urllib.unquote(source_name) keep = handler.request.get("keep") if keep: keep = int(keep) else: keep = 50 # TODO: get from cache f = Feed.get_by_source_name(source_name, return_none=True) if not f: helper.error(404) return results = {} entries = [] results['created'] = entries # TODO: use etag from previous ingest error_call = lambda entry, ex: logging.error(Exceptions.format_last()) user = users.get_current_user() if not user: # there is no logged in user for cron requests user = User(Services.API_USER) try: for artifact_guid, entry, created in model.ingest_feed_entries(f, user, error_call=error_call): entries.append({ "artifact-guid": artifact_guid, "url": entry.link, "title": entry.title, "created": created }) finally: # delete oldest feed entries # TODO: shouldn't I be deleting ArtifactContent instances also? def delete_info(c): try: i = c.info if i: i.delete() except Exception, e: pass deleted_key_names = ArtifactContent.delete_oldest_by_source(f.artifact_source, keep, pre_call=delete_info) results['deleted'] = deleted_key_names Counters.source_counter(f.artifact_source.name).decrement(len(deleted_key_names))
def put(cls, rhandler, guid, **kw): helper = RequestHelper(rhandler) artifact = ArtifactInfo.get_by_guid(guid) if not artifact: helper.error(404) return # removes existing properties props = ArtifactInfo.properties().keys() for prop in props: delattr(artifact, prop) # save artifact ArtifactInfo.save(artifact)
def get(self): helper = RequestHelper(self) count = int(self.request.get("count", 10)) start_idx = int(self.request.get("start", 1)) q = TwitterResponse.find_latest() q_results = q.fetch(count, start_idx) if start_idx > 1 else q.fetch(count) processed_results = [] for r in q_results: r_hash = {"type": r.tweet_type, "timestamp": r.timestamp.isoformat(), "message-id": r.message_id, "response-id": r.response_id, "user": r.user} processed_results.append(r_hash) helper.write_json(processed_results)
def post(self): result = None try: result = TwitterActor().act() except Exception, e: msg = Exceptions.format_last() logging.error(msg) result = (e, msg) set_error_status(RequestHelper(self))
def act(cls, handler): helper = RequestHelper(handler) if handler.request.get("mock", False): actor = TwitterActor(twitter_api=MockTwitterApi()) else: actor = TwitterActor() force_act = handler.request.get("force", False) action = handler.request.get("action") skip_responses = handler.request.get("skip_responses", False) act_response = actor.act(force_act=force_act, action=action, skip_responses=skip_responses) logging.debug("act_response: %s" % str(act_response)) action = act_response[0] result = dict(action=action) if len(act_response) > 1: result['detailed_action'] = act_response helper.write_json(result)
def get(self, **kw): helper = RequestHelper(self) result = {} today = datetime.combine(datetime.now(), time(0, 0, 0)) result['timestamp'] = str(datetime.now()) result['today'] = str(today) warnings = [] result['warnings'] = warnings # source/artifact counts result[ 'source_artifact_counts'] = ArtifactSourceAccessor.find_artifact_counts( ) # new artifacts result[ 'new_artifacts'] = ArtifactSourceAccessor.find_artifact_counts_newer( today) # newer_arts = ArtifactAccessor.find_newer(today, refresh=True) # new_art_stats = [] # result['new_artifacts'] = new_art_stats # for art in newer_arts: # new_art_stats.append(dict(guid=art.guid, source_name=art.source_name)) # memcache stats result['memcache'] = memcache.get_stats() try: twactor = TwitterActor() # outgoing messages result['statuses_out'] = [ describe_status_with_timestamp(s) for s in twactor.latest_statuses(5) ] # incoming messages direct_stats = [] mention_stats = [] result['directs'] = direct_stats result['mentions'] = mention_stats directs, mentions = twactor.messages(today) directs.reverse() mentions.reverse() direct_stats.extend( [describe_status_with_timestamp(s) for s in directs]) mention_stats.extend( [describe_status_with_timestamp(s) for s in mentions]) except TwitterException, ex: warnings.append(str(ex))
def delete(cls, rhandler, guid, **kw): helper = RequestHelper(rhandler) try: ArtifactAccessor.delete(guid) helper.set_status(204) except NotFoundException, ex: helper.error(404)
def get(self, **kw): helper = RequestHelper(self) q = self.request.get("q", None) output = self.request.get("o", None) max_results = int(self.request.get("max", -1)) if not q: helper.error(400, "q not provided.") return q_results = ArtifactContent.all().search(q) json_results = None if output == "short": json_results = {} json_results["count"] = q_results.count() elif output == "id": json_results = {} count = q_results.count() if max_results > 0 and max_results < q_results.count(): count = max_results json_results["count"] = count ids = [] json_results["ids"] = ids results = q_results.fetch(1000) if max_results == -1 else q_results.fetch(max_results) for c in results: ids.append(c.guid) else: json_results = [] if q_results.count(): for content in q_results.fetch(10): info = ArtifactInfo.get_by_guid(content.guid) json_results.append(ArtifactsHelper.artifact_to_hash(info, content)) helper.write_json(json_results)
def put(self, source_name): helper = RequestHelper(self) source_name = urllib.unquote(source_name) success, values = read_json_fields(helper, "url", "active", logger=logging) if not success: return url, active = values # a Feed must be sole owner of an ArtifactSource; # fails if source already exists and is already linked to a feed source = ArtifactSourceAccessor.get_by_name(source_name, return_none=True) if source: source_feed_key = Feed.get_by_source(source, keys_only=True, return_none=True) if source_feed_key: msg = "source '%s' is referenced by feed %s" % (source_name, source_feed_key.name()) helper.error(409, msg) else: source = ArtifactSourceAccessor.create(source_name) # creates UrlResource if necessary resource = UrlResourceAccessor.get_by_url(url, return_none=True) if not resource: resource = UrlResourceAccessor.create(url) # create or update Feed feed = Feed.get_by_source_name(source_name, return_none=True) if feed: feed.artifact_source = source feed.url_resource = resource feed.put() else: Feed.create(source_name, artifact_source=source, url=url, url_resource=resource, active=bool(active)) helper.set_status(204)
def get(self, **kw): helper = RequestHelper(self) message = self.request.get("q", None) if not message: helper.error(400, "q must be provided") return message = urllib.unquote(message) try: sources, response = Mixer(new_speaker()).mix_response(message) logging.debug("sources:%s, response:%s" % (sources, response)) result = dict(response=response) helper.write_json(result) except DataException, ex: helper.error(503, Exceptions.format(ex)) logging.error(ex)
def get(self, source_name): helper = RequestHelper(self) source_name = urllib.unquote(source_name) f = Feed.get_by_source_name(source_name, return_none=True) if not f: helper.error(404) return helper.write_json(build_feed_hash(f))
def delete(self, source_name): helper = RequestHelper(self) source_name = urllib.unquote(source_name) feed = Feed.get_by_source_name(source_name, return_none=True) if not feed: helper.error(404) return feed.delete() helper.set_status(204)