示例#1
0
文件: ib.py 项目: a37912/gaeaib
  def post(self, mode):
    client = self.request.form.get('from')
    rb, person, board, thread = client.split('/')
    thread = int(thread)

    logging.info("client %s %s %d - %r" % (rb, board, thread, mode))

    online = memcache.get('online-%s-%d' % (board, thread)) or []
    while rb in online:
        online.remove(rb)

    if mode == 'connected':
        online.append(rb)

    memcache.set('online-%s-%d' % (board, thread), online)

    # send notify
    from .matcher import Post
    from google.appengine.api import prospective_search as  matcher

    match_msg = Post(board = board, thread = thread,)
    match_msg.data = dict(
      board = board,
      thread = thread,
      rb = online,
      evt = 'online',
    )

    matcher.match(match_msg, topic='post',
      result_task_queue='postnotify')


    return Response("yep")
示例#2
0
文件: views.py 项目: ahbuntu/ece1779
def create_nearby_question(question_id):
    """Workaround for Prospective Searchs shortcomings; we need to create 
    NearbyQuestion objects for each User/Question pair."""
    prospective_users = ProspectiveUser.all()
    question = Question.get_by_id(question_id)
    for user_to_test in prospective_users:

        if user_to_test.login == question.added_by:
            continue # No need to create a search for your own questions

        # create a new document and subscribe to it
        distance_to_origin = get_location_distance_in_km(user_to_test.origin_location.lat,
                                                         user_to_test.origin_location.lon,
                                                         question.location.lat,
                                                         question.location.lon)
        nearby_prospective_question = NearbyQuestion(
            for_prospective_user_id=user_to_test.key.id(),
            for_question_id=question_id,
            origin_latitude=user_to_test.origin_location.lat,
            origin_longitude=user_to_test.origin_location.lon,
            origin_radius=user_to_test.notification_radius_in_km,
            origin_distance_in_km=distance_to_origin
        )

        # TODO: (potentially) only required for debugging purposes. Prospective_search.match might not required a saved entity.
        # nearby_prospective_question.put()

        # "Documents are assigned to a particular topic when calling match()"
        prospective_search.match(
            nearby_prospective_question
        )
示例#3
0
文件: stats.py 项目: 2cloud/AppEngine
def record(key, value, stamp=False):
    record = StatsRecord(event=key, value=value)
    if not stamp:
        stamp = timestamp.now()
    record.timestamp = stamp
    logging.info("Firing stats off. Event: %s" % key)
    prospective_search.match(record, result_task_queue="stats")
示例#4
0
def record(key, value, stamp=False):
    record = StatsRecord(event=key, value=value)
    if not stamp:
        stamp = timestamp.now()
    record.timestamp = stamp
    logging.info("Firing stats off. Event: %s" % key)
    prospective_search.match(record, result_task_queue='stats')
示例#5
0
    def __call__(self, environ, start_response):
        # Don't record if the request is to clio itself, or the config says no.
        if (
            environ["PATH_INFO"] == config.QUEUE_URL
            or environ["PATH_INFO"].startswith(config.BASE_URL)
            or not config.should_record(environ)
        ):
            return self.application(environ, start_response)

        request = webob.Request(environ)
        start_time = time.time()
        response = request.get_response(self.application)
        elapsed = int((time.time() - start_time) * 1000)
        status_code, status_text = response.status.split(" ", 1)

        record = model.RequestRecord(
            method=request.method,
            path=request.path_qs,
            request_headers=_stringifyHeaders(request.headers),
            status_code=int(status_code),
            status_text=status_text,
            response_headers=_stringifyHeaders(response.headers),
            wall_time=elapsed,
            cpu_time=quota.get_request_cpu_usage(),
            random=random.random(),
        )
        prospective_search.match(record, result_relative_url=config.QUEUE_URL, result_task_queue=config.QUEUE_NAME)
        return response(environ, start_response)
示例#6
0
  def get(self):

    releases = {}
    to_save = []
    for title_ep in uplinks.get_titles():
      #logging.info("t: %r" % title_ep)

      ep = models.Ep(**title_ep)

      subbers = releases.get(ep.title, [])
      if ep.subber not in subbers:
        subbers.append(ep.subber)
      releases[ep.title] = subbers

      to_save.append(ep)

    db.put(to_save)

    for ep in to_save:
      matcher.match(
          ep,
          result_key=str(ep.key()),
          result_return_document=False
      )
  
    deferred.defer(releasers_update, releases.items())


    return Response("loaded")
示例#7
0
    def process_inbound_feed(self, parsed_feed, overflow=False):
        entries = filter_entries(parsed_feed.entries)
        entries = yield self.filter_entries(entries)

        if not entries:
            logger.info(
                'prospective: Feed has seen all entries nothing new %s %s',
                self.feed_url, self.key.urlsafe())
            raise ndb.Return(([], []))

        last_guid = guid_for_item(entries[0])
        logger.info('prospective: entries before rss_items %s', len(entries))
        rss_items = map(lambda x: RssItem.from_rss_item(x, self), entries)
        logger.info(
            'prospective: Processing inbound prospective search %s %s %s' %
            (self.feed_url, len(rss_items), self.key.urlsafe()))

        for item in rss_items:
            for topic in self.topics:
                logger.info('prospective: matching %s %s' % (item, topic))
                blah = prospective_search.match(
                    item,
                    topic,
                    result_relative_url='/api/backend/queries/matched')
                logger.info('What do we get back %s', blah)

        self.last_guid = last_guid
        yield self.put_async()
        raise ndb.Return(([], []))
def match(document,
          topic=None,
          result_key=None,
          result_relative_url='/_ah/prospective_search',
          result_task_queue='default',
          result_batch_size=DEFAULT_RESULT_BATCH_SIZE,
          result_return_document=True):
  """Match document with all subscribed queries on specified topic."""
  # Convert document to datastore.Entity.
  topic = _get_document_topic(document.__class__, topic)
  pb = document._to_pb()
  entity = datastore.Entity('temp-kind').FromPb(pb)
  return prospective_search.match(
    entity,
    topic=topic,
    result_key=result_key,
    result_relative_url=result_relative_url,
    result_task_queue=result_task_queue,
    result_batch_size=result_batch_size,
    result_return_document=result_return_document)
示例#9
0
def match(document,
          topic=None,
          result_key=None,
          result_relative_url='/_ah/prospective_search',
          result_task_queue='default',
          result_batch_size=DEFAULT_RESULT_BATCH_SIZE,
          result_return_document=True):
    """Match document with all subscribed queries on specified topic."""
    # Convert document to datastore.Entity.
    topic = _get_document_topic(document.__class__, topic)
    pb = document._to_pb()
    entity = datastore.Entity('temp-kind').FromPb(pb)
    return prospective_search.match(
        entity,
        topic=topic,
        result_key=result_key,
        result_relative_url=result_relative_url,
        result_task_queue=result_task_queue,
        result_batch_size=result_batch_size,
        result_return_document=result_return_document)
示例#10
0
    def process_inbound_feed(self, parsed_feed, overflow=False):
        entries = filter_entries(parsed_feed.entries)
        entries = yield self.filter_entries(entries)

        if not entries:
            logger.info('prospective: Feed has seen all entries nothing new %s %s', self.feed_url, self.key.urlsafe())
            raise ndb.Return(([], []))

        last_guid = guid_for_item(entries[0])
        logger.info('prospective: entries before rss_items %s', len(entries))
        rss_items = map(lambda x: RssItem.from_rss_item(x, self), entries)
        logger.info('prospective: Processing inbound prospective search %s %s %s' % (self.feed_url, len(rss_items), self.key.urlsafe()))

        for item in rss_items:
            for topic in self.topics:
                logger.info('prospective: matching %s %s' % (item, topic))
                blah = prospective_search.match(item, topic, result_relative_url='/api/backend/queries/matched')
                logger.info('What do we get back %s', blah)

        self.last_guid = last_guid
        yield self.put_async()
        raise ndb.Return(([], []))
示例#11
0
文件: util.py 项目: a37912/gaeaib
def save_post(request, data, board, thread, ip):

  def board_increment():
    board_db = BoardCounter.get_by_key_name(board)

    if not board_db:
      board_db = BoardCounter(key_name = board, thread = [])

    board_db.counter += 1
    board_db.put()

    return board_db.counter

  postid = db.run_in_transaction(board_increment,)

  # create new thread
  new = False
  if thread == 'new':
    new = True
    if data.get("sage"):
      raise NotFound() # FIXME: move to form

    thread = postid
    posts = []
    thread_db = Thread.create(thread, board)
    thread_db.posts = []
    thread_db.subject = data.get("subject")[:SUBJECT_MAX]
  else:
    thread = int(thread)

    thread_db = Thread.load(thread, board)

    if not thread_db:
      raise NotFound()

  rb = rainbow.make_rainbow(ip, board, thread)
  data['rainbow'] = rb
  data['overlay'] = board in OVER
  
  data['text_html'] = markup(
        board=board, postid=postid,
        data=escape(data.get('text')),
  )

  # save thread and post number
  data['post'] = postid
  data['thread'] = thread
  now = datetime.now()
  data['time'] = now.strftime("%Y-%m-%d, %H:%M")
  data['timestamp'] = int(now.strftime("%s"))

  img_key = data.get("key")

  if img_key:
    blob_key = blobstore.BlobKey(img_key)
    blob_info = blobstore.BlobInfo.get(blob_key)

    data['image'] = {
        "size" : blob_info.size,
        "content_type" : blob_info.content_type,
        "full" : images.get_serving_url(img_key),
        "thumb" : images.get_serving_url(img_key, 200),
    }

  for fname in OPTIONS.get(board, []):
    func = globals().get('option_'+fname)

    if func:
      func(request, data)

  thread_db.posts.append(data)
  thread_db.put()

  r = Render(thread=thread_db)
  r.post_html = ''
  r.add(data, new) # WARNING: side effect on data
  r.save()

  deferred.defer(save_post_defer,
      thread_db.boards, thread,
      r.post_html, data.get('text_html'),
      postid,
      len(thread_db.posts),
      data.get("sage"),
  )

  # send notify
  thread_flag = 'new' if new else 'sage' if data.get("sage") else 'bump'
  match_msg = Post(board = board, thread = thread, thread_flag = thread_flag)
  match_msg.data = dict(
    board = board,
    thread = thread,
    html = r.post_html,
    text = data.get('text'),
    last = postid,
    count = len(thread_db.posts),
    evt = 'newpost'
  )

  matcher.match(match_msg, topic='post',
      result_task_queue='postnotify')

  return postid, thread