Exemple #1
0
def home():
    latitude = safe_float(request.args.get('city_lat'))
    longitude = safe_float(request.args.get('city_long'))
    radius = safe_int(request.args.get('radius'))
    closest = safe_int(request.args.get('closest'))
    python = bool(request.args.get('python'))
    r = bool(request.args.get('r'))

    people = db.get_session().query(Person).join(Person.facts).\
             join(Facts.airport).\
             filter(Facts.active == True)

    if python:
        people = people.filter(Facts.python == True)
    if r:
        people = people.filter(Facts.r == True)

    if latitude is not None and longitude is not None and radius:
        people = (
            person for person in people
            if person.facts.airport.is_within_radius_of(
                radius,
                latitude,
                longitude,
                units='km'
            )
        )
    elif latitude is not None and longitude is not None and closest:
        # not searching radius but rather closest people
        people = sorted(people,
            key=lambda x: x.facts.airport.distance_from(latitude, longitude))[:closest]

    return render_template('index.html', people=people)
Exemple #2
0
def home():
    latitude = safe_float(request.args.get('city_lat'))
    longitude = safe_float(request.args.get('city_long'))
    radius = safe_int(request.args.get('radius'))
    closest = safe_int(request.args.get('closest'))
    python = bool(request.args.get('python'))
    r = bool(request.args.get('r'))

    people = db.get_session().query(Person).join(Person.facts).\
             join(Facts.airport).\
             filter(Facts.active == True)

    if python:
        people = people.filter(Facts.python == True)
    if r:
        people = people.filter(Facts.r == True)

    if latitude is not None and longitude is not None and radius:
        people = (person for person in people
                  if person.facts.airport.is_within_radius_of(
                      radius, latitude, longitude, units='km'))
    elif latitude is not None and longitude is not None and closest:
        # not searching radius but rather closest people
        people = sorted(people,
                        key=lambda x: x.facts.airport.distance_from(
                            latitude, longitude))[:closest]

    return render_template('index.html', people=people)
Exemple #3
0
def home():
    latitude = safe_float(request.args.get('city_lat'))
    longitude = safe_float(request.args.get('city_long'))
    radius = safe_int(request.args.get('radius'))
    python = bool(request.args.get('python'))
    r = bool(request.args.get('r'))

    people = db.get_session().query(Person).join(Person.facts).\
             join(Facts.airport).\
             filter(Facts.active == True)

    if python:
        people = people.filter(Facts.python == True)
    if r:
        people = people.filter(Facts.r == True)

    if latitude is not None and longitude is not None and radius:
        people = (
            person for person in people
            if person.facts.airport.is_within_radius_of(
                radius,
                latitude,
                longitude,
                units='km'
            )
        )

    return render_template('index.html', people=people)
Exemple #4
0
 async def roll_dice(self, user, dice_size=None, num_dice='1'):
     if num_dice.lower().startswith('d'):
         old_dice_size = dice_size
         dice_size = self._parse_dice_size(user, num_dice)
         num_dice = safe_int(old_dice_size)
     else:
         dice_size = self._parse_dice_size(user, dice_size)
         num_dice = safe_int(num_dice)
     
     out = dict()
     out["Dice size"] = (str(num_dice) + " x " if num_dice > 1 else "") + "d" + str(dice_size)
     result = 0
     for x in range(num_dice):
         result += randint(1, dice_size)
     #message = self.db[(user.id, "dice", str(dice_size), str(result))]
     out["Result"] = "You rolled a " + ("total of " if num_dice > 1 else "") + str(result) #+ (" " if message else "") + message
     return out
Exemple #5
0
def output_scores(scored_images, scores_dir):
    if not os.path.exists(scores_dir):
        os.makedirs(scores_dir)
    scored_images_df = pd.DataFrame.from_dict(scored_images)
    columns = scored_images_df.columns.tolist()
    columns.sort(key=lambda x: (''.join(i for i in x if not i.isdigit()),
                                safe_int(''.join(i for i in x if i.isdigit()))
                                )
                 )
    scored_images_df.reindex_axis(columns, axis=1)
    scored_images_df.to_csv(os.path.join(scores_dir, 'image_scores.csv'))
Exemple #6
0
 def _parse_dice_size(self, user, dice_size):
     if dice_size is None:
         dice_size = self.db[(user.id, "dice", "size")]
     if dice_size.lower().startswith('d'):
         dice_size = dice_size[1:]
         
     dice_size = safe_int(dice_size)
     if dice_size < 2:
         dice_size = 2
         
     return dice_size
 async def roll_dice(self, user, dice_size=None):
     if dice_size is None:
         dice_size = safe_int(self.db[(user.id, "dice", "size")])
     if dice_size < 2:
         dice_size = 2
     out = dict()
     out["Dice size"] = str(dice_size)
     result = randint(1, dice_size)
     message = self.db[(user.id, "dice", str(dice_size), str(result))]
     out["Result"] = f"You rolled a {result}!" + (" " if message else "") + message
     return out
Exemple #8
0
    def get_args(self, command, tokens, message, user, server, channel):
        args = []
        mention_index = 0
        for index in range(len(self.commands[command]["args"])):
            arg = self.commands[command]['args'][index]

            if arg == "user":
                args.append(user)

            elif arg == "message":
                args.append(message)

            elif arg == "server":
                args.append(server)

            elif arg == "channel":
                args.append(channel)

            elif arg == "mention":
                if mention_index < len(message.mentions):
                    args.append(message.mentions[mention_index].id)
                    mention_index += 1
                else:
                    args.append(None)

            elif arg == '*mentions':
                args.append([p.id for p in message.mentions])

            elif len(tokens) != 0:

                if arg == 'force?':
                    args.append(self.autocomplete(tokens.pop().lower(), self.force_words))
                elif arg == 'no?':
                    args.append(self.autocomplete(tokens.pop().lower(), self.no_words))
                elif arg == 'yes?':
                    args.append(self.autocomplete(tokens.pop().lower(), self.yes_words))
                elif arg == 'str':
                    args.append(tokens.pop())
                elif arg == '*str':
                    args.append([tokens.pop().lower() for x in
                                    range(len(tokens) - (len(self.commands[command]['args']) - index - 1))])

                elif arg == 'int':
                    args.append(safe_int(tokens.pop()))
                elif arg == 'float':
                    args.append(safe_float(tokens.pop()))


        return args
Exemple #9
0
    async def create_socket_response(self):
        session = quart_session
        msg = await self.socket.receive()
        if msg == "ping" or msg == "pong":
            return await self.send_string(
                ("ping" if msg == "ping" else "pong"))
        self.current_message = msg
        self._data = self.parse_json_or_none(msg)
        if not self._data:
            return
        peer: str = self._data.get("peer")
        peer_socket: websocket = sockets_get(peer)
        tpe: str = self._data.get("type")
        data: dict = self._data.get("data")
        if not tpe:
            return await self.send_message({"error": "Invalid Values"})
        if tpe == "use_fallback":
            if peer:
                return await self.send_message({
                    "type": tpe,
                    "data": data
                }, peer_socket)
        if tpe == "rtc_data":
            return await self.send_message({
                "type": tpe,
                "data": data
            }, peer_socket)
        if tpe == "message-relay":
            if not peer_socket:
                return await notify_user(session["user"], peer, data)
            # alter_chat_data({**data, "sender": session["user"], "receiver": peer}, True)
            return await self.send_message({
                "type": tpe,
                "data": data
            }, peer_socket)
        if tpe == "binary-file":
            if peer_socket:
                return await self.send_message({
                    "type": tpe,
                    "data": {}
                }, peer_socket)
        if tpe == "direct-relay":
            return await self.send_message({
                "type": tpe,
                "data": data
            }, peer_socket)
        if tpe == "start_chat":
            if peer_socket:
                return await self.send_message(
                    {
                        "type": "online_status",
                        "data": {
                            "user":
                            session["user"],
                            "isonline":
                            True,
                            "common_chat_id":
                            check_or_create_chat(peer, session["user"]).id_,
                        },
                    },
                    peer_socket,
                )
        if tpe == "get_role":
            return await self.send_message({
                "type": tpe,
                "data": data
            }, peer_socket)
        if tpe == "fetch-update":
            resp = {"type": "ping-update", "data": []}
            msg_ids: list = data.get("msgids", [])
            chat_id: str = data.get("chat_id")
            chat_data = check_chat_data(id_=chat_id).chats
            for idx in msg_ids:
                message: dict = chat_data.get(str(idx)) or chat_data.get(
                    safe_int(idx))
                if not message:
                    continue
                if message.get("sender") == session["user"] and message.get(
                        "read"):
                    resp["data"].append({
                        "chat_id": chat_id,
                        "update_type": "read",
                        "msg": idx,
                        "rstamp": message.get("stamp"),
                    })
            return await self.send_message(resp)
        if tpe == "update":
            update_type: str = data.get("update_type")
            details: dict = data.get("details")
            _chat_id: str = details.get("chat_id")
            if update_type == "read-update":
                msgid = details.get("read")
                stamp = details.get("rstamp")
                chat_data = check_chat_data(id_=_chat_id)
                if chat_data:
                    chts = chat_data.chats
                    msg = chts.get(str(msgid)) or chts.get(safe_int(msgid))
                    msg["read"] = True
                    msg["rstamp"] = stamp
                    chts[safe_int(msgid)] = msg
                    chat_data.chats = chts
                    flag_modified(chat_data, "chats")
                    # pylint: disable=E1101
                    db.session.merge(chat_data)
                    db.session.commit()
                    print("updated")
                    # pylint: enable=E1101
                    return await self.send_message(
                        {
                            "type": "chat-update",
                            "data": {
                                "chat_id": _chat_id,
                                "update_type": "read",
                                "msg": msgid,
                                "rstamp": stamp,
                            },
                        },
                        peer_socket,
                    )

        if tpe == "send_role":
            print(peer_socket)
            offerer = data.get("is_offerer")
            await self.send_message({
                "type": "set_role",
                "data": {
                    "is_offerer": not offerer
                }
            })
            return await self.send_message(
                {
                    "type": "set_role",
                    "data": {
                        "is_offerer": offerer
                    }
                }, peer_socket)
    for image in images:
        print("getting feature scores for {}".format(image))
        image_path = os.path.join(image_dir, image)
        image_data = tf.gfile.FastGFile(image_path, 'rb').read()

        predictions = sess.run(result_tensor,
                               {'DecodeJpeg/contents:0': image_data})

        # Sort to show labels of first prediction in order of confidence
        top_k = predictions[0].argsort()[-len(predictions[0]):][::-1]

        basename = image.strip('.jpg') + '.txt'
        if not os.path.exists(scores_dir):
            os.makedirs(scores_dir)
        filename = os.path.join(scores_dir, basename)
        image_scores = {}
        with open(filename, 'wb') as f:
            for node_id in top_k:
                human_string = label_lines[node_id]
                score = predictions[0][node_id]
                f.write("{l}\t{s}\n".format(l=human_string, s=score))
                image_scores[human_string] = score
        scored_images[image.strip('.jpg')] = image_scores

scored_images_df = pd.DataFrame.from_dict(scored_images)
columns = scored_images_df.columns.tolist().sort(
    key=lambda x: (''.join(i for i in x if not i.isdigit()),
                   safe_int(''.join(i for i in x if i.isdigit()))))
scored_images_df.reindex_axis(columns, axis=1)
scored_images_df.to_csv(os.path.join(scores_dir, 'image_scores.csv'))
Exemple #11
0
def search(args, dumping = False):
  logging.info("search.search enter")
  """run a search against the backend specified by the 'backend' arg.
  Returns a result set that's been (a) de-dup'd ("merged") and (b) truncated
  to the appropriate number of results ("clipped").  Impression tracking
  happens here as well."""

  # TODO(paul): Create a QueryParams object to handle validation.
  #     Validation should be lazy, so that (for example) here
  #     only 'num' and 'start' are validated, since we don't
  #     yet need the rest.  QueryParams can have a function to
  #     create a normalized string, for the memcache key.
  # pylint: disable-msg=C0321
  
  normalize_query_values(args, dumping)

  # TODO: query param (& add to spec) for defeating the cache (incl FastNet)
  # I (mblain) suggest using "zx", which is used at Google for most services.

  # TODO: Should construct our own normalized query string instead of
  # using the browser's querystring.

  args_array = [str(key)+'='+str(value) for (key, value) in args.items()]
  args_array.sort()
  normalized_query_string = str('&'.join(args_array))
  logging.info('normalized_query_string: ' + normalized_query_string)

  use_cache = False
  if api.PARAM_CACHE in args and args[api.PARAM_CACHE] == '0':
    use_cache = False
    logging.debug('Not using search cache')

  start = safe_int(args[api.PARAM_START], api.CONST_MIN_START)
  num = safe_int(args[api.PARAM_NUM], api.CONST_DFLT_NUM)

  result_set = None
  # note: key cannot exceed 250 bytes
  #memcache_key = get_cache_key(normalized_query_string)

  if use_cache:
    result_set_str = ''
    chunk = 0
    while True:
      logging.info(get_cache_key(normalized_query_string, chunk))
      buff = memcache.get(get_cache_key(normalized_query_string, chunk))
      if not buff:
        break
      result_set_str += buff
      chunk += 1

    if result_set_str:
      try:
        result_set = pickle.loads(result_set_str)
      except:
        logging.warning('result_set not completely in cache')
        pass

    if result_set:
      logging.debug('in cache: "' + normalized_query_string + '"')
      if len(result_set.merged_results) < start + num:
        logging.debug('but too small-- rerunning query...')
        result_set = None
    else:
      logging.debug('not in cache: "' + normalized_query_string + '"')

  if not result_set:
    result_set = fetch_result_set(args, dumping)
    if result_set:
      result_set_str = pickle.dumps(result_set)
      sz = len(result_set_str)
      chunk = idx = 0
      while sz > 0:
        buff = result_set_str[idx:idx + MAX_CACHE_SZ]
        memcache.set(get_cache_key(normalized_query_string, chunk), buff, time=CACHE_TIME)
        sz -= MAX_CACHE_SZ
        idx += MAX_CACHE_SZ
        chunk += 1

  logging.info('result_set size after dedup: ' + str(result_set.num_merged_results))

  result_set.clip_merged_results(start, num)
  logging.info("search.search clip_merged_results completed")

  return result_set
Exemple #12
0
def normalize_query_values(args, dumping = False):
  """Pre-processes several values related to the search API that might be
  present in the query string."""

  # api.PARAM_OUTPUT is only used by callers (the view)
  #   (though I can imagine some output formats dictating which fields are
  #    retrieved from the backend...)
  #
  #if args[api.PARAM_OUTPUT] not in ['html', 'tsv', 'csv', 'json', 'rss', 
  #  'rssdesc', 'xml', 'snippets_list']
  #
  # TODO: csv list of fields
  #if args[api.PARAM_FIELDS] not in ['all', 'rss']:

  # TODO: process dbg -- currently, anything goes...

  # RESERVED: v
  # RESERVED: sort
  # RESERVED: type

  def dbgargs(arg):
    logging.debug("args[%s]=%s" % (arg, args[arg]))

  if not api.PARAM_NUM in args:
    args[api.PARAM_NUM] = api.CONST_DFLT_NUM

  num = safe_int(args[api.PARAM_NUM], api.CONST_DFLT_NUM) 
  args[api.PARAM_NUM] = min_max(num, api.CONST_MIN_NUM, api.CONST_MAX_NUM)

  dbgargs(api.PARAM_NUM)

  if not dumping:
    if not api.PARAM_START in args:
      args[api.PARAM_START] = api.CONST_MIN_START
    else:
      args[api.PARAM_START] = min_max(
                safe_int(args[api.PARAM_START], api.CONST_MIN_START), 
                api.CONST_MIN_START, api.CONST_MAX_START - num)

  dbgargs(api.PARAM_START)
  
  if dumping:
      overfetch_ratio = 1.0
  else:
    if api.PARAM_OVERFETCH_RATIO in args:
      overfetch_ratio = float(args[api.PARAM_OVERFETCH_RATIO])
    elif args[api.PARAM_START] > 1:
      # increase the overfetch ratio after the first page--
      # overfetch is expensive and we don't want to do this
      # on page one, which is very performance sensitive.
      overfetch_ratio = api.CONST_MAX_OVERFETCH_RATIO
    else:
      overfetch_ratio = 2.0

  args[api.PARAM_OVERFETCH_RATIO] = min_max(
    overfetch_ratio, api.CONST_MIN_OVERFETCH_RATIO,
    api.CONST_MAX_OVERFETCH_RATIO)
  dbgargs(api.PARAM_OVERFETCH_RATIO)

  use_cache = True
  if api.PARAM_CACHE in args and args[api.PARAM_CACHE] == '0':
    use_cache = False
    logging.debug('Not using search cache')

  # PARAM_TIMEPERIOD overrides VOL_STARTDATE/VOL_ENDDATE
  if api.PARAM_TIMEPERIOD in args:    
    period = args[api.PARAM_TIMEPERIOD]
    # No need to pass thru, just convert period to discrete date args.
    del args[api.PARAM_TIMEPERIOD]
    date_range = None
    today = datetime.date.today()
    if period == 'today':
      date_range = (today, today)
    elif period == 'this_weekend':
      days_to_sat = 5 - today.weekday()
      delta = datetime.timedelta(days=days_to_sat)
      this_saturday = today + delta
      this_sunday = this_saturday + datetime.timedelta(days=1)
      date_range = (this_saturday, this_sunday)
    elif period == 'this_week':
      days_to_mon = 0 - today.weekday()
      delta = datetime.timedelta(days=days_to_mon)
      this_monday = today + delta
      this_sunday = this_monday + datetime.timedelta(days=6)
      date_range = (this_monday, this_sunday)
    elif period == 'this_month':
      days_to_first = 1 - today.day
      delta = datetime.timedelta(days=days_to_first)
      first_of_month = today + delta
      days_to_month_end = calendar.monthrange(today.year, today.month)[1] - 1
      delta = datetime.timedelta(days=days_to_month_end)
      last_of_month = first_of_month + delta
      date_range = (first_of_month, last_of_month)

    if date_range:      
      start_date = date_range[0].strftime("%m/%d/%Y")
      end_date = date_range[1].strftime("%m/%d/%Y")
      args[api.PARAM_VOL_STARTDATE] = start_date
      args[api.PARAM_VOL_ENDDATE] = end_date

  if api.PARAM_TIMEPERIOD_START in args and args[api.PARAM_TIMEPERIOD_START] == 'start date':
    del args[api.PARAM_TIMEPERIOD_START]

  if api.PARAM_TIMEPERIOD_END in args and args[api.PARAM_TIMEPERIOD_END] == 'end date':
    del args[api.PARAM_TIMEPERIOD_END]

  if api.PARAM_TIMEPERIOD_START in args and api.PARAM_TIMEPERIOD_END in args and (api.PARAM_TIMEPERIOD not in args):
      start_date = args[api.PARAM_TIMEPERIOD_START]
      end_date = args[api.PARAM_TIMEPERIOD_END]
      args[api.PARAM_VOL_STARTDATE] = start_date
      args[api.PARAM_VOL_ENDDATE] = end_date

  if api.PARAM_Q not in args:
    args[api.PARAM_Q] = ""
  else:
    args[api.PARAM_Q] = args[api.PARAM_Q].strip()
  dbgargs(api.PARAM_Q)

  if (api.PARAM_VOL_LOC not in args 
      or args[api.PARAM_VOL_LOC] == ""
      or args[api.PARAM_VOL_LOC].lower().find("location") >=0
     ):
    # bugfix for http://code.google.com/p/footprint2009dev/issues/detail?id=461
    # q=Massachusetts should imply vol_loc=Massachusetts, USA
    # note that this implementation also makes q=nature match
    # a town near santa ana, CA
    # http://www.allforgood.org/search#q=nature&vol_loc=nature%2C%20USA
    # args[api.PARAM_VOL_LOC] = args[api.PARAM_Q] + " USA"
    # MT: 8/26/2010 - in practice that causes a lot of 602 results in geocode, eg "Laywers, USA"
    args[api.PARAM_VOL_LOC] = "USA"

  args[api.PARAM_LAT] = args[api.PARAM_LNG] = ""
  if api.PARAM_VIRTUAL in args:
    args["lat"] = args["long"] = "0.0"
    args[api.PARAM_VOL_DIST] = 25
    
  elif api.PARAM_VOL_LOC in args:
    if geocode.is_latlong(args[api.PARAM_VOL_LOC]):
      args[api.PARAM_LAT], args[api.PARAM_LNG] = \
                             args[api.PARAM_VOL_LOC].split(",")
    elif geocode.is_latlongzoom(args[api.PARAM_VOL_LOC]):
      args[api.PARAM_LAT], args[api.PARAM_LNG], zoom = \
                             args[api.PARAM_VOL_LOC].split(",")
    elif args[api.PARAM_VOL_LOC] == "virtual":
      args[api.PARAM_LAT] = args[api.PARAM_LNG] = "0.0"
    elif args[api.PARAM_VOL_LOC] == "anywhere":
      args[api.PARAM_LAT] = args[api.PARAM_LNG] = ""
    else:
      res = geocode.geocode(args[api.PARAM_VOL_LOC], use_cache)
      if res != "":
        args[api.PARAM_LAT], args[api.PARAM_LNG], zoom = res.split(",")
    
    args[api.PARAM_LAT] = args[api.PARAM_LAT].strip()
    args[api.PARAM_LNG] = args[api.PARAM_LNG].strip()    
    if api.PARAM_DISTANCE in args:
      args[api.PARAM_VOL_DIST] = safe_int(args[api.PARAM_DISTANCE])
    else:
      if api.PARAM_VOL_DIST in args:
          args[api.PARAM_VOL_DIST] = safe_int(args[api.PARAM_VOL_DIST])
      else:
          args[api.PARAM_VOL_DIST] = 25

  else:
    args[api.PARAM_VOL_LOC] = args[api.PARAM_VOL_DIST] = ""
  dbgargs(api.PARAM_VOL_LOC)
Exemple #13
0
def in_at_least_one_hour(dinner_time):
    hour, minute = dinner_time.split(":")
    dinner = utils.safe_int(hour) * 60 + utils.safe_int(minute)

    now = datetime.datetime.now()
    return dinner - (now.hour * 60 + now.minute) >= 60