import tornado.ioloop import tornado.web import json import argparse import memcache from allergy_assassin import settings, search, dish, allergies, logger from allergy_assassin.logger import log_request logger = logger.init("allergy_assassin.api.handler") def get_argument(request, argument_name): """Fetch argument from request.arguments""" arg_list = request.arguments.get(argument_name, [None]) return arg_list[-1] def cache(responder): """ Decorator for responders making use of cache """ cache = memcache.Client(settings.MEMCACHED_SERVERS) def wrapped(handler): cache_key = handler.request.uri cache_item = cache.get(cache_key) if cache_item is None: cache_item = responder(handler) cache.set(cache_key, cache_item, time=settings.CACHE_TIME) else: logger.info("Using cache for '%s' query" % cache_key) return cache_item
""" This file maps abstract allergies (which don't by name refer to a concrete ingredient, to a set of concrete ingredients. The fixtures in this file are also used to generate the allergies autocompletion list, which explains the 1->1 mappings also listed here. """ from allergy_assassin import db, logger import re logger = logger.init("allergy_assassin.allergies") fixtures = [ (('peanut',), ('peanut')), (('nut',), ('peanut', 'hazelnut', 'cashew nut', 'brazil nut', 'almond', 'walnut', 'pecan', 'pistachio')), (('shellfish',), ('prawn', 'clam', 'oyster', 'crab', 'lobster', 'crayfish', 'mussel', 'winkle', 'scallop', 'shrimp')), (('seafood',), ('prawn', 'clam', 'oyster', 'crab', 'lobster', 'crayfish', 'mussel', 'winkle', 'scallop', 'shrimp', 'squid', 'calamari', 'octopus', 'fish', 'cod', 'scampi', 'haddock', 'mackerel' 'tuna', 'sardine', 'anchovy')), (('dairy',), ('milk', 'yoghurt', 'cheese', 'cream', 'butter')), (('egg',), ('egg')), (('lime',), ('lime')), (('kidney bean',), ('kidney bean')), (('gluten','wheat'), ('bread', 'flour', 'pasta')), (('barley',), ('barley')),
from bson import Code, SON from allergy_assassin import settings, logger from allergy_assassin.db import requests logger = logger.init("allergy_assassin.metrics.users_per_day") m = Code( """function () { var rd = this.request.request_time; if (typeof rd == 'object') { var date = rd.toISOString().split('T')[0]; emit(date, 1); } };""" ) r = Code( """function (key, values) { return Array.sum(values); };""" ) def generate(): logger.debug("Beginning dish users per day mapreduce job") requests.map_reduce(m, r, out=SON([("replace", settings.USERS_PER_DAY_COLLECTION), ("db", settings.METRICS_DB)])) logger.debug("Mapreduce query completed") if __name__ == "__main__": generate()
from bson.code import Code from allergy_assassin import db, settings, logger, allergies logger = logger.init("allergy_assassin.search") class AllergenPresenceSearch: def __init__(self, allergens, terms): self.abstract_allergens = allergens self.terms = terms self.allergens = allergies.get_real_allergens(allergens) self._validate_search() def _validate_search(self): self.terms = self.terms.strip() self.allergens = [allergen.strip() for allergen in self.allergens] def search(self): self.results = [] for allergen in self.allergens: # query spec = self._get_query_spec() matching_recipes = self._query_recipes(spec) full_count = matching_recipes.count() allergen_presence = self._query_allergen_presence(matching_recipes, allergen) allergen_presence_count = allergen_presence.count() logger.info("%d results of %d containing allergen '%s'" % (allergen_presence_count, full_count, allergen)) result = AllergenPresenceResult(allergen,
from bson import Code from allergy_assassin import settings, logger from allergy_assassin.db import recipes, dish logger = logger.init("allergy_assassin.dish.autocomplete") m = Code("""function () { emit(this.recipe_name, {sources: [this.source], source_count: 1}); };""") r = Code("""function (key, values) { var result = { 'sources': [], source_count: 0}; values.forEach(function(value) { value.sources.forEach(function(source) { if ( result.sources.indexOf(source) == -1 ) { //if value not in result result.sources.push(source); result.source_count++; } }); }); return result; };""") def generate(): logger.debug("Beginning dish autocompletion list mapreduce job") results = recipes.map_reduce(m, r, out={'replace': dish.name}) logger.debug("Mapreduce query completed, removing items below threshold...") dish.remove({'value.source_count': {'$lt': settings.DISH_AUTOCOMPLETION_MIN_SOURCES}}, safe=True)