Esempio n. 1
0
 def get(self, keyword, sort):
     #query
     query_options = {
     "wt": keyword
     "sort": sort #change this later
     }
     #Sort Info
     query_result = AllRecipes.search(query_options)
     main_recipe_url = query_result[0]['url']
     detailed_recipe = AllRecipes.get(main_recipe_url)
     #Send out json
     result = {'recipe_name': detailed_recipe['name'], 'ingredients': detailed_recipe['ingredients'], 'steps': detailed_recipe['steps']}
     return jsonify(result)
Esempio n. 2
0
 def __init__(self, recipe_dict):
     self.name = recipe_dict["name"]
     self.url = recipe_dict["url"]
     self.image = recipe_dict["image"]
     self.description = recipe_dict["description"]
     detailed_info = allrecipes_scraper.get(self.url)
     for attr in detailed_info:
         self.__setattr__(attr, detailed_info[attr])
Esempio n. 3
0
 def search_item(*args):
     query_options = {
         "wt": self.chars.text,  # Query keywords
         "sort":
         "p"  # Sorting options : 're' for relevance, 'ra' for rating, 'p' for popular (optional)
     }
     query_results = AllRecipes.search(query_options)
     self.scoll_view_update_callback(query_results)
     self.popup.dismiss()
Esempio n. 4
0
def search(ingredients: List[str]) -> List[int]:
    query_options = {'ingIncl': ','.join(ingredients), 'sort': 'p'}
    try:
        query_results = ar.search(query_options)
    except StopIteration:
        print("No results found on allrecipes")
        return []
    return db_interface.add(
        [recipe['url'] for recipe in query_results],
        'allrecipes',
        extras={
            'image': [recipe.get('image') for recipe in query_results],
            'desc': [recipe.get('description') for recipe in query_results]
        })
Esempio n. 5
0
def scrapeRecipes(includeIngredients=None, excludeIngredients=None, sort=None):
    query_params = {}
    if includeIngredients:
        query_params["ingIncl"] = includeIngredients

    if excludeIngredients:
        query_params["ingExcl"] = excludeIngredients

    if sort:
        if sort not in ["re", "ra", "p"]:
            raise ValueError(
                "Not valid sorting type, only re (relevance), ra (rating), p (popular) accepted."
            )
        query_params["sort"] = sort

    return allrecipes_scraper.search(query_params)
Esempio n. 6
0
def lookup(url: str, extras: dict) -> Recipe:
    source = 'allrecipes'
    recipe_details = ar.get(url)
    name = recipe_details.get('name')
    ingredients = recipe_details.get('ingredients')
    time = recipe_details.get('total_time')
    rating = recipe_details.get('rating')
    image = extras.get('image')
    desc = extras.get('desc')
    return Recipe({
        'name': name,
        'ingredients': ingredients,
        'time': time,
        'rating': rating,
        'url': url,
        'desc': desc,
        'image': image,
        'source': source
    })
Esempio n. 7
0
from allrecipes import AllRecipes

query_options = {
    "wt": "Salad",  # Query keywords\
    "ingIncl": "",  # 'Must be included' ingrdients (optional)
    "ingExcl": "",  # 'Must not be included' ingredients (optional)
    "sort":
    "re"  # Sorting options : 're' for relevance, 'ra' for rating, 'p' for popular (optional)
}
query_result = AllRecipes.search(query_options)

# Get :
main_recipe_url = query_result[0]['url']
detailed_recipe = AllRecipes.get(
    main_recipe_url
)  # Get the details of the first returned recipe (most relevant in our case)

# Display result :
print("## %s :" % detailed_recipe['name'])  # Name of the recipe
Esempio n. 8
0
from recipeswikia import RecipesWikia
from allrecipes import AllRecipes
import sys
import json
import time

scraper = AllRecipes()
link = 'http://allrecipes.com/recipe/'
i = 121513
recipes = []
while i < 122000:
    print i
    url = link + str(i)
    result = scraper.scrape(url)
    filename = 'recipes2/' + result['Recipe'] + '.json'
    with open(filename, 'w') as outFile:
        json.dump(result, outFile)
    i += 1
    time.sleep(1.5)