def import_recipes(self): recipe_dir: Path = self.import_dir.joinpath("recipes") imports = [] successful_imports = [] for recipe in recipe_dir.glob("*.json"): with open(recipe, "r") as f: recipe_dict = json.loads(f.read()) recipe_dict = ImportDatabase._recipe_migration(recipe_dict) try: recipe_obj = Recipe(**recipe_dict) recipe_obj.save_to_db(self.session) import_status = RecipeImport(name=recipe_obj.name, slug=recipe_obj.slug, status=True) imports.append(import_status) successful_imports.append(recipe.stem) logger.info(f"Imported: {recipe.stem}") except Exception as inst: logger.error(inst) logger.info(f"Failed Import: {recipe.stem}") import_status = RecipeImport( name=recipe.stem, slug=recipe.stem, status=False, exception=str(inst), ) imports.append(import_status) self._import_images(successful_imports) return imports
def read_chowdown_file(recipe_file: Path) -> Recipe: with open(recipe_file, "r") as stream: recipe_description: str = str recipe_data: dict = {} try: for x, item in enumerate(yaml.load_all(stream, Loader=Loader)): print(item) if x == 0: recipe_data = item elif x == 1: recipe_description = str(item) except yaml.YAMLError as exc: print(exc) return reformat_data = { "name": recipe_data.get("title"), "description": recipe_description, "image": recipe_data.get("image", ""), "recipeIngredient": recipe_data.get("ingredients"), "recipeInstructions": recipe_data.get("directions"), "tags": recipe_data.get("tags").split(","), } new_recipe = Recipe(**reformat_data) reformated_list = [] for instruction in new_recipe.recipeInstructions: reformated_list.append({"text": instruction}) new_recipe.recipeInstructions = reformated_list return new_recipe
def update_recipe_image( recipe_slug: str, image: bytes = File(...), extension: str = Form(...) ): """ Removes an existing image and replaces it with the incoming file. """ response = write_image(recipe_slug, image, extension) Recipe.update_image(recipe_slug, extension) return response
def create_from_url(url: str) -> dict: recipe_data = process_recipe_url(url) with open(TEMP_FILE, "w") as f: f.write(json.dumps(recipe_data, indent=4, default=str)) recipe = Recipe(**recipe_data) return recipe.save_to_db()
def delete_recipe(recipe_slug: str, db: Session = Depends(generate_session)): """ Deletes a recipe by slug """ try: Recipe.delete(db, recipe_slug) except: raise HTTPException( status_code=404, detail=SnackResponse.error("Unable to Delete Recipe")) return SnackResponse.error(f"Recipe {recipe_slug} Deleted")
def delete_recipe(recipe_slug: str): """ Deletes a recipe by slug """ try: Recipe.delete(recipe_slug) except: raise HTTPException( status_code=404, detail=SnackResponse.error("Unable to Delete Recipe") ) return SnackResponse.success("Recipe Deleted")
def import_recipes(recipe_dir: Path) -> Recipe: image = False for file in recipe_dir.glob("full.*"): image = file for file in recipe_dir.glob("*.json"): recipe_file = file with open(recipe_file, "r") as f: recipe_dict = json.loads(f.read()) recipe_data = Cleaner.clean(recipe_dict) image_name = None if image: image_name = recipe_data["slug"] + image.suffix recipe_data["image"] = image_name else: recipe_data["image"] = "none" recipe = Recipe(**recipe_data) if image: shutil.copy(image, IMG_DIR.joinpath(image_name)) return recipe
def read_chowdown_file(recipe_file: Path) -> Recipe: """Parse through the yaml file to try and pull out the relavent information. Some issues occur when ":" are used in the text. I have no put a lot of effort into this so there may be better ways of going about it. Currently, I get about 80-90% of recipes from repos I've tried. Args: recipe_file (Path): Path to the .yml file Returns: Recipe: Recipe class object """ with open(recipe_file, "r") as stream: recipe_description: str = str recipe_data: dict = {} try: for x, item in enumerate(yaml.load_all(stream, Loader=Loader)): print(item) if x == 0: recipe_data = item elif x == 1: recipe_description = str(item) except yaml.YAMLError as exc: print(exc) return reformat_data = { "name": recipe_data.get("title"), "description": recipe_description, "image": recipe_data.get("image", ""), "recipeIngredient": recipe_data.get("ingredients"), "recipeInstructions": recipe_data.get("directions"), "tags": recipe_data.get("tags").split(","), } new_recipe = Recipe(**reformat_data) reformated_list = [] for instruction in new_recipe.recipeInstructions: reformated_list.append({"text": instruction}) new_recipe.recipeInstructions = reformated_list return new_recipe
def update_recipe(recipe_slug: str, data: Recipe, db: Session = Depends(generate_session)): """ Updates a recipe by existing slug and data. """ new_slug = data.update(db, recipe_slug) return new_slug
def post_webhooks(): all_settings = SiteSettings.get_site_settings() if all_settings.webhooks.enabled: todays_meal = Recipe.get_by_slug(MealPlan.today()).dict() urls = all_settings.webhooks.webhookURLs for url in urls: requests.post(url, json.dumps(todays_meal, default=str))
def post_webhooks(): session = create_session() all_settings = db.get(session, "main") all_settings = SiteSettings(**all_settings) if all_settings.webhooks.enabled: todays_meal = Recipe.get_by_slug(MealPlan.today()).dict() urls = all_settings.webhooks.webhookURLs for url in urls: requests.post(url, json.dumps(todays_meal, default=str)) session.close()
def import_recipes(self): recipe_dir: Path = self.import_dir.joinpath("recipes") successful_imports = [] failed_imports = [] for recipe in recipe_dir.glob("*.json"): with open(recipe, "r") as f: recipe_dict = json.loads(f.read()) recipe_dict = ImportDatabase._recipe_migration(recipe_dict) try: recipe_obj = Recipe(**recipe_dict) recipe_obj.save_to_db() successful_imports.append(recipe.stem) logger.info(f"Imported: {recipe.stem}") except: logger.info(f"Failed Import: {recipe.stem}") failed_imports.append(recipe.stem) self._import_images(successful_imports) return {"successful": successful_imports, "failed": failed_imports}
def export_recipes(self): all_recipes = Recipe.get_all(self.session) for recipe in all_recipes: logger.info(f"Backing Up Recipes: {recipe}") filename = recipe.get("slug") + ".json" file_path = self.recipe_dir.joinpath(filename) ExportDatabase._write_json_file(recipe, file_path) if self.templates: self._export_template(recipe)
def process_meals(self): meals = [] for x, meal in enumerate(self.meals): recipe = Recipe.get_by_slug(meal.slug) meal_data = { "slug": recipe.slug, "name": recipe.name, "date": self.startDate + timedelta(days=x), "dateText": meal.dateText, "image": recipe.image, "description": recipe.description, } meals.append(Meal(**meal_data)) self.meals = meals
def create_from_url(url: str) -> Recipe: """Main entry point for generating a recipe from a URL. Pass in a URL and a Recipe object will be returned if successful. Args: url (str): a valid string representing a URL Returns: Recipe: Recipe Object """ r = requests.get(url) new_recipe = extract_recipe_from_html(r.text, url) new_recipe = Cleaner.clean(new_recipe) new_recipe = download_image_for_recipe(new_recipe) recipe = Recipe(**new_recipe) return recipe
def get_recipe(recipe_slug: str, db: Session = Depends(generate_session)): """ Takes in a recipe slug, returns all data for a recipe """ recipe = Recipe.get_by_slug(db, recipe_slug) return recipe
def create_from_json( data: Recipe, db: Session = Depends(generate_session)) -> str: """ Takes in a JSON string and loads data into the database as a new entry""" new_recipe_slug = data.save_to_db(db) return new_recipe_slug
async def update(recipe_slug: str, data: dict): """ Updates a recipe by existing slug and data. Data should containt """ Recipe.update(recipe_slug, data) return {"message": "PLACEHOLDER"}
def create_from_url(url: str) -> dict: recipe_data = process_recipe_url(url) recipe = Recipe(**recipe_data) return recipe.save_to_db()
def create_from_json(data: Recipe) -> str: """ Takes in a JSON string and loads data into the database as a new entry""" created_recipe = data.save_to_db() return created_recipe
def get_recipe(recipe_slug: str): """ Takes in a recipe slug, returns all data for a recipe """ recipe = Recipe.get_by_slug(recipe_slug) return recipe
def create_from_url(url: str) -> Recipe: recipe_data = process_recipe_url(url) recipe = Recipe(**recipe_data) return recipe
def update_recipe(recipe_slug: str, data: Recipe): """ Updates a recipe by existing slug and data. """ new_slug = data.update(recipe_slug) return new_slug