def cais(request, response, content): if request.method == "GET": #查询 if request.GET: food_id = request.GET["food_id"] f = Food.objects.filter(food_id=food_id).first() info = {"name": f.name, "price": str(f.price)} content["info"] = info else: foods = Food.objects.filter( is_delete=0, create_time__day=datetime.today().strftime("%d")) # foods = Food.objects.filter(is_delete=0,create_time__day="28") print ">>>foods: ", foods info = [{ "name": f.name, "price": str(f.price), "time": f.create_time.strftime("%Y-%m-%d %H:%M:%S") } for f in foods] content["info"] = info elif request.method == "POST": #添加 group_id = request.POST["group_id"] store_id = request.POST["store_id"] name = request.POST["name"] desc = request.POST["desc"] food_img = request.POST["food_img"] price = request.POST["price"] sequence = int(request.POST.get("sequence", 1)) is_delete = int(request.POST.get("is_delete", 0)) food = Food() food.food_id = rand_str(8) food.group_id = group_id food.store_id = store_id food.name = name food.desc = desc food.food_img = food_img food.price = change_money(price) food.sequence = sequence food.is_delete = is_delete food.save() content["info"] = {"msg": "成功"} elif request.method == "PUT": #修改 put = QueryDict(request.body) food_id = put["food_id"] name = put["name"] food = Food.objects.filter(food_id=food_id, is_delete=0).first() food.name = name food.save() info = {"msg": "更新名字成功", "name": name} content["info"] = info elif request.method == "DELETE": #删除 delete = QueryDict(request.body) food_id = delete["food_id"] food = Food.objects.filter(food_id=food_id, is_delete=0).first() food.is_delete = 1 food.save() info = {"msg": "菜删除成功"} content["info"] = info
def handle(self, *args, **kwargs): diretory = os.path.join(os.path.dirname(__file__), '../../../feedeasyform_project/assets/') csvfile_path = diretory + 'table_ingredients.csv' try: with open(csvfile_path, newline='') as csvfile: foods = csv.DictReader(csvfile) for food in foods: name_grp_parsed = food['alim_grp_nom_fr'].replace( ' et ', ' & ') try: group = FoodGroup.objects.get(name=name_grp_parsed) except ObjectDoesNotExist: group = FoodGroup() group.name = name_grp_parsed group.save() name_food_parsed = food['alim_nom_fr'].replace( ' et ', ' & ') new_food = Food() new_food.name = name_food_parsed new_food.id_group = group new_food.food_code = food["alim_code"] new_food.proteine = food["Protéines (g/100g)"] new_food.glucide = food["Glucides (g/100g)"] new_food.lipide = food["Lipides (g/100g)"] new_food.sucre = food["Sucres (g/100g)"] new_food.fibre = food["Fibres alimentaires (g/100g)"] new_food.acide_gras_sature = food["AG saturés (g/100g)"] new_food.cholesterol = food["Cholestérol (mg/100g)"] new_food.sel_chlorure_de_sodium = food[ "Sel chlorure de sodium (g/100g)"] new_food.calcium = food["Calcium (mg/100g)"] new_food.fer = food["Fer (mg/100g)"] new_food.magnesium = food["Magnésium (mg/100g)"] new_food.potassium = food["Potassium (mg/100g)"] new_food.zinc = food["Zinc (mg/100g)"] new_food.vitamine_c = food["Vitamine C (mg/100g)"] new_food.vitamine_d = food["Vitamine D (µg/100g)"] new_food.vitamine_e = food["Vitamine E (mg/100g)"] new_food.energie = food["Energie (kcal/100g)"] new_food.save() if kwargs['verbose']: self.stdout.write( f"{self.style.SUCCESS(name_food_parsed)} ajouté à la base de données" ) except: self.stderr.write( self.style.ERROR('Une erreur est survenu. \ \nIl est possible que les ingrédients existe déjà dans la base de données.'))
def handle(self, *args, **kwargs): diretory = os.path.join(os.path.dirname(__file__), '../../../feedeasyform_project/assets/') jsonfile_path = diretory + 'new_foods.json' try: with open(jsonfile_path, 'r') as jsonfile: foods = json.load(jsonfile) for food in foods: name_food_parsed = food.pop('name').replace(' et ', ' & ') try: new_food = Food.objects.get(name=name_food_parsed) state = "modifié dans" except ObjectDoesNotExist: new_food = Food() state = "ajouté à" name_grp_parsed = food.pop('categorie_name').replace( ' et ', ' & ') food.pop('source AJOUT') try: group = FoodGroup.objects.get(name=name_grp_parsed) except ObjectDoesNotExist: group = FoodGroup() group.name = name_grp_parsed group.save() new_food.name = name_food_parsed new_food.id_group = group for k, v in food.items(): setattr(new_food, k, v) new_food.save() if kwargs['verbose']: self.stdout.write( f"{self.style.SUCCESS(name_food_parsed)} {state} la base de données" ) except Exception as e: print(e) self.stderr.write( self.style.ERROR('Une erreur est survenu. \ \nIl est possible que les ingrédients existe déjà dans la base de données.'))
def post(self, request): try: print ">>>request.POST", request.POST food_img = request.POST["food_img"] # 菜图片 food_price = request.POST["food_price"] # 菜价格 food_name = request.POST["food_name"] # 菜名字 food_desc = request.POST["food_desc"] # 活动配图 group_id = request.POST["group_id"] # 分组id food_id = rand_str(8) food = Food() food.food_id = food_id food.name = food_name food.group_id = group_id food.food_img = food_img food.desc = food_desc food.price = food_price food.create_time = datetime.now() food.save() return JsonResponse({"status": 200, "data": request.POST}) except BaseException, e: return JsonResponse({"error": str(e), "status": 400})
def fill_table(maxFoodCat, maxFoodPage): """ This function download food and inserting it into the local database Parameter: maxFoodCat = maximum food categories wished maxFoodPage = maximum categories food page wished Not all foods are looking for relevant information. So we used a table containing the keys searched for verification before extraction. """ requiredIndex = ("url", "nutrition_grade_fr", "purchase_places", "manufacturing_places", "countries", "ingredients_text", "product_name", "generic_name_fr", "image_url", "image_small_url", ) # geting categories objects from openfoodfact categoriesUrl = "https://fr.openfoodfacts.org/categories.json" r = requests.get(categoriesUrl) categories = r.json() # insert the categories in the DB for i in range(maxFoodCat): cat = Category() cat.name = categories['tags'][i]['name'] cat.url = categories['tags'][i]['url'] cat.save() # insert foods for each category """ A category may contain many page of foods, paginate by 20. Then we save the differents url in a list tab to loop other it and extract any foods that respect the constraint. """ foodPageUrlTab = list() # the different page url of aliments foodPageUrlTab = [cat.url + '/' + str(ind) + '.json' for ind in range(1, maxFoodPage + 1)] # request each url of the table to find out the contains foods for j in range(len(foodPageUrlTab)): foodsUrl = foodPageUrlTab[j] r2 = requests.get(foodsUrl) foods = r2.json() foodsName = list() # each page contain 20 foods for k in range(len(foods)): # verify if the food object keys contain the # required index if present(requiredIndex, foods['products'][k]): # then add them to the DB food = Food() fObject = foods['products'][k] # json food object # fill in all the texts fields food.category = cat food.name = fObject['product_name'] food.nameFr = fObject['product_name_fr'] food.genericNameFr = fObject['generic_name_fr'] food.url = fObject['url'] food.nutritionGrade = fObject['nutrition_grade_fr'] food.manufacturingPlaces = fObject['manufacturing_places'] food.purchasePlaces = fObject['purchase_places'] food.countries = fObject['countries'].replace("en:", "") food.ingredientsText = fObject['ingredients_text'] food.image_link = fObject['image_front_url'] # this section deals with uploading images and inserting # them into the DB # we save two kinds of images; small and normal size # variables what store the the saved images directory path #imageDirectory = 'media/image/' # for the normal size #imageSmallDirectory = 'media/imageSmall/' # small """imageDirectory = 'media/image/' # variables who rename the downloaded images imageName = "{}.jpg".format(slugify(food.name)) imageSmallName = "{}-sm.jpg".format(slugify(food.name)) # download the two images with urllib librairy # urllib.request.urlretrieve(imageUrl, localImagePath) imagePath = imageDirectory + str(imageName) imageSmallPath = imageDirectory + str(imageSmallName) urllib.request.urlretrieve(fObject['image_url'], imagePath) urllib.request.urlretrieve(fObject['image_small_url'], imageSmallPath) # now we can fill the two imageFields # with the downloaded images food.image = File(open(imagePath, 'rb')) food.imageSmall = File(open(imageSmallPath, 'rb'))""" if food.name in foodsName: pass else: foodsName.append(food.name) food.save()