def add_pep_image(pep_number, path): image_path = os.path.join(settings.PEP_REPO_PATH, path) if not os.path.exists(image_path): print("Image Path '{}' does not exist, skipping".format(image_path)) return try: page = Page.objects.get(path=pep_url(pep_number)) except Page.DoesNotExist: print("Could not find backing PEP {}".format(pep_number)) return # Find existing images, we have to loop here as we can't use the ORM # to query against image__path existing_images = Image.objects.filter(page=page) MISSING = False FOUND = False for image in existing_images: image_root_path = os.path.join(settings.MEDIA_ROOT, page.path, path) if image.image.path.endswith(path): FOUND = True # File is missing on disk, recreate if not os.path.exists(image_root_path): MISSING = image break if not FOUND or MISSING: image = None if MISSING: image = MISSING else: image = Image(page=page) with open(image_path, 'rb') as image_obj: image.image.save(path, File(image_obj)) image.save() # Old images used to live alongside html, but now they're in different # places, so update the page accordingly. soup = BeautifulSoup(page.content.raw) for img_tag in soup.findAll('img'): if img_tag['src'] == path: img_tag['src'] = os.path.join(settings.MEDIA_URL, page.path, path) page.content.raw = str(soup) page.save() return image
def add_pep_image(pep_number, path): image_path = os.path.join(settings.PEP_REPO_PATH, path) if not os.path.exists(image_path): print("Image Path '{}' does not exist, skipping".format(image_path)) return try: page = Page.objects.get(path=pep_url(pep_number)) except Page.DoesNotExist: print("Could not find backing PEP {}".format(pep_number)) return # Find existing images, we have to loop here as we can't use the ORM # to query against image__path existing_images = Image.objects.filter(page=page) MISSING = False FOUND = False for image in existing_images: image_root_path = os.path.join(settings.MEDIA_ROOT, page.path, path) if image.image.path.endswith(path): FOUND = True # File is missing on disk, recreate if not os.path.exists(image_root_path): MISSING = image break if not FOUND or MISSING: image = None if MISSING: image = MISSING else: image = Image(page=page) with open(image_path, 'rb') as image_obj: image.image.save(path, File(image_obj)) image.save() # Old images used to live alongside html, but now they're in different # places, so update the page accordingly. soup = BeautifulSoup(page.content.raw, 'lxml') for img_tag in soup.findAll('img'): if img_tag['src'] == path: img_tag['src'] = os.path.join(settings.MEDIA_URL, page.path, path) page.content.raw = str(soup) page.save() return image
def post(self, request): form = AddListingForm(request.POST, request.FILES) if form.is_valid(): item_id = form.cleaned_data['item_id'] if item_id and 'item_id' in request.session and item_id == request.session['item_id']: parameters = Params.objects.get(item_owner_id=request.user.id, id=item_id) image = Image.objects.get(param_image_id=item_id) message_text = "Successfully Edited" del request.session['item_id'] else: message_text = "Successfully Added" parameters = Params() image = Image() parameters.price = form.cleaned_data['price'] parameters.name = form.cleaned_data['name'] parameters.item_owner_id = request.user.id parameters.subcategory = form.cleaned_data['subcategory'] parameters.description = form.cleaned_data['description'] parameters.address = form.cleaned_data['street_address'] parameters.street = form.cleaned_data['street'] parameters.city = form.cleaned_data['city'] parameters.postal_code = form.cleaned_data['postal_code'] parameters.state = form.cleaned_data['state'] parameters.latitude = form.cleaned_data['latitude'] parameters.longitude = form.cleaned_data['longitude'] parameters.save() image_filename = '' if 'image_filename' in request.session: image_filename = request.session['image_filename'] image_name = form.cleaned_data['image_file'] if image_name == image_filename: image.image_name = image_name image.param_image_id = parameters.id image.save() del request.session['image_filename'] messages.success(request, message_text) return HttpResponseRedirect('/listings/') else: context = {'form': form, 'val_error': 'true'} return render(request, 'accounts/listings.html', context)
def add_pep_image(artifact_path, pep_number, path): image_path = os.path.join(artifact_path, path) if not os.path.exists(image_path): print("Image Path '{}' does not exist, skipping".format(image_path)) return try: page = Page.objects.get(path=pep_url(pep_number)) except Page.DoesNotExist: print("Could not find backing PEP {}".format(pep_number)) return # Find existing images, we have to loop here as we can't use the ORM # to query against image__path existing_images = Image.objects.filter(page=page) FOUND = False for image in existing_images: if image.image.name.endswith(path): FOUND = True break if not FOUND: image = Image(page=page) with open(image_path, 'rb') as image_obj: image.image.save(path, File(image_obj)) image.save() # Old images used to live alongside html, but now they're in different # places, so update the page accordingly. soup = BeautifulSoup(page.content.raw, 'lxml') for img_tag in soup.findAll('img'): if img_tag['src'] == path: img_tag['src'] = image.image.url page.content.raw = str(soup) page.save() return image
def add_pep_image(artifact_path, pep_number, path): image_path = os.path.join(artifact_path, path) if not os.path.exists(image_path): print(f"Image Path '{image_path}' does not exist, skipping") return try: page = Page.objects.get(path=pep_url(pep_number)) except Page.DoesNotExist: print(f"Could not find backing PEP {pep_number}") return # Find existing images, we have to loop here as we can't use the ORM # to query against image__path existing_images = Image.objects.filter(page=page) FOUND = False for image in existing_images: if image.image.name.endswith(path): FOUND = True break if not FOUND: image = Image(page=page) with open(image_path, 'rb') as image_obj: image.image.save(path, File(image_obj)) image.save() # Old images used to live alongside html, but now they're in different # places, so update the page accordingly. soup = BeautifulSoup(page.content.raw, 'lxml') for img_tag in soup.findAll('img'): if img_tag['src'] == path: img_tag['src'] = image.image.url page.content.raw = str(soup) page.save() return image