def query_tile_items(self, context, tile_items, aggregate=True): brains = query_children(context) if not aggregate: brains = [brain for brain in brains] shuffle(brains) for brain in brains: if brain.portal_type in PRODUCT_TYPES: tile_items.append(brain.getObject()) if not aggregate: return continue if brain.portal_type in FOLDER_TYPES: obj = brain.getObject() if ILeadImage.providedBy(obj): tile_items.append(obj) continue # look for first item in Folder count = len(tile_items) self.query_tile_items(brain.getObject(), tile_items, aggregate=False) # case multi level folder structure if len(tile_items) > count + 1: del tile_items[count + 1:] else: tile_items.append(obj)
def __call__(self): req_file = self.request.get('file') c_type = req_file.headers.get('content-type', '') file_data = req_file.read() file_name = safe_unicode(req_file.filename) media_container = self.get_media_container() behavior = IRelatedMedia(self.context) if c_type.startswith("image/"): blob = NamedBlobImage(data=file_data, filename=file_name) img = createContentInContainer( media_container, "Image", image=blob) # safe image as leadImage if none exists if ILeadImage.providedBy(self.context) and \ ILeadImage(self.context).image is None: ILeadImage(self.context).image = blob else: to_id = self.intids.getId(img) imgs = behavior.related_images and \ list(behavior.related_images) or [] imgs.append(RelationValue(to_id)) behavior.related_images = imgs else: blob = NamedBlobFile(data=file_data, filename=file_name) att = createContentInContainer(media_container, "File", file=blob) to_id = self.intids.getId(att) atts = behavior.related_attachments and \ list(behavior.related_attachments) or [] atts.append(RelationValue(to_id)) behavior.related_attachments = atts return json.dumps(dict( status=u"done", ))
def handle_storage(self): gsettings = self.gsettings storage_dir = self.storage_dir settings = self.settings context = self.context # save lead image if available if ILeadImage.providedBy(self.context): path = os.path.join(storage_dir, 'large') filename = None for dump_filename in os.listdir(path): if dump_filename.startswith('dump_1.'): filename = dump_filename break filepath = os.path.join(path, filename) tmppath = '%s.tmp' % (filepath) # NamedBlobImage eventually calls blob.consume, # destroying the image, so we need to make a temporary copy. shutil.copyfile(filepath, tmppath) fi = open(tmppath) self.context.image = NamedBlobImage( fi, filename=filename.decode('utf8')) fi.close() if self.gsettings.storage_type == 'Blob': logger.info('setting blob data for %s' % repr(context)) # go through temp folder and move items into blob storage files = OOBTree() for size in ('large', 'normal', 'small'): path = os.path.join(storage_dir, size) for filename in os.listdir(path): filepath = os.path.join(path, filename) filename = '%s/%s' % (size, filename) files[filename] = saveFileToBlob(filepath) if self.settings.enable_indexation: textfilespath = os.path.join(storage_dir, TEXT_REL_PATHNAME) for filename in os.listdir(textfilespath): filepath = os.path.join(textfilespath, filename) filename = '%s/%s' % (TEXT_REL_PATHNAME, filename) files[filename] = saveFileToBlob(filepath) # Store converted PDF dump_pdf_path = os.path.join(storage_dir, DUMP_FILENAME) filename = 'pdf/%s' % DUMP_FILENAME files[filename] = saveFileToBlob(dump_pdf_path) settings.blob_files = files shutil.rmtree(storage_dir) # check for old storage to remove... Just in case. old_storage_dir = os.path.join(gsettings.storage_location, context.UID()) if os.path.exists(old_storage_dir): shutil.rmtree(old_storage_dir) else: # if settings used to be blob, delete file data if settings.storage_type == 'Blob' and settings.blob_files: del settings._metadata['blob_files']
def handle_storage(self): gsettings = self.gsettings storage_dir = self.storage_dir settings = self.settings context = self.context # save lead image if available if ILeadImage.providedBy(self.context): path = os.path.join(storage_dir, 'large') filename = None for dump_filename in os.listdir(path): if dump_filename.startswith('dump_1.'): filename = dump_filename break filepath = os.path.join(path, filename) tmppath = '%s.tmp' % (filepath) # NamedBlobImage eventually calls blob.consume, # destroying the image, so we need to make a temporary copy. shutil.copyfile(filepath, tmppath) fi = open(tmppath) self.context.image = NamedBlobImage(fi, filename=filename.decode('utf8')) fi.close() if self.gsettings.storage_type == 'Blob': logger.info('setting blob data for %s' % repr(context)) # go through temp folder and move items into blob storage files = OOBTree() for size in ('large', 'normal', 'small'): path = os.path.join(storage_dir, size) for filename in os.listdir(path): filepath = os.path.join(path, filename) filename = '%s/%s' % (size, filename) files[filename] = saveFileToBlob(filepath) if self.settings.enable_indexation: textfilespath = os.path.join(storage_dir, TEXT_REL_PATHNAME) for filename in os.listdir(textfilespath): filepath = os.path.join(textfilespath, filename) filename = '%s/%s' % (TEXT_REL_PATHNAME, filename) files[filename] = saveFileToBlob(filepath) # Store converted PDF dump_pdf_path = os.path.join(storage_dir, DUMP_FILENAME) filename = 'pdf/%s' % DUMP_FILENAME files[filename] = saveFileToBlob(dump_pdf_path) settings.blob_files = files shutil.rmtree(storage_dir) # check for old storage to remove... Just in case. old_storage_dir = os.path.join(gsettings.storage_location, context.UID()) if os.path.exists(old_storage_dir): shutil.rmtree(old_storage_dir) else: # if settings used to be blob, delete file data if settings.storage_type == 'Blob' and settings.blob_files: del settings._metadata['blob_files']
def addILeadImageInterface(context, event): context = getattr(context, 'aq_explicit', None) if context: has_image = getattr(context, 'related_image', False) if has_image and not ILeadImage.providedBy(context): alsoProvides(context, ILeadImage) elif not has_image: noLongerProvides(context, ILeadImage)
def _album_results(self): """Get results to display an album with subalbums. """ results = self.results() images = [] folders = [] for it in results: # TODO: potentially expensive! ob = it.getObject() if IFolder.providedBy(ob): folders.append(it) elif IImage.providedBy(ob) or \ ILeadImage.providedBy(ob): images.append(it) return {'images': images, 'folders': folders}
def query_tile_items(self, context, tile_items, aggregate=True): brains = [brain for brain in query_children(context)] if not aggregate: shuffle(brains) for brain in brains: if brain.portal_type == 'bda.plone.productshop.productgroup' \ or brain.portal_type == 'bda.plone.productshop.product': tile_items.append(brain.getObject()) if not aggregate: return elif brain.portal_type == 'Folder': obj = brain.getObject() if ILeadImage.providedBy(obj) and ILeadImage(obj).image: tile_items.append(obj) else: count = len(tile_items) self.query_tile_items(brain.getObject(), tile_items, aggregate=False) # case multi level folder structure if len(tile_items) > count + 1: del tile_items[count + 1:]
def body_tag(self): """ returns img tag """ inherit_images = False width = 1420 height = 300 context = aq_inner(self.context) # maybe a smaller banner on mosaic frontpage if context.getLayout() == 'layout_view': # height = 100 pass # do we want inheritance? if inherit_images: chain = context.aq_chain else: chain = [context] for obj in chain: if IPloneSiteRoot.providedBy(obj): break if getattr( obj, 'portal_type', '' ) in BLACKLIST or not ILeadImage.providedBy(obj): continue if getattr(obj, 'image', None) and obj.image.getSize(): scales = getMultiAdapter((obj, self.request), name='images') scale = scales.scale( 'image', width=width, height=height, direction='down' ) tag = scale.tag() if scale else '' if tag: return tag # a alternative would be a fallback-image return None
def update(self): parent = aq_parent(self.context) self.available = ILeadImage.providedBy(parent) and\ True if getattr(parent, 'image', False) else False
def images(self): rm_behavior = self.behavior if not rm_behavior: return context = aq_inner(self.context) imgs = get_related_media(context, portal_type="Image") show_caption = rm_behavior.show_titles_as_caption first_img_scales = None first_img_caption = "" further_images = [] gallery = [] if len(imgs): first_img = imgs[0] if first_img: first_img_scales = first_img.restrictedTraverse("@@images") first_img_caption = first_img.Title() further_images = imgs[1:] elif rm_behavior.include_leadimage and ILeadImage.providedBy(context): # include leadimage if no related images are defined first_img_scales = context.restrictedTraverse("@@images") first_img_caption = ILeadImage(context).image_caption if first_img_scales: scale = first_img_scales.scale( "image", scale=rm_behavior.first_image_scale, direction=rm_behavior.first_image_scale_direction and "down" or "thumbnail", ) if scale: large_scale_url = first_img_scales.scale("image", scale="large").url gallery.append( dict( url=large_scale_url, tag=scale.tag( title=first_img_caption, alt=first_img_caption, css_class="img-fluid", ), caption=first_img_caption, show_caption=show_caption, title=first_img_caption, ) ) for img in further_images: if img: scales = img.restrictedTraverse("@@images") scale = scales.scale( "image", scale=rm_behavior.preview_scale, direction=rm_behavior.preview_scale_direction and "down" or "thumbnail", ) if scale: large_scale_url = scales.scale("image", scale="large").url gallery.append( dict( url=large_scale_url, tag=scale.tag(css_class="img-fluid"), caption=img.Title(), show_caption=show_caption, title=img.Title(), ) ) return gallery
def content_has_leadimage(self): return (self.content_context and ILeadImage.providedBy(self.content_context))
def images(self): context = aq_inner(self.context) if IOccurrence.providedBy(context): # support for related images on event occurrences context = context.aq_parent rm_behavior = IRelatedMedia(context) imgs = rm_behavior.related_images or [] tcap = rm_behavior.show_titles_as_caption first_img_scales = None further_images = [] gallery = [] if rm_behavior.include_leadimage and ILeadImage.providedBy(context): first_img_scales = context.restrictedTraverse('@@images') first_img_caption = ILeadImage(context).image_caption further_images = imgs elif len(imgs): first_img = imgs[0] first_img_obj = first_img.to_object if first_img_obj: first_img_scales = first_img_obj.restrictedTraverse( '@@images') first_img_caption = tcap and first_img_obj.Title() or u'' further_images = imgs[1:] if first_img_scales: scale = first_img_scales.scale( 'image', scale=rm_behavior.first_image_scale, direction=rm_behavior.first_image_scale_direction and 'down' or 'thumbnail') if scale: large_scale_url = first_img_scales.scale( 'image', scale='large').url gallery.append(dict( url=large_scale_url, tag=scale.tag( title=first_img_caption, alt=first_img_caption, ), caption=tcap and first_img_caption or u'', title=first_img_caption, )) for img in further_images: img_obj = img.to_object if img_obj: scales = img_obj.restrictedTraverse('@@images') scale = scales.scale( 'image', scale=rm_behavior.preview_scale, direction=rm_behavior.preview_scale_direction and 'down' or 'thumbnail') if scale: large_scale_url = scales.scale('image', scale='large').url gallery.append(dict( url=large_scale_url, tag=scale.tag(), caption=tcap and img_obj.Title() or u'', title=img_obj.Title(), )) return gallery
def handle_storage(self): gsettings = self.gsettings storage_dir = self.storage_dir settings = self.settings context = self.context # save lead image if available if ILeadImage.providedBy(self.context): path = os.path.join(storage_dir, u'large') filename = os.listdir(path) filename.sort() filename = filename[0] filepath = os.path.join(path, filename) tmppath = '%s.tmp' % (filepath) # NamedBlobImage eventually calls blob.consume, # destroying the image, so we need to make a temporary copy. shutil.copyfile(filepath, tmppath) NamedBlobImagefailed = False with open(tmppath, 'rb') as fi: try: self.context.image = NamedBlobImage(fi, filename=filename) except Exception: NamedBlobImagefailed = True # If we are using python2 we need to recreate the file and try again if NamedBlobImagefailed: shutil.copyfile(filepath, tmppath) with open(tmppath, 'rb') as fi: self.context.image = NamedBlobImage( fi, filename=filename.decode("utf8")) if self.gsettings.storage_type == 'Blob': logger.info('setting blob data for %s' % repr(context)) # go through temp folder and move items into blob storage files = OOBTree() for size in (u'large', u'normal', u'small'): path = os.path.join(storage_dir, size) for file in os.listdir(path): filename = '%s/%s' % (size, file) filepath = os.path.join(path, file) files[filename] = saveFileToBlob(filepath) if self.settings.enable_indexation: textfilespath = os.path.join(storage_dir, TEXT_REL_PATHNAME) for filename in os.listdir(textfilespath): filepath = os.path.join(textfilespath, filename) filename = '%s/%s' % (TEXT_REL_PATHNAME, filename) files[filename] = saveFileToBlob(filepath) # Store converted PDF dump_pdf_path = os.path.join(storage_dir, DUMP_FILENAME) filename = 'pdf/%s' % DUMP_FILENAME files[filename] = saveFileToBlob(dump_pdf_path) settings.blob_files = files shutil.rmtree(storage_dir) # check for old storage to remove... Just in case. old_storage_dir = os.path.join(gsettings.storage_location, context.UID()) if os.path.exists(old_storage_dir): shutil.rmtree(old_storage_dir) else: # if settings used to be blob, delete file data if settings.storage_type == 'Blob' and settings.blob_files: del settings._metadata['blob_files']