def handle_path(self, path): logger.debug("Trying path {}".format(path)) thumbnail_info = recognize_thumbnail(path) # If not a thumbnail or path already exists then do nothing if not thumbnail_info: logger.debug( "Path {} does not match thumbnail pattern".format(path)) return if os.path.exists(path): logger.debug("Path {} already exists".format(path)) return logger.debug("Handling thumbnail {}".format(path)) # If we got this far then we have a thumbnail to generate so # generate final directory thumbnail_dir = os.path.dirname(path) logger.debug("Thumbnail dir: {}".format(thumbnail_dir)) if not os.path.exists(thumbnail_dir): logger.debug("Creating thumbnail dir: {}".format(thumbnail_dir)) mkdir_p(thumbnail_dir) original_path = thumbnail_to_original_path(path) try: image = Image.open(original_path) thumbnail = self._resize(image, thumbnail_info.group("spec")) thumbnail.save(path) logger.info("Generated Thumbnail {}".format( os.path.basename(path))) except IOError as e: logger.error("Generating Thumbnail for {} skipped: {}".format( os.path.basename(path), str(e)))
def generate_rss_feed(self): feed = Rss201rev2Feed( title='Latest Linux Kernel Versions', link='http://www.kernel.org', feed_url='http://www.kernel.org/feeds/kdist.xml', description='Latest Linux Kernel Versions', creator='FTP Admin <*****@*****.**>' ) for entry in self.current_releases: (label, release, iseol, timestamp, isodate, source, sign, patch, incr, changelog, gitweb, diffview) = entry if iseol: eol = ' (EOL)' else: eol = '' contents = ''' <table> <tr><th align="right">Version:</th><td><strong>%s%s</strong> (%s)</td></tr> <tr><th align="right">Released:</th><td>%s</td></tr> ''' % (release, eol, label, isodate) if source: contents += ''' <tr><th align="right">Source:</th><td><a href="%s">%s</a></td></tr>''' % (source, os.path.basename(source)) if sign: contents += ''' <tr><th align="right">PGP Signature:</th><td><a href="%s">%s</a></td></tr>''' % (sign, os.path.basename(sign)) if patch: contents += ''' <tr><th align="right">Patch:</th><td><a href="%s">full</a>''' % patch if incr: contents += ''' (<a href="%s">incremental</a>)''' % incr contents += '''</td></tr>''' if changelog: contents += ''' <tr><th align="right">ChangeLog:</th><td><a href="%s">%s</a></td></tr>''' % (changelog, os.path.basename(changelog)) contents += ''' </table>''' feed.add_item( title='%s: %s' % (release, label), link='http://www.kernel.org/', unique_id='kernel.org,%s,%s,%s' % (label, release, isodate), description=contents, pubdate=datetime.datetime.fromtimestamp(timestamp) ) # We really should be generating after site is done, # but I'm too lazy to figure out the plugin hooks for that utils.mkdir_p(os.path.dirname(self.rss_path)) fp = open(self.rss_path, 'w') feed.write(fp, 'utf-8') fp.close()
def generate_output(self, writer): # copy all Static files for sc in self.autostatic_files: source_path = os.path.join(self.path, sc.source_path) save_as = os.path.join(self.output_path, sc.save_as) mkdir_p(os.path.dirname(save_as)) shutil.copy2(source_path, save_as) logger.info('Copying %s to %s', sc.source_path, sc.save_as)
def generate_output(self, writer): self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme, 'theme', self.output_path, '.') # copy all StaticContent files for sc in self.staticfiles: mkdir_p(os.path.dirname(sc.save_as)) shutil.copy(sc.filepath, sc.save_as) logger.info('copying %s to %s' % (sc.filepath, sc.save_as))
def generate_output(self, writer): # copy all Static files # pdb.set_trace() for sc in self.context['staticfiles']: source_path = os.path.join(self.path, sc.source_path) save_as = os.path.join(self.output_path, sc.save_as) mkdir_p(os.path.dirname(save_as)) shutil.copy(source_path, save_as) logger.info('copying {} to {}'.format(sc.source_path, sc.save_as))
def write_post(output_post_dir, ctx, post): post_dir = os.path.join(output_post_dir, post.slug) mkdir_p(post_dir) output_path = os.path.join(post_dir, 'index.html') output = render_post(ctx, post) logger.info("Writing post to path %s", output_path) with open(output_path, 'w') as fd: fd.write(output) return post
def generate_output(self, writer): self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme, 'theme', self.output_path, os.curdir) # copy all Static files for sc in self.staticfiles: source_path = os.path.join(self.path, sc.source_path) save_as = os.path.join(self.output_path, sc.save_as) mkdir_p(os.path.dirname(save_as)) shutil.copy(source_path, save_as) logger.info('copying {} to {}'.format(sc.source_path, sc.save_as))
def save_cache(self): """Save the updated cache""" if self._cache_data_policy: try: mkdir_p(self.settings['CACHE_PATH']) with self._cache_open(self._cache_path, 'wb') as fhandle: pickle.dump(self._cache, fhandle) except (IOError, OSError, pickle.PicklingError) as err: logger.warning('Could not save cache %s\n ... %s', self._cache_path, err)
def generate_output(self, writer): self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme, self.settings['THEME_STATIC_DIR'], self.output_path, os.curdir) # copy all Static files for sc in self.context['staticfiles']: source_path = os.path.join(self.path, sc.source_path) save_as = os.path.join(self.output_path, sc.save_as) mkdir_p(os.path.dirname(save_as)) shutil.copy2(source_path, save_as) logger.info('Copying %s to %s', sc.source_path, sc.save_as)
def write_site(ctx): output_post_dir = os.path.join(SITE_DIR, 'posts') clean_output_dir(SITE_DIR, []) copy(SITE_ROOT_DIR, SITE_DIR) mkdir_p(output_post_dir) write_post_to_dir = functools.partial(write_post, output_post_dir, ctx) posts = itertools.imap(write_post_to_dir, get_posts()) posts = itertools.ifilter(lambda x: x.get('published'), posts) posts = sorted(list(posts), key=lambda x: x.get('date'), reverse=True) write_index(SITE_DIR, ctx, posts) write_feed(SITE_DIR, ctx, posts)
def update_author(article_generator, author): """Updates the Author metadata objects with extra information.""" settings = article_generator.settings author_metadata = settings.get("AUTHOR_METADATA", {}) author_dict = author_metadata.get(author.name, {}) author.tagline = author_dict.get("tagline") author.discourse = author_dict.get("discourse") author.discourse_url = ("https://mixxx.discourse.group/u/{}/".format( author.discourse) if author.discourse else "") author.facebook = author_dict.get("facebook") author.facebook_url = ("https://www.facebook.com/{}/".format( author.facebook) if author.facebook else "") author.github = author_dict.get("github") author.github_url = ("https://github.com/{}".format(author.github) if author.github else "") author.twitter = author_dict.get("twitter") author.twitter_url = ("https://twitter.com/{}/".format(author.twitter) if author.twitter else "") author.email = author_dict.get("email") author.avatar_url = None if not author.github or "images" not in settings["STATIC_PATHS"]: return path = os.path.join(article_generator.path, "images", "avatars") avatar_path = os.path.join(path, f"{author.github}.png") if not os.path.exists(avatar_path): logger.warning( "Missing avatar for author '%s', expected %s", author.name, avatar_path, ) logger.debug( "Downloading missing avatar for GitHub user %s...", author.github, ) try: data = download_avatar(author.github) except urllib.error.URLError: logger.warning("Download of avatar failed, skipping...") return mkdir_p(path) with open(avatar_path, "w+b") as fp: fp.write(data) logger.warning("Downloaded missing avatar to: %s", avatar_path) author.avatar_url = path_to_url( os.path.relpath(avatar_path, article_generator.path))
def generate_output(self, writer=None): ''' Generate redirect files ''' logger.info('Generating permalink files in %r', self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) path = os.path.dirname(os.path.realpath(__file__)) env = Environment(loader=FileSystemLoader(path)) template = env.get_template('permalink.html') settings = self.settings.copy() if settings.get('RELATIVE_URLS', False): settings['SITEURL'] = path_to_url( get_relative_path( os.path.join(settings['PERMALINK_PATH'], 'dummy.html'))) with open(os.path.join(self.permalink_output_path, '.htaccess'), 'w') as redirect_file: for content in itertools.chain(self.context['articles'], self.context['pages']): for permalink_id in content.get_permalink_ids_iter(): relative_permalink_path = os.path.join( self.settings['PERMALINK_PATH'], permalink_id) + '.html' permalink_path = os.path.join(self.output_path, relative_permalink_path) localcontext = settings.copy() localcontext['content'] = content localcontext['page'] = content with open(permalink_path, 'wb') as f: f.write(template.render(**localcontext)) signals.content_written.send(permalink_path, context=localcontext) redirect_file.write( 'Redirect permanent "/{relative_permalink_path}" "{url}"\n' .format( url=article_url(content), permalink_id=permalink_id, relative_permalink_path=relative_permalink_path, ))
def convert_pdfs(pelican): """ Create the PNGs from the original PDF, PS, and EPS files, placing them in the approriate location in the output directory. """ for path in pdf_imgs: outpath = os.path.join(pelican.output_path, pdf_imgs[path]) mkdir_p(os.path.dirname(outpath)) try: with Image(filename=os.path.join(pelican.path, path) + '[0]', resolution=200) as img: img.format = 'png' img.save(filename=outpath) logger.info('Creating PNG preview of %s as %s', path, pdf_imgs[path]) except BlobError: logger.warn('Could create PNG preview of `{}`'.format(src))
def generate_context(self): for f in self.get_files(self.settings.get('CONVERT_PATHS', [])): static_file = self.readers.read_file( base_path=self.path, path=f, content_class=Static, fmt='static', context=self.context) match_ext=[src_ext for( src_ext, rule) in self.rules if str(f).endswith(src_ext)] if len(match_ext)!=1 or match_ext[0]!='': source_path = os.path.join(self.path, static_file.source_path) temp_path=mkdtemp() dst_path=os.path.join(self.output_path, os.path.dirname(static_file.save_as)) mkdir_p(dst_path) content=self.cache.get_cached_data(source_path) if content: with open(temp_path+"/tmp.zip", "wb") as zfile: zfile.write(content) with ZipFile(temp_path+"/tmp.zip", "r") as zfile: zfile.extractall(temp_path) os.remove(temp_path+"/tmp.zip") else: src_ext, rule=[(src_ext, rule) for( src_ext, rule) in self.rules if str(source_path).endswith(src_ext)][0] call(rule.format(src=source_path, src_path=os.path.dirname(source_path), basename=os.path.basename(source_path).replace(src_ext,''), dst_path=temp_path+'/'), shell=True) zf = ZipFile(temp_path+"/tmp.zip", "w") for dirname, subdirs, files in os.walk(temp_path): #zf.write(dirname) for filename in files: if filename=="tmp.zip": continue zf.write(os.path.join(dirname, filename), filename) zf.close() with open(temp_path+"/tmp.zip", "rb") as zfile: content=zfile.read() os.remove(temp_path+"/tmp.zip") self.cache.cache_data(source_path, content) copy(temp_path, dst_path) rmtree(temp_path) else: source_path = os.path.join(self.path, static_file.source_path) save_as = os.path.join(self.output_path, static_file.save_as) mkdir_p(os.path.dirname(save_as)) shutil.copy2(source_path, save_as) self.add_source_path(static_file) self.cache.save_cache()
def generate_output(self, writer=None): ''' Generate redirect files ''' logger.info('Generating permalink files in %r', self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) for content in itertools.chain(self.context['articles'], self.context['pages']): for permalink_id in content.get_permalink_ids_iter(): permalink_path = os.path.join(self.permalink_output_path, permalink_id) + '.html' redirect_string = REDIRECT_STRING.format( url=article_url(content), title=content.title) open(permalink_path, 'w').write(redirect_string)
def generate_output(self, writer=None): """ Generate redirect files """ logger.info("Generating permalink files in %r", self.permalink_output_path) clean_output_dir(self.permalink_output_path, []) mkdir_p(self.permalink_output_path) for content in itertools.chain(self.context["articles"], self.context["pages"]): for permalink_id in content.get_permalink_ids_iter(): permalink_path = ( os.path.join(self.permalink_output_path, permalink_id) + ".html" ) redirect_string = REDIRECT_STRING.format( url=article_url(content), title=content.title ) open(permalink_path, "w").write(redirect_string)
def convert_pdfs(pelican): """ Create the PNGs from the original PDF, PS, and EPS files, placing them in the approriate location in the output directory. """ with Color("white") as white: for path in pdf_imgs: outpath = os.path.join(pelican.output_path, pdf_imgs[path]) mkdir_p(os.path.dirname(outpath)) try: with Image( filename=os.path.join(pelican.path, path) + "[0]", resolution=100, background=white, ) as img: img.format = "png" img.save(filename=outpath) logger.info("Creating PNG preview of %s as %s", path, pdf_imgs[path]) except BlobError: logger.warn("Could create PNG preview of `{}`".format(src))
def _mkdir(self, path): if os.path.lexists(path) and not os.path.isdir(path): os.unlink(path) mkdir_p(path)