def _execute(self, command, args): self.logger = get_logger( CommandGitHubDeploy.name, self.site.loghandlers ) # Check if ghp-import is installed check_ghp_import_installed() # Build before deploying build = main(['build']) if build != 0: self.logger.error('Build failed, not deploying to GitHub') sys.exit(build) # Clean non-target files l = self._doitargs['cmds'].get_plugin('list')(config=self.config, **self._doitargs) only_on_output, _ = real_scan_files(l, self.site) for f in only_on_output: os.unlink(f) # Commit and push self._commit_and_push() return
def _execute(self, options, args): """Run the deployment.""" self.logger = get_logger(CommandGitHubDeploy.name, STDERR_HANDLER) # Check if ghp-import is installed check_ghp_import_installed() # Build before deploying build = main(['build']) if build != 0: self.logger.error('Build failed, not deploying to GitHub') return build # Clean non-target files only_on_output, _ = real_scan_files(self.site) for f in only_on_output: os.unlink(f) # Remove drafts and future posts if requested (Issue #2406) undeployed_posts = clean_before_deployment(self.site) if undeployed_posts: self.logger.notice("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts))) # Commit and push self._commit_and_push(options['commit_message']) return
def _execute(self, options, args): """Start test server.""" self.logger = get_logger("serve", self.site.loghandlers) out_dir = self.site.config["OUTPUT_FOLDER"] if not os.path.isdir(out_dir): self.logger.error("Missing '{0}' folder?".format(out_dir)) else: os.chdir(out_dir) if "[" in options["address"]: options["address"] = options["address"].strip("[").strip("]") ipv6 = True OurHTTP = IPv6Server elif options["ipv6"]: ipv6 = True OurHTTP = IPv6Server else: ipv6 = False OurHTTP = HTTPServer httpd = OurHTTP((options["address"], options["port"]), OurHTTPRequestHandler) sa = httpd.socket.getsockname() self.logger.info("Serving HTTP on {0} port {1}...".format(*sa)) if options["browser"]: if ipv6: server_url = "http://[{0}]:{1}/".format(*sa) else: server_url = "http://{0}:{1}/".format(*sa) self.logger.info("Opening {0} in the default web browser...".format(server_url)) webbrowser.open(server_url) try: httpd.serve_forever() except KeyboardInterrupt: self.logger.info("Server is shutting down.") return 130
def set_site(self, site): self.logger = utils.get_logger('bundles', utils.STDERR_HANDLER) if webassets is None and site.config['USE_BUNDLES']: utils.req_missing(['webassets'], 'USE_BUNDLES', optional=True) self.logger.warn('Setting USE_BUNDLES to False.') site.config['USE_BUNDLES'] = False super(BuildBundles, self).set_site(site)
def _execute(self, command, args): self.logger = get_logger('ping', self.site.loghandlers) blog_title = self.site.config['BLOG_TITLE'] site_url = self.site.config['SITE_URL'] if sys.version_info[0] == 2: import xmlrpclib as ping_xmlclient import urllib2 as ping_browser elif sys.version_info[0] >= 3: import xmlrpc.client as ping_xmlclient import urllib.request as ping_browser for xmlrpc_service in self.site.config['PING_XMLRPC_SERVICES']: self.logger.notice("==> XML-RPC service: {0}".format(xmlrpc_service)) try: ping_xmlclient.ServerProxy(xmlrpc_service).weblogUpdates.ping(blog_title, site_url) except ping_xmlclient.ProtocolError as e: self.logger.warn("Unsuccessfully pinged service {0}: [{1}] {2}".format(xmlrpc_service, e.errcode, e.errmsg)) except Exception as e: self.logger.warn("Unknown problem while pinging service {0}: {1}".format(xmlrpc_service, e)) for web_service in self.site.config['PING_GET_SERVICES']: self.logger.notice("==> HTTP GET service: {0}".format(web_service)) try: ping_browser.urlopen(web_service).read() except Exception as e: self.logger.warn("Unknown problem while pinging service {0}: {1}".format(web_service, e)) self.logger.notice("Pinged all services")
def _execute(self, options, args): """Start test server.""" self.logger = get_logger('serve') out_dir = self.site.config['OUTPUT_FOLDER'] if not os.path.isdir(out_dir): self.logger.error("Missing '{0}' folder?".format(out_dir)) else: self.serve_pidfile = os.path.abspath('nikolaserve.pid') os.chdir(out_dir) if '[' in options['address']: options['address'] = options['address'].strip('[').strip(']') ipv6 = True OurHTTP = IPv6Server elif options['ipv6']: ipv6 = True OurHTTP = IPv6Server else: ipv6 = False OurHTTP = HTTPServer httpd = OurHTTP((options['address'], options['port']), OurHTTPRequestHandler) sa = httpd.socket.getsockname() self.logger.info("Serving HTTP on {0} port {1}...".format(*sa)) if options['browser']: if ipv6: server_url = "http://[{0}]:{1}/".format(*sa) elif sa[0] == '0.0.0.0': server_url = "http://127.0.0.1:{1}/".format(*sa) else: server_url = "http://{0}:{1}/".format(*sa) self.logger.info("Opening {0} in the default web browser...".format(server_url)) webbrowser.open(server_url) if options['detach']: self.detached = True OurHTTPRequestHandler.quiet = True try: pid = os.fork() if pid == 0: signal.signal(signal.SIGTERM, self.shutdown) httpd.serve_forever() else: with open(self.serve_pidfile, 'w') as fh: fh.write('{0}\n'.format(pid)) self.logger.info("Detached with PID {0}. Run `kill {0}` or `kill $(cat nikolaserve.pid)` to stop the server.".format(pid)) except AttributeError: if os.name == 'nt': self.logger.warning("Detaching is not available on Windows, server is running in the foreground.") else: raise else: self.detached = False try: self.dns_sd = dns_sd(options['port'], (options['ipv6'] or '::' in options['address'])) signal.signal(signal.SIGTERM, self.shutdown) httpd.serve_forever() except KeyboardInterrupt: self.shutdown() return 130
def _execute(self, options, args): logger = get_logger('vcs', self.site.loghandlers) self.site.scan_posts() repo_path = local('.') wd = workdir.open(repo_path) # See if anything got deleted del_paths = [] flag = False for s in wd.status(): if s.state == 'removed': if not flag: logger.info('Found deleted files') flag = True logger.info('DEL => {}', s.relpath) del_paths.append(s.relpath) if flag: logger.info('Marking as deleted') wd.remove(paths=del_paths) wd.commit(message='Deleted Files', paths=del_paths) # Collect all paths that should be kept under control # Post and page sources paths = [] for lang in self.site.config['TRANSLATIONS']: for p in self.site.timeline: paths.extend(p.fragment_deps(lang)) # Files in general for k, v in self.site.config['FILES_FOLDERS'].items(): paths.extend(get_path_list(k)) for k, v in self.site.config['LISTINGS_FOLDERS'].items(): paths.extend(get_path_list(k)) for k, v in self.site.config['GALLERY_FOLDERS'].items(): paths.extend(get_path_list(k)) # Themes and plugins for p in ['plugins', 'themes']: paths.extend(get_path_list(p)) # The configuration paths.extend('conf.py') # Add them to the VCS paths = list(set(paths)) wd.add(paths=paths) flag = False for s in wd.status(): if s.state == 'added': if not flag: logger.info('Found new files') flag = True logger.info('NEW => {}', s.relpath) logger.info('Committing changes') wd.commit(message='Updated files')
def gen_tasks(self): config = self.site.config self.logger = utils.get_logger('postcast') self.site.scan_posts() yield self.group_task() for slug in config.get('POSTCASTS', []): category = _get_with_default_key( config.get('POSTCAST_CATEGORY', {}), slug, '') tags = _get_with_default_key( config.get('POSTCAST_TAGS', {}), slug, '') itunes_explicit = _get_with_default_key( config.get('POSTCAST_ITUNES_EXPLICIT', {}), slug, '') itunes_image = _get_with_default_key( config.get('POSTCAST_ITUNES_IMAGE', {}), slug, '') itunes_categories = _get_with_default_key( config.get('POSTCAST_ITUNES_CATEGORIES'), slug, '') for lang in config['TRANSLATIONS']: if category: title = config['CATEGORY_TITLES'][lang].get(category) description = config['CATEGORY_DESCRIPTIONS'][lang].get(category) else: title = None description = None posts = [ post for post in self.site.posts if post.is_translation_available(lang) and (post.meta('category', lang) == category if category else True) and (set(post.tags_for_language(lang)) >= set(tags) if tags else True) ] feed_deps = [self.site.configuration_filename] for post in posts: feed_deps.append(post.source_path) feed_deps.append(self.audio_path(lang=lang, post=post)) output_path = self.feed_path(slug, lang) yield { 'basename': self.name, 'name': str(output_path), 'targets': [output_path], 'file_dep': feed_deps, 'clean': True, 'actions': [(self.render_feed, [slug, posts, output_path], { 'description': description, 'itunes_categories': itunes_categories, 'itunes_explicit': itunes_explicit, 'itunes_image': itunes_image, 'lang': lang, 'title': title, })] }
def set_site(self, site): """Set Nikola site.""" self.logger = utils.get_logger('bundles') if webassets is None and site.config['USE_BUNDLES']: utils.req_missing(['webassets'], 'USE_BUNDLES', optional=True) self.logger.warn('Setting USE_BUNDLES to False.') site.config['USE_BUNDLES'] = False site._GLOBAL_CONTEXT['use_bundles'] = False super(BuildBundles, self).set_site(site)
def _execute(self, command, args): self.logger = get_logger('ping', self.site.loghandlers) timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastping') new_ping = datetime.utcnow() try: with codecs.open(timestamp_path, 'rb', 'utf8') as inf: last_ping = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f") except (IOError, Exception) as e: self.logger.debug("Problem when reading `{0}`: {1}".format(timestamp_path, e)) last_ping = datetime(1970, 1, 1) """Limit pings to every 15 minutes to avoid looking like spam""" if (timedelta(minutes=15) > new_ping - last_ping): self.logger.warn("Pings suppressed. Pinging too often! Only ping when there are new posts! (Max once per 15 minutes.)") return 0 with codecs.open(timestamp_path, 'wb+', 'utf8') as outf: outf.write(new_ping.isoformat()) try: blog_title = self.site.config['BLOG_TITLE']() except TypeError: blog_title = self.site.config['BLOG_TITLE'] site_url = self.site.config['SITE_URL'] if sys.version_info[0] == 2: import xmlrpclib as ping_xmlclient import urllib2 as ping_browser elif sys.version_info[0] >= 3: import xmlrpc.client as ping_xmlclient import urllib.request as ping_browser else: self.logger.error("Unsupported Python version. Well done!") return for xmlrpc_service in self.site.config['PING_XMLRPC_SERVICES']: self.logger.notice("==> XML-RPC service: {0}".format(xmlrpc_service)) try: ping_xmlclient.ServerProxy(xmlrpc_service).weblogUpdates.ping(blog_title, site_url) except ping_xmlclient.ProtocolError as e: self.logger.warn("Unsuccessfully pinged service {0}: [{1}] {2}".format(xmlrpc_service, e.errcode, e.errmsg)) except Exception as e: self.logger.warn("Unknown problem while pinging service {0}: {1}".format(xmlrpc_service, e)) for web_service in self.site.config['PING_GET_SERVICES']: self.logger.notice("==> HTTP GET service: {0}".format(web_service)) try: ping_browser.urlopen(web_service).read() except Exception as e: self.logger.warn("Unknown problem while pinging service {0}: {1}".format(web_service, e)) self.logger.notice("Pinged all services")
def _execute(self, command, args): self.logger = get_logger('iarchiver', STDERR_HANDLER) """ /robots.txt must be in root, so this use of urljoin() is intentional """ iatestbot = robotparser.RobotFileParser(urljoin(self.site.config['SITE_URL'], "/robots.txt")) iatestbot.read() timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastiarchive') new_iarchivedate = datetime.now(dateutil.tz.tzutc()) try: with codecs.open(timestamp_path, 'rb', 'utf8') as inf: last_iarchivedate = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f%z") firstrun = False except (IOError, Exception) as e: self.logger.debug("Problem when reading `{0}`: {1}".format(timestamp_path, e)) last_iarchivedate = datetime(1970, 1, 1).replace(tzinfo=dateutil.tz.tzutc()) firstrun = True self.site.scan_posts() sent_requests = False self.logger.info("Beginning submission of archive requests. This can take some time....") for post in self.site.timeline: if post.is_draft or post.publish_later: continue postdate = datetime.strptime(post.formatted_date("%Y-%m-%dT%H:%M:%S"), "%Y-%m-%dT%H:%M:%S") postdate_sitetime = self.site.tzinfo.localize(postdate) postdate_utc = postdate_sitetime.astimezone(dateutil.tz.tzutc()) if (firstrun or last_iarchivedate <= postdate_utc): post_permalink = post.permalink(absolute=True) archival_request = "http://web.archive.org/save/{0}".format(post_permalink) if (iatestbot.can_fetch("ia_archiver", post_permalink)): try: """ Intentionally not urlencoded """ web_browser.urlopen(archival_request).read() self.logger.info("==> sent archive request for {0}".format(post_permalink)) except Exception as e: self.logger.warn("==> unknown problem when archiving {0}: ({1})".format(post_permalink, e)) """ Throttle requests """ time.sleep(4) sent_requests = True else: self.logger.warn("==> /robots.txt directives blocked archiving of ({0})".format(post_permalink)) """ Record archival time """ with codecs.open(timestamp_path, 'wb+', 'utf8') as outf: outf.write(new_iarchivedate.strftime("%Y-%m-%dT%H:%M:%S.%f%z")) if sent_requests: self.logger.notice("Archival requests sent to the Internet Archive.") else: self.logger.notice("Nothing new to archive")
def _execute(self, command, args): self.logger = get_logger('deploy', self.site.loghandlers) # Get last successful deploy date timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy') if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo': self.logger.warn("\nWARNING WARNING WARNING WARNING\n" "You are deploying using the nikolademo Disqus account.\n" "That means you will not be able to moderate the comments in your own site.\n" "And is probably not what you want to do.\n" "Think about it for 5 seconds, I'll wait :-)\n\n") time.sleep(5) deploy_drafts = self.site.config.get('DEPLOY_DRAFTS', True) deploy_future = self.site.config.get('DEPLOY_FUTURE', False) if not (deploy_drafts and deploy_future): # Remove drafts and future posts out_dir = self.site.config['OUTPUT_FOLDER'] undeployed_posts = [] self.site.scan_posts() for post in self.site.timeline: if (not deploy_drafts and post.is_draft) or \ (not deploy_future and post.publish_later): remove_file(os.path.join(out_dir, post.destination_path())) remove_file(os.path.join(out_dir, post.source_path)) undeployed_posts.append(post) for command in self.site.config['DEPLOY_COMMANDS']: self.logger.notice("==> {0}".format(command)) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError as e: self.logger.error('Failed deployment — command {0} ' 'returned {1}'.format(e.cmd, e.returncode)) sys.exit(e.returncode) self.logger.notice("Successful deployment") if self.site.config['TIMEZONE'] is not None: tzinfo = pytz.timezone(self.site.config['TIMEZONE']) else: tzinfo = pytz.UTC try: with open(timestamp_path, 'rb') as inf: last_deploy = literal_eval(inf.read().strip()) # this might ignore DST last_deploy = last_deploy.replace(tzinfo=tzinfo) clean = False except Exception: last_deploy = datetime(1970, 1, 1).replace(tzinfo=tzinfo) clean = True new_deploy = datetime.now() self._emit_deploy_event(last_deploy, new_deploy, clean, undeployed_posts) # Store timestamp of successful deployment with codecs.open(timestamp_path, 'wb+', 'utf8') as outf: outf.write(repr(new_deploy))
def set_site(self, site): self.config_dependencies = [] for plugin_info in site.plugin_manager.getPluginsOfCategory("RestExtension"): self.config_dependencies.append(plugin_info.name) self.logger = get_logger('compile_rest', site.loghandlers) if not site.debug: self.logger.level = 4 return super(CompileRestHTML5, self).set_site(site)
def set_site(self, site): super(CompileRest, self).set_site(site) self.config_dependencies = [] for plugin_info in self.get_compiler_extensions(): self.config_dependencies.append(plugin_info.name) plugin_info.plugin_object.short_help = plugin_info.description self.logger = get_logger('compile_rest', STDERR_HANDLER) if not site.debug: self.logger.level = 4
def _execute(self, options, args): """Start test server.""" self.logger = get_logger('serve', STDERR_HANDLER) out_dir = self.site.config['OUTPUT_FOLDER'] if not os.path.isdir(out_dir): self.logger.error("Missing '{0}' folder?".format(out_dir)) else: os.chdir(out_dir) if '[' in options['address']: options['address'] = options['address'].strip('[').strip(']') ipv6 = True OurHTTP = IPv6Server elif options['ipv6']: ipv6 = True OurHTTP = IPv6Server else: ipv6 = False OurHTTP = HTTPServer httpd = OurHTTP((options['address'], options['port']), OurHTTPRequestHandler) sa = httpd.socket.getsockname() self.logger.info("Serving HTTP on {0} port {1}...".format(*sa)) if options['browser']: if ipv6: server_url = "http://[{0}]:{1}/".format(*sa) else: server_url = "http://{0}:{1}/".format(*sa) self.logger.info("Opening {0} in the default web browser...".format(server_url)) webbrowser.open(server_url) if options['detach']: OurHTTPRequestHandler.quiet = True try: pid = os.fork() if pid == 0: httpd.serve_forever() else: self.logger.info("Detached with PID {0}. Run `kill {0}` to stop the server.".format(pid)) except AttributeError as e: if os.name == 'nt': self.logger.warning("Detaching is not available on Windows, server is running in the foreground.") else: raise e else: try: self.dns_sd = dns_sd(options['port'], (options['ipv6'] or '::' in options['address'])) httpd.serve_forever() except KeyboardInterrupt: self.logger.info("Server is shutting down.") if self.dns_sd: self.dns_sd.Reset() return 130
def _execute(self, options, args): """Start test server.""" self.logger = get_logger('serve', self.site.loghandlers) out_dir = self.site.config['OUTPUT_FOLDER'] if not os.path.isdir(out_dir): self.logger.error("Missing '{0}' folder?".format(out_dir)) else: os.chdir(out_dir) httpd = HTTPServer((options['address'], options['port']), OurHTTPRequestHandler) sa = httpd.socket.getsockname() self.logger.notice("Serving HTTP on {0} port {1} ...".format(*sa)) httpd.serve_forever()
def _execute(self, command, args): self.logger = get_logger('deploy', self.site.loghandlers) # Get last successful deploy date timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy') if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo': self.logger.warn("\nWARNING WARNING WARNING WARNING\n" "You are deploying using the nikolademo Disqus account.\n" "That means you will not be able to moderate the comments in your own site.\n" "And is probably not what you want to do.\n" "Think about it for 5 seconds, I'll wait :-)\n\n") time.sleep(5) deploy_drafts = self.site.config.get('DEPLOY_DRAFTS', True) deploy_future = self.site.config.get('DEPLOY_FUTURE', False) undeployed_posts = [] if not (deploy_drafts and deploy_future): # Remove drafts and future posts out_dir = self.site.config['OUTPUT_FOLDER'] self.site.scan_posts() for post in self.site.timeline: if (not deploy_drafts and post.is_draft) or \ (not deploy_future and post.publish_later): remove_file(os.path.join(out_dir, post.destination_path())) remove_file(os.path.join(out_dir, post.source_path)) undeployed_posts.append(post) for command in self.site.config['DEPLOY_COMMANDS']: self.logger.info("==> {0}".format(command)) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError as e: self.logger.error('Failed deployment — command {0} ' 'returned {1}'.format(e.cmd, e.returncode)) sys.exit(e.returncode) self.logger.info("Successful deployment") try: with io.open(timestamp_path, 'r', encoding='utf8') as inf: last_deploy = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f") clean = False except (IOError, Exception) as e: self.logger.debug("Problem when reading `{0}`: {1}".format(timestamp_path, e)) last_deploy = datetime(1970, 1, 1) clean = True new_deploy = datetime.utcnow() self._emit_deploy_event(last_deploy, new_deploy, clean, undeployed_posts) # Store timestamp of successful deployment with io.open(timestamp_path, 'w+', encoding='utf8') as outf: outf.write(unicode_str(new_deploy.isoformat()))
def set_site(self, site): for plugin_info in site.plugin_manager.getPluginsOfCategory("RestExtension"): if (plugin_info.name in site.config['DISABLED_PLUGINS'] or (plugin_info.name in site.EXTRA_PLUGINS and plugin_info.name not in site.config['ENABLED_EXTRAS'])): site.plugin_manager.removePluginFromCategory(plugin_info, "RestExtension") continue site.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(site) plugin_info.plugin_object.short_help = plugin_info.description self.logger = get_logger('compile_rest', site.loghandlers) return super(CompileRest, self).set_site(site)
def _execute(self, options, args): """Check the generated site.""" self.logger = get_logger('check', self.site.loghandlers) if not options['links'] and not options['files'] and not options['clean']: print(self.help()) return False if options['links']: failure = self.scan_links(options['find_sources']) if options['files']: failure = self.scan_files() if options['clean']: failure = self.clean_files() if failure: sys.exit(1)
def _execute(self, options, args): """Start test server.""" self.logger = get_logger("serve", self.site.loghandlers) out_dir = self.site.config["OUTPUT_FOLDER"] if not os.path.isdir(out_dir): self.logger.error("Missing '{0}' folder?".format(out_dir)) else: os.chdir(out_dir) httpd = HTTPServer((options["address"], options["port"]), OurHTTPRequestHandler) sa = httpd.socket.getsockname() self.logger.notice("Serving HTTP on {0} port {1} ...".format(*sa)) if options["browser"]: server_url = "http://{0}:{1}/".format(options["address"], options["port"]) self.logger.notice("Opening {0} in the default web browser ...".format(server_url)) webbrowser.open(server_url) httpd.serve_forever()
def set_site(self, site): self.config_dependencies = [] for plugin_info in site.plugin_manager.getPluginsOfCategory("RestExtension"): if plugin_info.name in site.config['DISABLED_PLUGINS']: site.plugin_manager.removePluginFromCategory(plugin_info, "RestExtension") continue site.plugin_manager.activatePluginByName(plugin_info.name) self.config_dependencies.append(plugin_info.name) plugin_info.plugin_object.set_site(site) plugin_info.plugin_object.short_help = plugin_info.description self.logger = get_logger('compile_rest', site.loghandlers) if not site.debug: self.logger.level = 4 return super(CompileRest, self).set_site(site)
def set_directories(self, directories, cache_folder): """Create a template lookup.""" cache_dir = os.path.join(cache_folder, '.mako.tmp') # Workaround for a Mako bug, Issue #825 if sys.version_info[0] == 2: try: os.path.abspath(cache_dir).decode('ascii') except UnicodeEncodeError: cache_dir = tempfile.mkdtemp() self.logger = get_logger('mako', self.site.loghandlers) self.logger.warning('Because of a Mako bug, setting cache_dir to {0}'.format(cache_dir)) if os.path.exists(cache_dir): shutil.rmtree(cache_dir) self.lookup = TemplateLookup( directories=directories, module_directory=cache_dir, output_encoding='utf-8')
def set_site(self, site): site.register_path_handler("gallery", self.gallery_path) site.register_path_handler("gallery_rss", self.gallery_rss_path) self.logger = utils.get_logger("render_galleries", site.loghandlers) self.kw = { "thumbnail_size": site.config["THUMBNAIL_SIZE"], "max_image_size": site.config["MAX_IMAGE_SIZE"], "output_folder": site.config["OUTPUT_FOLDER"], "cache_folder": site.config["CACHE_FOLDER"], "default_lang": site.config["DEFAULT_LANG"], "use_filename_as_title": site.config["USE_FILENAME_AS_TITLE"], "gallery_folders": site.config["GALLERY_FOLDERS"], "sort_by_date": site.config["GALLERY_SORT_BY_DATE"], "filters": site.config["FILTERS"], "translations": site.config["TRANSLATIONS"], "global_context": site.GLOBAL_CONTEXT, "feed_length": site.config["FEED_LENGTH"], "tzinfo": site.tzinfo, "comments_in_galleries": site.config["COMMENTS_IN_GALLERIES"], "generate_rss": site.config["GENERATE_RSS"], } # Verify that no folder in GALLERY_FOLDERS appears twice appearing_paths = set() for source, dest in self.kw["gallery_folders"].items(): if source in appearing_paths or dest in appearing_paths: problem = source if source in appearing_paths else dest utils.LOGGER.error( "The gallery input or output folder '{0}' appears in more than one entry in GALLERY_FOLDERS, exiting.".format( problem ) ) sys.exit(1) appearing_paths.add(source) appearing_paths.add(dest) # Find all galleries we need to process self.find_galleries() # Create self.gallery_links self.create_galleries_paths() return super(Galleries, self).set_site(site)
def _execute(self, options, args): L = utils.get_logger('upgrade_metadata', utils.STDERR_HANDLER) nikola.post._UPGRADE_METADATA_ADVERTISED = True # scan posts self.site.scan_posts() flagged = [] for post in self.site.timeline: if not post.newstylemeta: flagged.append(post) if flagged: if len(flagged) == 1: L.info('1 post (and/or its translations) contains old-style metadata:') else: L.info('{0} posts (and/or their translations) contain old-style metadata:'.format(len(flagged))) for post in flagged: L.info(' ' + post.metadata_path) if not options['yes']: yesno = utils.ask_yesno("Proceed with metadata upgrade?") if options['yes'] or yesno: for post in flagged: for lang in self.site.config['TRANSLATIONS'].keys(): if lang == post.default_lang: fname = post.metadata_path else: meta_path = os.path.splitext(post.source_path)[0] + '.meta' fname = utils.get_translation_candidate(post.config, meta_path, lang) if os.path.exists(fname): with io.open(fname, 'r', encoding='utf-8') as fh: meta = fh.readlines() if not meta[min(1, len(meta) - 1)].startswith('.. '): # check if we’re dealing with old style metadata with io.open(fname, 'w', encoding='utf-8') as fh: for k, v in zip(self.fields, meta): fh.write('.. {0}: {1}'.format(k, v)) L.debug(fname) L.info('{0} posts upgraded.'.format(len(flagged))) else: L.info('Metadata not upgraded.') else: L.info('No old-style metadata posts found. No action is required.')
def set_site(self, site): """Set Nikola site.""" site.register_path_handler('gallery', self.gallery_path) site.register_path_handler('gallery_global', self.gallery_global_path) site.register_path_handler('gallery_rss', self.gallery_rss_path) self.logger = utils.get_logger('render_galleries', utils.STDERR_HANDLER) self.kw = { 'thumbnail_size': site.config['THUMBNAIL_SIZE'], 'max_image_size': site.config['MAX_IMAGE_SIZE'], 'output_folder': site.config['OUTPUT_FOLDER'], 'cache_folder': site.config['CACHE_FOLDER'], 'default_lang': site.config['DEFAULT_LANG'], 'use_filename_as_title': site.config['USE_FILENAME_AS_TITLE'], 'gallery_folders': site.config['GALLERY_FOLDERS'], 'sort_by_date': site.config['GALLERY_SORT_BY_DATE'], 'filters': site.config['FILTERS'], 'translations': site.config['TRANSLATIONS'], 'global_context': site.GLOBAL_CONTEXT, 'feed_length': site.config['FEED_LENGTH'], 'tzinfo': site.tzinfo, 'comments_in_galleries': site.config['COMMENTS_IN_GALLERIES'], 'generate_rss': site.config['GENERATE_RSS'], 'preserve_exif_data': site.config['PRESERVE_EXIF_DATA'], 'exif_whitelist': site.config['EXIF_WHITELIST'], } # Verify that no folder in GALLERY_FOLDERS appears twice appearing_paths = set() for source, dest in self.kw['gallery_folders'].items(): if source in appearing_paths or dest in appearing_paths: problem = source if source in appearing_paths else dest utils.LOGGER.error("The gallery input or output folder '{0}' appears in more than one entry in GALLERY_FOLDERS, ignoring.".format(problem)) continue appearing_paths.add(source) appearing_paths.add(dest) # Find all galleries we need to process self.find_galleries() # Create self.gallery_links self.create_galleries_paths() return super(Galleries, self).set_site(site)
def _execute(self, options, args): """Check the generated site.""" self.logger = get_logger('check', STDERR_HANDLER) if not options['links'] and not options['files'] and not options['clean']: print(self.help()) return False if options['verbose']: self.logger.level = 1 else: self.logger.level = 4 if options['links']: failure = self.scan_links(options['find_sources'], options['remote']) if options['files']: failure = self.scan_files() if options['clean']: failure = self.clean_files() if failure: return 1
def _execute(self, options, args): """Check the generated site.""" self.logger = get_logger("check", STDERR_HANDLER) if not options["links"] and not options["files"] and not options["clean"]: print(self.help()) return False if options["verbose"]: self.logger.level = 1 else: self.logger.level = 4 if options["links"]: failure = self.scan_links(options["find_sources"], options["remote"]) if options["files"]: failure = self.scan_files() if options["clean"]: failure = self.clean_files() if failure: return 1
def _execute(self, options, args): """Check the generated site.""" self.logger = get_logger('check', self.site.loghandlers) self.l = self._doitargs['cmds'].get_plugin('list')(config=self.config, **self._doitargs) if not options['links'] and not options['files'] and not options['clean']: print(self.help()) return False if options['verbose']: self.logger.level = 1 else: self.logger.level = 4 if options['links']: failure = self.scan_links(options['find_sources'], options['remote']) if options['files']: failure = self.scan_files() if options['clean']: failure = self.clean_files() if failure: sys.exit(1)
def _execute(self, options, args): """Start test server.""" self.logger = get_logger('serve', self.site.loghandlers) out_dir = self.site.config['OUTPUT_FOLDER'] if not os.path.isdir(out_dir): self.logger.error("Missing '{0}' folder?".format(out_dir)) else: os.chdir(out_dir) httpd = HTTPServer((options['address'], options['port']), OurHTTPRequestHandler) sa = httpd.socket.getsockname() self.logger.info("Serving HTTP on {0} port {1} ...".format(*sa)) if options['browser']: server_url = "http://{0}:{1}/".format(options['address'], options['port']) self.logger.info("Opening {0} in the default web browser ...".format(server_url)) webbrowser.open(server_url) try: httpd.serve_forever() except KeyboardInterrupt: exit(0)
def _execute(self, command, args): self.logger = get_logger( CommandGitHubDeploy.name, self.site.loghandlers ) self._source_branch = self.site.config.get( 'GITHUB_SOURCE_BRANCH', 'master' ) self._deploy_branch = self.site.config.get( 'GITHUB_DEPLOY_BRANCH', 'gh-pages' ) self._remote_name = self.site.config.get( 'GITHUB_REMOTE_NAME', 'origin' ) self._pull_before_commit = self.site.config.get( 'GITHUB_PULL_BEFORE_COMMIT', False ) self._ensure_git_repo() self._exit_if_output_committed() if not self._prompt_continue(): return build = main(['build']) if build != 0: self.logger.error('Build failed, not deploying to GitHub') sys.exit(build) only_on_output, _ = real_scan_files(self.site) for f in only_on_output: os.unlink(f) self._checkout_deploy_branch() self._copy_output() self._commit_and_push() return
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import datetime import re from docutils import languages, nodes, utils from docutils.parsers.rst import Directive, directives, roles from docutils.parsers.rst.directives.admonitions import BaseAdmonition from docutils.parsers.rst.directives.body import MathBlock from docutils.transforms import Transform from nikola.plugin_categories import RestExtension from nikola.plugins.compile.rest import add_node from nikola.utils import get_logger logger = get_logger("sphinx_roles") class Plugin(RestExtension): name = "rest_sphinx_roles" def set_site(self, site): self.site = site roles.register_local_role("pep", pep_role) roles.register_local_role("rfc", rfc_role) roles.register_local_role("term", term_role) roles.register_local_role("option", option_role) roles.register_local_role("ref", ref_role) roles.register_local_role("eq", eq_role)
import json.decoder import os import shutil import sys import time import requests import pygments from pygments.lexers import PythonLexer from pygments.formatters import TerminalFormatter from pkg_resources import resource_filename from nikola.plugin_categories import Command from nikola import utils LOGGER = utils.get_logger('theme') class CommandTheme(Command): """Manage themes.""" json = None name = "theme" doc_usage = "[-u url] [-i theme_name] [-r theme_name] [-l] [--list-installed] [-g] [-n theme_name] [-c template_name]" doc_purpose = "manage themes" output_dir = 'themes' cmd_options = [ { 'name': 'install', 'short': 'i', 'long': 'install',
def _execute(self, command, args): self.logger = get_logger('deploy', self.site.loghandlers) # Get last successful deploy date timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy') if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo': self.logger.warn( "\nWARNING WARNING WARNING WARNING\n" "You are deploying using the nikolademo Disqus account.\n" "That means you will not be able to moderate the comments in your own site.\n" "And is probably not what you want to do.\n" "Think about it for 5 seconds, I'll wait :-)\n\n") time.sleep(5) deploy_drafts = self.site.config.get('DEPLOY_DRAFTS', True) deploy_future = self.site.config.get('DEPLOY_FUTURE', False) undeployed_posts = [] if not (deploy_drafts and deploy_future): # Remove drafts and future posts out_dir = self.site.config['OUTPUT_FOLDER'] self.site.scan_posts() for post in self.site.timeline: if (not deploy_drafts and post.is_draft) or \ (not deploy_future and post.publish_later): remove_file(os.path.join(out_dir, post.destination_path())) remove_file(os.path.join(out_dir, post.source_path)) undeployed_posts.append(post) if args: presets = args else: presets = ['default'] # test for preset existence for preset in presets: try: self.site.config['DEPLOY_COMMANDS'][preset] except: self.logger.error('No such preset: {0}'.format(preset)) sys.exit(255) for preset in presets: self.logger.info("=> preset '{0}'".format(preset)) for command in self.site.config['DEPLOY_COMMANDS'][preset]: self.logger.info("==> {0}".format(command)) try: subprocess.check_call(command, shell=True) except subprocess.CalledProcessError as e: self.logger.error('Failed deployment — command {0} ' 'returned {1}'.format( e.cmd, e.returncode)) sys.exit(e.returncode) self.logger.info("Successful deployment") try: with io.open(timestamp_path, 'r', encoding='utf8') as inf: last_deploy = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f") clean = False except (IOError, Exception) as e: self.logger.debug("Problem when reading `{0}`: {1}".format( timestamp_path, e)) last_deploy = datetime(1970, 1, 1) clean = True new_deploy = datetime.utcnow() self._emit_deploy_event(last_deploy, new_deploy, clean, undeployed_posts) # Store timestamp of successful deployment with io.open(timestamp_path, 'w+', encoding='utf8') as outf: outf.write(unicode_str(new_deploy.isoformat()))
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function, unicode_literals import os from nikola import __version__ from nikola.plugin_categories import Command from nikola.utils import get_logger, STDERR_HANDLER LOGGER = get_logger('console', STDERR_HANDLER) class Console(Command): """Start debugging console.""" name = "console" shells = ['ipython', 'bpython', 'plain'] doc_purpose = "start an interactive Python console with access to your site" doc_description = """\ Order of resolution: IPython → bpython [deprecated] → plain Python interpreter The site engine is accessible as `SITE`, and the config as `conf`.""" header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration, SITE = site engine)" cmd_options = [{ 'name': 'plain', 'short': 'p', 'long': 'plain',
# -*- coding: utf-8 -*- # This file is public domain according to its author, Roberto Alsina """Emoji directive for reStructuredText.""" import glob import json import os from nikola.plugin_categories import ShortcodePlugin from nikola import utils TABLE = {} LOGGER = utils.get_logger('scan_posts') def _populate(): for fname in glob.glob( os.path.join(os.path.dirname(__file__), 'data', '*.json')): with open(fname) as inf: data = json.load(inf) data = data[list(data.keys())[0]] data = data[list(data.keys())[0]] for item in data: if item['key'] in TABLE: LOGGER.warning('Repeated emoji {}'.format(item['key'])) else: TABLE[item['key']] = item['value'] class Plugin(ShortcodePlugin):
try: from markdown.extensions import Extension from markdown.inlinepatterns import Pattern from markdown.util import AtomicString from markdown.util import etree except ImportError: # No need to catch this, if you try to use this without Markdown, # the markdown compiler will fail first Extension = Pattern = object from nikola.plugin_categories import MarkdownExtension from nikola.utils import get_logger, STDERR_HANDLER import requests LOGGER = get_logger('compile_markdown.mdx_gist', STDERR_HANDLER) GIST_JS_URL = "https://gist.github.com/{0}.js" GIST_FILE_JS_URL = "https://gist.github.com/{0}.js?file={1}" GIST_RAW_URL = "https://gist.githubusercontent.com/raw/{0}" GIST_FILE_RAW_URL = "https://gist.githubusercontent.com/raw/{0}/{1}" GIST_MD_RE = r'\[:gist:\s*(?P<gist_id>\S+)(?:\s*(?P<filename>.+?))?\s*\]' GIST_RST_RE = r'(?m)^\.\.\s*gist::\s*(?P<gist_id>[^\]\s]+)(?:\s*(?P<filename>.+?))?\s*$' class GistFetchException(Exception): '''Raised when attempt to fetch content of a Gist from github.com fails.''' def __init__(self, url, status_code): Exception.__init__(self) self.message = 'Received a {0} response from Gist URL: {1}'.format(
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Import posts from an ikiwiki blog.""" import os import re import sys import datetime import dateutil from nikola.plugin_categories import Command from nikola import utils from nikola.plugins.basic_import import ImportMixin LOGGER = utils.get_logger('import_ikiwiki', utils.STDERR_HANDLER) def get_date(dt, tz=None, iso8601=False): """Return a Nikola date/time stamp. dt - datetime: the date/time stamp to use tz - tzinfo: the timezone used iso8601 - bool: whether to force ISO 8601 dates (instead of locale-specific ones) """
import json import textwrap import datetime import unidecode import dateutil.tz import dateutil.zoneinfo from mako.template import Template from pkg_resources import resource_filename import nikola from nikola.nikola import DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_FEED_READ_MORE_LINK, LEGAL_VALUES, urlsplit, urlunsplit from nikola.plugin_categories import Command from nikola.utils import ask, ask_yesno, get_logger, makedirs, load_messages from nikola.packages.tzlocal import get_localzone LOGGER = get_logger('init') SAMPLE_CONF = { 'BLOG_AUTHOR': "Your Name", 'BLOG_TITLE': "Demo Site", 'SITE_URL': "https://example.com/", 'BLOG_EMAIL': "*****@*****.**", 'BLOG_DESCRIPTION': "This is a demo site for Nikola.", 'PRETTY_URLS': True, 'STRIP_INDEXES': True, 'DEFAULT_LANG': "en", 'TRANSLATIONS': """{ DEFAULT_LANG: "", # Example for another language: # "es": "./es", }""",
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Given a swatch name from bootswatch.com and a parent theme, creates a custom theme.""" import os import requests from nikola.plugin_categories import Command from nikola import utils LOGGER = utils.get_logger('bootswatch_theme') def _check_for_theme(theme, themes): for t in themes: if t.endswith(os.sep + theme): return True return False class CommandBootswatchTheme(Command): """Given a swatch name from bootswatch.com and a parent theme, creates a custom theme.""" name = "bootswatch_theme" doc_usage = "[options]" doc_purpose = "given a swatch name from bootswatch.com and a parent theme, creates a custom"\
""" PY2_BARBS = [ "Python 2 has been deprecated for years. Stop clinging to your long gone youth and switch to Python3.", "Python 2 is the safety blanket of languages. Be a big kid and switch to Python 3", "Python 2 is old and busted. Python 3 is the new hotness.", "Nice unicode you have there, would be a shame something happened to it.. switch to python 3!.", "Don’t get in the way of progress! Upgrade to Python 3 and save a developer’s mind today!", "Winners don't use Python 2 -- Signed: The FBI", "Python 2? What year is it?", "I just wanna tell you how I'm feeling\n" "Gotta make you understand\n" "Never gonna give you up [But Python 2 has to go]", "The year 2009 called, and they want their Python 2.7 back.", ] LOGGER = get_logger('Nikola', STDERR_HANDLER) def has_python_3(): """Check if python 3 is available.""" if 'win' in sys.platform: py_bin = 'py.exe' else: py_bin = 'python3' for path in os.environ["PATH"].split(os.pathsep): if os.access(os.path.join(path, py_bin), os.X_OK): return True return False class Py3Switch(LateTask):
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Select one random propaganda image from a list. """ from nikola.plugin_categories import Task from nikola import utils import os import os.path from plugins.propaganda.variables import html _LOGGER = utils.get_logger('render_propaganda', utils.STDERR_HANDLER) class Propaganda(Task): """To render an propaganda box with content of some defined subdir.""" def _gen_html(self, destination, images_folder, conf): """Generate html.""" list_item = """images[{item}] = ["{image}", "{url}", "{title}"];""" n = 0 items = [] for item in os.listdir(images_folder): item_path = os.path.join(images_folder, item) if os.path.isdir(item_path): with open(os.path.join(item_path, 'url.txt'),
from urllib.parse import urlparse # NOQA from zipfile import ZipFile import dateutil try: import micawber except ImportError: micawber = None # NOQA from nikola.plugin_categories import Command from nikola import utils from nikola.utils import req_missing from nikola.plugins.basic_import import ImportMixin from nikola.plugins.command.init import SAMPLE_CONF, prepare_config LOGGER = utils.get_logger('import_gplus', utils.STDERR_HANDLER) class CommandImportGplus(Command, ImportMixin): """Import a Google+ dump.""" name = "import_gplus" needs_config = False doc_usage = "[options] dump_file.zip" doc_purpose = "import a Google+ dump" cmd_options = ImportMixin.cmd_options def _execute(self, options, args): ''' Import Google+ dump '''
from urlparse import urlparse except ImportError: from urllib.parse import urlparse # NOQA try: import feedparser except ImportError: feedparser = None # NOQA from nikola.plugin_categories import Command from nikola import utils from nikola.utils import req_missing from nikola.plugins.basic_import import ImportMixin from nikola.plugins.command.init import SAMPLE_CONF, prepare_config LOGGER = utils.get_logger('import_blogger', utils.STDERR_HANDLER) class CommandImportBlogger(Command, ImportMixin): """Import a blogger dump.""" name = "import_blogger" needs_config = False doc_usage = "[options] blogger_export_file" doc_purpose = "import a blogger dump" cmd_options = ImportMixin.cmd_options + [ { 'name': 'exclude_drafts', 'long': 'no-drafts', 'short': 'd', 'default': False,
from __future__ import unicode_literals, print_function import os import re import datetime import codecs import yaml import dateutil from nikola.plugin_categories import Command, PageCompiler from nikola import utils from nikola.plugins.basic_import import ImportMixin from nikola.plugins.command.init import SAMPLE_CONF, prepare_config LOGGER = utils.get_logger('import_jekyll', utils.STDERR_HANDLER) class JekyllImportError(Exception): def __init__(self, arg, *args, **kwargs): self._arg = arg super(JekyllImportError, self).__init__(*args, **kwargs) class JekyllConfigurationNotFound(JekyllImportError): def __str__(self): return 'Jekyll configuration file was not found at %s' % self._arg class CommandImportJekyll(Command, ImportMixin): """Import a Jekyll or Octopress blog."""
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Create a new site.""" from __future__ import print_function, unicode_literals import os import dateutil import feedparser from nikola.plugin_categories import Command from nikola.utils import get_logger, STDERR_HANDLER, slugify LOGGER = get_logger('init', STDERR_HANDLER) class CommandContinuousImport(Command): """Import and merge feeds into your blog.""" name = "continuous_import" doc_usage = "" needs_config = True doc_purpose = "Import and merge feeds into your blog." cmd_options = [] def _execute(self, options={}, args=None): """Import and merge feeds into your blog.""" for name, feed in self.site.config['FEEDS'].items():
def set_site(self, site): """Set Nikola site.""" self.logger = get_logger('compile_ipynb', STDERR_HANDLER) super(CompileIPynb, self).set_site(site)
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function, unicode_literals import json import os from medium import Client from nikola import utils from nikola.plugin_categories import Command LOGGER = utils.get_logger('Medium', utils.STDERR_HANDLER) class CommandMedium(Command): """Publish to Medium.""" name = "medium" needs_config = True doc_usage = "" doc_purpose = "publish to Medium" def _execute(self, options, args): """Publish to Medium.""" if not os.path.exists('medium.json'): LOGGER.error( 'Please put your credentials in medium.json as described in the README.'
except ImportError: from urllib.parse import urlparse, unquote # NOQA try: import phpserialize except ImportError: phpserialize = None # NOQA from nikola.plugin_categories import Command from nikola import utils from nikola.utils import req_missing from nikola.plugins.basic_import import ImportMixin, links from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN from nikola.plugins.command.init import SAMPLE_CONF, prepare_config, format_default_translations_config LOGGER = utils.get_logger('import_wordpress', utils.STDERR_HANDLER) def install_plugin(site, plugin_name, output_dir=None, show_install_notes=False): """Install a Nikola plugin.""" LOGGER.notice("Installing plugin '{0}'".format(plugin_name)) # Get hold of the 'plugin' plugin plugin_installer_info = site.plugin_manager.getPluginByName('plugin', 'Command') if plugin_installer_info is None: LOGGER.error('Internal error: cannot find the "plugin" plugin which is supposed to come with Nikola!') return False if not plugin_installer_info.is_activated: # Someone might have disabled the plugin in the `conf.py` used site.plugin_manager.activatePluginByName(plugin_installer_info.name) plugin_installer_info.plugin_object.set_site(site) plugin_installer = plugin_installer_info.plugin_object
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import unicode_literals, print_function import codecs import datetime import os import sys from blinker import signal from nikola.plugin_categories import Command from nikola import utils POSTLOGGER = utils.get_logger('new_post', utils.STDERR_HANDLER) PAGELOGGER = utils.get_logger('new_page', utils.STDERR_HANDLER) LOGGER = POSTLOGGER def filter_post_pages(compiler, is_post, compilers, post_pages): """Given a compiler ("markdown", "rest"), and whether it's meant for a post or a page, and compilers, return the correct entry from post_pages.""" # First throw away all the post_pages with the wrong is_post filtered = [entry for entry in post_pages if entry[3] == is_post] # These are the extensions supported by the required format extensions = compilers[compiler]
import json import shutil import pygments from pygments.lexers import PythonLexer from pygments.formatters import TerminalFormatter try: import requests except ImportError: requests = None # NOQA from nikola.plugin_categories import Command from nikola import utils LOGGER = utils.get_logger('install_theme', utils.STDERR_HANDLER) # Stolen from textwrap in Python 3.3.2. def indent(text, prefix, predicate=None): # NOQA """Adds 'prefix' to the beginning of selected lines in 'text'. If 'predicate' is provided, 'prefix' will only be added to the lines where 'predicate(line)' is True. If 'predicate' is not provided, it will default to adding 'prefix' to all non-empty lines that do not consist solely of whitespace characters. """ if predicate is None: def predicate(line): return line.strip()
def gen_tasks(self): """Generate CSS out of Sass sources.""" self.logger = utils.get_logger('build_sass', self.site.loghandlers) kw = { 'cache_folder': self.site.config['CACHE_FOLDER'], 'themes': self.site.THEMES, } # Find where in the theme chain we define the Sass targets # There can be many *.sass/*.scss in the folder, but we only # will build the ones listed in sass/targets targets_path = utils.get_asset_path( os.path.join(self.sources_folder, "targets"), self.site.THEMES) try: with codecs.open(targets_path, "rb", "utf-8") as inf: targets = [x.strip() for x in inf.readlines()] except Exception: targets = [] for theme_name in kw['themes']: src = os.path.join(utils.get_theme_path(theme_name), self.sources_folder) for task in utils.copy_tree( src, os.path.join(kw['cache_folder'], self.sources_folder)): task['basename'] = 'prepare_sass_sources' yield task # Build targets and write CSS files base_path = utils.get_theme_path(self.site.THEMES[0]) dst_dir = os.path.join(self.site.config['OUTPUT_FOLDER'], 'assets', 'css') # Make everything depend on all sources, rough but enough deps = glob.glob( os.path.join(base_path, self.sources_folder, *("*{0}".format(ext) for ext in self.sources_ext))) def compile_target(target, dst): utils.makedirs(dst_dir) src = os.path.join(kw['cache_folder'], self.sources_folder, target) try: compiled = subprocess.check_output([self.compiler_name, src]) except OSError: utils.req_missing([self.compiler_name], 'build Sass files (and use this theme)', False, False) with open(dst, "wb+") as outf: outf.write(compiled) yield self.group_task() # We can have file conflicts. This is a way to prevent them. # I orignally wanted to use sets and their cannot-have-duplicates # magic, but I decided not to do this so we can show the user # what files were problematic. # If we didn’t do this, there would be a cryptic message from doit # instead. seennames = {} for target in targets: base = os.path.splitext(target)[0] dst = os.path.join(dst_dir, base + ".css") if base in seennames: self.logger.error( 'Duplicate filenames for SASS compiled files: {0} and ' '{1} (both compile to {2})'.format(seennames[base], target, base + ".css")) else: seennames.update({base: target}) yield { 'basename': self.name, 'name': dst, 'targets': [dst], 'file_dep': deps, 'task_dep': ['prepare_sass_sources'], 'actions': ((compile_target, [target, dst]), ), 'uptodate': [utils.config_changed(kw)], 'clean': True }
import io import datetime import operator import os import shutil import subprocess import sys import dateutil.tz from blinker import signal from nikola.plugin_categories import Command from nikola import utils COMPILERS_DOC_LINK = 'https://getnikola.com/handbook.html#configuring-other-input-formats' POSTLOGGER = utils.get_logger('new_post') PAGELOGGER = utils.get_logger('new_page') LOGGER = POSTLOGGER def get_default_compiler(is_post, compilers, post_pages): """Given compilers and post_pages, return a reasonable default compiler for this kind of post/page.""" # First throw away all the post_pages with the wrong is_post filtered = [entry for entry in post_pages if entry[3] == is_post] # Get extensions in filtered post_pages until one matches a compiler for entry in filtered: extension = os.path.splitext(entry[0])[-1] for compiler, extensions in compilers.items(): if extension in extensions: return compiler
def set_site(self, site): """Set Nikola site.""" self.logger = utils.get_logger('scale_images') return super(ScaleImage, self).set_site(site)
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function, unicode_literals import json import os import contentful import yaml from nikola import utils from nikola.plugin_categories import Command LOGGER = utils.get_logger('contentful') class CommandContenful(Command): """Import the contenful dump.""" name = "contenful" needs_config = True doc_usage = "" doc_purpose = "import the contenful dump" def _execute(self, options, args): """Import posts and pages from contenful.""" if not os.path.exists('contentful.json'): LOGGER.error( 'Please put your credentials in contentful.json as described in the README.'
import io import os import sys import shutil import subprocess import time import requests import pygments from pygments.lexers import PythonLexer from pygments.formatters import TerminalFormatter from nikola.plugin_categories import Command from nikola import utils LOGGER = utils.get_logger('plugin', utils.STDERR_HANDLER) class CommandPlugin(Command): """Manage plugins.""" json = None name = "plugin" doc_usage = "[-u url] [--user] [-i name] [-r name] [--upgrade] [-l] [--list-installed]" doc_purpose = "manage plugins" output_dir = None needs_config = False cmd_options = [ { 'name': 'install', 'short': 'i',
from __future__ import print_function import os import io import shutil import time import requests import pygments from pygments.lexers import PythonLexer from pygments.formatters import TerminalFormatter from nikola.plugin_categories import Command from nikola import utils LOGGER = utils.get_logger('theme', utils.STDERR_HANDLER) class CommandTheme(Command): """Manage themes.""" json = None name = "theme" doc_usage = "[-i theme_name] [-r theme_name] [-l] [-u url] [-g] [-n theme_name] [-c template_name]" doc_purpose = "manage themes" output_dir = 'themes' cmd_options = [ { 'name': 'install', 'short': 'i', 'long': 'install',
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Start debugging console.""" import os from nikola import __version__ from nikola.plugin_categories import Command from nikola.utils import get_logger, req_missing, Commands LOGGER = get_logger('console') class CommandConsole(Command): """Start debugging console.""" name = "console" shells = ['ipython', 'bpython', 'plain'] doc_purpose = "start an interactive Python console with access to your site" doc_description = """\ The site engine is accessible as `site` and `nikola_site`, the config file as `conf`, and commands are available as `commands`. If there is no console to use specified (as -b, -i, -p) it tries IPython, then falls back to bpython, and finally falls back to the plain Python console.""" header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site, nikola_site = site engine, commands = nikola commands)" cmd_options = [ { 'name': 'bpython',
def gen_tasks(self): self.logger = utils.get_logger('speechsynthesizednetcast', self.site.loghandlers) # Deps and config kw = { "translations": self.site.config['TRANSLATIONS'], "blog_title": self.site.config['BLOG_TITLE'], "blog_description": self.site.config['BLOG_DESCRIPTION'], "site_url": self.site.config['SITE_URL'], "blog_description": self.site.config['BLOG_DESCRIPTION'], "output_folder": self.site.config['OUTPUT_FOLDER'], "cache_folder": self.site.config['CACHE_FOLDER'], "feed_length": self.site.config['FEED_LENGTH'], "default_lang" : self.site.config['DEFAULT_LANG'], "audio_formats" : self.default_audio_formats, "intro_text" : self.default_text_intro, "outro_text" : self.default_text_outro, } # Default configuration values if 'NETCAST_AUDIO_FORMATS' in self.site.config: kw['audio_formats'] = self.site.config['NETCAST_AUDIO_FORMATS'] if 'NETCAST_INTRO' in self.site.config: kw['intro_text'] = self.site.config['NETCAST_INTRO'] if 'NETCAST_OUTRO' in self.site.config: kw['outro_text'] = self.site.config['NETCAST_OUTRO'] self.test_required_programs([kw['audio_formats']]) self.site.scan_posts() yield self.group_task() for lang in kw['translations']: feed_deps = [] posts = [x for x in self.site.posts if x.is_translation_available(lang)][:10] for post in posts: post_recording_path = self.netcast_audio_path(lang=lang, post=post, format='flac', is_cache=True) yield {'name': str(post_recording_path), 'basename': str(self.name), 'targets': [post_recording_path], 'file_dep': post.fragment_deps(lang), 'uptodate' : [utils.config_changed(kw)], 'clean': True, 'actions': [(self.record_post, [post_recording_path, post, lang])] } for format in kw['audio_formats']: output_name = self.netcast_audio_path(lang=lang, post=post, format=format) yield {'name': str(output_name), 'basename': str(self.name), 'targets': [output_name], 'file_dep': [post_recording_path], 'clean': True, 'actions': [(self.encode_post, [output_name, post_recording_path, post, lang, format])] } feed_deps.append(output_name) for format in kw['audio_formats']: output_name = self.netcast_feed_path(lang=lang, format=format) yield {'name': str(output_name), 'basename': str(self.name), 'targets': [output_name], 'file_dep': feed_deps, # depends on all formats 'clean': True, 'actions': [(self.netcast_feed_renderer, [lang, posts, output_name, format])] }
import json.decoder import os import sys import shutil import subprocess import time import requests import pygments from pygments.lexers import PythonLexer from pygments.formatters import TerminalFormatter from nikola.plugin_categories import Command from nikola import utils LOGGER = utils.get_logger('plugin') class CommandPlugin(Command): """Manage plugins.""" json = None name = "plugin" doc_usage = "[-u url] [--user] [-i name] [-r name] [--upgrade] [-l] [--list-installed]" doc_purpose = "manage plugins" output_dir = None needs_config = False cmd_options = [ { 'name': 'install', 'short': 'i',
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Show the default configuration.""" import sys import nikola.plugins.command.init from nikola.plugin_categories import Command from nikola.utils import get_logger LOGGER = get_logger('default_config') class CommandShowConfig(Command): """Show the default configuration.""" name = "default_config" doc_usage = "" needs_config = False doc_purpose = "Print the default Nikola configuration." cmd_options = [] def _execute(self, options=None, args=None): """Show the default configuration.""" try: