def create_site(self, siteId, title, supportEmail, admin_email, admin_password, canonicalHost, canonicalPort, smtp_host, smtp_port, smtp_user, smtp_password): '''Create a GroupServer site''' creation.manage_addGroupserverSite(self.app, siteId, title, supportEmail, admin_email, admin_password, canonicalHost, canonicalPort, smtp_host, smtp_port, smtp_user, smtp_password) if not hasattr(self.app, siteId): m = '"{0}" folder not found'.format(siteId) raise UserError(m) if not hasattr(getattr(self.app, siteId), 'Content'): m = '"Content" folder not found in {0}'.format(siteId) raise UserError(m) if not hasattr(getattr(getattr(self.app, siteId), 'Content'), SITE_ID): m = '"{0}" not found in the "{0}/Content" folder'.format(SITE_ID) raise UserError(m) vhm = getattr(self.app, 'virtual_hosting', None) if not vhm: m = 'Could not find the VHM in {0}'.format(self.app) raise UserError(m) #'++skin++skin_gs_ogn' does not work during install mappingD = { 'host': canonicalHost, 'id': siteId, 'site': SITE_ID, 'skin': '' } mapping = '%(host)s/%(id)s/Content/%(site)s/%(skin)s\n' % mappingD vhm.set_map(mapping)
def get_configs(self, xml2_location=None, xslt_location=None): """Get the executables for libxml2 and libxslt configuration If not configured, then try to get them from a built location. If the location is not given, then search os.environ["PATH"] and warn the user about that. """ self.xslt_config = self.options.get("xslt-config") if not self.xslt_config: self.xslt_config = which("xslt-config", xslt_location) if not self.xslt_config: raise UserError( "No ``xslt-config`` binary configured and none found in path." ) self.logger.warn("Using xslt-config found in %s." % self.xslt_config) self.xml2_config = self.options.get("xml2-config") if not self.xml2_config: self.xml2_config = which("xml2-config", xml2_location) if not self.xml2_config: raise UserError( "No ``xml2-config`` binary configured and none found in path." ) self.logger.warn("Using xml2-config found in %s." % self.xml2_config) self.logger.debug("xslt-config: %s" % self.xslt_config) self.logger.debug("xml2-config: %s" % self.xml2_config)
def install(self): """Installer""" if self.should_run(): configCreator = ConfigCreator() try: configCreator.set_database( self.options.get('database_username', ''), self.options.get('database_password', ''), self.options['database_host'], self.options['database_port'], self.options['database_name']) configCreator.set_smtp(self.options['smtp_host'], self.options['smtp_port'], self.options.get('smtp_user' ''), self.options.get('smtp_password', '')) configCreator.create_token() configCreator.write(self.options['dest']) except OSError as e: m = '{0} Issue creating the configuration\n{1}\n\n' msg = m.format(self.name, e) raise UserError(msg) except KeyError as e: m = '{0} Issue creating the configuration\nThe required '\ 'paramater "{1}" was not supplied.' msg = m.format(self.name, e.args[0]) raise UserError(msg) else: self.mark_locked() m = 'Configuration written to\n{dest}\n\n' sys.stdout.write(m.format(**self.options)) return tuple()
def check_ternary_value(key, options, defaults_map=DEFAULTS_MAP, values_map=VALUES_MAP, **kwargs): """ Check the given key for an allowed value and normalize it keyword arguments: allowed -- a list of keys of the values_map implemented -- a subset of <allowed> (a value might be allowed but not yet implemented) """ val = options.get(key, None) if not val: if key in defaults_map: val = defaults_map[key] else: raise UserError("'%(key)s: a value is required!" % locals()) allowed = kwargs.pop('allowed', values_map.keys()) implemented = kwargs.pop('implemented', allowed) for group in allowed: if val == group or val in values_map[group]: if group not in implemented: raise UserError("'%(key)s': Sorry, %(val)r " 'is not implemented yet.' % locals()) elif val != group: options[key] = group return group raise UserError("'%(key)s': value %(val)r is not allowed." % locals())
def _test_distination_file(self): from zc.buildout import UserError from os.path import isfile, exists from os import access, W_OK if not self.destination_file: raise UserError("missing destination_file") if not access(self.source, W_OK): raise UserError("cannot write %s" % self.destination_file)
def __init__(self, buildout, name, options): self.log = logging.getLogger(name) # Deprecations if 'version' in options: raise UserError('The version option is deprecated. ' 'Read about the change on ' 'http://pypi.python.org/pypi/djangorecipe/0.99') if 'wsgilog' in options: raise UserError('The wsgilog option is deprecated. ' 'Read about the change on ' 'http://pypi.python.org/pypi/djangorecipe/2.0') if 'projectegg' in options: raise UserError("The projectegg option is deprecated. " "See the changelog for 2.0 at " "http://pypi.python.org/pypi/djangorecipe/2.0") if 'deploy_script_extra' in options: # Renamed between 1.9 and 1.10 raise UserError( "'deploy_script_extra' option found (with underscores). " "This has been renamed to 'deploy-script-extra'.") if 'script-entrypoints' in options: # Renamed between 2.0 and 2.1 raise UserError("The 'script-entrypoints' option is deprecated by " "'scripts-with-settings'. See the changelog " "for 2.1 at " "http://pypi.python.org/pypi/djangorecipe/2.1") # Generic initialization. self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options) self.buildout, self.name, self.options = buildout, name, options options['location'] = os.path.join( buildout['buildout']['parts-directory'], name) options['bin-directory'] = buildout['buildout']['bin-directory'] # Option defaults. options.setdefault('project', 'project') options.setdefault('settings', 'development') options.setdefault('extra-paths', '') options.setdefault('initialization', '') options.setdefault('deploy-script-extra', '') options.setdefault('scripts-with-settings', '') options.setdefault('coverage', '') # mod_wsgi support script options.setdefault('wsgi', 'false') options.setdefault('logfile', '') # respect relative-paths (from zc.recipe.egg) relative_paths = options.get( 'relative-paths', buildout['buildout'].get('relative-paths', 'false')) if relative_paths == 'true': options['buildout-directory'] = buildout['buildout']['directory'] self._relative_paths = options['buildout-directory'] else: self._relative_paths = '' assert relative_paths == 'false'
def install(self): """Let's build vscode settings file: This is the method will be called by buildout it-self and this recipe will generate or/update vscode setting file (.vscode/settings.json) based on provided options. """ eggs_locations = set() develop_eggs_locations = set() develop_eggs = os.listdir( self.buildout["buildout"]["develop-eggs-directory"]) develop_eggs = [dev_egg[:-9] for dev_egg in develop_eggs] try: requirements, ws = self.egg.working_set() for dist in ws.by_key.values(): project_name = dist.project_name if project_name not in self.ignored_eggs: eggs_locations.add(dist.location) if project_name in develop_eggs: develop_eggs_locations.add(dist.location) for package in self.packages: eggs_locations.add(package) except Exception as exc: raise UserError(str(exc)) try: with io.open(os.path.join(self.settings_dir, "settings.json"), "r", encoding="utf-8") as fp: json_text = fp.read() existing_settings = json.loads(json_text) except ValueError as e: raise UserError(str(e)) except IOError: existing_settings = dict() vscode_settings = self._prepare_settings(list(eggs_locations), list(develop_eggs_locations), existing_settings) self._write_project_file(vscode_settings, existing_settings) # Write json file values only those are generated by this recipe. # Also dodges (by giving fake like file) buildout to # remove original settings.json file. vs_generated_file = os.path.join(self.settings_dir, "vs-recipe-generated-settings.json") with io.open(vs_generated_file, "w", encoding="utf-8") as fp: json_text = json.dumps(vscode_settings, indent=2, sort_keys=True) fp.write(ensure_unicode(json_text)) return vs_generated_file
def _test_source_directory(self): from zc.buildout import UserError from os.path import isdir, exists from os import access, R_OK if not self.source: raise UserError("missing source_directory") if not access(self.source, R_OK): raise UserError("cannot read %s" % self.source) if not isdir(self.source): raise UserError("%s is not a directory" % self.source)
def _set_url(self): from zc.buildout import UserError from infi.os_info import get_platform_string self.version = self.options.get('version', None) if not self.version: raise UserError("version option is missing") self.download_base = self.options.get('download-base', DOWNLOAD_BASE) if not self.download_base: raise UserError("download-base option is missing") filename = "python-%s-%s.tar.gz" % (self.version, get_platform_string()) self.url = "/".join([self.download_base, filename])
def __init__(self, buildout, name, options): super().__init__(buildout, name, options) self.options.setdefault('version-file', name + '.ini') self.options.setdefault('required-major', '0') self.options.setdefault('required-minor', '0') self.options.setdefault('version-major', '0') self.options.setdefault('version-minor', '0') self.options.setdefault('version-debug', '0') # convert body into function, adapted from mr.scripty if not 'body' in self.options: raise UserError('Missing mandatory "body" option.') newbody = 'def checkVersion(self):\n' indent = True for line in self.options['body'].split('\n'): if line.startswith("..."): line = line[4:] if indent: newbody += " " newbody += line + '\n' if line.startswith('"""'): indent = not indent exec(newbody, globals(), locals()) f = types.MethodType(eval('checkVersion'), self) setattr(self, 'checkVersion', f) # version file self.version_file = os.path.join( buildout['buildout']['parts-directory'], self.options['version-file']) # check version using script method success, self.version_major, self.version_minor, self.version_debug, self.path = self.checkVersion( ) if not success: raise UserError('Failed to check version.') self.options['version-major'] = str(self.version_major) self.options['version-minor'] = str(self.version_minor) self.options['version-debug'] = str(self.version_debug) self.options['path'] = self.path self.log.debug( 'path %s version %s.%s.%s' % (self.options['path'], self.options['version-major'], self.options['version-minor'], self.options['version-debug']))
def offline_merge(self, revision): """Merge revision into current branch""" if ishex(revision): if not self.has_commit(revision): raise UserError("Commit %s not found in git repository " "%s (offline mode)" % (revision, self)) elif not self._is_a_branch(revision): raise UserError("Branch %s not found in git repository " "%s (offline mode)" % (revision, self)) cmd = self._no_edit(['git', 'merge', revision]) with working_directory_keeper: os.chdir(self.target_dir) self.log_call(cmd)
def __init__(self, buildout, name, options): self.buildout, self.name, self.options = buildout, name, options self.logger = logging.getLogger(name) # force build option force = options.get("force") self.force = force in ("true", "True") options["force"] = force and "true" or "false" # XLST build or location option build_xslt = options.get("build-libxslt", "true") self.build_xslt = build_xslt in ("true", "True") options["build-libxslt"] = build_xslt and "true" or "false" if not self.build_xslt: self.xslt_location = options.get("xslt-location") if not self.xslt_location: raise UserError( "You must either configure ``xslt-location`` or set" " ``build-libxslt`` to ``true``") # XML2 build or location option build_xml2 = options.get("build-libxml2", "true") self.build_xml2 = build_xml2 in ("true", "True") options["build-libxml2"] = build_xml2 and "true" or "false" if not self.build_xml2: self.xml2_location = options.get("xml2-location") if not self.xml2_location: raise UserError( "You must either configure ``xml2-location`` or set" " ``build-libxml2`` to ``true``") # static build option static_build = options.get("static-build", "darwin" in sys.platform and "true" or None) self.static_build = static_build in ("true", "True") if self.static_build and not (self.build_xml2 and self.build_xslt): raise UserError( "Static build is only possible if both " "``build-libxml2`` and ``build-libxslt`` are ``true``.") if self.static_build: self.logger.info("Static build requested.") options["static-build"] = self.static_build and "true" or "false" # our location location = options.get('location', buildout['buildout']['parts-directory']) options['location'] = os.path.join(location, name)
def __init__(self, buildout, name, options): self.options = options self.buildout = buildout self.name = 'grape.install_module' options.setdefault('strip-top-level-dir', 'false') options.setdefault('ignore-existing', 'false') options.setdefault('download-only', 'false') options.setdefault('hash-name', 'false') options.setdefault('on-update', 'true') options['filename'] = options.get('filename', '').strip() log = logging.getLogger(self.name) if not options.get('name'): log.warning('Module name was not specified - using part name') options['name'] = self.name if not options.get('version'): log.error('Unable to get the %s version from the configuration file', self.name) raise UserError('Module version is mandatory') if options.get('mode'): options['mode'] = options['mode'].strip() # buildout -vv (or more) will trigger verbose mode self.verbose = int(buildout['buildout'].get('verbosity', 0)) >= 20 self.excludes = [x.strip() for x in options.get('excludes', '').strip().splitlines() if x.strip()]
def runPostgresCommand(self, cmd): """\ Executes a command in single-user mode, with no daemon running. Multiple commands can be executed by providing newlines, preceeded by backslash, between them. See http://www.postgresql.org/docs/9.1/static/app-postgres.html """ pgdata = self.options['pgdata-directory'] postgres_binary = os.path.join(self.options['bin'], 'postgres') try: p = subprocess.Popen([ postgres_binary, '--single', '-D', pgdata, 'postgres', ], stdin=subprocess.PIPE) p.communicate((cmd + '\n').encode()) except subprocess.CalledProcessError: raise UserError('Could not create database %s' % pgdata)
def createCluster(self): """\ A Postgres cluster is "a collection of databases that is managed by a single instance of a running database server". Here we create an empty cluster. """ initdb_binary = os.path.join(self.options['bin'], 'initdb') self.check_exists(initdb_binary) pgdata = self.options['pgdata-directory'] try: subprocess.check_call([ initdb_binary, '-D', pgdata, '-A', 'ident', '-E', 'UTF8', '-U', self.options['superuser'], ]) except subprocess.CalledProcessError: raise UserError('Could not create cluster directory in %s' % pgdata)
def __init__(self, buildout, name, options): self.buildout, self.name, self.options = buildout, name, options self.egg = zc.recipe.egg.Scripts(buildout, self.options["recipe"], options) # Check required options required_options = [ "src", ] for required_option in required_options: if required_option not in self.options: raise UserError( 'Please provide a "%s" in your Solr section "%s"' % (required_option, self.options["recipe"]) ) # Set default options self.options.setdefault("port", "8983") self.options.setdefault("solr-core-name", "plone") # Figure out default output file parts_directory = os.path.join( self.buildout["buildout"]["parts-directory"], __name__ ) if not os.path.exists(parts_directory): os.makedirs(parts_directory) # What files are tracked by this recipe self.files = [ parts_directory, os.path.join(self.buildout["buildout"]["bin-directory"], self.name), ]
def __init__(self, buildout, name, options): self.buildout, self.name, self.options = buildout, name, options self.egg = zc.recipe.egg.Scripts(buildout, self.options['recipe'], options) # Check required options required_options = [ 'hostname', 'jobname', 'username', 'password', 'jobconfig' ] for required_option in required_options: if required_option not in self.options: raise UserError( 'Please provide a "%s" in your jenkins section "%s"' % (required_option, self.options['recipe'])) # Set default options self.options.setdefault('port', '80') self.options.setdefault('jobconfig', 'jenkins_config.xml') self.options['config'] = os.path.join( self.buildout['buildout']['directory'], self.options['jobconfig']) # Figure out default output file plone_jenkins = os.path.join( self.buildout['buildout']['parts-directory'], __name__) if not os.path.exists(plone_jenkins): os.makedirs(plone_jenkins) # What files are tracked by this recipe self.files = [ plone_jenkins, os.path.join(self.buildout['buildout']['bin-directory'], self.name) ]
def create_scripts_with_settings(self, extra_paths, ws): """Create duplicates of existing scripts - *with* a settings env. What we're installing here are existing setuptools entry points. We look up the script names in the list of available entry points and install a duplicate. We postfix the duplicate with '-with-settings', so that 'gunicorn' for instance becomes 'gunicorn-with-settings'. """ zc.buildout.easy_install.script_template = ( WSGI_HEADER + WSGI_TEMPLATE + self.options['deploy-script-extra']) script_names = [ entrypoint.strip() for entrypoint in self.options.get( 'scripts-with-settings').splitlines() if entrypoint.strip() ] if not script_names: return [] settings = self.get_settings() postfix = '-with-settings' initialization = self.options['initialization'] initialization += ( "\n" + "os.environ['DJANGO_SETTINGS_MODULE'] = '%s'" % settings) created_scripts = [] known_entrypoints = list(ws.iter_entry_points('console_scripts')) to_create = [ entrypoint for entrypoint in known_entrypoints if entrypoint.name in script_names ] for entrypoint in to_create: print(entrypoint) script_name = entrypoint.name + postfix dotted_path = entrypoint.module_name function_name = entrypoint.attrs[0] self.log.debug("Creating entrypoint %s:%s as %s", dotted_path, function_name, script_name) print(script_name, dotted_path, function_name) print(extra_paths) print(self._relative_paths) zc.buildout.easy_install.scripts( [(script_name, dotted_path, function_name)], ws, sys.executable, self.options['bin-directory'], extra_paths=extra_paths, relative_paths=self._relative_paths, initialization=initialization) created_scripts.append(script_name) # Feedback logging. known_names = [entrypoint.name for entrypoint in known_entrypoints] unkown_script_names = [ name for name in script_names if name not in known_names ] if unkown_script_names: raise UserError("Some script names couldn't be found: %s" % (', '.join(unkown_script_names))) print(created_scripts) return created_scripts
def install(self): options = self.options if not options['masters']: # All parameters are always provided. # This parameter needs special care, because it is initially generated # empty, until all requested master nodes get their partitions # allocated. # Only then can this recipe start succeeding and actually doing anything # useful, as per NEO deploying constraints. raise UserError('"masters" parameter is mandatory') option_list = [ options['binary'], '-l', options['logfile'], '-m', options['masters'], '-b', self._getBindingAddress(), # TODO: reuse partition reference for better log readability. #'-n', options['name'], '-c', options['cluster'], ] option_list.extend(self._getOptionList()) return [ self.createPythonScript(options['wrapper'], 'slapos.recipe.librecipe.execute.execute', option_list) ]
def install(self): options = self.options if not options['masters']: # All parameters are always provided. # This parameter needs special care, because it is initially generated # empty, until all requested master nodes get their partitions # allocated. # Only then can this recipe start succeeding and actually doing anything # useful, as per NEO deploying constraints. raise UserError('"masters" parameter is mandatory') args = [ options['binary'], # Keep the -l option first, as expected by logrotate snippets. '-l', options['logfile'], '-m', options['masters'], '-b', self._getBindingAddress(), # TODO: reuse partition reference for better log readability. #'-n', options['name'], '-c', options['cluster'], ] if options['ssl']: etc = os.path.join(self.buildout['buildout']['directory'], 'etc', '') args += ( '--ca', etc + 'ca.crt', '--cert', etc + 'neo.crt', '--key', etc + 'neo.key', ) args += self._getOptionList() args += shlex.split(options.get('extra-options', '')) return self.createWrapper(options['wrapper'], args)
def install(self): if self._existing: raise UserError( "hash-files must only list files that are generated by buildout:" "\n " + "\n ".join(self._existing)) options = self.options args = shlex.split(options['command-line']) wait_files = options.get('wait-for-files') pidfile = options.get('pidfile') private_tmpfs = self.parsePrivateTmpfs() environment = {} for line in (options.get('environment') or '').splitlines(): line = line.strip() if line: k, v = line.split('=', 1) environment[k.rstrip()] = v.lstrip() kw = {} if wait_files: kw['wait_list'] = wait_files.split() if pidfile: kw['pidfile'] = pidfile if private_tmpfs: kw['private_tmpfs'] = private_tmpfs if self.isTrueValue(options.get('reserve-cpu')): kw['reserve_cpu'] = True return self.createWrapper(self.getWrapperPath(), args, environment, **kw)
def _write_project_file(self, settings, existing_settings): """Project File Writer: This method is actual doing writting project file to file system.""" # Add some python file specific default setting for key in python_file_defaults: if key not in existing_settings: settings[key] = python_file_defaults[key] with io.open(os.path.join(self.settings_dir, "settings.json"), "w", encoding="utf-8") as fp: try: final_settings = existing_settings.copy() final_settings.update(settings) # sorted by key final_settings = OrderedDict( sorted(final_settings.items(), key=lambda t: t[0])) json_text = json.dumps(final_settings, indent=4, sort_keys=True) fp.write(ensure_unicode(json_text)) except ValueError as exc: # catching any json error raise UserError(str(exc))
def get_update(self, revision): """Ensure that target_dir is a clone of url at specified revision. If target_dir already exists, does a simple pull. Offline-mode: no clone nor pull, but update. """ target_dir = self.target_dir url = self.url offline = self.offline if not os.path.exists(target_dir): # TODO case of local url ? if offline: raise UserError("hg repository %r does not exist; " "cannot clone it from %r (offline mode)" % (target_dir, url)) logger.info("Cloning %s ...", url) clone_cmd = ['hg', 'clone'] if revision: clone_cmd.extend(['-r', revision]) clone_cmd.extend([url, target_dir]) subprocess.check_call(clone_cmd, env=SUBPROCESS_ENV) else: self.update_hgrc_paths() # TODO what if remote repo is actually local fs ? if self.is_local_fixed_revision(revision): self._update(revision) return if not offline: self._pull() self._update(revision)
def check_symlink(linkname, target, **kwargs): """ Check the existence of the symbolic link <linkname>; unlink it if it is a symlink pointing to another target. Return True, if the symlink needs to be (re)created; return False, if the symlink already exists with the given target; raise an error if <linkname> is a non-symlink filesystem object. """ if not exists(linkname): return True if islink(linkname): if readlink(linkname) != target: unlink(linkname) return True return False msg = [] pkg = kwargs.pop('pkg', None) if pkg: msg.append("Error processing package '%(pkg)s':" % locals()) msg.append("Error creating symbolic link " "to '%(target)s': '%(linkname)s' exists " "but is not a symbolic link!" % locals()) if kwargs: msg.append('UNUSED ARGUMENTS: %(kwargs)r' % locals()) raise UserError(' '.join(msg))
def install(self): """Installer""" if self.should_run(): d = self.buildout['buildout']['directory'] configFolder = os.path.join(d, self.POSTFIX_CONF_DIR) try: configCreator = ConfigurationCreator() # The 'False' is deliberate useSSL = self.options.get('use_ssl', 'False').lower() \ not in ['false', 'off', 'no'] writtenFiles = configCreator.create( self.options['smtp2gs_path'], self.options['site'], self.options.get('port', ''), useSSL, configFolder) except OSError as e: m = '{0}: Failed to create example Postfix configuration :'\ 'in "{1}":\n{2}' msg = m.format(self.name, d, e) raise UserError(msg) else: self.mark_locked() fns = '\n '.join(writtenFiles) m = '\nExample Postfix configuration written to\n {0}\n' msg = m.format(fns) sys.stdout.write(msg) return tuple()
def __init__(self, buildout, name, options): from zc.buildout import UserError self.__logger = logger = logging.getLogger(name) for k, v in options.items(): logger.info('%s: %r', k, v) self.__recipe = options['recipe'] not_found = options.get('not-found', 'not-found') version = options.get('version', '').strip() if 'location' in options: # if location is explicitly specified, it must contains java # executable. for found in contains_python_in_bin(options['location']): if not version or found['version'].startswith(version): # Python found, no further discovery required. options['executable'] = found['executable'] return raise UserError('Python not found at %s' % options['location']) in_wellknown = options.get('search-in-wellknown-places', 'true').lower().strip() in_wellknown = in_wellknown in ('true', 'yes', '1') in_path = options.get('search-in-path', 'true').lower().strip() in_path = in_path in ('true', 'yes', '1') founds = discover_python(in_wellknown=in_wellknown, in_path=in_path) founds = log_discovered('candidates', founds, EXPOSE_NAMES, log=logger.debug) if version: # filter with version founds = (found for found in founds if found['version'].startswith(version)) founds = log_discovered('matching', founds, EXPOSE_NAMES, log=logger.info) founds = list(founds) # location is not specified: try to discover a Python installation if founds: found = founds[0] logger.info('the first-matching one will be used:') expose_options(options, EXPOSE_NAMES, found, not_found=not_found, logger=logger) return # ensure executable publishes not-found marker expose_options(options, ['executable'], dict(), not_found=not_found, logger=logger) logger.warning('Python not found') return
def __init__(self, buildout, name, options): # The use of version is deprecated. if 'version' in options: raise UserError('The version option is deprecated. ' 'Read about the change on ' 'http://pypi.python.org/pypi/djangorecipe/0.99') self.log = logging.getLogger(name) self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options) self.buildout, self.name, self.options = buildout, name, options options['location'] = os.path.join( buildout['buildout']['parts-directory'], name) options['bin-directory'] = buildout['buildout']['bin-directory'] options.setdefault('project', 'project') options.setdefault('settings', 'development') options.setdefault('urlconf', options['project'] + '.urls') options.setdefault( 'media_root', "os.path.join(os.path.dirname(__file__), 'media')") # Set this so the rest of the recipe can expect the values to be # there. We need to make sure that both pythonpath and extra-paths are # set for BBB. if 'extra-paths' in options: options['pythonpath'] = options['extra-paths'] else: options.setdefault('extra-paths', options.get('pythonpath', '')) options.setdefault('initialization', '') # mod_wsgi support script options.setdefault('wsgi', 'false') options.setdefault('wsgilog', '') options.setdefault('logfile', '')
def check_directory(dirname, **kwargs): """ Check the existence of <dirname>, and assert it is not a non-directory Return True, if the directory needs to be created, and False, if it does exist already; if the given path exists and is not a directory, raise a UserError. """ if not exists(dirname): return True if isdir(dirname): return False msg = [] key = kwargs.pop('key', None) if key is not None: msg.append('%(key)s:' % locals()) else: pkg = kwargs.pop('pkg', None) or kwargs.pop('package', None) if pkg is not None: msg.append('Error processing package %(pkg)s:' % locals()) msg.append("'%(dirname)s' exists but is not a directory!" % locals()) if kwargs: msg.append('UNUSED ARGUMENTS: %(kwargs)r' % locals()) raise UserError(' '.join(msg))
def __init__(self, buildout, name, options): super().__init__(buildout, name, options, executable='inklecate.exe') self.options.setdefault('output-dir', '') # synonyms if 'output-directory' in self.options: self.options['output-dir'] = self.options['output-directory'] if 'input' in self.options: self.options['inputs'] = self.options['input'] # output directory self.output_directory = self.options['output-dir'] # resolve input files if not 'input' in self.options: raise UserError('Missing mandatory "input" parameter.') self.inputs_resolved = [] for i in self.options['inputs'].splitlines(): self.inputs_resolved.extend([ os.path.abspath(f) for f in glob.glob(i) if os.path.isfile(f) ]) self.options['inputs_resolved'] = ' '.join( str(e) for e in self.inputs_resolved)
def runCommand(self, args, prefixArgs=[], parseLine=lambda line: True, quiet=False, expected=0): args = prefixArgs + [ self.options['executable'] ] + args self.log.debug(str(args)) success = True try: with subprocess.Popen(args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as proc: for line in iter(proc.stdout.readline, b''): stripped = line.rstrip().decode('UTF-8') if not quiet: self.log.info(stripped) if not parseLine(stripped): success = False proc.communicate() self.log.debug('returned %d' % (proc.returncode)) if proc.returncode != expected or not success: raise CalledProcessError(proc.returncode, args) except FileNotFoundError: raise UserError('Failed to execute "%s".' % (str(args))) return success