def customize_parser(self, parser, subparsers, with_status=True, epilog=None, **kwargs): """ Add the SPEC related options to the parser. If *with_status* is true, options ``--stable``, ``--testing``, ``--unstable`` are also handled. """ epilog = _(""" SPEC can either specify just a name or contain required versions indications, for instance 'pkgname=1.0', or 'pkgname>=2.1'. """) + (epilog or "") subp = super(WithSpec, self).customize_parser( parser, subparsers, epilog=epilog, **kwargs) subp.add_argument('spec', metavar='SPEC', help = _("name and optional version of the package")) if with_status: g = subp.add_mutually_exclusive_group(required=False) g.add_argument('--stable', dest='status', action='store_const', const=Spec.STABLE, default=Spec.STABLE, help=_("only accept stable distributions [default]")) g.add_argument('--testing', dest='status', action='store_const', const=Spec.TESTING, help=_("accept testing distributions too")) g.add_argument('--unstable', dest='status', action='store_const', const=Spec.UNSTABLE, help=_("accept unstable distributions too")) return subp
def get_option_parser(): """ Return an option parser populated with the available commands. The parser is populated with all the options defined by the implemented commands. Only commands defining a ``name`` attribute are added. The function relies on the `Command` subclasses being already created: call `load_commands()` before calling this function. """ parser = argparse.ArgumentParser( # usage = _("%(prog)s [global options] COMMAND [command options]"), description = _("Interact with the PostgreSQL Extension Network (PGXN)."), ) parser.add_argument("--version", action='version', version="%%(prog)s %s" % __version__, help = _("print the version number and exit")) subparsers = parser.add_subparsers( title = _("available commands"), metavar = 'COMMAND', help = _("the command to execute." " The complete list is available using `pgxn help --all`." " Builtin commands are:")) clss = [ cls for cls in CommandType.subclasses if cls.name ] clss.sort(key=lambda c: c.name) for cls in clss: cls.customize_parser(parser, subparsers) return parser
def _inun(self, pdir): logger.info(_("checking extension")) upenv = self.get_psql_env() logger.debug("additional env: %s", upenv) env = os.environ.copy() env.update(upenv) cmd = ['installcheck'] if 'PGDATABASE' in upenv: cmd.append("CONTRIB_TESTDB=" + env['PGDATABASE']) try: self.run_make(cmd, dir=pdir, env=env) except PgxnClientException: # if the test failed, copy locally the regression result for ext in ('out', 'diffs'): fn = os.path.join(pdir, 'regression.' + ext) if os.path.exists(fn): dest = './regression.' + ext if not os.path.exists(dest) or not os.path.samefile( fn, dest ): logger.info(_('copying regression.%s'), ext) shutil.copy(fn, dest) raise
def get_make(self, _cache=[]): """ Return the path of the make binary. """ # the cache is not for performance but to return a consistent value # even if the cwd is changed if _cache: return _cache[0] make = self.opts.make if os.path.split(make)[0]: # At least a relative dir specified. if not os.path.exists(make): raise PgxnClientException(_("make executable not found: %s") % make) # Convert to abs path to be robust in case the dir is changed. make = os.path.abspath(make) else: # we don't find make here and convert to abs path because it's a # security hole: make may be run under sudo and in this case we # don't want root to execute a make hacked in an user local dir if not find_executable(make): raise PgxnClientException(_("make executable not found: %s") % make) _cache.append(make) return make
def download(f, fn, rename=True): """Download a file locally. :param f: open file to read :param fn: name of the file to write. If a dir, save into it. :param rename: if true and a file *fn* exist, rename the downloaded file adding a prefix ``-1``, ``-2``... before the extension. Return the name of the file saved. """ if os.path.isdir(fn): fn = get_local_file_name(fn, f.url) if rename: if os.path.exists(fn): base, ext = os.path.splitext(fn) for i in count(1): logger.debug(_("file %s exists"), fn) fn = "%s-%d%s" % (base, i, ext) if not os.path.exists(fn): break logger.info(_("saving %s"), fn) try: fout = open(fn, "wb") except Exception, e: raise PgxnClientException( _("cannot open target file: %s: %s") % (e.__class__.__name__, e))
def customize_parser(self, parser, subparsers, **kwargs): subp = super(Help, self).customize_parser(parser, subparsers, **kwargs) g = subp.add_mutually_exclusive_group() g.add_argument( '--all', action="store_true", help=_("list all the available commands"), ) g.add_argument( '--libexec', action="store_true", help=_("print the location of the scripts directory"), ) g.add_argument( 'command', metavar='CMD', nargs='?', help=_("the command to get help about"), ) # To print the basic help self._parser = parser return subp
def parse(self, spec): """Parse a spec string into a populated Spec instance. Raise BadSpecError if couldn't parse. """ if os.sep in spec: # This is a local thing, let's see what if os.path.isdir(spec): return Spec(dirname=spec) elif os.path.exists(spec): return Spec(filename=spec) else: raise ResourceNotFound(_("cannot find '%s'") % spec) # split operator/version and name m = re.match(r'(.+?)(?:(==|=|>=|>|<=|<)(.*))?$', spec) if m is None: raise BadSpecError( _("bad format for version specification: '%s'"), spec) name = Term(m.group(1)) op = m.group(2) if op == '=': op = '==' if op is not None: ver = SemVer.clean(m.group(3)) else: ver = None return Spec(name, op, ver)
def customize_parser(self, parser, subparsers, **kwargs): subp = super(Info, self).customize_parser(parser, subparsers, **kwargs) g = subp.add_mutually_exclusive_group() g.add_argument( '--details', dest='what', action='store_const', const='details', default='details', help=_("show details about the distribution [default]"), ) g.add_argument( '--meta', dest='what', action='store_const', const='meta', help=_("show the distribution META.json"), ) g.add_argument( '--readme', dest='what', action='store_const', const='readme', help=_("show the distribution README"), ) g.add_argument( '--versions', dest='what', action='store_const', const='versions', help=_("show the list of available versions"), ) return subp
def run_make(self, cmd, dir, env=None, sudo=None): """Invoke make with the selected command. :param cmd: the make target or list of options to pass make :param dir: the direcrory to run the command into :param env: variables to add to the make environment :param sudo: if set, use the provided command/arg to elevate privileges """ # check if the directory contains a makefile for fn in ('GNUmakefile', 'makefile', 'Makefile'): if os.path.exists(os.path.join(dir, fn)): break else: raise PgxnClientException( _("no Makefile found in the extension root")) cmdline = [] if sudo: cmdline.extend(shlex.split(sudo)) cmdline.extend([self.get_make(), 'PG_CONFIG=%s' % self.get_pg_config()]) if isinstance(cmd, basestring): cmdline.append(cmd) else: # a list cmdline.extend(cmd) logger.debug(_("running: %s"), cmdline) p = self.popen(cmdline, cwd=dir, shell=False, env=env, close_fds=True) p.communicate() if p.returncode: raise ProcessError(_("command returned %s: %s") % (p.returncode, ' '.join(cmdline)))
def customize_parser(self, parser, subparsers, **kwargs): subp = super(Search, self).customize_parser( parser, subparsers, **kwargs ) g = subp.add_mutually_exclusive_group() g.add_argument( '--docs', dest='where', action='store_const', const='docs', default='docs', help=_("search in documentation [default]"), ) g.add_argument( '--dist', dest='where', action='store_const', const="dists", help=_("search in distributions"), ) g.add_argument( '--ext', dest='where', action='store_const', const='extensions', help=_("search in extensions"), ) subp.add_argument( 'query', metavar='TERM', nargs='+', help=_("a string to search") ) return subp
def load_ext(self, name, sqlfile): logger.debug(_("loading extension '%s' with file: %s"), name, sqlfile) if sqlfile and not sqlfile.endswith('.sql'): logger.info( _( "the specified file '%s' doesn't seem SQL:" " assuming '%s' is not a PostgreSQL extension" ), sqlfile, name, ) return pgver = self.get_pg_version() if pgver >= (9, 1, 0): if self.is_extension(name): self.create_extension(name) return else: self.confirm( _( """\ The extension '%s' doesn't contain a control file: it will be installed as a loose set of objects. Do you want to continue?""" ) % name ) confirm = False if not sqlfile: sqlfile = name + '.sql' confirm = True fn = self.find_sql_file(name, sqlfile) if confirm: self.confirm( _( """\ The extension '%s' doesn't specify a SQL file. '%s' is probably the right one. Do you want to load it?""" ) % (name, fn) ) # TODO: is confirmation asked only once? Also, check for repetition # in unload. if self._is_loaded(fn): logger.info(_("file %s already loaded"), fn) else: data = self.patch_for_schema(fn) self.load_sql(data=data) self._register_loaded(fn)
def customize_parser(self, parser, subparsers, **kwargs): subp = super(LoadUnload, self).customize_parser(parser, subparsers, **kwargs) subp.add_argument( "--schema", metavar="SCHEMA", type=Identifier.parse_arg, help=_("use SCHEMA instead of the default schema") ) subp.add_argument("extensions", metavar="EXT", nargs="*", help=_("only specified extensions [default: all]")) return subp
def customize_parser(self, parser, subparsers, **kwargs): subp = super(Mirror, self).customize_parser( parser, subparsers, **kwargs) subp.add_argument('uri', nargs='?', metavar="URI", help = _("return detailed info about this mirror." " If not specified return a list of mirror URIs")) subp.add_argument('--detailed', action="store_true", help = _("return full details for each mirror")) return subp
def maybe_run_configure(self, dir): fn = os.path.join(dir, "configure") logger.debug("checking '%s'", fn) if not os.path.exists(fn): return logger.info(_("running configure")) p = self.popen(fn, cwd=dir) p.communicate() if p.returncode: raise PgxnClientException(_("configure failed with return code %s") % p.returncode)
def customize_parser(self, parser, subparsers, **kwargs): subp = super(WithSudo, self).customize_parser( parser, subparsers, **kwargs) g = subp.add_mutually_exclusive_group() g.add_argument('--sudo', metavar="PROG", default='sudo', help = _("run PROG to elevate privileges when required" " [default: %(default)s]")) g.add_argument('--nosudo', dest='sudo', action='store_false', help = _("never elevate privileges")) return subp
def unload_ext(self, name, sqlfile): logger.debug( _("unloading extension '%s' with file: %s"), name, sqlfile ) if sqlfile and not sqlfile.endswith('.sql'): logger.info( _( "the specified file '%s' doesn't seem SQL:" " assuming '%s' is not a PostgreSQL extension" ), sqlfile, name, ) return pgver = self.get_pg_version() if pgver >= (9, 1, 0): if self.is_extension(name): self.drop_extension(name) return else: self.confirm( _( """\ The extension '%s' doesn't contain a control file: will look for an SQL script to unload the objects. Do you want to continue?""" ) % name ) if not sqlfile: sqlfile = name + '.sql' tmp = os.path.split(sqlfile) sqlfile = os.path.join(tmp[0], 'uninstall_' + tmp[1]) fn = self.find_sql_file(name, sqlfile) self.confirm( _( """\ In order to unload the extension '%s' looks like you will have to load the file '%s'. Do you want to execute it?""" ) % (name, fn) ) data = self.patch_for_schema(fn) self.load_sql(data=data)
def customize_parser(self, parser, subparsers, **kwargs): subp = super(WithSudo, self).customize_parser( parser, subparsers, **kwargs) g = subp.add_mutually_exclusive_group() g.add_argument('--sudo', metavar="PROG", const='sudo', nargs="?", help = _("run PROG to elevate privileges when required" " [default: %(const)s]")) g.add_argument('--nosudo', dest='sudo', action='store_false', help = _("never elevate privileges " "(no more needed: for backward compatibility)")) return subp
def unpack(zipname, destdir): logger.info(_("unpacking: %s"), zipname) destdir = os.path.abspath(destdir) zf = ZipFile(zipname, 'r') try: for fn in zf.namelist(): fname = os.path.abspath(os.path.join(destdir, fn)) if not fname.startswith(destdir): raise PgxnClientException( _("archive file '%s' trying to escape!") % fname) # Looks like checking for a trailing / is the only way to # tell if the file is a directory. if fn.endswith('/'): os.makedirs(fname) continue # The directory is not always explicitly present in the archive if not os.path.exists(os.path.dirname(fname)): os.makedirs(os.path.dirname(fname)) # Copy the file content logger.debug(_("saving: %s"), fname) fout = open(fname, "wb") try: data = zf.read(fn) # In order to restore the executable bit, I haven't find # anything that looks like an executable flag in the zipinfo, # so look at the hashbangs... isexec = data[:2] == b('#!') fout.write(data) finally: fout.close() if isexec: os.chmod(fname, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) finally: zf.close() # Choose the directory where to work. Because we are mostly a wrapper for # pgxs, let's look for a makefile. The zip should contain a single base # directory, so return the first dir we found containing a Makefile, # alternatively just return the unpacked dir for dir in os.listdir(destdir): for fn in ('Makefile', 'makefile', 'GNUmakefile', 'configure'): if os.path.exists(os.path.join(destdir, dir, fn)): return os.path.join(destdir, dir) return destdir
def _get_dist_data(self, name): try: return self.api.dist(name) except NotFound as e: # maybe the user was looking for an extension instead? try: ext = self.api.ext(name) except NotFound: pass else: vs = ext.get('versions', {}) for extver, ds in vs.items(): for d in ds: if 'dist' not in d: continue dist = d['dist'] distver = d.get('version', 'unknown') logger.info( _("extension %s %s found in distribution %s %s"), name, extver, dist, distver, ) raise e
def print_details(self, spec): data = self._get_dist_data(spec.name) ver = self.get_best_version(data, spec, quiet=True) data = self.api.meta(spec.name, ver) for k in [u'name', u'abstract', u'description', u'maintainer', u'license', u'release_status', u'version', u'date', u'sha1']: try: v = data[k] except KeyError: logger.warn(_("data key '%s' not found"), k) continue if isinstance(v, list): for vv in v: print "%s: %s" % (k, vv) elif isinstance(v, dict): for kk, vv in v.iteritems(): print "%s: %s: %s" % (k, kk, vv) else: print "%s: %s" % (k, v) k = 'provides' for ext, dext in data[k].iteritems(): print "%s: %s: %s" % (k, ext, dext['version']) k = 'prereqs' if k in data: for phase, rels in data[k].iteritems(): for rel, pkgs in rels.iteritems(): for pkg, ver in pkgs.iteritems(): print "%s: %s: %s %s" % (phase, rel, pkg, ver)
def customize_parser(self, parser, subparsers, **kwargs): subp = super(Download, self).customize_parser( parser, subparsers, **kwargs) subp.add_argument('--target', metavar='PATH', default='.', help = _('Target directory and/or filename to save')) return subp
def print_details(self, spec): data = self._get_dist_data(spec.name) ver = self.get_best_version(data, spec, quiet=True) data = self.api.meta(spec.name, ver) for k in u""" name abstract description maintainer license release_status version date sha1 """.split(): try: v = data[k] except KeyError: logger.warning(_("data key '%s' not found"), k) continue if isinstance(v, list): for vv in v: emit("%s: %s" % (k, vv)) elif isinstance(v, dict): for kk, vv in v.items(): emit("%s: %s: %s" % (k, kk, vv)) else: emit("%s: %s" % (k, v)) k = 'provides' for ext, dext in data[k].items(): emit("%s: %s: %s" % (k, ext, dext['version'])) k = 'prereqs' if k in data: for phase, rels in data[k].items(): for rel, pkgs in rels.items(): for pkg, ver in pkgs.items(): emit("%s: %s: %s %s" % (phase, rel, pkg, ver))
def open(self): assert not self._file, "archive already open" try: self._file = tarfile.open(self.filename, 'r') except Exception, e: raise PgxnClientException( _("cannot open archive '%s': %s") % (self.filename, e))
def get_meta(self, spec): """ Return the content of the ``META.json`` file for *spec*. Return the object obtained parsing the JSON. """ if not spec.is_local(): # Get the metadata from the API try: data = self.api.dist(spec.name) except NotFound: # Distro not found: maybe it's an extension? ext = self.api.ext(spec.name) name, ver = self.get_best_version_from_ext(ext, spec) return self.api.meta(name, ver) else: ver = self.get_best_version(data, spec) return self.api.meta(spec.name, ver) elif spec.is_dir(): # Get the metadata from a directory fn = os.path.join(spec.dirname, 'META.json') logger.debug("reading %s", fn) if not os.path.exists(fn): raise PgxnClientException( _("file 'META.json' not found in '%s'") % dir) return load_json(open(fn)) elif spec.is_file(): # Get the metadata from a zip file return get_meta_from_zip(spec.filename)
def get_file(url): opener = urllib2.build_opener() opener.addheaders = [("User-agent", "pgxnclient/%s" % __version__)] logger.debug("opening url: %s", url) try: return closing(opener.open(url)) except urllib2.HTTPError, e: if e.code == 404: raise ResourceNotFound(_("resource not found: '%s'") % e.url) elif e.code == 400: raise BadRequestError(_("bad request on '%s'") % e.url) elif e.code == 500: raise NetworkError(_("server error")) elif e.code == 503: raise NetworkError(_("service unavailable")) else: raise NetworkError(_("unexpected response %d for '%s'") % (e.code, e.url))
def unpack(self, destdir): tarname = self.filename logger.info(_("unpacking: %s"), tarname) destdir = os.path.abspath(destdir) self.open() try: for fn in self.list_files(): fname = os.path.abspath(os.path.join(destdir, fn)) if not fname.startswith(destdir): raise PgxnClientException( _("archive file '%s' trying to escape!") % fname) self._file.extractall(path=destdir) finally: self.close() return self._find_work_directory(destdir)
def print_all_commands(self): cmds = self.find_all_commands() title = _("Available PGXN Client commands") print title print "-" * len(title) for cmd in cmds: print " " + cmd
def print_all_commands(self): cmds = self.find_all_commands() title = _("Available PGXN Client commands") emit(title) emit("-" * len(title)) for cmd in cmds: emit(" " + cmd)
def parse(self, spec): """Parse a spec string into a populated Spec instance. Raise BadSpecError if couldn't parse. """ # check if it's a network resource if spec.startswith('http://') or spec.startswith('https://'): return Spec(url=spec) # check if it's a local resource if spec.startswith('file://'): try_file = unquote_plus(spec[len('file://') :]) elif os.sep in spec: try_file = spec else: try_file = None if try_file: # This is a local thing, let's see what if os.path.isdir(try_file): return Spec(dirname=try_file) elif os.path.exists(try_file): return Spec(filename=try_file) else: raise ResourceNotFound(_("cannot find '%s'") % try_file) # so we think it's a PGXN spec # split operator/version and name m = re.match(r'(.+?)(?:(==|=|>=|>|<=|<)(.*))?$', spec) if m is None: raise BadSpecError( _("bad format for version specification: '%s'"), spec ) name = Term(m.group(1)) op = m.group(2) if op == '=': op = '==' if op is not None: ver = SemVer.clean(m.group(3)) else: ver = None return Spec(name, op, ver)
def unpack(self, destdir): zipname = self.filename logger.info(_("unpacking: %s"), zipname) destdir = os.path.abspath(destdir) self.open() try: for fn in self.list_files(): fname = os.path.abspath(os.path.join(destdir, fn)) if not fname.startswith(destdir): raise PgxnClientException( _("archive file '%s' trying to escape!") % fname ) # Looks like checking for a trailing / is the only way to # tell if the file is a directory. if fn.endswith('/'): os.makedirs(fname) continue # The directory is not always explicitly present in the archive if not os.path.exists(os.path.dirname(fname)): os.makedirs(os.path.dirname(fname)) # Copy the file content logger.debug(_("saving: %s"), fname) fout = open(fname, "wb") try: data = self.read(fn) # In order to restore the executable bit, I haven't find # anything that looks like an executable flag in the zipinfo, # so look at the hashbangs... isexec = data[:2] == b'#!' fout.write(data) finally: fout.close() if isexec: os.chmod( fname, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC ) finally: self.close() return self._find_work_directory(destdir)
def from_file(filename): """Return an `Archive` instance to handle the file *filename*""" from pgxnclient.zip import ZipArchive from pgxnclient.tar import TarArchive for cls in (ZipArchive, TarArchive): a = cls(filename) if a.can_open(): return a raise PgxnClientException( _("can't open archive '%s': file type not recognized") % filename )
def download(f, fn, rename=True): """Download a file locally. :param f: open file to read :param fn: name of the file to write. If a dir, save into it. :param rename: if true and a file *fn* exist, rename the downloaded file adding a prefix ``-1``, ``-2``... before the extension. Return the name of the file saved. """ if os.path.isdir(fn): fn = get_local_file_name(fn, f.url) if rename: if os.path.exists(fn): base, ext = os.path.splitext(fn) for i in count(1): logger.debug(_("file %s exists"), fn) fn = "%s-%d%s" % (base, i, ext) if not os.path.exists(fn): break logger.info(_("saving %s"), fn) try: fout = open(fn, "wb") except Exception as e: raise PgxnClientException( _("cannot open target file: %s: %s") % (e.__class__.__name__, e)) try: while 1: data = f.read(8192) if not data: break fout.write(data) finally: fout.close() return fn
def __make_subparser( self, parser, subparsers, description=None, epilog=None ): """Create a new subparser with help populated.""" subp = subparsers.add_parser( self.name, help=gettext(self.description), description=description or gettext(self.description), add_help=False, epilog=epilog, ) subp.set_defaults(cmd=self) # Drop the conflicting -h argument subp.add_argument( "--help", action='help', default=argparse.SUPPRESS, help=_('show this help message and exit'), ) glb = subp.add_argument_group(_("global options")) glb.add_argument( "--mirror", metavar="URL", default='https://api.pgxn.org/', help=_("the mirror to interact with [default: %(default)s]"), ) glb.add_argument( "--verbose", action='store_true', help=_("print more information") ) glb.add_argument( "--yes", action='store_true', help=_("assume affirmative answer to all questions"), ) return subp
def run_make(self, cmd, dir, env=None, sudo=None): """Invoke make with the selected command. :param cmd: the make target or list of options to pass make :param dir: the direcrory to run the command into :param env: variables to add to the make environment :param sudo: if set, use the provided command/arg to elevate privileges """ # check if the directory contains a makefile for fn in ('GNUmakefile', 'makefile', 'Makefile'): if os.path.exists(os.path.join(dir, fn)): break else: raise PgxnClientException( _("no Makefile found in the extension root")) cmdline = [] if sudo: cmdline.extend(shlex.split(sudo)) cmdline.extend( [self.get_make(), 'PG_CONFIG=%s' % self.get_pg_config()]) if isinstance(cmd, six.string_types): cmdline.append(cmd) else: # a list cmdline.extend(cmd) logger.debug(_("running: %s"), cmdline) p = self.popen(cmdline, cwd=dir, shell=False, env=env, close_fds=True) p.communicate() if p.returncode: raise ProcessError( _("command returned %s: %s") % (p.returncode, ' '.join(cmdline)))
def get_meta(self): filename = self.filename self.open() try: # Return the first file with the expected name for fn in self.list_files(): if fn.endswith('META.json'): return load_jsons(self.read(fn).decode('utf8')) else: raise PgxnClientException( _("file 'META.json' not found in archive '%s'") % filename) finally: self.close()
def parse(self, s): """ Split a valid version number in components (major, minor, patch, trail). """ m = re_semver.match(s) if m is None: raise ValueError(_("bad version number: '%s'") % s) maj, min, patch, trail = m.groups() if not patch: patch = 0 if not trail: trail = '' return (int(maj), int(min), int(patch), trail)
def clean(self, s): """ Convert an invalid but still recognizable version number into a SemVer. """ m = re_clean.match(s.strip()) if m is None: raise ValueError(_("bad version number: '%s' - can't clean") % s) maj, min, patch, trail = m.groups() maj = maj and int(maj) or 0 min = min and int(min) or 0 patch = patch and int(patch) or 0 trail = trail and '-' + trail.strip() or '' return "%d.%d.%d%s" % (maj, min, patch, trail)
def _get_best_version(self, vers, spec, quiet): # Is there any result at the desired release status? want = [ v for lvl, v in enumerate(vers) if lvl >= self.opts.status and v is not None ] if want: ver = max(want) if not quiet: logger.info(_("best version: %s %s"), spec.name, ver) return ver # Not found: is there any hint we can give? if self.opts.status > Spec.TESTING and vers[Spec.TESTING]: hint = (vers[Spec.TESTING], _('testing')) elif self.opts.status > Spec.UNSTABLE and vers[Spec.UNSTABLE]: hint = (vers[Spec.UNSTABLE], _('unstable')) else: hint = None msg = _("no suitable version found for %s") % spec if hint: msg += _(" but there is version %s at level %s") % hint raise ResourceNotFound(msg)
def customize_parser(self, parser, subparsers, epilog=None, **kwargs): epilog = ( _( """ SPEC may also be an url specifying a protocol such as 'http://' or 'https://'. """ ) + (epilog or "") ) subp = super(WithSpecUrl, self).customize_parser( parser, subparsers, epilog=epilog, **kwargs ) return subp
def popen(self, cmd, *args, **kwargs): """ Excecute subprocess.Popen. Commands should use this method instead of importing subprocess.Popen: this allows replacement with a mock in the test suite. """ logger.debug("running command: %s", cmd) try: return Popen(cmd, *args, **kwargs) except OSError as e: if not isinstance(cmd, six.string_types): cmd = ' '.join(cmd) msg = _("%s running command: %s") % (e, cmd) raise ProcessError(msg)
def get_spec(self, _can_be_local=False, _can_be_url=False): """ Return the package specification requested. Return a `Spec` instance. """ spec = self.opts.spec try: spec = Spec.parse(spec) except (ValueError, BadSpecError) as e: self.parser.error(_("cannot parse package '%s': %s") % (spec, e)) if not _can_be_local and spec.is_local(): raise PgxnClientException( _("you cannot use a local resource with this command") ) if not _can_be_url and spec.is_url(): raise PgxnClientException( _("you cannot use an url with this command") ) return spec
def _inun(self, pdir): logger.info(_("checking extension")) upenv = self.get_psql_env() logger.debug("additional env: %s", upenv) env = os.environ.copy() env.update(upenv) cmd = ['installcheck'] if 'PGDATABASE' in upenv: cmd.append("CONTRIB_TESTDB=" + env['PGDATABASE']) try: self.run_make(cmd, dir=pdir, env=env) except PgxnClientException: # if the test failed, copy locally the regression result for ext in ('out', 'diffs'): fn = os.path.join(pdir, 'regression.' + ext) if os.path.exists(fn): dest = './regression.' + ext if not os.path.exists(dest) or not os.path.samefile( fn, dest): logger.info(_('copying regression.%s'), ext) shutil.copy(fn, dest) raise
def customize_parser(self, parser, subparsers, **kwargs): """ Add the ``--pg_config`` option to the options parser. """ subp = super(WithPgConfig, self).customize_parser(parser, subparsers, **kwargs) subp.add_argument( '--pg_config', metavar="PROG", default='pg_config', help=_("the pg_config executable to find the database" " [default: %(default)s]"), ) return subp
def customize_parser(self, parser, subparsers, **kwargs): """ Add the ``--make`` option to the options parser. """ subp = super(WithMake, self).customize_parser(parser, subparsers, **kwargs) subp.add_argument( '--make', metavar="PROG", default=self._find_default_make(), help=_("the 'make' executable to use to build the extension " "[default: %(default)s]"), ) return subp
def patch_for_schema(self, fn): """ Patch a sql file to set the schema where the commands are executed. If no schema has been requested, return the data unchanged. Else, ask for confirmation and return the data for a patched file. The schema is only useful for PG < 9.1: for proper PG extensions there is no need to patch the sql. """ schema = self.opts.schema f = open(fn) try: data = f.read() finally: f.close() if not schema: return data self._check_schema_exists(schema) re_path = re.compile(r'SET\s+search_path\s*(?:=|to)\s*([^;]+);', re.I) m = re_path.search(data) if m is None: newdata = ("SET search_path = %s;\n\n" % schema) + data else: newdata = re_path.sub("SET search_path = %s;" % schema, data) diff = ''.join( difflib.unified_diff( [r + '\n' for r in data.splitlines()], [r + '\n' for r in newdata.splitlines()], fn, fn + ".schema", ) ) msg = _( """ In order to operate in the schema %s, the following changes will be performed:\n\n%s\n\nDo you want to continue?""" ) self.confirm(msg % (schema, diff)) return newdata
def customize_parser(self, parser, subparsers, epilog=None, **kwargs): epilog = ( _( """ SPEC may also be a local zip file or unpacked directory, but in this case it should contain at least a '%s', for instance '.%spkgname.zip'. """ ) % (os.sep, os.sep) + (epilog or "") ) subp = super(WithSpecLocal, self).customize_parser( parser, subparsers, epilog=epilog, **kwargs ) return subp
def call_pg_config(self, what, _cache={}): """ Call :program:`pg_config` and return its output. """ if what in _cache: return _cache[what] logger.debug("running pg_config --%s", what) cmdline = [self.get_pg_config(), "--%s" % what] p = self.popen(cmdline, stdout=PIPE) out, err = p.communicate() if p.returncode: raise ProcessError( _("command returned %s: %s") % (p.returncode, cmdline)) out = out.rstrip().decode('utf-8') rv = _cache[what] = out return rv
def get_meta(self, spec): """ Return the content of the ``META.json`` file for *spec*. Return the object obtained parsing the JSON. """ if spec.is_name(): # Get the metadata from the API try: data = self.api.dist(spec.name) except NotFound: # Distro not found: maybe it's an extension? ext = self.api.ext(spec.name) name, ver = self.get_best_version_from_ext(ext, spec) return self.api.meta(name, ver) else: ver = self.get_best_version(data, spec) return self.api.meta(spec.name, ver) elif spec.is_dir(): # Get the metadata from a directory fn = os.path.join(spec.dirname, 'META.json') logger.debug("reading %s", fn) if not os.path.exists(fn): raise PgxnClientException( _("file 'META.json' not found in '%s'") % spec.dirname ) with open(fn) as f: return load_json(f) elif spec.is_file(): arc = archive.from_spec(spec) return arc.get_meta() elif spec.is_url(): with network.get_file(spec.url) as fin: with temp_dir() as dir: fn = network.download(fin, dir) arc = archive.from_file(fn) return arc.get_meta() else: assert False
def run(self): data = self.api.mirrors() if self.opts.uri: detailed = True data = [d for d in data if d['uri'] == self.opts.uri] if not data: raise ResourceNotFound( _('mirror not found: %s') % self.opts.uri) else: detailed = self.opts.detailed for i, d in enumerate(data): if not detailed: emit(d['uri']) else: for k in u""" uri frequency location bandwidth organization email timezone src rsync notes """.split(): emit("%s: %s" % (k, d.get(k, ''))) emit()
def load_commands(): """ Load all the commands known by the program. Currently commands are read from modules into the `pgxnclient.commands` package. Importing the package causes the `Command` classes to be created: they register themselves thanks to the `CommandType` metaclass. """ pkgdir = os.path.dirname(__file__) for fn in os.listdir(pkgdir): if fn.startswith('_'): continue modname = __name__ + '.' + os.path.splitext(fn)[0] # skip already imported modules if modname in sys.modules: continue try: __import__(modname) except Exception, e: logger.warn(_("error importing commands module %s: %s - %s"), modname, e.__class__.__name__, e)
def customize_parser(self, parser, subparsers, epilog=None, **kwargs): """ Add the options related to database connections. """ epilog = ( _( """ The default database connection options depend on the value of environment variables PGDATABASE, PGHOST, PGPORT, PGUSER. """ ) + (epilog or "") ) subp = super(WithDatabase, self).customize_parser( parser, subparsers, epilog=epilog, **kwargs ) g = subp.add_argument_group(_("database connections options")) g.add_argument( '-d', '--dbname', metavar="DBNAME", help=_("database name to install into"), ) g.add_argument( '-h', '--host', metavar="HOST", help=_("database server host or socket directory"), ) g.add_argument( '-p', '--port', metavar="PORT", type=int, help=_("database server port"), ) g.add_argument( '-U', '--username', metavar="NAME", help=_("database user name") ) return subp
def get_file(url): opener = build_opener() opener.addheaders = [('User-agent', 'pgxnclient/%s' % __version__)] logger.debug('opening url: %s', url) try: return closing(opener.open(url)) except HTTPError as e: if e.code == 404: raise ResourceNotFound(_("resource not found: '%s'") % e.url) elif e.code == 400: raise BadRequestError(_("bad request on '%s'") % e.url) elif e.code == 500: raise NetworkError(_("server error")) elif e.code == 503: raise NetworkError(_("service unavailable")) else: raise NetworkError( _("unexpected response %d for '%s'") % (e.code, e.url)) except URLError as e: raise NetworkError(_("network error: %s") % e.reason)
def _inun(self, pdir): logger.info(_("building extension")) self.run_make('all', dir=pdir) logger.info(_("installing extension")) self.run_make('install', dir=pdir, sudo=self.get_sudo_prog())
def _inun(self, pdir): logger.info(_("removing extension")) self.run_make('uninstall', dir=pdir, sudo=self.get_sudo_prog())
class WithSpec(Command): """Mixin to implement commands taking a package specification. This class adds a positional argument SPEC to the parser and related options. """ @classmethod def customize_parser(self, parser, subparsers, with_status=True, epilog=None, **kwargs): """ Add the SPEC related options to the parser. If *with_status* is true, options ``--stable``, ``--testing``, ``--unstable`` are also handled. """ epilog = _(""" SPEC can either specify just a name or contain required versions indications, for instance 'pkgname=1.0', or 'pkgname>=2.1'. """) + (epilog or "") subp = super(WithSpec, self).customize_parser( parser, subparsers, epilog=epilog, **kwargs) subp.add_argument('spec', metavar='SPEC', help = _("name and optional version of the package")) if with_status: g = subp.add_mutually_exclusive_group(required=False) g.add_argument('--stable', dest='status', action='store_const', const=Spec.STABLE, default=Spec.STABLE, help=_("only accept stable distributions [default]")) g.add_argument('--testing', dest='status', action='store_const', const=Spec.TESTING, help=_("accept testing distributions too")) g.add_argument('--unstable', dest='status', action='store_const', const=Spec.UNSTABLE, help=_("accept unstable distributions too")) return subp def get_spec(self, _can_be_local=False, _can_be_url=False): """ Return the package specification requested. Return a `Spec` instance. """ spec = self.opts.spec try: spec = Spec.parse(spec) except (ValueError, BadSpecError), e: self.parser.error(_("cannot parse package '%s': %s") % (spec, e)) if not _can_be_local and spec.is_local(): raise PgxnClientException( _("you cannot use a local resource with this command")) if not _can_be_url and spec.is_url(): raise PgxnClientException( _("you cannot use an url with this command")) return spec
def __new__(cls, value): if not Label._re_chk.match(value): raise ValueError(_("bad label: '%s'") % value) return CIStr.__new__(cls, value)
def __new__(cls, value): if not Term._re_chk.match(value) or min(map(ord, value)) < 32: raise ValueError(_("not a valid term term: '%s'") % value) return CIStr.__new__(cls, value)
try: return closing(opener.open(url)) except urllib2.HTTPError, e: if e.code == 404: raise ResourceNotFound(_("resource not found: '%s'") % e.url) elif e.code == 400: raise BadRequestError(_("bad request on '%s'") % e.url) elif e.code == 500: raise NetworkError(_("server error")) elif e.code == 503: raise NetworkError(_("service unavailable")) else: raise NetworkError(_("unexpected response %d for '%s'") % (e.code, e.url)) except urllib2.URLError, e: raise NetworkError(_("network error: %s") % e.reason) def get_local_file_name(target, url): """Return a good name for a local file. If *target* is a dir, make a name out of the url. Otherwise return target itself. Always return an absolute path. """ if os.path.isdir(target): basename = urlsplit(url)[2].rsplit('/', 1)[-1] fn = os.path.join(target, basename) else: fn = target return os.path.abspath(fn)
except UserAbort, e: # The user replied "no" to some question logger.info("%s", e) sys.exit(1) except PgxnException, e: # An regular error from the program logger.error("%s", e) sys.exit(1) except SystemExit, e: # Usually the arg parser bailing out. pass except Exception, e: logger.error(_("unexpected error: %s - %s"), e.__class__.__name__, e, exc_info=True) sys.exit(1) except BaseException, e: # ctrl-c sys.exit(1) def command_dispatch(argv=None): """ Entry point for a script to dispatch commands to external scripts. Upon invocation of a command ``pgxn cmd --arg``, locate pgxn-cmd and