def get_make(self, _cache=[]): """ Return the path of the make binary. """ # the cache is not for performance but to return a consistent value # even if the cwd is changed if _cache: return _cache[0] make = self.opts.make if os.path.split(make)[0]: # At least a relative dir specified. if not os.path.exists(make): raise PgxnClientException( _("make executable not found: %s") % make ) # Convert to abs path to be robust in case the dir is changed. make = os.path.abspath(make) else: # we don't find make here and convert to abs path because it's a # security hole: make may be run under sudo and in this case we # don't want root to execute a make hacked in an user local dir if not find_executable(make): raise PgxnClientException( _("make executable not found: %s") % make ) _cache.append(make) return make
def parse_pg_version(self, data): data = data.rstrip() try: nums = (int(data[:-4]), int(data[-4:-2]), int(data[-2:])) except Exception: raise PgxnClientException("cannot parse version number from '%s'" % data) if nums[0] >= 10: if nums[1]: raise PgxnClientException("weird version number: '%s'" % data) nums = (nums[0], nums[2]) return nums
def find_sql_file(self, name, sqlfile): # In the extension the sql can be specified with a directory, # butit gets flattened into the target dir by the Makefile sqlfile = os.path.basename(sqlfile) sharedir = self.call_pg_config('sharedir') # TODO: we only check in contrib and in <name>: actually it may be # somewhere else - only the makefile knows! tries = [ name + '/' + sqlfile, sqlfile.rsplit('.', 1)[0] + '/' + sqlfile, 'contrib/' + sqlfile, ] tried = set() for fn in tries: if fn in tried: continue tried.add(fn) fn = sharedir + '/' + fn logger.debug("checking sql file in %s" % fn) if os.path.exists(fn): return fn else: raise PgxnClientException( "cannot find sql file for extension '%s': '%s'" % (name, sqlfile) )
def run_make(self, cmd, dir, env=None, sudo=None): """Invoke make with the selected command. :param cmd: the make target or list of options to pass make :param dir: the direcrory to run the command into :param env: variables to add to the make environment :param sudo: if set, use the provided command/arg to elevate privileges """ # check if the directory contains a makefile for fn in ('GNUmakefile', 'makefile', 'Makefile'): if os.path.exists(os.path.join(dir, fn)): break else: raise PgxnClientException( _("no Makefile found in the extension root")) cmdline = [] if sudo: cmdline.extend(shlex.split(sudo)) cmdline.extend([self.get_make(), 'PG_CONFIG=%s' % self.get_pg_config()]) if isinstance(cmd, basestring): cmdline.append(cmd) else: # a list cmdline.extend(cmd) logger.debug(_("running: %s"), cmdline) p = self.popen(cmdline, cwd=dir, shell=False, env=env, close_fds=True) p.communicate() if p.returncode: raise ProcessError(_("command returned %s: %s") % (p.returncode, ' '.join(cmdline)))
def parse_pg_version(self, data): m = re.match(r'\S+\s+(\d+)\.(\d+)(?:\.(\d+))?', data) if m is None: raise PgxnClientException("cannot parse version number from '%s'" % data) return (int(m.group(1)), int(m.group(2)), int(m.group(3) or 0))
def download(f, fn, rename=True): """Download a file locally. :param f: open file to read :param fn: name of the file to write. If a dir, save into it. :param rename: if true and a file *fn* exist, rename the downloaded file adding a prefix ``-1``, ``-2``... before the extension. Return the name of the file saved. """ if os.path.isdir(fn): fn = get_local_file_name(fn, f.url) if rename: if os.path.exists(fn): base, ext = os.path.splitext(fn) for i in count(1): logger.debug(_("file %s exists"), fn) fn = "%s-%d%s" % (base, i, ext) if not os.path.exists(fn): break logger.info(_("saving %s"), fn) try: fout = open(fn, "wb") except Exception, e: raise PgxnClientException( _("cannot open target file: %s: %s") % (e.__class__.__name__, e))
def open(self): assert not self._file, "archive already open" try: self._file = tarfile.open(self.filename, 'r') except Exception, e: raise PgxnClientException( _("cannot open archive '%s': %s") % (self.filename, e))
def load_sql(self, filename=None, data=None): cmdline = [self.find_psql()] cmdline.extend(self.get_psql_options()) # load via pipe to enable psql commands in the file if not data: logger.debug("loading sql from %s", filename) with open(filename, 'r') as fin: p = self.popen(cmdline, stdin=fin) p.communicate() else: if len(data) > 105: tdata = data[:100] + "..." else: tdata = data logger.debug('running sql command: "%s"', tdata) p = self.popen(cmdline, stdin=PIPE) # for Python 3: just assume default encoding will do if isinstance(data, six.text_type): data = data.encode() p.communicate(data) if p.returncode: raise PgxnClientException( "psql returned %s loading extension" % (p.returncode) )
def _check_schema_exists(self, schema): cmdline = [self.find_psql()] cmdline.extend(self.get_psql_options()) cmdline.extend(['-c', 'SET search_path=%s' % schema]) p = self.popen(cmdline, stdin=PIPE, stdout=PIPE, stderr=PIPE) p.communicate() if p.returncode: raise PgxnClientException("schema %s does not exist" % schema)
def _get_extensions(self): """ Return a list of pairs (name, sql file) to be loaded/unloaded. Items are in loading order. """ spec = self.get_spec() dist = self.get_meta(spec) if 'provides' not in dist: # No 'provides' specified: assume a single extension named # after the distribution. This is automatically done by PGXN, # but we should do ourselves to deal with local META files # not mangled by the PGXN upload script yet. name = dist['name'] for ext in self.opts.extensions: if ext != name: raise PgxnClientException( "can't find extension '%s' in the distribution '%s'" % (name, spec) ) return [(name, None)] rv = [] if not self.opts.extensions: # All the extensions, in the order specified # (assume we got an orddict from json) for name, data in dist['provides'].items(): rv.append((name, data.get('file'))) else: # Only the specified extensions for name in self.opts.extensions: try: data = dist['provides'][name] except KeyError: raise PgxnClientException( "can't find extension '%s' in the distribution '%s'" % (name, spec) ) rv.append((name, data.get('file'))) return rv
def maybe_run_configure(self, dir): fn = os.path.join(dir, 'configure') logger.debug("checking '%s'", fn) if not os.path.exists(fn): return logger.info(_("running configure")) p = self.popen(fn, cwd=dir) p.communicate() if p.returncode: raise PgxnClientException( _("configure failed with return code %s") % p.returncode)
def from_file(filename): """Return an `Archive` instance to handle the file *filename*""" from pgxnclient.zip import ZipArchive from pgxnclient.tar import TarArchive for cls in (ZipArchive, TarArchive): a = cls(filename) if a.can_open(): return a raise PgxnClientException( _("can't open archive '%s': file type not recognized") % filename)
def get_pg_config(self): """ Return the absolute path of the pg_config binary. """ pg_config = self.opts.pg_config if os.path.split(pg_config)[0]: pg_config = os.path.abspath(pg_config) else: pg_config = find_executable(pg_config) if not pg_config: raise PgxnClientException(_("pg_config executable not found")) return pg_config
def get_spec(self, _can_be_local=False, _can_be_url=False): """ Return the package specification requested. Return a `Spec` instance. """ spec = self.opts.spec try: spec = Spec.parse(spec) except (ValueError, BadSpecError) as e: self.parser.error(_("cannot parse package '%s': %s") % (spec, e)) if not _can_be_local and spec.is_local(): raise PgxnClientException( _("you cannot use a local resource with this command")) if not _can_be_url and spec.is_url(): raise PgxnClientException( _("you cannot use an url with this command")) return spec
def get_meta(self): filename = self.filename self.open() try: # Return the first file with the expected name for fn in self.list_files(): if fn.endswith('META.json'): return load_jsons(self.read(fn).decode('utf8')) else: raise PgxnClientException( _("file 'META.json' not found in archive '%s'") % filename) finally: self.close()
def call_psql(self, command): cmdline = [self.find_psql()] cmdline.extend(self.get_psql_options()) if command is not None: cmdline.extend(['-c', command]) logger.debug("calling %s", cmdline) p = self.popen(cmdline, stdout=PIPE) out, err = p.communicate() if p.returncode: raise PgxnClientException("psql returned %s running command" % (p.returncode)) return out.decode('utf-8')
def unpack(self, destdir): tarname = self.filename logger.info(_("unpacking: %s"), tarname) destdir = os.path.abspath(destdir) self.open() try: for fn in self.list_files(): fname = os.path.abspath(os.path.join(destdir, fn)) if not fname.startswith(destdir): raise PgxnClientException( _("archive file '%s' trying to escape!") % fname) self._file.extractall(path=destdir) finally: self.close() return self._find_work_directory(destdir)
def get_meta(self, spec): """ Return the content of the ``META.json`` file for *spec*. Return the object obtained parsing the JSON. """ if spec.is_name(): # Get the metadata from the API try: data = self.api.dist(spec.name) except NotFound: # Distro not found: maybe it's an extension? ext = self.api.ext(spec.name) name, ver = self.get_best_version_from_ext(ext, spec) return self.api.meta(name, ver) else: ver = self.get_best_version(data, spec) return self.api.meta(spec.name, ver) elif spec.is_dir(): # Get the metadata from a directory fn = os.path.join(spec.dirname, 'META.json') logger.debug("reading %s", fn) if not os.path.exists(fn): raise PgxnClientException( _("file 'META.json' not found in '%s'") % spec.dirname ) with open(fn) as f: return load_json(f) elif spec.is_file(): arc = archive.from_spec(spec) return arc.get_meta() elif spec.is_url(): with network.get_file(spec.url) as fin: with temp_dir() as dir: fn = network.download(fin, dir) arc = archive.from_file(fn) return arc.get_meta() else: assert False
def unpack(self, destdir): zipname = self.filename logger.info(_("unpacking: %s"), zipname) destdir = os.path.abspath(destdir) self.open() try: for fn in self.list_files(): fname = os.path.abspath(os.path.join(destdir, fn)) if not fname.startswith(destdir): raise PgxnClientException( _("archive file '%s' trying to escape!") % fname) # Looks like checking for a trailing / is the only way to # tell if the file is a directory. if fn.endswith('/'): os.makedirs(fname) continue # The directory is not always explicitly present in the archive if not os.path.exists(os.path.dirname(fname)): os.makedirs(os.path.dirname(fname)) # Copy the file content logger.debug(_("saving: %s"), fname) fout = open(fname, "wb") try: data = self.read(fn) # In order to restore the executable bit, I haven't find # anything that looks like an executable flag in the zipinfo, # so look at the hashbangs... isexec = data[:2] == b'#!' fout.write(data) finally: fout.close() if isexec: os.chmod(fname, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) finally: self.close() return self._find_work_directory(destdir)
def run(self): spec = self.get_spec() assert not spec.is_local() if spec.is_url(): return self._run_url(spec) data = self.get_meta(spec) try: chk = data['sha1'] except KeyError: raise PgxnClientException( "sha1 missing from the distribution meta") with self.api.download(data['name'], SemVer(data['version'])) as fin: fn = network.download(fin, self.opts.target) self.verify_checksum(fn, chk) return fn
def test_from_file_unknown(self): fn = get_test_filename('META-manyext.json') self.assertRaises(PgxnClientException(archive.from_file, fn))
class WithSpec(Command): """Mixin to implement commands taking a package specification. This class adds a positional argument SPEC to the parser and related options. """ @classmethod def customize_parser(self, parser, subparsers, with_status=True, epilog=None, **kwargs): """ Add the SPEC related options to the parser. If *with_status* is true, options ``--stable``, ``--testing``, ``--unstable`` are also handled. """ epilog = _(""" SPEC can either specify just a name or contain required versions indications, for instance 'pkgname=1.0', or 'pkgname>=2.1'. """) + (epilog or "") subp = super(WithSpec, self).customize_parser( parser, subparsers, epilog=epilog, **kwargs) subp.add_argument('spec', metavar='SPEC', help = _("name and optional version of the package")) if with_status: g = subp.add_mutually_exclusive_group(required=False) g.add_argument('--stable', dest='status', action='store_const', const=Spec.STABLE, default=Spec.STABLE, help=_("only accept stable distributions [default]")) g.add_argument('--testing', dest='status', action='store_const', const=Spec.TESTING, help=_("accept testing distributions too")) g.add_argument('--unstable', dest='status', action='store_const', const=Spec.UNSTABLE, help=_("accept unstable distributions too")) return subp def get_spec(self, _can_be_local=False, _can_be_url=False): """ Return the package specification requested. Return a `Spec` instance. """ spec = self.opts.spec try: spec = Spec.parse(spec) except (ValueError, BadSpecError), e: self.parser.error(_("cannot parse package '%s': %s") % (spec, e)) if not _can_be_local and spec.is_local(): raise PgxnClientException( _("you cannot use a local resource with this command")) if not _can_be_url and spec.is_url(): raise PgxnClientException( _("you cannot use an url with this command")) return spec