def make_release_tree(self, basedir, files): clean_tables() build_tables() dirty = dirty_version() sdist.make_release_tree(self, basedir, files) if dirty: restore_version()
def make_release_tree(self, base_dir, files): if 'flent/build_info.py' in files and not self.dry_run: files = [f for f in files if f != 'flent/build_info.py'] _sdist.make_release_tree(self, base_dir, files) rewrite_build_info(os.path.join(base_dir, 'flent/build_info.py')) else: _sdist.make_release_tree(self, base_dir, files)
def make_release_tree (self, base_dir, files, ) : sdist_orig.make_release_tree(self, base_dir, files, ) with file(os.path.join(base_dir, "__version__.py", ), "w", ) as f : f.write( "VERSION = \"%s\"" % getattr(self.distribution.metadata, "version", ) )
def make_release_tree(self, basedir, files): clean_tables() build_tables() dirty = dirty_version() sdist.make_release_tree(self, basedir, files) if dirty: discard_changes()
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) sdist_version_ini = os.path.join(base_dir, "version.ini") if os.path.exists(sdist_version_ini): os.unlink(sdist_version_ini) shutil.copy( os.path.join(THIS_DIRECTORY, "../../../version.ini"), sdist_version_ini)
def make_release_tree(self, base_dir, files): """ Create the files in subdir base_dir ready for packaging After the normal make_release_tree ran, we insert shared_setup and modify the to-be-packaged setup.py """ log.error("sdist make_release_tree original base_dir %s files %s" % (base_dir, files)) log.error("sdist from shared_setup %s current dir %s" % (__file__, os.getcwd())) if os.path.exists(base_dir): # no autocleanup? # can be a leftover of earlier crash/raised exception raise Exception("base_dir %s present. Please remove it" % base_dir) sdist.make_release_tree(self, base_dir, files) if __name__ == '__main__': log.error('running shared_setup as main, not adding it to sdist') else: # use a new name, to avoid confusion with original new_shared_setup = 'shared_setup_dist_only' external_dir = 'external_dist_only' self._mod_setup_py(base_dir, external_dir, new_shared_setup) self._add_shared_setup(base_dir, external_dir, new_shared_setup)
def make_release_tree(self, base_dir, files): print("in sdist_testimages.make_release_tree") sdist.make_release_tree(self, base_dir, files) for afile in self.to_remove: dest = os.path.join(base_dir, afile) if os.path.exists(dest): os.unlink(dest)
def make_release_tree(self, base_dir, files): """ Create the files in subdir base_dir ready for packaging After the normal make_release_tree ran, we insert shared_setup and modify the to-be-packaged setup.py """ log.info("sdist make_release_tree original base_dir %s files %s" % (base_dir, files)) log.info("sdist from shared_setup %s current dir %s" % (__file__, os.getcwd())) if os.path.exists(base_dir): # no autocleanup? # can be a leftover of earlier crash/raised exception raise Exception("base_dir %s present. Please remove it" % base_dir) sdist.make_release_tree(self, base_dir, files) # have to make sure setup.py is not a symlink dest, code = self._copy_setup_py(base_dir) if __name__ == '__main__': log.info('running shared_setup as main, not adding it to sdist') else: # use a new name, to avoid confusion with original self._mod_setup_py(dest, code) self._add_shared_setup(base_dir) # Add mandatory files for fn in [LICENSE, README]: self.copy_file(os.path.join(REPO_BASE_DIR, fn), os.path.join(base_dir, fn))
def make_release_tree(self, *a, **kw): dist_path = self.distribution.get_fullname() sdist.make_release_tree(self, *a, **kw) self.reinitialize_command('build_static', work_path=dist_path, force=True) self.run_command('build_static')
def make_release_tree(self, base_dir, files): _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory (remembering # that it may be a hardlink) and replace it with an updated value target_versionfile = os.path.join(base_dir, versionfile_source) print("UPDATING %s" % target_versionfile) os.unlink(target_versionfile) with open(target_versionfile, "w") as f: f.write(SHORT_VERSION_PY % self._versioneer_generated_versions)
def make_release_tree(self, base_dir, files): _sdist.make_release_tree(self, base_dir, files) orig = path.join("git", "__init__.py") assert path.exists(orig), orig dest = path.join(base_dir, orig) if hasattr(os, "link") and path.exists(dest): os.unlink(dest) self.copy_file(orig, dest) _stamp_version(dest)
def make_release_tree (self, base_dir, files): _sdist.make_release_tree(self, base_dir, files) orig = path.join('lib', 'git', '__init__.py') assert path.exists(orig) dest = path.join(base_dir, orig) if hasattr(os, 'link') and path.exists(dest): os.unlink(dest) self.copy_file(orig, dest) _stamp_version(dest)
def make_release_tree(self, base_dir, files): orig_sdist.make_release_tree(self, base_dir, files) # add additional dependecies in the required version for name, tar_src in self.contrib: tarball = tar_src.format(versions[name + '_version']) print("Downloading and adding {} sources from {}".format(name, tarball)) subprocess.call('wget -O - -q {} | tar xz -C {} --one-top-level=contrib/{} --strip-components=1'.format( tarball, base_dir, name), shell=True)
def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions)
def make_release_tree(self, *a, **kw): dist_path = self.distribution.get_fullname() sdist.make_release_tree(self, *a, **kw) self.reinitialize_command('build_static', work_path=dist_path) self.run_command('build_static') with open(os.path.join(dist_path, 'sentry-package.json'), 'w') as fp: json.dump({ 'createdAt': datetime.datetime.utcnow().isoformat() + 'Z', }, fp)
def make_release_tree(self, base_dir, files): orig_sdist.make_release_tree(self, base_dir, files) # checkout libosmium in the required version tarball = 'https://github.com/osmcode/libosmium/archive/v%s.tar.gz' % libosmium_version print("Downloading and adding libosmium sources from", tarball) call('cd %s && wget -O - -q %s | tar xz' % (base_dir, tarball), shell=True) # checkout protozero in the required version tarball = 'https://github.com/mapbox/protozero/archive/v%s.tar.gz' % protozero_version print("Downloading and adding protozero sources from", tarball) call('cd %s && wget -O - -q %s | tar xz' % (base_dir, tarball), shell=True)
def make_release_tree(self, base_dir, files): orig_sdist.make_release_tree(self, base_dir, files) # add additional dependecies in the required version for name, tar_src in self.contrib: tarball = tar_src.format(versions[name + '_version']) print("Downloading and adding {} sources from {}".format( name, tarball)) subprocess.call( 'wget -O - -q {} | tar xz -C {} --one-top-level=contrib/{} --strip-components=1' .format(tarball, base_dir, name), shell=True)
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) version_file = os.path.join(base_dir, 'VERSION') print('updating %s' % (version_file,)) # Write to temporary file first and rename over permanent not # just to avoid atomicity issues (not likely an issue since if # interrupted the whole sdist directory is only partially # written) but because the upstream sdist may have made a hard # link, so overwriting in place will edit the source tree. with open(version_file + '.tmp', 'w') as f: f.write('%s\n' % (PKG_VERSION,)) os.rename(version_file + '.tmp', version_file)
def make_release_tree(self, *a, **kw): dist_path = self.distribution.get_fullname() sdist.make_release_tree(self, *a, **kw) self.reinitialize_command('build_static', work_path=dist_path) self.run_command('build_static') with open(os.path.join(dist_path, 'lemur-package.json'), 'w') as fp: json.dump({ 'createdAt': datetime.datetime.utcnow().isoformat() + 'Z', }, fp)
def make_release_tree(self, base_dir, files): import os sdist.make_release_tree(self, base_dir, files) version_file = os.path.join(base_dir, 'VERSION') print('updating %s' % (version_file,)) # Write to temporary file first and rename over permanent not # just to avoid atomicity issues (not likely an issue since if # interrupted the whole sdist directory is only partially # written) but because the upstream sdist may have made a hard # link, so overwriting in place will edit the source tree. with open(version_file + '.tmp', 'wb') as f: f.write('%s\n' % (pkg_version,)) os.rename(version_file + '.tmp', version_file)
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) fname = os.path.join(base_dir, SAVED_VERSION_FILE) # This could be a hard link, so try to delete it first. Is there any way # to do this atomically together with opening? try: os.remove(fname) except OSError: pass with open(fname, 'w') as f: f.write("# This file has been generated by setup.py.\n{}\n" .format(version))
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) fname = os.path.join(base_dir, SAVED_VERSION_FILE) # This could be a hard link, so try to delete it first. Is there any way # to do this atomically together with opening? try: os.remove(fname) except OSError: pass with open(fname, 'w') as f: f.write("# This file has been generated by setup.py.\n{}\n".format( version))
def make_release_tree(self, base_dir, files): if self.distribution.ssh_packaging: self.distribution.salt_ssh_packaging_file = PACKAGED_FOR_SALT_SSH_FILE self.run_command("write_salt_ssh_packaging_file") self.filelist.files.append( os.path.basename(PACKAGED_FOR_SALT_SSH_FILE)) sdist.make_release_tree(self, base_dir, files) # Let's generate salt/_version.py to include in the sdist tarball self.distribution.running_salt_sdist = True self.distribution.salt_version_hardcoded_path = os.path.join( base_dir, "salt", "_version.py") self.run_command("write_salt_version")
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) # ensure there's a buildbot_worker/VERSION file fn = os.path.join(base_dir, 'buildbot_worker', 'VERSION') open(fn, 'w').write(version) # ensure that NEWS has a copy of the latest release notes, copied from # the master tree, with the proper version substituted src_fn = os.path.join('..', 'master', 'docs', 'relnotes/index.rst') with open(src_fn) as f: src = f.read() src = src.replace('|version|', version) dst_fn = os.path.join(base_dir, 'NEWS') with open(dst_fn, 'w') as f: f.write(src)
def make_release_tree(self, base_dir, files): sdist_orig.make_release_tree(self, base_dir, files) target = os.path.join(base_dir, "setup.py") with open(__file__) as fp: template = fp.read() ver = self.distribution.version if not ver: from setuptools_scm import get_version ver = get_version(**scm_config()) finalized = template.replace("use_scm_version=scm_config,\n", 'version="%s",\n' % ver) os.remove(target) with open(target, "w") as fp: fp.write(finalized)
def make_release_tree(self, base_dir, files): sdist_orig.make_release_tree(self, base_dir, files) target = os.path.join(base_dir, 'setup.py') with open(__file__) as fp: template = fp.read() ver = self.distribution.version if not ver: from setuptools_scm import get_version ver = get_version(**scm_config()) finalized = template.replace('use_scm_version=scm_config,\n', 'version="%s",\n' % ver) os.remove(target) with open(target, 'w') as fp: fp.write(finalized)
def make_release_tree(self, base_dir, files): if self.distribution.ssh_packaging: self.distribution.salt_ssh_packaging_file = PACKAGED_FOR_SALT_SSH_FILE self.run_command('write_salt_ssh_packaging_file') self.filelist.files.append( os.path.basename(PACKAGED_FOR_SALT_SSH_FILE)) if not IS_PY3 and not isinstance(base_dir, str): # Work around some bad code in distutils which logs unicode paths # against a str format string. base_dir = base_dir.encode('utf-8') sdist.make_release_tree(self, base_dir, files) # Let's generate salt/_version.py to include in the sdist tarball self.distribution.running_salt_sdist = True self.distribution.salt_version_hardcoded_path = os.path.join( base_dir, 'salt', '_version.py') self.run_command('write_salt_version')
def make_release_tree(self, base_dir, files): self.mkpath(base_dir) # alternative script locations are made possible script_file = sys.argv[0] if script_file.lower() != "setup.py": files = [f for f in files if f.lower() != "setup.py"] self.copy_file(script_file, os.path.join(base_dir, "setup.py")) if self.template is not None and self.template.lower( ) != "manifest.in": files = [f for f in files if f.lower() != "manifest.in"] self.copy_file(self.template, os.path.join(base_dir, "MANIFEST.in")) return setuptools_sdist.make_release_tree(self, base_dir, files)
def make_release_tree(self, base_dir, files): orig_sdist.make_release_tree(self, base_dir, files) # add additional dependecies in the required version for name, tar_src in self.contrib: tarball = tar_src.format(versions[name + '_version']) print("Downloading and adding {} sources from {}".format( name, tarball)) base = Path("-".join((name, versions[name + '_version']))) dest = Path(base_dir) / "contrib" / name with urllib.request.urlopen(tarball) as reader: with tarfile.open(fileobj=reader, mode='r|gz') as tf: for member in tf: fname = Path(member.name) if not fname.is_absolute(): fname = fname.relative_to(base) if member.isdir(): (dest / fname).mkdir(parents=True, exist_ok=True) elif member.isfile(): with tf.extractfile(member) as memberfile: with (dest / fname).open('wb') as of: of.write(memberfile.read())
def add_to_sdist(self=None, base_dir=os.curdir, files=()): """ The custom part of the sdist command. >>> add_to_sdist(base_dir='/tmp') == Rendering: ... >>> add_to_sdist(base_dir='/tmp') == Rendering: ... >>> def boom(file=None): ... raise OSError('File not found.') >>> import os >>> _exists = os.path.exists >>> os.path.exists = boom >>> add_to_sdist(base_dir='/tmp') == Rendering: ... >>> os.path.exists = _exists """ # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value self and _sdist.make_release_tree(self, base_dir, files) source_versionfile, build_versionfile = read_setup_cfg() target_versionfile = os.path.join(base_dir, build_versionfile) static_versionfile = versionfile.render_static_file() if versionfile else 'test_content' print("== Rendering:\n%s\n== To Versionfile: %s" % (static_versionfile, target_versionfile)) try: # handles the hard link case correctly os.path.exists(target_versionfile) and os.unlink(target_versionfile) with open(target_versionfile, 'w') as fh: fh.write(static_versionfile) except: print("=== Could not render static _version.py to sdist!") self_target = join(base_dir, basename(__file__)) print("== Updating: %s" % self_target) try: os.path.exists(self_target) or os.link(__file__, self_target) except OSError: print("=== Could not add %s to sdist!" % basename(__file__))
def make_release_tree(self, base_dir, files): if has_cython: files = self.__filter_files(files) _sdist.make_release_tree(self, base_dir, files) minify_static_files(base_dir, files, self.static_exclude)
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) target_dir = join(base_dir, self.distribution.metadata.name) write_version_into_init(target_dir, self.distribution.metadata.version) write_version_file(target_dir, self.distribution.metadata.version)
def make_release_tree(self, base_dir, files): sdist_orig.make_release_tree(self, base_dir, files) with open(os.path.join(base_dir, 'setup.json'), 'w') as f: json.dump(conf, f)
def make_release_tree(self, base_dir, files): if not self.dry_run: write_baked_revision(base_dir) sdist.make_release_tree(self, base_dir, files)
def make_release_tree(self, base_dir, files): setuptools_sdist.make_release_tree(self, base_dir, files) add_git_metadata(base_dir)
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) self.apply_patches(base_dir)
def make_release_tree(self, base_dir, files): if has_cython: files = self.__filter_files(files) _sdist.make_release_tree(self, base_dir, files)
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) self.copy_frontend_files()
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) write_version(os.path.join(base_dir, *STATIC_VERSION_PATH))
def make_release_tree(self, basedir, files): _sdist.make_release_tree(self, basedir, files) self.execute(_run_build_tables, (basedir,), msg="Build the lexing/parsing tables")
def make_release_tree(self, base_dir, files): _sdist.make_release_tree(self, base_dir, files) target_versionfile = os.path.join(base_dir, "artiq", "_version.py") print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_version)
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) target_dir = join(base_dir, *self.distribution.metadata.name.split(".")) _write_version(target_dir, self.distribution.metadata.version)
def make_release_tree(self, basedir, files): clean_tables() build_tables() sdist.make_release_tree(self, basedir, files)
def make_release_tree(self, basedir, files): _sdist.make_release_tree(self, basedir, files) self.execute(prepare_release, (basedir, True), msg='Building the source release')
def make_release_tree(self, base_dir, files): sdist.make_release_tree(self, base_dir, files) for pkg in self.distribution.packages: make_version_static(base_dir, pkg)
def make_release_tree(self, basedir, files): _sdist.make_release_tree(self, basedir, files) self.execute(_run_build_cache, (basedir,), msg="Build ISA and architecture cache")