def build(self): super().build() package_dir = self._install(rootdir=self.builddir) # Now move everything over to the plugin's installdir link_or_copy_tree(package_dir, self.installdir) # Copy in the node binary link_or_copy( os.path.join(self._npm_dir, "bin", "node"), os.path.join(self.installdir, "bin", "node"), ) # Create binary entries package_json = self._get_package_json(rootdir=self.builddir) link_or_copy( os.path.join(package_dir, "../bin", "roshubd"), os.path.join(self.installdir, "bin", "roshubd"), ) #_create_bins(package_json, self.installdir) lock_file_path = os.path.join(self.installdir, "yarn.lock") if os.path.isfile(lock_file_path): with open(lock_file_path) as lock_file: self._manifest["yarn-lock-contents"] = lock_file.read() # Get the names and versions of installed packages # installed_node_packages = self._get_installed_node_packages(self.installdir) self._manifest["node-packages"] = ["none"]
def install_stage_packages( cls, *, package_names: List[str], install_dir: str, base: str ) -> List[str]: logger.debug(f"Requested stage-packages: {sorted(package_names)!r}") installed: Set[str] = set() with AptCache(stage_cache=_STAGE_CACHE_DIR) as apt_cache: filter_packages = set(get_packages_in_base(base=base)) apt_cache.update() apt_cache.mark_packages(set(package_names)) apt_cache.unmark_packages( required_names=set(package_names), filtered_names=filter_packages ) for pkg_name, pkg_version, dl_path in apt_cache.fetch_archives( _DEB_CACHE_DIR ): logger.debug(f"Extracting stage package: {pkg_name}") installed.add(f"{pkg_name}={pkg_version}") with tempfile.TemporaryDirectory(suffix="deb-extract") as extract_dir: # Extract deb package. cls._extract_deb(str(dl_path), extract_dir) # Mark source of files. marked_name = f"{pkg_name}={pkg_version}" cls._mark_origin_stage_package(extract_dir, marked_name) # Stage files to install_dir. file_utils.link_or_copy_tree(extract_dir, install_dir) cls.normalize(install_dir) return sorted(installed)
def test_link_symlink_to_file(self): # Create a symlink to a file os.symlink('2', os.path.join('foo', '2-link')) file_utils.link_or_copy_tree('foo', 'qux') # Verify that the symlink remains a symlink self.assertThat(os.path.join('qux', '2-link'), tests.LinkExists('2'))
def test_link_symlink_to_file(self): # Create a symlink to a file os.symlink("2", os.path.join("foo", "2-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "2-link"), unit.LinkExists("2"))
def pull(self): file_utils.link_or_copy_tree( self.source_abspath, self.source_dir, ignore=self._ignore, copy_function=self.copy_function, )
def test_link_symlink_to_dir(self): os.symlink('bar', os.path.join('foo', 'bar-link')) file_utils.link_or_copy_tree('foo', 'qux') # Verify that the symlink remains a symlink self.assertThat( os.path.join('qux', 'bar-link'), tests.LinkExists('bar'))
def test_link_symlink_to_dir(self): os.symlink("bar", os.path.join("foo", "bar-link")) file_utils.link_or_copy_tree("foo", "qux") # Verify that the symlink remains a symlink self.assertThat(os.path.join("qux", "bar-link"), unit.LinkExists("bar"))
def build(self): super().build() if self._using_gradlew(): gradle_cmd = ["./gradlew"] else: self._gradle_tar.provision(self._gradle_dir, keep_zip=True) gradle_cmd = ["gradle"] self.run( gradle_cmd + self._get_proxy_options() + self.options.gradle_options, rootdir=self.builddir, ) src = os.path.join(self.builddir, self.options.gradle_output_dir) basedir = "jabref" # jarfiles = glob(os.path.join(src, "*.jar")) # warfiles = glob(os.path.join(src, "*.war")) # if len(jarfiles) > 0: # basedir = "jar" # elif len(warfiles) > 0: # basedir = "war" # jarfiles = warfiles # else: # raise RuntimeError("Could not find any built jar files for part") file_utils.link_or_copy_tree( src, os.path.join(self.installdir, basedir), copy_function=lambda src, dst: file_utils.link_or_copy( src, dst, self.installdir), ) self._create_symlinks()
def _organize_filesets(fileset, base_dir): for key in sorted(fileset, key=lambda x: ["*" in x, x]): src = os.path.join(base_dir, key) # Remove the leading slash if there so os.path.join # actually joins dst = os.path.join(base_dir, fileset[key].lstrip("/")) sources = iglob(src, recursive=True) for src in sources: if os.path.isdir(src) and "*" not in key: file_utils.link_or_copy_tree(src, dst) # TODO create alternate organization location to avoid # deletions. shutil.rmtree(src) elif os.path.isfile(dst): raise errors.SnapcraftEnvironmentError( "Trying to organize file {key!r} to {dst!r}, " "but {dst!r} already exists".format( key=key, dst=os.path.relpath(dst, base_dir) ) ) else: os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.move(src, dst)
def test_link_symlink_to_dir(self): os.symlink('bar', os.path.join('foo', 'bar-link')) file_utils.link_or_copy_tree('foo', 'qux') # Verify that the symlink remains a symlink self.assertThat(os.path.join('qux', 'bar-link'), tests.LinkExists('bar'))
def build(self): super().build() package_dir = self._install(rootdir=self.builddir) # Now move everything over to the plugin's installdir link_or_copy_tree(package_dir, self.installdir) # Copy in the node binary link_or_copy( os.path.join(self._npm_dir, "bin", "node"), os.path.join(self.installdir, "bin", "node"), ) # Create binary entries package_json = self._get_package_json(rootdir=self.builddir) _create_bins(package_json, self.installdir) lock_file_path = os.path.join(self.installdir, "yarn.lock") if os.path.isfile(lock_file_path): with open(lock_file_path) as lock_file: self._manifest["yarn-lock-contents"] = lock_file.read() # Get the names and versions of installed packages if self.options.nodejs_package_manager == "npm": installed_node_packages = self._get_installed_node_packages( self.installdir) self._manifest["node-packages"] = [ "{}={}".format(name, installed_node_packages[name]) for name in installed_node_packages ] # Skip this step if yarn is used, as it may produce different # dependency trees than npm else: self._manifest["node-packages"] = []
def build(self) -> None: super().build() self.run(["flutter", "pub", "get"]) self.run([ "flutter", "build", "linux", "--release", "-v", "-t", self.options.flutter_target, ]) # Flutter only supports arm64 and amd64 if Path(self.builddir, "build/linux/x64/release/bundle").exists(): bundle_dir_path = Path(self.builddir, "build/linux/x64/release/bundle") elif Path(self.builddir, "build/linux/arm64/release/bundle").exists(): bundle_dir_path = Path(self.builddir, "build/linux/arm64/release/bundle") else: bundle_dir_path = Path(self.builddir, "build/linux/release/bundle") install_bin_dir_path = Path(self.installdir) / "bin" install_bin_dir_path.mkdir(exist_ok=True) # Now move everything over to the plugin's installdir file_utils.link_or_copy_tree(bundle_dir_path.as_posix(), install_bin_dir_path.as_posix())
def test_link_directory_overwrite_file_raises(self): open('qux', 'w').close() with self.assertRaises(NotADirectoryError) as raised: file_utils.link_or_copy_tree('foo', 'qux') self.assertEqual( str(raised.exception), "Cannot overwrite non-directory 'qux' with directory 'foo'")
def unpack(self, unpackdir) -> None: pkgs_abs_path = glob.glob(os.path.join(self._downloaddir, "*.deb")) for pkg in pkgs_abs_path: with tempfile.TemporaryDirectory() as temp_dir: self._extract_deb(pkg, temp_dir) deb_name = self._extract_deb_name_version(pkg) self._mark_origin_stage_package(temp_dir, deb_name) file_utils.link_or_copy_tree(temp_dir, unpackdir) self.normalize(unpackdir)
def provision( self, dst: str, clean_target: bool = True, keep_snap: bool = False, src: str = None, ) -> None: """ Provision the snap source to dst. :param str dst: the destination directory to provision to. :param bool clean_target: clean dst before provisioning if True. :param bool keep_snap: keep the snap after provisioning is done if True. :param str src: force a new source to use for extraction. raises errors.InvalidSnapError: when trying to provision an invalid snap. """ if src: snap_file = src else: snap_file = os.path.join(self.source_dir, os.path.basename(self.source)) snap_file = os.path.realpath(snap_file) if clean_target: tmp_snap = tempfile.NamedTemporaryFile().name shutil.move(snap_file, tmp_snap) shutil.rmtree(dst) os.makedirs(dst) shutil.move(tmp_snap, snap_file) # unsquashfs [options] filesystem [directories or files to extract] # options: # -force: if file already exists then overwrite # -dest <pathname>: unsquash to <pathname> with tempfile.TemporaryDirectory( prefix=os.path.dirname(snap_file)) as temp_dir: extract_command = [ "unsquashfs", "-force", "-dest", temp_dir, snap_file ] self._run_output(extract_command) snap_name = _get_snap_name(temp_dir) # Rename meta and snap dirs from the snap rename_paths = (os.path.join(temp_dir, d) for d in ["meta", "snap"]) rename_paths = (d for d in rename_paths if os.path.exists(d)) for rename in rename_paths: shutil.move(rename, "{}.{}".format(rename, snap_name)) file_utils.link_or_copy_tree(source_tree=temp_dir, destination_tree=dst) if not keep_snap: os.remove(snap_file)
def unpack_stage_packages( cls, *, stage_packages_path: pathlib.Path, install_path: pathlib.Path ) -> None: for pkg_path in stage_packages_path.glob("*.deb"): with tempfile.TemporaryDirectory(suffix="deb-extract") as extract_dir: # Extract deb package. cls._extract_deb(pkg_path, extract_dir) # Mark source of files. marked_name = cls._extract_deb_name_version(pkg_path) cls._mark_origin_stage_package(extract_dir, marked_name) # Stage files to install_dir. file_utils.link_or_copy_tree(extract_dir, install_path.as_posix()) cls.normalize(str(install_path))
def _organize_filesets(part_name, fileset, base_dir, overwrite): for key in sorted(fileset, key=lambda x: ["*" in x, x]): src = os.path.join(base_dir, key) # Remove the leading slash if there so os.path.join # actually joins dst = os.path.join(base_dir, fileset[key].lstrip("/")) sources = iglob(src, recursive=True) # Keep track of the number of glob expansions so we can properly error if more # than one tries to organize to the same file src_count = 0 for src in sources: src_count += 1 if os.path.isdir(src) and "*" not in key: file_utils.link_or_copy_tree(src, dst) # TODO create alternate organization location to avoid # deletions. shutil.rmtree(src) continue elif os.path.isfile(dst): if overwrite and src_count <= 1: with contextlib.suppress(FileNotFoundError): os.remove(dst) elif src_count > 1: raise errors.SnapcraftOrganizeError( part_name, "multiple files to be organized into {!r}. If this is supposed " "to be a directory, end it with a forward slash.".format( os.path.relpath(dst, base_dir) ), ) else: raise errors.SnapcraftOrganizeError( part_name, "trying to organize file {key!r} to {dst!r}, but {dst!r} " "already exists".format( key=key, dst=os.path.relpath(dst, base_dir) ), ) if os.path.isdir(dst) and overwrite: real_dst = os.path.join(dst, os.path.basename(src)) if os.path.isdir(real_dst): shutil.rmtree(real_dst) else: with contextlib.suppress(FileNotFoundError): os.remove(real_dst) os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.move(src, dst)
def build(self): super().build() package_dir = self._install(rootdir=self.builddir) # Now move everything over to the plugin's installdir link_or_copy_tree(package_dir, self.installdir) # Copy in the node binary link_or_copy( os.path.join(self._npm_dir, "bin", "node"), os.path.join(self.installdir, "bin", "node"), ) # Create binary entries package_json = self._get_package_json(rootdir=self.builddir) _create_bins(package_json, self.installdir) lock_file_path = os.path.join(self.installdir, "yarn.lock") if os.path.isfile(lock_file_path): with open(lock_file_path) as lock_file: self._manifest["yarn-lock-contents"] = lock_file.read() # Get the names and versions of installed packages installed_node_packages = self._get_installed_node_packages( self.installdir) self._manifest["node-packages"] = [ "{}={}".format(name, installed_node_packages[name]) for name in installed_node_packages ] self._nodejs_release_uri = get_nodejs_release( self.options.nodejs_version, 'armhf') self._nodejs_tar_handle = None print('Downloading Node for armhf') cmd = [ 'curl', self._nodejs_release_uri, '--output', os.path.join(self._npm_dir, os.path.basename(self._nodejs_release_uri)) ] subprocess.check_output(cmd) print('Provisioning Node for armhf') self._nodejs_tar.provision(self._npm_dir, clean_target=False, keep_tarball=True) print('Copying Node for armhf') link_or_copy( os.path.join(self._npm_dir, "bin", "node"), os.path.join(self.installdir, "bin", "node"), )
def _update(self): # First, copy the directories for directory in self._updated_directories: file_utils.link_or_copy_tree( os.path.join(self.source, directory), os.path.join(self.source_dir, directory), ignore=self._ignore, copy_function=self.copy_function, ) # Now, copy files for file_path in self._updated_files: self.copy_function( os.path.join(self.source, file_path), os.path.join(self.source_dir, file_path), )
def pull(self): current_dir = os.getcwd() source_abspath = os.path.abspath(self.source) def ignore(directory, files): if directory == source_abspath or \ directory == current_dir: ignored = copy.copy(common.SNAPCRAFT_FILES) snaps = glob.glob(os.path.join(directory, '*.snap')) if snaps: snaps = [os.path.basename(s) for s in snaps] ignored += snaps return ignored else: return [] file_utils.link_or_copy_tree(source_abspath, self.source_dir, ignore=ignore)
def _copy_symlinked_content(modules_dir): """Copy symlinked content. When running newer versions of npm, symlinks to the local tree are created from the part's installdir to the root of the builddir of the part (this only affects some build configurations in some projects) which is valid when running from the context of the part but invalid as soon as the artifacts migrate across the steps, i.e.; stage and prime. If modules_dir does not exist we simply return. """ if not os.path.exists(modules_dir): return modules = [os.path.join(modules_dir, d) for d in os.listdir(modules_dir)] symlinks = [l for l in modules if os.path.islink(l)] for link_path in symlinks: link_target = os.path.realpath(link_path) os.unlink(link_path) link_or_copy_tree(link_target, link_path)
def build(self) -> None: super().build() self.run([ "flutter", "build", "linux", "--release", "-v", "-t", self.options.flutter_target, ], ) bundle_dir_path = pathlib.Path( self.builddir) / "build/linux/release/bundle" install_bin_dir_path = pathlib.Path(self.installdir) / "bin" install_bin_dir_path.mkdir(exist_ok=True) # Now move everything over to the plugin's installdir file_utils.link_or_copy_tree(bundle_dir_path.as_posix(), install_bin_dir_path.as_posix())
def _organize_filesets(fileset, base_dir): for key in sorted(fileset, key=lambda x: ['*' in x, x]): src = os.path.join(base_dir, key) dst = os.path.join(base_dir, fileset[key]) sources = iglob(src, recursive=True) for src in sources: if os.path.isdir(src) and '*' not in key: file_utils.link_or_copy_tree(src, dst) # TODO create alternate organization location to avoid # deletions. shutil.rmtree(src) elif os.path.isfile(dst): raise EnvironmentError( 'Trying to organize file {key!r} to {dst!r}, ' 'but {dst!r} already exists'.format( key=key, dst=os.path.relpath(dst, base_dir))) else: os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.move(src, dst)
def _organize_filesets(fileset, base_dir): for key in sorted(fileset, key=lambda x: ["*" in x, x]): src = os.path.join(base_dir, key) # Remove the leading slash if there so os.path.join # actually joins dst = os.path.join(base_dir, fileset[key].lstrip("/")) sources = iglob(src, recursive=True) for src in sources: if os.path.isdir(src) and "*" not in key: file_utils.link_or_copy_tree(src, dst) # TODO create alternate organization location to avoid # deletions. shutil.rmtree(src) elif os.path.isfile(dst): raise errors.SnapcraftEnvironmentError( "Trying to organize file {key!r} to {dst!r}, " "but {dst!r} already exists".format(key=key, dst=os.path.relpath( dst, base_dir))) else: os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.move(src, dst)
def test_link_subtree(self): file_utils.link_or_copy_tree('foo/bar', 'qux') self.assertTrue(os.path.isfile(os.path.join('qux', '3'))) self.assertTrue(os.path.isfile(os.path.join('qux', 'baz', '4')))
def test_link_directory_to_directory(self): file_utils.link_or_copy_tree("foo", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "2"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "bar", "baz", "4")))
def test_link_directory_to_directory(self): file_utils.link_or_copy_tree('foo', 'qux') self.assertTrue(os.path.isfile(os.path.join('qux', '2'))) self.assertTrue(os.path.isfile(os.path.join('qux', 'bar', '3'))) self.assertTrue(os.path.isfile(os.path.join('qux', 'bar', 'baz', '4')))
def test_link_file_into_directory(self): os.mkdir('qux') with self.assertRaises(NotADirectoryError) as raised: file_utils.link_or_copy_tree('1', 'qux') self.assertEqual(str(raised.exception), "'1' is not a directory")
def test_link_subtree(self): file_utils.link_or_copy_tree("foo/bar", "qux") self.assertTrue(os.path.isfile(os.path.join("qux", "3"))) self.assertTrue(os.path.isfile(os.path.join("qux", "baz", "4")))
def install_stage_packages( cls, *, package_names: List[str], install_dir: str ) -> List[str]: marked_packages: Dict[str, apt.package.Version] = dict() skipped_blacklisted: Set[str] = set() skipped_essential: Set[str] = set() logger.debug(f"Requested stage-packages: {sorted(package_names)!r}") # First scan all packages and set desired version, if specified. # We do this all at once in case it gets added as a dependency # along the way. for name in package_names: name, specified_version = repo.get_pkg_name_parts(name) package = cls._get_resolved_package(name) if name != package.name: logger.info( f"virtual stage-package {name!r} resolved to {package.name!r}" ) if specified_version: cls._set_package_version(package, specified_version) for name in package_names: name, _ = repo.get_pkg_name_parts(name) package = cls._get_resolved_package(name) cls._mark_package_dependencies( package=package, marked_packages=marked_packages, skipped_blacklisted=skipped_blacklisted, skipped_essential=skipped_essential, unfiltered_packages=package_names, ) marked = sorted(marked_packages.keys()) logger.debug(f"Installing staged-packages {marked!r} to {install_dir!r}") if skipped_blacklisted: blacklisted = sorted(skipped_blacklisted) logger.debug(f"Skipping blacklisted packages: {blacklisted!r}") if skipped_essential: essential = sorted(skipped_essential) logger.debug(f"Skipping priority essential packages: {essential!r}") for pkg_name, pkg_version in marked_packages.items(): try: dl_path = pkg_version.fetch_binary(cls._cache_dir) except apt.package.FetchError as e: raise errors.PackageFetchError(str(e)) logger.debug(f"Extracting stage package: {pkg_name}") with tempfile.TemporaryDirectory() as temp_dir: # Extract deb package. cls._extract_deb(dl_path, temp_dir) # Mark source of files. marked_name = f"{pkg_name}:{pkg_version.version}" cls._mark_origin_stage_package(temp_dir, marked_name) # Stage files to install_dir. file_utils.link_or_copy_tree(temp_dir, install_dir) cls.normalize(install_dir) return [ f"{pkg_name}={pkg_version}" for pkg_name, pkg_version in marked_packages.items() ]