def _lookup_by_mapping(): """Returns a tuple containing the init system's type and version based on a constant mapping of distribution+version to init system.. See constants.py for the mapping. A failover of the version is proposed for when no version is supplied. For instance, Arch Linux's version will most probably be "rolling" at any given time, which means that the init system cannot be idenfied by the version of the distro. On top of trying to identify by the distro's ID, if /etc/os-release contains an "ID_LIKE" field, it will be tried. That, again is true for Arch where the distro's ID changes (Manjaro, Antergos, etc...) But the "ID_LIKE" field is always (?) `arch`. """ like = distro.like().lower() distribution_id = distro.id().lower() version = distro.major_version() # init (upstart 1.12.1) if distribution_id in ('arch'): version = 'any' if like in ('arch'): version = 'any' init_sys = const.DIST_TO_INITSYS.get( distribution_id, const.DIST_TO_INITSYS.get(like)) if init_sys: return [init_sys.get(version)] or []
def set_distro(): os = sys.platform if os == 'darwin': return 'darwin' elif os.startswith('linux'): try: import distro dist = distro.id() name = distro.name() except ImportError: dist = 'ubuntu' # default value, will remove DISTRO name = 'Ubuntu' # in future. # To add new distributions, refer to: # http://distro.readthedocs.io/en/latest/#distro.id # http://linuxmafia.com/faq/Admin/release-files.html if dist in ('ubuntu', 'debian'): return 'debian' elif dist in ('fedora', 'rhel', 'centos'): return 'fedora' elif dist == 'arch': return 'arch' else: raise ValueError( "Not supported for your Linux distribution: {}"\ .format(name) ) else: raise ValueError( "Not supported for your OS: {}"\ .format(os) )
def switch_repo(subscription, on=True): repos_dir = '/etc/yum.repos.d' yum_file = '{}/Rockstor-{}.repo'.format(repos_dir, subscription.name) # Historically our base subscription url denotes our CentOS rpm repo. subscription_distro_url = subscription.url distro_id = distro.id() if distro_id == 'opensuse-leap': subscription_distro_url += '/leap/{}'.format(distro.version()) elif distro_id == 'opensuse-tumbleweed': subscription_distro_url += '/tumbleweed' # Check if dir /etc/yum.repos.d exists and if not create. if not os.path.isdir(repos_dir): # Can use os.makedirs(path) if intermediate levels also don't exist. os.mkdir(repos_dir, ) if (on): with open(yum_file, 'w') as rfo: rfo.write('[Rockstor-%s]\n' % subscription.name) rfo.write('name=%s\n' % subscription.description) if (subscription.password is not None): rfo.write('baseurl=http://%s:%s@%s\n' % (subscription.appliance.uuid, subscription.password, subscription_distro_url)) else: rfo.write('baseurl=http://%s\n' % subscription_distro_url) rfo.write('enabled=1\n') rfo.write('gpgcheck=1\n') rfo.write('gpgkey=file://%sconf/ROCKSTOR-GPG-KEY\n' % settings.ROOT_DIR) rfo.write('metadata_expire=1h\n') # Set file to rw- --- --- (600) via stat constants. os.chmod(yum_file, stat.S_IRUSR | stat.S_IWUSR) else: if (os.path.exists(yum_file)): os.remove(yum_file)
def __init__(self): self.os_version = None self.os_version_name = None self.is_linux = platform.system() == "Linux" self.linux_distro = None self.is_windows = platform.system() == "Windows" self.is_macos = platform.system() == "Darwin" self.is_freebsd = platform.system() == "FreeBSD" self.is_solaris = platform.system() == "SunOS" if self.is_linux: import distro self.linux_distro = distro.id() self.os_version = Version(distro.version()) version_name = distro.codename() self.os_version_name = version_name if version_name != "n/a" else "" if not self.os_version_name and self.linux_distro == "debian": self.os_version_name = self.get_debian_version_name(self.os_version) elif self.is_windows: self.os_version = self.get_win_os_version() self.os_version_name = self.get_win_version_name(self.os_version) elif self.is_macos: self.os_version = Version(platform.mac_ver()[0]) self.os_version_name = self.get_osx_version_name(self.os_version) elif self.is_freebsd: self.os_version = self.get_freebsd_version() self.os_version_name = "FreeBSD %s" % self.os_version elif self.is_solaris: self.os_version = Version(platform.release()) self.os_version_name = self.get_solaris_version_name(self.os_version)
def _pm_autodetect(self): if self.packagemanager == 'autodetect': current_distro = distro.like() if current_distro == '': current_distro = distro.id() if current_distro in ['debian', 'ubuntu']: self.packagemanager = 'apt' elif current_distro == 'arch': self.packagemanager = 'pacman'
def post(self, request, command): service = Service.objects.get(name=self.name) if command == 'config': config = request.data.get('config', None) root_share = config['root_share'] self._validate_root(request, root_share) self._save_config(service, config) elif command == 'start': try: config = self._get_config(service) except Exception as e: logger.exception(e) e_msg = ('Cannot start without configuration. ' 'Please configure (System->Services) and try again.') handle_exception(Exception(e_msg), request) share = self._validate_root(request, config['root_share']) mnt_pt = '{}{}'.format(settings.MNT_PT, share.name) if not share.is_mounted: mount_share(share, mnt_pt) docker_wrapper = '{}bin/docker-wrapper'.format(settings.ROOT_DIR) distro_id = distro.id() # for Leap 15 <--> Tumbleweed moves. if distro_id not in KNOWN_DISTRO_IDS: distro_id = 'generic' # TODO: Consider sourcing /usr/lib/systemd/system/docker.service inf = '{}/docker-{}.service'.format(settings.CONFROOT, distro_id) outf = '/etc/systemd/system/docker.service' with open(inf) as ino, open(outf, 'w') as outo: for l in ino.readlines(): if re.match('ExecStart=', l) is not None: outo.write('{} {}\n'.format( l.strip().replace(DOCKERD, docker_wrapper, 1), mnt_pt)) elif re.match('Type=notify', l) is not None: # Our docker wrapper use need NotifyAccess=all: avoids # "Got notification message from PID ####1, but # reception only permitted for main PID ####2" outo.write(l) outo.write('NotifyAccess=all\n') elif re.match('After=', l) is not None: outo.write('{} {}\n'.format( l.strip(), 'rockstor-bootstrap.service')) else: outo.write(l) if distro_id == 'rockstor': socket_file = '{}/docker.socket'.format(settings.CONFROOT) shutil.copy(socket_file, '/etc/systemd/system/docker.socket') systemctl(self.name, 'enable') systemctl(self.name, 'start') elif command == 'stop': systemctl(self.name, 'stop') systemctl(self.name, 'disable') return Response()
def update_check(subscription=None): if (subscription is not None): switch_repo(subscription) pkg = 'rockstor' version, date = rpm_build_info(pkg) if date is None: # None date signifies no rpm installed so list all changelog entries. date = 'all' log = False available = False new_version = None updates = [] try: o, e, rc = run_command([YUM, 'changelog', date, pkg]) except CommandException as e: # Catch as yet unconfigured repos ie Leap 15.1: error log accordingly. # Avoids breaking current version display and update channel selection. emsg = 'Error\\: Cannot retrieve repository metadata \\(repomd.xml\\)' if re.match(emsg, e.err[-2]) is not None: logger.error('Rockstor repo for distro.id ({}) version ({}) may ' 'not exist: pending or deprecated.\nReceived: ({}).' .format(distro.id(), distro.version(), e.err)) new_version = version # Explicitly set (flag) for code clarity. return version, new_version, updates # otherwise we raise an exception as normal. raise e for l in o: if (re.search('Available Packages', l) is not None): available = True if (not available): continue if (new_version is None and (re.match('rockstor-', l) is not None)): new_version = l.split()[0].split( 'rockstor-')[1].split('.x86_64')[0] if (log is True): updates.append(l) if (len(l.strip()) == 0): log = False if (re.match('\* ', l) is not None): log = True if (new_version is None): new_version = version # do a second check which is valid for updates without changelog # updates. eg: same day updates, testing updates. o, e, rc = run_command([YUM, 'update', pkg, '--assumeno'], throw=False) if (rc == 1): for l in o: if (re.search('will be an update', l) is not None): if (re.search('rockstor.x86_64', l) is not None): new_version = l.strip().split()[3].split(':')[1] return version, new_version, updates
def install_help_msg(self): """ The help message to show if the package is not installed. The help message shown depends on whether some class variables are present. """ def _try_managers(*managers): for manager in managers: pkg_name = self.pkg_names.get(manager, None) if pkg_name and which(manager) is not None: pkg_note = None if isinstance(pkg_name, (tuple, list)): pkg_name, pkg_note = pkg_name msg = ('Try installing {0} with `{1} install {2}`.' .format(self.name, manager, pkg_name)) if pkg_note: msg += ' Note: ' + pkg_note return msg message = "" if sys.platform == "win32": url = self.pkg_names.get("windows_url", None) if url: return ('Please check {0} for instructions to install {1}' .format(url, self.name)) elif sys.platform == "darwin": manager_message = _try_managers("brew", "port") return manager_message or message elif sys.platform.startswith("linux"): try: import distro except ImportError: setup_install(['distro']) import distro release = distro.id() if release in ('debian', 'ubuntu', 'linuxmint', 'raspbian'): manager_message = _try_managers('apt-get') if manager_message: return manager_message elif release in ('centos', 'rhel', 'redhat', 'fedora', 'scientific', 'amazon', ): manager_message = _try_managers('dnf', 'yum') if manager_message: return manager_message elif release in ('sles', 'opensuse'): manager_message = _try_managers('zypper') if manager_message: return manager_message elif release in ('arch'): manager_message = _try_managers('pacman') if manager_message: return manager_message return message
def finalize_options(self): special_cases = ('debian', 'ubuntu', 'linuxmint') if platform.system() == 'Linux': # linux_distribution has been removed in Python 3.8; we require # the third-party distro package for future handling. try: distribution = platform.linux_distribution()[0].lower() except AttributeError: try: distribution = distro.id() except NameError: distribution = 'unknown' if distribution in special_cases: # Maintain an explicit install-layout, but use deb by default specified_layout = getattr(self, 'install_layout', None) self.install_layout = specified_layout or 'deb' distutils.command.install.install.finalize_options(self)
def identify_packages(files): """Organizes the files, using the distribution's package manager. """ distribution = distro.id() if distribution in ('debian', 'ubuntu'): logger.info("Identifying Debian packages for %d files...", len(files)) manager = DpkgManager() elif (distribution in ('centos', 'centos linux', 'fedora', 'scientific linux') or distribution.startswith('red hat')): logger.info("Identifying RPM packages for %d files...", len(files)) manager = RpmManager() else: logger.info("Unknown distribution, can't identify packages") return files, [] begin = time.time() manager.search_for_files(files) logger.debug("Assigning files to packages took %f seconds", (time.time() - begin)) return manager.unknown_files, listvalues(manager.packages)
def _default_platform_name(distutils_util_get_platform): """Guess a sane default platform name. On OS X and Windows, just uses the default platform name. On Linux, uses information from the `platform` module to try to make something reasonable. """ def grab_version(string, num): """Grab the `num` most significant components of a version string. >>> grab_version('12.04.1', 2) '12.04' >>> grab_version('8.2', 1) '8' """ return '.'.join(string.split('.')[:num]) if platform.system() == 'Linux': dist, version = distro.id(), distro.version() dist = re.sub('linux$', '', dist.lower()).strip() # Try to determine a good "release" name. This is highly dependent on # distribution and what guarantees they provide between versions. release = None if dist in {'debian', 'rhel', 'centos', 'fedora', 'opensuse'}: release = grab_version(version, 1) # one version component elif dist in {'ubuntu', 'amzn'}: release = grab_version(version, 2) # two version components if release: return 'linux_{dist}_{release}_{arch}'.format( dist=_sanitize_platform(dist), release=_sanitize_platform(release), arch=_sanitize_platform(platform.machine()), ) # For Windows, OS X, or Linux distributions we couldn't identify, just fall # back to whatever pip normally uses. return _sanitize_platform(distutils_util_get_platform())
def platform_profiles(self): if platform.system() == 'Darwin': atoms = set(['darwin']) # detect available macos package managers if os.system('which brew >/dev/null') == 0: atoms.add('brew') self.platform = Brew() return ["platform:%s" % (atom,) for atom in sorted(atoms)] distro_id = distro.id() if not distro_id: log = logging.getLogger(__name__) log.error('Unable to determine distro ID. ' 'Does /etc/os-release exist or ' 'is lsb_release installed?') raise Exception('Distro name not found') # NOTE(toabctl): distro can be more than one string (i.e. "SUSE LINUX") codename = distro.codename().lower() release = distro.version().lower() # NOTE(toabctl): space is a delimiter for bindep, so remove the spaces distro_id = "".join(distro_id.split()).lower() atoms = set([distro_id]) atoms.update(self.codenamebits(distro_id, codename)) atoms.update(self.releasebits(distro_id, release)) if distro_id in ["debian", "ubuntu"]: atoms.add("dpkg") self.platform = Dpkg() # RPM distros seem to be especially complicated elif distro_id in ["amzn", "amazonami", "centos", "rhel", "redhatenterpriseserver", "redhatenterpriseworkstation", "fedora", "opensuseproject", "opensuse", "opensuse-leap", "opensuse-tumbleweed", "sles", "suselinux"]: # Distro aliases if distro_id in ["redhatenterpriseserver", "redhatenterpriseworkstation"]: # just short alias atoms.add("rhel") atoms.update(self.codenamebits("rhel", codename)) atoms.update(self.releasebits("rhel", release)) elif distro_id == 'rhel' and 'server' in distro.name().lower(): atoms.add("redhatenterpriseserver") atoms.update(self.codenamebits("redhatenterpriseserver", codename)) atoms.update(self.releasebits("redhatenterpriseserver", release)) elif (distro_id == 'rhel' and 'workstation' in distro.name().lower()): atoms.add("redhatenterpriseworkstation") atoms.update(self.codenamebits("redhatenterpriseworkstation", codename)) atoms.update(self.releasebits("redhatenterpriseworkstation", release)) elif "amzn" in distro_id: atoms.add("amazonami") atoms.update(self.codenamebits("amazonami", codename)) atoms.update(self.releasebits("amazonami", release)) elif "amazonami" in distro_id: atoms.add("amzn") atoms.update(self.codenamebits("amzn", codename)) atoms.update(self.releasebits("amzn", release)) elif "opensuse" in distro_id: # just short alias atoms.add("opensuse") atoms.update(self.codenamebits("opensuse", codename)) atoms.update(self.releasebits("opensuse", release)) atoms.add("opensuseproject") atoms.update(self.codenamebits("opensuseproject", codename)) atoms.update(self.releasebits("opensuseproject", release)) elif "sles" in distro_id: atoms.add("suselinux") atoms.update(self.codenamebits("suselinux", codename)) atoms.update(self.releasebits("suselinux", release)) elif "suselinux" in distro_id: atoms.add("sles") atoms.update(self.codenamebits("sles", codename)) atoms.update(self.releasebits("sles", release)) # Family aliases if 'suse' in distro_id or distro_id == 'sles': atoms.add("suse") else: atoms.add("redhat") atoms.add("rpm") self.platform = Rpm() elif distro_id in ["gentoo"]: atoms.add("emerge") self.platform = Emerge() elif distro_id in ["arch"]: atoms.add("pacman") self.platform = Pacman() elif distro_id in ["alpine"]: atoms.add("apk") self.platform = Apk() else: self.platform = Unknown() return ["platform:%s" % (atom,) for atom in sorted(atoms)]
def get_os_string(): """ Return distribution string, e.g. 'Debian_7.4'. """ dist = [distro.id().capitalize(), distro.version()] return '_'.join(filter(None, dist)) or 'unknown'
else: return True # # Encoding # DEFAULT_ENCODING = sys.getdefaultencoding() # # Linux distro # if ON_LINUX: if distro: LINUX_DISTRO = distro.id() elif PYTHON_VERSION_INFO < (3, 7, 0): LINUX_DISTRO = platform.linux_distribution()[0] or 'unknown' elif '-ARCH-' in platform.platform(): LINUX_DISTRO = 'arch' # that's the only one we need to know for now else: LINUX_DISTRO = 'unknown' else: LINUX_DISTRO = None # # Windows # if ON_WINDOWS:
def __init__(self, config: MiniDLNAIndicatorConfig, test_mode: bool) -> None: self.logger = logging.getLogger(__name__) # type: logging.Logger DBusGMainLoop(set_as_default=True) try: self.session_bus = dbus.SessionBus(dbus.mainloop.glib.DBusGMainLoop()) except dbus.DBusException: raise RuntimeError(_("No D-Bus connection")) if self.session_bus.name_has_owner(APP_DBUS_DOMAIN): raise AlreadyRunningException() bus_name = dbus.service.BusName(APP_DBUS_DOMAIN, self.session_bus) dbus.service.Object.__init__( self, object_path=APP_DBUS_PATH, bus_name=bus_name ) self.config = config self.test_mode = test_mode self.indicator = AppIndicator3.Indicator.new(APPINDICATOR_ID, MINIDLNA_ICON_GREY, AppIndicator3.IndicatorCategory.APPLICATION_STATUS) self.indicator.set_status(AppIndicator3.IndicatorStatus.ACTIVE) self.minidlna_config = MiniDLNAConfig(self, MINIDLNA_CONFIG_FILE) self.minidlna_path = None # type: Optional[str] # Build menu items self.menu = Gtk.Menu() self.indicator.set_menu(self.menu) if distro.id() in ["fedora", "centos", "rhel", "ubuntu", "mint"]: self.detect_menuitem = Gtk.MenuItem(_("MiniDLNA not installed; click here to install")) self.detect_menuitem.connect('activate', lambda _: self.detect_minidlna(auto_start=True, ask_for_install=True)) else: self.detect_menuitem = Gtk.MenuItem(_("MiniDLNA not installed; click here to show how to install")) self.detect_menuitem.connect('activate', lambda _: self.run_xdg_open(None, "https://github.com/okelet/minidlnaindicator")) self.start_menuitem = Gtk.MenuItem(_("Start MiniDLNA")) self.start_menuitem.connect('activate', lambda _: self.start_minidlna()) self.start_reindex_menuitem = Gtk.MenuItem(_("Start and reindex MiniDLNA")) self.start_reindex_menuitem.connect('activate', lambda _: self.start_minidlna(True)) self.restart_menuitem = Gtk.MenuItem(_("Restart MiniDLNA")) self.restart_menuitem.connect('activate', lambda _: self.restart_minidlna()) self.restart_reindex_menuitem = Gtk.MenuItem(_("Restart and reindex MiniDLNA")) self.restart_reindex_menuitem.connect('activate', lambda _: self.restart_minidlna(True)) self.stop_menuitem = Gtk.MenuItem(_("Stop MiniDLNA")) self.stop_menuitem.connect('activate', lambda _: self.stop_minidlna()) self.weblink_menuitem = Gtk.MenuItem(_("Web interface (port {port})").format(port=self.minidlna_config.port)) self.weblink_menuitem.connect('activate', self.on_weblink_menuitem_activated) self.showlog_menuitem = Gtk.MenuItem(_("Show MiniDLNA LOG")) self.showlog_menuitem.connect('activate', self.on_showlog_menuitem_activated) self.editconfig_menuitem = Gtk.MenuItem(_("Edit MiniDLNA configuration")) self.editconfig_menuitem.connect('activate', self.on_editconfig_menuitem_activated) self.indicator_startup_menuitem = Gtk.CheckMenuItem(_("Autostart indicator")) self.indicator_startup_menuitem.connect('activate', self.indicator_startup_menuitem_toggled) self.indicator_startup_menuitem.set_active(self.config.auto_start) self.update_available = None self.new_version_menuitem = Gtk.MenuItem(_("A new version of MiniDLNA has been detected; click here to show how to upgrade")) self.new_version_menuitem.connect('activate', self.run_xdg_open, "https://github.com/okelet/minidlnaindicator") self.minidlna_help_menuitem = Gtk.MenuItem(_("MiniDLNA help")) self.minidlna_help_menuitem.connect('activate', self.run_xdg_open, "https://help.ubuntu.com/community/MiniDLNA") self.indicator_help_menuitem = Gtk.MenuItem(_("MiniDLNA Indicator help")) self.indicator_help_menuitem.connect('activate', self.run_xdg_open, "https://github.com/okelet/minidlnaindicator") self.item_quit = Gtk.MenuItem(_("Quit")) self.item_quit.connect('activate', self.quit) self.runner = ProcessRunner() self.runner.add_listener(self) self.rebuild_menu() # Init notifications before running minidlna Notify.init(APPINDICATOR_ID) # FS Monitor self.fs_monitor = FSMonitorThread() self.fs_monitor.add_listener(self) self.fs_monitor.start() # Update check self.update_checker = UpdateCheckThread(self.config, "minidlnaindicator", module_version, self.test_mode) self.update_checker.add_listener(self) self.update_checker.start() # Detect minidlna and rebuild menu self.detect_minidlna()
def build_pyvulkan(): """use cmake to build and install the extension """ if cmake_path is None: cmake_install_url = "https://cmake.org/install/" message = ("You can install cmake using the instructions at " + cmake_install_url) msg_pkgmanager = ("You can install cmake on {0} using " "`sudo {1} install cmake`.") if sys.platform == "darwin": pkgmanagers = ('brew', 'port') for manager in pkgmanagers: if find_executable(manager) is not None: message = msg_pkgmanager.format('OSX', manager) break elif sys.platform.startswith('linux'): try: import distro except ImportError as err: import pip pip_exit = pip.main(['install', '-q', 'distro']) if pip_exit > 0: log.debug("Unable to install `distro` to identify " "the recommended command. Falling back " "to default error message.") distro = err else: import distro if not isinstance(distro, ImportError): distname = distro.id() if distname in ('debian', 'ubuntu'): message = msg_pkgmanager.format( distname.title(), 'apt-get') elif distname in ('fedora', 'centos', 'redhat'): pkgmanagers = ("dnf", "yum") for manager in pkgmanagers: if find_executable(manager) is not None: message = msg_pkgmanager.format( distname.title(), manager) break raise DistutilsSetupError( "Cannot find cmake, ensure it is installed and in the path.\n" + message + "\n" "You can also specify its path with --cmake parameter.") cmake_extra = [] cmake_gen = [] if sys.platform.startswith('linux'): this_file_dir = os.path.dirname(os.path.realpath(__file__)) numpy_swig_inc_path = os.path.join(this_file_dir, 'numpy_swig') cmake_extra += ['-DNUMPY_SWIG_DIR=' + numpy_swig_inc_path] cmake_extra += ['-DCMAKE_BUILD_TYPE=RELEASE'] elif sys.platform == "win32": cmake_gen = ['-G','Visual Studio 14 2015 Win64'] cmake_extra += ['-DSWIG_DIR=C:/DEV/swigwin-3.0.12'] cmake_extra += ['-DSWIG_EXECUTABLE=C:/dev/swigwin-3.0.12/swig.exe'] cmake_extra += ['-DNUMPY_SWIG_DIR=C:/dev/pyvulkan/numpy_swig/'] cmake_extra += ['-DVULKAN_SDK=c:/VulkanSDK/1.0.65.0/'] if sys.version_info >= (3, 0): cmake_extra += ['-DPYTHON3=yes'] platform_arch = platform.architecture()[0] log.info("Detected Python architecture: %s" % platform_arch) # make sure build artifacts are generated for the version of Python currently running inc_dir = get_python_inc() lib_dir = get_config_var('LIBDIR') if (inc_dir != None): cmake_extra += ['-DPYTHON_INCLUDE_DIR=' + inc_dir] if (lib_dir != None): cmake_extra += ['-DCMAKE_LIBRARY_PATH=' + lib_dir] cmake_extra += ['-DCMAKE_INSTALL_PREFIX=../install'] log.info("Detected platform: %s" % sys.platform) build_dir = os.path.join(script_dir, "./build") if os.path.exists(build_dir): log.info('Removing build directory %s' % build_dir) rmtree(build_dir) try: os.makedirs(build_dir) except OSError: pass # cd build os.chdir(build_dir) log.info('Configuring cmake ...') cmake_cmd = [ cmake_path, "..", ] + cmake_gen + cmake_extra if run_process(cmake_cmd): raise DistutilsSetupError("cmake configuration failed!") log.info('Build using cmake ...') cmake_cmd = [ cmake_path, "--build", ".", "--config", cmake_config, "--target", "install", ] if run_process(cmake_cmd): raise DistutilsSetupError("cmake build failed!") # cd back where setup awaits os.chdir(script_dir)
def build(ctx, rebuild=False, race=False, build_include=None, build_exclude=None, puppy=False, development=True, precompile_only=False, skip_assets=False, embedded_path=None, rtloader_root=None, python_home_2=None, python_home_3=None, major_version='7', python_runtimes='3', arch='x64'): """ Build the agent. If the bits to include in the build are not specified, the values from `invoke.yaml` will be used. Example invokation: inv agent.build --build-exclude=systemd """ build_include = DEFAULT_BUILD_TAGS if build_include is None else build_include.split( ",") build_exclude = [] if build_exclude is None else build_exclude.split(",") ldflags, gcflags, env = get_build_flags(ctx, embedded_path=embedded_path, rtloader_root=rtloader_root, python_home_2=python_home_2, python_home_3=python_home_3, major_version=major_version, python_runtimes=python_runtimes, arch=arch) if not sys.platform.startswith('linux'): for ex in LINUX_ONLY_TAGS: if ex not in build_exclude: build_exclude.append(ex) # remove all tags that are only available on debian distributions distname = distro.id().lower() if distname not in REDHAT_AND_DEBIAN_DIST: for ex in REDHAT_AND_DEBIAN_ONLY_TAGS: if ex not in build_exclude: build_exclude.append(ex) if sys.platform == 'win32': py_runtime_var = get_win_py_runtime_var(python_runtimes) windres_target = "pe-x86-64" # Important for x-compiling env["CGO_ENABLED"] = "1" if arch == "x86": env["GOARCH"] = "386" windres_target = "pe-i386" # This generates the manifest resource. The manifest resource is necessary for # being able to load the ancient C-runtime that comes along with Python 2.7 # command = "rsrc -arch amd64 -manifest cmd/agent/agent.exe.manifest -o cmd/agent/rsrc.syso" ver = get_version_numeric_only(ctx, env, major_version=major_version) build_maj, build_min, build_patch = ver.split(".") command = "windmc --target {target_arch} -r cmd/agent cmd/agent/agentmsg.mc ".format( target_arch=windres_target) ctx.run(command, env=env) command = "windres --target {target_arch} --define {py_runtime_var}=1 --define MAJ_VER={build_maj} --define MIN_VER={build_min} --define PATCH_VER={build_patch} --define BUILD_ARCH_{build_arch}=1".format( py_runtime_var=py_runtime_var, build_maj=build_maj, build_min=build_min, build_patch=build_patch, target_arch=windres_target, build_arch=arch) command += "-i cmd/agent/agent.rc -O coff -o cmd/agent/rsrc.syso" ctx.run(command, env=env) if puppy: # Puppy mode overrides whatever passed through `--build-exclude` and `--build-include` build_tags = get_default_build_tags(puppy=True) else: build_tags = get_build_tags(build_include, build_exclude) # Generating go source from templates by running go generate on ./pkg/status generate(ctx) cmd = "go build {race_opt} {build_type} -tags \"{go_build_tags}\" " cmd += "-o {agent_bin} -gcflags=\"{gcflags}\" -ldflags=\"{ldflags}\" {REPO_PATH}/cmd/agent" args = { "race_opt": "-race" if race else "", "build_type": "-a" if rebuild else ("-i" if precompile_only else ""), "go_build_tags": " ".join(build_tags), "agent_bin": os.path.join(BIN_PATH, bin_name("agent", android=False)), "gcflags": gcflags, "ldflags": ldflags, "REPO_PATH": REPO_PATH, } ctx.run(cmd.format(**args), env=env) # Remove cross-compiling bits to render config env.update({ "GOOS": "", "GOARCH": "", }) # Render the Agent configuration file template cmd = "go run {go_file} {build_type} {template_file} {output_file}" args = { "go_file": "./pkg/config/render_config.go", "build_type": "agent-py2py3" if has_both_python(python_runtimes) else "agent-py3", "template_file": "./pkg/config/config_template.yaml", "output_file": "./cmd/agent/dist/datadog.yaml", } ctx.run(cmd.format(**args), env=env) # On Linux and MacOS, render the system-probe configuration file template if sys.platform != 'win32': cmd = "go run ./pkg/config/render_config.go system-probe ./pkg/config/config_template.yaml ./cmd/agent/dist/system-probe.yaml" ctx.run(cmd, env=env) if not skip_assets: refresh_assets(ctx, build_tags, development=development, puppy=puppy)
def main(): # Certificate WARNING bug urllib3.disable_warnings() parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", required=True, help="Config file") parser.add_argument("-v", "--verbosity", action="count", help="Increase output verbosity") args = parser.parse_args() if args.verbosity == 2: loglevel = logging.DEBUG elif args.verbosity == 1: loglevel = logging.INFO else: loglevel = logging.ERROR logging.basicConfig( level=loglevel, # filename='/var/log/elasticsearch-forwarder.log', format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') with open(args.config, 'r', encoding="utf-8") as config_file: config = yaml.load(config_file, Loader=yaml.Loader) modules = [] for module_config in config['filebeat']['modules']: if module_config['module'] == 'apache': modules.append(Apache(module_config)) config_elasticsearch = config['output']['elasticsearch'] es = Elasticsearch( config_elasticsearch['hosts'], http_auth=(config_elasticsearch['username'], config_elasticsearch['password']), scheme=config_elasticsearch['protocol'], port=config_elasticsearch['port'], ca_certs=config_elasticsearch['ssl']['certificate']['authorities']) host_fields = { 'host': { 'name': platform.node(), 'hostname': platform.node(), "os": { "kernel": platform.release(), "codename": distro.codename(), "name": distro.name(), 'family': distro.like(), "version": distro.version(), "platform": distro.id() }, "architecture": platform.machine() } } extra_fields = config['extra_fields'] index = config_elasticsearch['index'] pipeline = config_elasticsearch['pipeline'] #helpers.bulk(es, get_bulk_data(index, pipeline, host_fields, extra_fields)) for doc in get_data(host_fields, extra_fields): logging.info('Document: %s', data) res = es.index(index=index, pipeline=pipeline, body=doc) logging.info("%s doc with id %s", res['result'], res['_id'])
def is_amzn2(): return ('amzn' in distro.id()) and ('2' in distro.version())
from __future__ import print_function from pprint import pformat import distro def pprint(obj): for line in pformat(obj).split("\n"): print(4 * " " + line) print("os_release_info:") pprint(distro.os_release_info()) print("lsb_release_info:") pprint(distro.lsb_release_info()) print("distro_release_info:") pprint(distro.distro_release_info()) print("id: {0}".format(distro.id())) print("name: {0}".format(distro.name())) print("name_pretty: {0}".format(distro.name(True))) print("version: {0}".format(distro.version())) print("version_pretty: {0}".format(distro.version(True))) print("like: {0}".format(distro.like())) print("codename: {0}".format(distro.codename())) print("linux_distribution_full: {0}".format(distro.linux_distribution())) print("linux_distribution: {0}".format(distro.linux_distribution(False))) print("major_version: {0}".format(distro.major_version())) print("minor_version: {0}".format(distro.minor_version())) print("build_number: {0}".format(distro.build_number()))
cmd('apt-get -y -q update') # 安装必须的包 cmd('apt-get -y -q install git python3 python3-pip wget curl') # 安装非必须的包 try: # 更新一下openssl cmd('apt-get -y -q install openssl', allow_failure=True) except: pass # 如果安装了, 则可以启用http2 ppa_available = cmd( 'apt-get -y -q install software-properties-common python-software-properties', allow_failure=True) if distro.id() == 'ubuntu' and ppa_available: # 安装高版本的Apache2(支持http2), 仅限ubuntu cmd("""LC_ALL=C.UTF-8 add-apt-repository -y ppa:ondrej/apache2 && apt-key update && apt-get -y -q update && apt-get -y -q install apache2""") else: # debian 只有低版本的可以用 cmd("apt-get -y -q install apache2") cmd("""a2enmod rewrite mime include headers filter expires deflate autoindex setenvif ssl""" ) if not cmd("a2enmod http2", allow_failure=True): warnprint("[Warning!] your server does not support http2") sleep(0.5)
docpath = "share/man" etcpath = "/etc/cobbler/" libpath = "/var/lib/cobbler/" logpath = "/var/log/" completion_path = "/usr/share/bash-completion/completions" statepath = "/tmp/cobbler_settings/devinstall" httpd_service = "httpd.service" suse_release = "suse" in distro.like() if suse_release: webconfig = "/etc/apache2/vhosts.d" webroot = "/srv/www/" http_user = "******" httpd_service = "apache2.service" defaultpath = "/etc/sysconfig/" elif distro.id() in ("debian", "ubuntu"): webconfig = "/etc/apache2/conf-available" webroot = "/var/www/" http_user = "******" httpd_service = "apache2.service" defaultpath = "/etc/default/" else: webconfig = "/etc/httpd/conf.d" webroot = "/var/www/" http_user = "******" defaultpath = "/etc/sysconfig/" webcontent = webroot + "cobbler_webui_content/" webimages = webcontent + "/images" setup(
def build(ctx, rebuild=False, race=False, build_include=None, build_exclude=None, development=True, precompile_only=False, skip_assets=False, major_version='7', python_runtimes='3'): """ Build the android apk. If the bits to include in the build are not specified, the values from `invoke.yaml` will be used. Example invokation: inv android.build """ # ensure BIN_PATH exists if not os.path.exists(BIN_PATH): os.makedirs(BIN_PATH) # put the check confs in place assetconfigs(ctx) build_include = DEFAULT_BUILD_TAGS if build_include is None else build_include.split( ",") build_exclude = [] if build_exclude is None else build_exclude.split(",") ldflags, gcflags, env = get_build_flags(ctx, major_version=major_version, python_runtimes=python_runtimes) if not sys.platform.startswith('linux'): for ex in LINUX_ONLY_TAGS: if ex not in build_exclude: build_exclude.append(ex) # Generating go source from templates by running go generate on ./pkg/status generate(ctx) # remove all tags that are only available on debian distributions distname = distro.id().lower() if distname not in REDHAT_DEBIAN_SUSE_DIST: for ex in REDHAT_DEBIAN_SUSE_ONLY_TAGS: if ex not in build_exclude: build_exclude.append(ex) build_tags = get_default_build_tags(puppy=True) build_tags.add("android") cmd = "gomobile bind -target android {race_opt} {build_type} -tags \"{go_build_tags}\" " cmd += "-o {agent_bin} -gcflags=\"{gcflags}\" -ldflags=\"{ldflags}\" {REPO_PATH}/cmd/agent/android" args = { "race_opt": "-race" if race else "", "build_type": "-a" if rebuild else ("-i" if precompile_only else ""), "go_build_tags": " ".join(build_tags), "agent_bin": os.path.join(BIN_PATH, bin_name("ddagent", android=True)), "gcflags": gcflags, "ldflags": ldflags, "REPO_PATH": REPO_PATH, } ctx.run(cmd.format(**args), env=env) pwd = os.getcwd() os.chdir("cmd/agent/android") if sys.platform == 'win32': cmd = "gradlew.bat --no-daemon build" else: cmd = "./gradlew --no-daemon build" ctx.run(cmd) os.chdir(pwd) ver = get_version(ctx, include_git=True, git_sha_length=7, major_version=major_version) outfile = "bin/agent/ddagent-{}-unsigned.apk".format(ver) shutil.copyfile( "cmd/agent/android/app/build/outputs/apk/release/app-release-unsigned.apk", outfile)
import shlex import tempfile import subprocess import pkg_resources import jinja2 import distro import requests from cloudify import ctx from cloudify import exceptions from cloudify.decorators import operation import filebeat_plugin distro = distro.id() FILEBEAT_CONFIG_FILE_DEFAULT = os.path.join( '/', 'etc', 'filebeat', 'filebeat.yml') FILEBEAT_PATH_DEFAULT = os.path.join('/', 'opt', 'filebeat') @operation def install(filebeat_config_inputs, filebeat_config_file='', filebeat_install_path='', download_url='', **kwargs): """Installation operation. Downloading and installing filebeat packacge - default version is 0.12.0. Default installation dir is set to /opt/filebeat.
def is_debian_variant(): d = get_id_like() if get_id_like() else distro.id() return ('debian' in d)
GREEN = colored.green YELLOW = colored.yellow distros = ["ubuntu", "debian", "linuxmint", "arch", "endeavouros"] figlet = Figlet(font='speed') print(YELLOW(figlet.renderText('''GAME SETUP'''))) while True: try: if platform.system() != 'Linux': print("This is for Linux only") break print("Detecting your distro...") # FIXME: Too many if-else.. if distro.id() in distros or distro.like() in distros: print( GREEN( f"[{distro.id()}] distro detected... based on {distro.like()}" )) if distro.id() == "ubuntu" or distro.like() == "ubuntu": sys_distro = 1 elif distro.id() == "arch" or distro.like() == "arch": sys_distro = 2 elif distro.id() == "debian" or distro.like( ) == "debian" and distro.id() != "ubuntu": sys_distro = 3 else: sys_distro = 0 print(
def is_gentoo(): return ('gentoo' in distro.id())
# * See the License for the specific language governing permissions and # * limitations under the License. import os import shutil import unittest import tempfile import distro from mock import patch from cloudify.mocks import MockCloudifyContext from .. import tasks disto_id = distro.id() TEMP_TELEGRAF = os.path.join(tempfile.gettempdir(), 'telegraf') class TesttelegrafPlugin(unittest.TestCase): def setUp(self): os.mkdir(TEMP_TELEGRAF) def tearDown(self): # Remove telegraf temp dir if os.path.exists(TEMP_TELEGRAF): shutil.rmtree(TEMP_TELEGRAF) @patch('telegraf_plugin.tasks.TELEGRAF_PATH_DEFAULT', TEMP_TELEGRAF) @patch('telegraf_plugin.tasks.ctx', MockCloudifyContext()) def test_download_telegraf(self):
def do_system_pkg(self, arg): """ install system package """ sysname = platform.uname().system logging.debug(f"system is {sysname}") logging.info("install pkg") if sysname == "Darwin": subprocess.run(["xcode-select", "--install"]) subprocess.run(["sudo", "xcodebuild", "-license"]) output = subprocess.run( ["which", "brew"], stdout=subprocess.PIPE, encoding='utf-8') logging.info(f"check brew {output}") if output is '': install_brew = subprocess.run([ "ruby", "-e", "\"$(curl -fsSL https://raw.githubusercontent.com/Homebrew" "/install/master/install)\"" ], stdout=subprocess.PIPE, encoding='utf-8') install_cask = subprocess.run([ "brew", "tap", "caskroom/cask" ], stdout=subprocess.PIPE, encoding='utf-8') install_cask_font = subprocess.run([ "brew", "tap", "caskroom/fonts" ], stdout=subprocess.PIPE, encoding='utf-8') logging.debug(install_brew) logging.debug(install_cask) logging.debug(install_cask_font) install_pkg = subprocess.run([ "brew", "install", "tmux", "tig", "zsh" ], stdout=subprocess.PIPE, encoding='utf-8') logging.debug(install_pkg) install_vim = subprocess.run([ "brew", "install", "vim", "--with-python3", "--with-override-system-vi" ], stdout=subprocess.PIPE, encoding='utf-8') logging.debug(install_vim) install_font_hack = subprocess.run([ "brew", "cask", "install", "font-hack", "font-iosevka", "font-fira-code", "font-noto-mono-for-powerline", "font-fira-mono-for-powerline" ], stdout=subprocess.PIPE, encoding='utf-8') logging.debug(install_font_hack) elif sysname == "Linux": distro_name = distro.id() logging.debug(f"distro {distro_name}") if distro_name == "ubuntu": install_pkg = subprocess.run([ "sudo", "apt", "install", "-y", "vim", "tig", "tmux", "zsh", "openssh-server" ], stdout=subprocess.PIPE, encoding='utf-8') elif distro_name == "manjaro": install_pkg = subprocess.run([ "yaourt", "-S", "vim", "tig", "zsh", "openssh" ], stdout=subprocess.PIPE, encoding='utf-8') logging.debug(install_pkg)
ismacOS = sys.platform == 'darwin' # public isNix = uname()[0] in ('Linux', 'linux') isPyPy = '[PyPy ' in sys.version # platform.python_implementation() == 'PyPy' isPython2 = sys.version_info[0] == 2 isPython3 = sys.version_info[0] == 3 isPython37 = sys.version_info[:2] >= (3, 7) # for testLazy isWindows = sys.platform.startswith('win') try: # use distro only for Linux, not macOS, etc. if isNix: import distro # <https://PyPI.org/project/distro> else: raise ImportError _Nix = anStr(distro.id()).capitalize() # .name()? def nix_ver(): # *nix release try: # no subprocess.check_output ... v = distro.version() except AttributeError: # ... Python 2.6 v = '' return anStr(v), _os_bitstr except ImportError: _Nix = '' # not linux? def nix_ver(): # PYCHOK expected return _Nix, _os_bitstr
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import distro print 'os_release_info: {0}'.format(distro.os_release_info()) print 'lsb_release_info: {0}'.format(distro.lsb_release_info()) print 'distro_release_info: {0}'.format(distro.distro_release_info()) print 'id: {0}'.format(distro.id()) print 'name: {0}'.format(distro.name()) print 'name_pretty: {0}'.format(distro.name(True)) print 'version: {0}'.format(distro.version()) print 'version_pretty: {0}'.format(distro.version(True)) print 'like: {0}'.format(distro.like()) print 'codename: {0}'.format(distro.codename()) print 'linux_distribution_full: {0}'.format(distro.linux_distribution()) print 'linux_distribution: {0}'.format(distro.linux_distribution(False)) print 'major_version: {0}'.format(distro.major_version()) print 'minor_version: {0}'.format(distro.minor_version()) print 'build_number: {0}'.format(distro.build_number())
def build_dlib(): """use cmake to build and install the extension """ if cmake_path is None: cmake_install_url = "https://cmake.org/install/" message = ("You can install cmake using the instructions at " + cmake_install_url) msg_pkgmanager = ("You can install cmake on {0} using " "`sudo {1} install cmake`.") if sys.platform == "darwin": pkgmanagers = ('brew', 'port') for manager in pkgmanagers: if find_executable(manager) is not None: message = msg_pkgmanager.format('OSX', manager) break elif sys.platform.startswith('linux'): try: import distro except ImportError as err: import pip pip_exit = pip.main(['install', '-q', 'distro']) if pip_exit > 0: log.debug("Unable to install `distro` to identify " "the recommended command. Falling back " "to default error message.") distro = err else: import distro if not isinstance(distro, ImportError): distname = distro.id() if distname in ('debian', 'ubuntu'): message = msg_pkgmanager.format( distname.title(), 'apt-get') elif distname in ('fedora', 'centos', 'redhat'): pkgmanagers = ("dnf", "yum") for manager in pkgmanagers: if find_executable(manager) is not None: message = msg_pkgmanager.format( distname.title(), manager) break raise DistutilsSetupError( "Cannot find cmake, ensure it is installed and in the path.\n" + message + "\n" "You can also specify its path with --cmake parameter.") platform_arch = platform.architecture()[0] log.info("Detected Python architecture: %s" % platform_arch) # make sure build artifacts are generated for the version of Python currently running cmake_extra_arch = [] if sys.version_info >= (3, 0): cmake_extra_arch += ['-DPYTHON3=yes'] log.info("Detected platform: %s" % sys.platform) if sys.platform == "darwin": # build on OS X inc_dir = get_python_inc() cmake_extra_arch += [ '-DPYTHON_INCLUDE_DIR={inc}'.format(inc=inc_dir) ] # by default, cmake will choose the system python lib in /usr/lib # this checks the sysconfig and will correctly pick up a brewed python lib # e.g. in /usr/local/Cellar py_ver = get_python_version() py_lib = os.path.join(get_config_var('LIBDIR'), 'libpython' + py_ver + '.dylib') cmake_extra_arch += ['-DPYTHON_LIBRARY={lib}'.format(lib=py_lib)] if platform_arch == '64bit' and sys.platform == "win32": # 64bit build on Windows if not generator_set: # see if we can deduce the 64bit default generator cmake_extra_arch += get_msvc_win64_generator() # help cmake to find Python library in 64bit Python in Windows # because cmake is 32bit and cannot find PYTHON_LIBRARY from registry. inc_dir = get_python_inc() cmake_extra_arch += [ '-DPYTHON_INCLUDE_DIR={inc}'.format(inc=inc_dir) ] # this imitates cmake in path resolution py_ver = get_python_version() for ext in [ py_ver.replace(".", "") + '.lib', py_ver + 'mu.lib', py_ver + 'm.lib', py_ver + 'u.lib' ]: py_lib = os.path.abspath( os.path.join(inc_dir, '../libs/', 'python' + ext)) if os.path.exists(py_lib): cmake_extra_arch += [ '-DPYTHON_LIBRARY={lib}'.format(lib=py_lib) ] break build_dir = os.path.join(script_dir, "./tools/python/build") if os.path.exists(build_dir): log.info('Removing build directory %s' % build_dir) rmtree(build_dir) try: os.makedirs(build_dir) except OSError: pass # cd build os.chdir(build_dir) log.info('Configuring cmake ...') cmake_cmd = [ cmake_path, "..", ] + cmake_extra + cmake_extra_arch if run_process(cmake_cmd): raise DistutilsSetupError("cmake configuration failed!") log.info('Build using cmake ...') cmake_cmd = [ cmake_path, "--build", ".", "--config", cmake_config, "--target", "install", ] if run_process(cmake_cmd): raise DistutilsSetupError("cmake build failed!") # cd back where setup awaits os.chdir(script_dir)
def drivers(args): # Interactive installation? I.e., requires user to press "y/n" interactive = (not args.yes) if (os.geteuid() != 0 and not args.dry_run): util.fill_print("Some commands require root access and will prompt " "for password") if (args.dry_run): util._print_header("Dry Run Mode") runner = util.ProcessRunner(logger, args.dry_run) # Install pre-requisites linux_release = platform.release() linux_headers = "linux-headers-" + linux_release kernel_devel = "kernel-devel-" + linux_release kernel_headers = "kernel-headers-" + linux_release apt_pkg_list = ["make", "gcc", "git", "patch", "patchutils", "libproc-processtable-perl", linux_headers] dnf_pkg_list = ["make", "gcc", "git", "patch", "patchutils", "perl-Proc-ProcessTable", "perl-Digest-SHA", "perl-File-Copy-Recursive", kernel_devel, kernel_headers] yum_pkg_list = dnf_pkg_list # On dnf, not always the kernel-devel/headers package will be available for # the same version as the current kernel. Check: dnf_update_required = False if (which("dnf")): res_d = runner.run(["dnf", "list", kernel_devel], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, nocheck = True) res_h = runner.run(["dnf", "list", kernel_headers], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, nocheck = True) if (not args.dry_run): kernel_devel_unavailable = (res_d.returncode != 0) kernel_headers_unavailable = (res_h.returncode != 0) dnf_update_required = kernel_devel_unavailable or \ kernel_headers_unavailable if (kernel_devel_unavailable): print("Could not find package {}".format(kernel_devel)) if (kernel_headers_unavailable): print("Could not find package {}".format(kernel_headers)) # If the target kernel-devel/kernel-headers versions are not available for # the current release, suggest to run dnf update: if (dnf_update_required): res = runner.run(["dnf", "list", "--upgrades", "kernel"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, nocheck = True) kernel_update_available = (res.returncode == 0) if (kernel_update_available): print("Kernel update required") if (util._ask_yes_or_no("OK to run \"dnf update\"?")): cmd = ["dnf", "update"] runner.run(util.root_cmd(cmd)) print("Please reboot to load the new kernel and try the " + "command below again:") print("\n blocksat-cli deps tbs-drivers\n") sys.exit(1) return else: logger.error("Could not find an available kernel update") sys.exit(1) return _update_pkg_repo(interactive, args.dry_run) _install_packages(apt_pkg_list, dnf_pkg_list, yum_pkg_list, interactive=interactive, update=False, dry=args.dry_run) # Clone the driver repositories driver_src_dir = os.path.join(args.cfg_dir, "src", "tbsdriver") media_build_dir = os.path.join(driver_src_dir, "media_build") media_dir = os.path.join(driver_src_dir, "media") if not os.path.exists(driver_src_dir): os.makedirs(driver_src_dir) if os.path.exists(media_build_dir): runner.run(["git", "pull", "origin", "master"], cwd = media_build_dir) else: runner.run(["git", "clone", "https://github.com/tbsdtv/media_build.git"], cwd = driver_src_dir) if os.path.exists(media_dir): runner.run(["git", "pull", "origin", "latest"], cwd = media_dir) else: runner.run(["git", "clone", "--depth=1", "https://github.com/tbsdtv/linux_media.git", "-b", "latest", "./media"], cwd = driver_src_dir) # Build the media drivers nproc = int(subprocess.check_output(["nproc"]).decode().rstrip()) nproc_arg = "-j" + str(nproc) runner.run(["make", "cleanall"], cwd = media_build_dir) runner.run(["make", "dir", "DIR=../media"], cwd = media_build_dir) runner.run(["make", "allyesconfig"], cwd = media_build_dir) # FIXME: Temporary workaround for error "modpost: "__devm_regmap_init_sccb" # ov9650.ko undefined!": disable ov9650 from the build. The problem was # observed on kernel versions 5.3.7 and 5.7.7. Apply the workaround for any # version < 5.8. if (distro.id() == "fedora" and LooseVersion(linux_release) < LooseVersion('5.8')): runner.run(["sed", "-i", "s/CONFIG_VIDEO_OV9650=m/CONFIG_VIDEO_OV9650=n/g", "v4l/.config"], cwd = media_build_dir) runner.run(["make", nproc_arg], cwd = media_build_dir) # Delete the previous Media Tree installation media_lib_path = "/lib/modules/" + linux_release + \ "/kernel/drivers/media/" runner.run( util.root_cmd(["rm", "-rf", media_lib_path]), cwd = media_build_dir ) # Install the new Media Tree runner.run(util.root_cmd(["make", "install"]), cwd = media_build_dir) # Download the firmware tbs_linux_url = "https://www.tbsdtv.com/download/document/linux/" fw_tarball = "tbs-tuner-firmwares_v1.0.tar.bz2" fw_url = tbs_linux_url + fw_tarball _download_file(fw_url, driver_src_dir, args.dry_run) # Install the firmware runner.run(util.root_cmd(["tar", "jxvf", fw_tarball, "-C", "/lib/firmware/"]), cwd = driver_src_dir) if (not args.dry_run): print("Installation completed successfully. Please reboot now.")
def is_suse_variant(): d = get_id_like() if get_id_like() else distro.id() return ('suse' in d)
def get_os_distribution(): return distro.id()
# along with this program. If not, see <http://www.gnu.org/licenses/>. """ Module: Defaults ================ Isomer distribution default settings. Contains database setup, certificate locations, platform details, service templates and a table of exit codes for the management tool. """ import distro distribution = distro.id().upper() db_host_default = "127.0.0.1:27017" db_host_help = "Define hostname for database server (default: " + db_host_default + ")" db_host_metavar = "<ip:port>" db_default = "isomer" db_help = "Define name of database (default: " + db_default + ")" db_metavar = "<name>" source_url = "https://github.com/isomeric/isomer" source_api_url = "https://api.github.com/repos/isomeric/isomer" pypi_api_url = "https://pypi.org/pypi/isomer/json" distribution_name = distro.codename()
import distro assert (distro.name() == 'Buildroot') assert (distro.id() == 'buildroot')
def main(): # configure parser parser = optparse.OptionParser() parser.add_option('-v', '--verbose', action="count", dest='verbosity', default=1, help='print more information to stdout') parser.add_option('-q', '--quiet', action='store_const', const=0, dest='verbosity', help='print less information to stdout') parser.add_option('-p', '--package', action='store_true', dest='package', default=True) parser.add_option('-n', '--no-package', action='store_false', dest='package') (options, args) = parser.parse_args() # configure logging log = logging.getLogger() if options.verbosity >= 2: log.setLevel(logging.DEBUG) elif options.verbosity == 1: log.setLevel(logging.INFO) else: log.setLevel(logging.WARNING) ch = logging.StreamHandler() formatter = logging.Formatter("%(levelname)s - %(message)s") ch.setFormatter(formatter) log.addHandler(ch) distro_id = distro.id() distro_major_version = distro.major_version() if distro_id in ['debian', 'ubuntu']: log.info('Detected: {0}'.format(distro_id)) cmd = ['sudo', 'apt-get', 'update', '-y'] build.run_cmd(cmd, check_rc='getting updates failed') # get prerequisites cmd = [ 'sudo', 'DEBIAN_FRONTEND=noninteractive', 'apt-get', 'install', '-y', 'curl', 'automake', 'make', 'autoconf2.13', 'texinfo', 'help2man', 'git', 'gpg', 'lsb-release', 'libtool', 'libbz2-dev', 'zlib1g-dev', 'libcurl4-gnutls-dev', 'libxml2-dev', 'pkg-config', 'python3-dev', 'uuid-dev', 'libssl-dev', 'fuse', 'libfuse2', 'libfuse-dev', 'libmicrohttpd-dev', 'unixodbc-dev' ] if distro_id in ['debian']: # Debian 11's default GCC is version 10.2. # Debian containers do not have "ps" command by default. cmd.extend(['g++', 'procps']) # At this point, we know we're dealing with some version of Ubuntu. elif distro_major_version == '20': # Compiling LLVM 13's libcxx requires at least GCC 10. cmd.extend(['gcc-10', 'g++-10']) else: # Ubuntu 18 does not have any issues compiling LLVM 13's libcxx # because it is using GCC 7 which does not support any C++20 features. cmd.append('g++') build.run_cmd(cmd, check_rc='installing prerequisites failed') cmd = ['sudo', 'apt-get', 'install', '-y', 'autoconf', 'rsync'] build.run_cmd(cmd, check_rc='installing autoconf failed') cmd = ['sudo', 'apt-get', 'install', '-y', 'patchelf'] build.run_cmd(cmd, check_rc='installing patchelf failed') elif distro_id in ['rocky', 'almalinux', 'centos', 'rhel', 'scientific']: log.info('Detected: {0}'.format(distro_id)) # prep if distro_id in ['rocky', 'almalinux']: cmd = [ 'sudo', 'dnf', 'install', '-y', 'epel-release', 'dnf-plugins-core' ] build.run_cmd(cmd, check_rc='rpm dnf install failed') cmd = [ 'sudo', 'dnf', 'config-manager', '--set-enabled', 'powertools' ] build.run_cmd(cmd, check_rc='rpm dnf config-manager failed') cmd = [ 'sudo', 'dnf', 'install', '-y', 'procps', 'redhat-lsb-core', 'rsync' ] # For ps, lsb_release, and rsync. build.run_cmd(cmd, check_rc='yum install failed') else: cmd = ['sudo', 'rpm', '--rebuilddb'] build.run_cmd(cmd, check_rc='rpm rebuild failed') cmd = ['sudo', 'yum', 'clean', 'all'] build.run_cmd(cmd, check_rc='yum clean failed') if distro_id not in ['rocky', 'almalinux']: cmd = ['sudo', 'yum', 'install', 'centos-release-scl-rh', '-y'] build.run_cmd(cmd, check_rc='yum install failed') cmd = [ 'sudo', 'yum', 'update', '-y', 'glibc*', 'yum*', 'rpm*', 'python*' ] build.run_cmd(cmd, check_rc='yum update failed') # get prerequisites cmd = [ 'sudo', 'yum', 'install', '-y', 'epel-release', 'wget', 'openssl', 'ca-certificates' ] build.run_cmd(cmd, check_rc='installing epel failed') cmd = [ 'sudo', 'yum', 'install', '-y', 'curl', 'gcc-c++', 'git', 'autoconf', 'automake', 'texinfo', 'help2man', 'rpm-build', 'rubygems', 'ruby-devel', 'zlib-devel', 'fuse', 'fuse-devel', 'bzip2-devel', 'libcurl-devel', 'libmicrohttpd-devel', 'libxml2-devel', 'libtool', 'libuuid-devel', 'openssl-devel', 'unixODBC-devel', 'patchelf' ] if distro_id in ['rocky', 'almalinux']: cmd.append('python36-devel') # python39-devel also available. else: cmd.append('python3-devel') build.run_cmd(cmd, check_rc='installing prerequisites failed') elif distro_id in ['opensuse ', 'sles']: log.info('Detected: {0}'.format(distro_id)) # get prerequisites cmd = [ 'sudo', 'zypper', 'install', '-y', 'curl', 'tar', 'gzip', 'git', 'ruby-devel', 'libmicrohttpd-devel', 'makeinfo', 'rubygems', 'libopenssl-devel', 'rpm-build', 'help2man', 'python-devel', 'libbz2-devel', 'libcurl-devel', 'libxml2-devel', 'libtool', 'libuuid-devel', 'uuid-devel', 'unixODBC-devel', 'cyrus-sasl', 'patchelf' ] build.run_cmd(cmd, check_rc='installing prerequisites failed') else: if platform.mac_ver()[0] != '': log.info('Detected: {0}'.format(platform.mac_ver()[0])) # get prerequisites cmd = ['brew', 'install', 'git', 'help2man', 'texinfo', 'libtool'] build.run_cmd(cmd, check_rc='installing prerequisites failed') cmd = ['brew', 'link', 'texinfo', '--force'] build.run_cmd(cmd, check_rc='linking texinfo failed') else: log.error( 'Cannot determine prerequisites for platform [{0}]'.format( distro_id)) return 1 # get necessary ruby gems if options.package: install_rvm_and_ruby() install_fpm_gem()
def is_gentoo_variant(): return ('gentoo' in distro.id())
def build_dlib(): """use cmake to build and install the extension """ if cmake_path is None: cmake_install_url = "https://cmake.org/install/" message = ("You can install cmake using the instructions at " + cmake_install_url) msg_pkgmanager = ("You can install cmake on {0} using " "`sudo {1} install cmake`.") if sys.platform == "darwin": pkgmanagers = ('brew', 'port') for manager in pkgmanagers: if find_executable(manager) is not None: message = msg_pkgmanager.format('OSX', manager) break elif sys.platform.startswith('linux'): try: import distro except ImportError as err: import pip pip_exit = pip.main(['install', '-q', 'distro']) if pip_exit > 0: log.debug("Unable to install `distro` to identify " "the recommended command. Falling back " "to default error message.") distro = err else: import distro if not isinstance(distro, ImportError): distname = distro.id() if distname in ('debian', 'ubuntu'): message = msg_pkgmanager.format( distname.title(), 'apt-get') elif distname in ('fedora', 'centos', 'redhat'): pkgmanagers = ("dnf", "yum") for manager in pkgmanagers: if find_executable(manager) is not None: message = msg_pkgmanager.format( distname.title(), manager) break raise DistutilsSetupError( "Cannot find cmake, ensure it is installed and in the path.\n" + message + "\n" "You can also specify its path with --cmake parameter.") platform_arch = platform.architecture()[0] log.info("Detected Python architecture: %s" % platform_arch) # make sure build artifacts are generated for the version of Python currently running cmake_extra_arch = [] if sys.version_info >= (3, 0): cmake_extra_arch += ['-DPYTHON3=yes'] log.info("Detected platform: %s" % sys.platform) if sys.platform == "darwin": # build on OS X inc_dir = get_python_inc() cmake_extra_arch += ['-DPYTHON_INCLUDE_DIR={inc}'.format(inc=inc_dir)] # by default, cmake will choose the system python lib in /usr/lib # this checks the sysconfig and will correctly pick up a brewed python lib # e.g. in /usr/local/Cellar py_ver = get_python_version() py_lib = os.path.join(get_config_var('LIBDIR'), 'libpython'+py_ver+'.dylib') cmake_extra_arch += ['-DPYTHON_LIBRARY={lib}'.format(lib=py_lib)] if sys.platform == "win32": if platform_arch == '64bit' and not generator_set: cmake_extra_arch += get_msvc_win64_generator() inc_dir = get_python_inc() cmake_extra_arch += ['-DPYTHON_INCLUDE_DIR={inc}'.format(inc=inc_dir)] # this imitates cmake in path resolution py_ver = get_python_version() for ext in [py_ver.replace(".", "") + '.lib', py_ver + 'mu.lib', py_ver + 'm.lib', py_ver + 'u.lib']: py_lib = os.path.abspath(os.path.join(inc_dir, '../libs/', 'python' + ext)) if os.path.exists(py_lib): cmake_extra_arch += ['-DPYTHON_LIBRARY={lib}'.format(lib=py_lib)] break build_dir = os.path.join(script_dir, "./tools/python/build") if os.path.exists(build_dir): log.info('Removing build directory %s' % build_dir) rmtree(build_dir) try: os.makedirs(build_dir) except OSError: pass # cd build os.chdir(build_dir) log.info('Configuring cmake ...') cmake_cmd = [ cmake_path, "..", ] + cmake_extra + cmake_extra_arch if run_process(cmake_cmd): raise DistutilsSetupError("cmake configuration failed!") log.info('Build using cmake ...') cmake_cmd = [ cmake_path, "--build", ".", "--config", cmake_config, "--target", "install", ] if run_process(cmake_cmd): raise DistutilsSetupError("cmake build failed!") # cd back where setup awaits os.chdir(script_dir)
def get_distro_id_base(): """ Returns a compatible distro id. """ return distro.like() or distro.id()
""" from __future__ import division, print_function, unicode_literals import distro import logging import subprocess from reprounzip.unpackers.common.misc import UsageError from reprounzip.utils import itervalues logger = logging.getLogger('reprounzip') THIS_DISTRIBUTION = distro.id() PKG_NOT_INSTALLED = "(not installed)" class CantFindInstaller(UsageError): def __init__(self, msg="Can't select a package installer"): UsageError.__init__(self, msg) class AptInstaller(object): """Installer for deb-based systems (Debian, Ubuntu). """ def __init__(self, binary): self.bin = binary
def is_redhat_variant(): d = get_id_like() if get_id_like() else distro.id() return ('rhel' in d) or ('fedora' in d) or ('oracle') in d
def _print_package_info(pack, info, verbosity=1): print("Pack file: %s" % pack) print("\n----- Pack information -----") print("Compressed size: %s" % hsize(pack.size())) info_pack = info.get('pack') if info_pack: if 'total_size' in info_pack: print("Unpacked size: %s" % hsize(info_pack['total_size'])) if 'total_paths' in info_pack: print("Total packed paths: %d" % info_pack['total_paths']) if verbosity >= 3: print(" Files: %d" % info_pack['files']) print(" Directories: %d" % info_pack['dirs']) if info_pack.get('symlinks'): print(" Symbolic links: %d" % info_pack['symlinks']) if info_pack.get('hardlinks'): print(" Hard links: %d" % info_pack['hardlinks']) if info_pack.get('others'): print(" Unknown (what!?): %d" % info_pack['others']) print("\n----- Metadata -----") info_meta = info['meta'] if verbosity >= 3: print("Total paths: %d" % info_meta['total_paths']) print("Listed packed paths: %d" % info_meta['packed_paths']) if info_meta.get('packages'): print("Total software packages: %d" % info_meta['packages']) print("Packed software packages: %d" % info_meta['packed_packages']) if verbosity >= 3: print("Files from packed software packages: %d" % info_meta['packed_packages_files']) print("Files from unpacked software packages: %d" % info_meta['unpacked_packages_files']) if 'architecture' in info_meta: print("Architecture: %s (current: %s)" % (info_meta['architecture'], platform.machine().lower())) if 'distribution' in info_meta: distribution = ' '.join(t for t in info_meta['distribution'] if t) current_distribution = [distro.id(), distro.version()] current_distribution = ' '.join(t for t in current_distribution if t) print("Distribution: %s (current: %s)" % ( distribution, current_distribution or "(not Linux)")) if 'runs' in info: runs = info['runs'] print("Runs (%d):" % len(runs)) for run in runs: cmdline = ' '.join(shell_escape(a) for a in run['argv']) if len(runs) == 1 and run['id'] == "run0": print(" %s" % cmdline) else: print(" %s: %s" % (run['id'], cmdline)) if verbosity >= 2: print(" wd: %s" % run['workingdir']) if 'signal' in run: print(" signal: %d" % run['signal']) else: print(" exitcode: %d" % run['exitcode']) if run.get('walltime') is not None: print(" walltime: %s" % run['walltime']) inputs_outputs = info.get('inputs_outputs') if inputs_outputs: if verbosity < 2: print("Inputs/outputs files (%d): %s" % ( len(inputs_outputs), ", ".join(sorted(inputs_outputs)))) else: print("Inputs/outputs files (%d):" % len(inputs_outputs)) for name, f in sorted(iteritems(inputs_outputs)): t = [] if f['read_runs']: t.append("in") if f['write_runs']: t.append("out") print(" %s (%s): %s" % (name, ' '.join(t), f['path'])) unpacker_status = info.get('unpacker_status') if unpacker_status: print("\n----- Unpackers -----") for s, n in [(COMPAT_OK, "Compatible"), (COMPAT_MAYBE, "Unknown"), (COMPAT_NO, "Incompatible")]: if s != COMPAT_OK and verbosity < 2: continue if s not in unpacker_status: continue upks = unpacker_status[s] print("%s (%d):" % (n, len(upks))) for upk_name, msg in upks: if msg is not None: print(" %s (%s)" % (upk_name, msg)) else: print(" %s" % upk_name)
def is_arch(): return ('arch' in distro.id())
# This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA import distro import sys # access /usr/share/pyshared on Debian # http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715010 if distro.id() == 'debian': sys.path[0:0] = ['/usr/share/pyshared'] import grp import pwd import os import socket from dhcpy6d import UDPMulticastIPv6 from dhcpy6d.config import cfg from dhcpy6d.globals import (config_answer_queue, config_query_queue, IF_NAME, route_queue, volatile_answer_queue, volatile_query_queue) from dhcpy6d.log import log from dhcpy6d.handler import RequestHandler
import os import shutil import unittest import tempfile import subprocess import yaml import distro from mock import patch from cloudify.mocks import MockCloudifyContext from .. import tasks distro_id = distro.id() PATH = os.path.join(os.path.expanduser('~'), 'cloudify-filebeat-plugin') TEMP_FILEBEAT = os.path.join(tempfile.gettempdir(), 'filebeat') CONFIG_FILE = os.path.join(TEMP_FILEBEAT, 'filebeat.yml') installation_file = os.path.join('/', 'etc', 'init.d', 'filebeat') dict1_valid = { 'inputs': {'shipper': None, 'looging': {'rotateeverybytes': 10485760}}, 'outputs': {'logstash': {'hosts': ['http://localhost:5044'], 'bulk_max_size': 10, 'index': 'filebeat'}, 'elasticsearch': {'hosts': ['http://localhost:9200'],
def run(self): assert get_os_name() in ['linux', 'osx', 'win'], \ 'Platform {} is not currently supported by this script. Please install manually.'.format(get_os_name()) if len(sys.argv) > 1: self.build_type = sys.argv[1] print('Build type: ', self.build_type) else: self.build_type = 'default' global build_type build_type = self.build_type print('Build type = {}'.format(self.build_type)) assert self.build_type in ['default', 'ci'] check_command_existence('wget') try: import pip print('pip3 installation detected') except Exception as e: print(e) print('Installing pip3') execute_command('wget https://bootstrap.pypa.io/get-pip.py') subprocess.run([get_python_executable(), "get-pip.py", "--user"]) execute_command('rm get-pip.py') subprocess.run([get_python_executable(), "-m", "pip", "install", "--user", "colorama", "numpy", "Pillow", "flask", "scipy", "pybind11", "flask_cors", "GitPython", "yapf", "distro", "requests", "PyQt5"]) print("importing numpy test:") ret = subprocess.run([get_python_executable(), "-c", "import numpy as np"]) print("ret:", ret) execute_command('cmake --version') if get_os_name() == 'osx': # Check command existence check_command_existence('git') check_command_existence('cmake') elif get_os_name() == 'linux': check_command_existence('sudo') execute_command('sudo apt-get update') # TODO: this works for Ubuntu only if self.build_type != 'ci': import distro dist = distro.id() else: dist = 'ubuntu' print("Linux distribution '{}' detected", dist) if dist == 'ubuntu': if self.build_type == 'ci': execute_command('sudo apt-get install -y python3-dev libx11-dev') else: execute_command('sudo apt-get install -y python3-dev git build-essential cmake make g++ libx11-dev') elif dist == 'arch': execute_command('sudo pacman --needed -S git cmake make gcc') else: print("Unsupported Linux distribution.") subprocess.run([get_python_executable(), "-m", "pip", "install", "--user", "psutil"]) self.detect_or_setup_repo() #TODO: Make sure there is no existing Taichi ENV set_env('TAICHI_NUM_THREADS', 8) set_env('TAICHI_REPO_DIR', self.repo_dir) set_env('PYTHONPATH', '$TAICHI_REPO_DIR/python/' + get_path_separator() + '$PYTHONPATH', '{}/python/'.format(self.repo_dir) + get_path_separator() + os.environ.get('PYTHONPATH', '')) set_env('PATH', '$TAICHI_REPO_DIR/bin/' + get_path_separator() + '$PATH', os.path.join(self.repo_dir, 'bin') + get_path_separator() + os.environ.get('PATH', '')) os.environ['PYTHONIOENCODING'] = 'utf-8' print('PYTHONPATH={}'.format(os.environ['PYTHONPATH'])) execute_command('echo $PYTHONPATH') if test_installation(): print(' Successfully Installed Taichi at {}.'.format(self.repo_dir)) if get_os_name() != 'win': if execute_command('ti') != 0: print(' Warning: shortcut "ti" does not work.') if execute_command('taichi') != 0: print(' Warning: shortcut "taichi" does not work.') print(' Please execute') print(' source ~/.bashrc') print(' or restart your terminal.') else: print(' Error: installation failed.') exit(-1)
def write_configuration(directory, sort_packages, find_inputs_outputs, overwrite=False): """Writes the canonical YAML configuration file. """ database = directory / 'trace.sqlite3' if PY3: # On PY3, connect() only accepts unicode conn = sqlite3.connect(str(database)) else: conn = sqlite3.connect(database.path) conn.row_factory = sqlite3.Row # Reads info from database files, inputs, outputs = get_files(conn) # Identifies which file comes from which package if sort_packages: files, packages = identify_packages(files) else: packages = [] # Writes configuration file config = directory / 'config.yml' distribution = [distro.id(), distro.version()] cur = conn.cursor() if overwrite or not config.exists(): runs = [] # This gets all the top-level processes (p.parent ISNULL) and the first # executed file for that process (sorting by ids, which are # chronological) executions = cur.execute( ''' SELECT e.name, e.argv, e.envp, e.workingdir, p.timestamp, p.exit_timestamp, p.exitcode FROM processes p JOIN executed_files e ON e.id=( SELECT id FROM executed_files e2 WHERE e2.process=p.id ORDER BY e2.id LIMIT 1 ) WHERE p.parent ISNULL; ''') else: # Loads in previous config runs, oldpkgs, oldfiles = load_config(config, canonical=False, File=TracedFile) # Same query as previous block but only gets last process executions = cur.execute( ''' SELECT e.name, e.argv, e.envp, e.workingdir, p.timestamp, p.exit_timestamp, p.exitcode FROM processes p JOIN executed_files e ON e.id=( SELECT id FROM executed_files e2 WHERE e2.process=p.id ORDER BY e2.id LIMIT 1 ) WHERE p.parent ISNULL ORDER BY p.id LIMIT 2147483647 OFFSET ?; ''', (len(runs),)) for (r_name, r_argv, r_envp, r_workingdir, r_start, r_end, r_exitcode) in executions: # Decodes command-line argv = r_argv.split('\0') if not argv[-1]: argv = argv[:-1] # Decodes environment envp = r_envp.split('\0') if not envp[-1]: envp = envp[:-1] environ = dict(v.split('=', 1) for v in envp) run = {'id': "run%d" % len(runs), 'binary': r_name, 'argv': argv, 'workingdir': unicode_(Path(r_workingdir)), 'architecture': platform.machine().lower(), 'distribution': distribution, 'hostname': platform.node(), 'system': [platform.system(), platform.release()], 'environ': environ, 'uid': os.getuid(), 'gid': os.getgid()} if r_exitcode & 0x0100: run['signal'] = r_exitcode & 0xFF else: run['exitcode'] = r_exitcode & 0xFF if r_end is not None: run['walltime'] = (r_end - r_start) / 1.0E9 # ns to s runs.append(run) cur.close() conn.close() if find_inputs_outputs: inputs_outputs = compile_inputs_outputs(runs, inputs, outputs) else: inputs_outputs = {} save_config(config, runs, packages, files, reprozip_version, inputs_outputs) print("Configuration file written in {0!s}".format(config)) print("Edit that file then run the packer -- " "use 'reprozip pack -h' for help")
import shlex import tempfile import subprocess import pkg_resources import jinja2 import distro import requests from cloudify import ctx from cloudify import exceptions from cloudify.decorators import operation import telegraf_plugin dist = distro.id() TELEGRAF_CONFIG_FILE_DEFAULT = os.path.join('/', 'etc', 'telegraf', 'telegraf.conf') TELEGRAF_PATH_DEFAULT = os.path.join('/', 'opt', 'telegraf') @operation def install(telegraf_config_inputs, telegraf_config_file='', telegraf_install_path='', download_url='', **kwargs): """Installation operation. Downloading and installing telegraf packacge - default version is 0.12.0. Default installation dir is set to /opt/telegraf.
def __init__(self, args): self.args = args self.configFilePath = os.path.join(args.build_root, 'qt.cmake') self.version = os.getenv('VIRCADIA_USE_QT_VERSION', '5.15.2') self.assets_url = hifi_utils.readEnviromentVariableFromFile(args.build_root, 'EXTERNAL_BUILD_ASSETS') defaultBasePath = os.path.expanduser('~/hifi/qt') self.basePath = os.getenv('HIFI_QT_BASE', defaultBasePath) if (not os.path.isdir(self.basePath)): os.makedirs(self.basePath) self.path = os.path.join(self.basePath, self.version) self.fullPath = os.path.join(self.path, 'qt5-install') self.cmakePath = os.path.join(self.fullPath, 'lib/cmake') print("Using qt path {}".format(self.path)) lockDir, lockName = os.path.split(self.path) lockName += '.lock' if not os.path.isdir(lockDir): os.makedirs(lockDir) self.lockFile = os.path.join(lockDir, lockName) if (os.getenv('VIRCADIA_USE_PREBUILT_QT')): print("Using pre-built Qt5") return # OS dependent information system = platform.system() cpu_architecture = platform.machine() if 'Windows' == system: self.qtUrl = self.assets_url + '/dependencies/vcpkg/qt5-install-5.15.2-windows.tar.gz' elif 'Darwin' == system: self.qtUrl = self.assets_url + '/dependencies/vcpkg/qt5-install-5.15.2-macos.tar.gz' elif 'Linux' == system: import distro dist = distro.linux_distribution() if 'x86_64' == cpu_architecture: if distro.id() == 'ubuntu': u_major = int( distro.major_version() ) u_minor = int( distro.minor_version() ) if u_major == 18: self.qtUrl = self.assets_url + '/dependencies/vcpkg/qt5-install-5.15.2-ubuntu-18.04-amd64.tar.xz' elif u_major > 19: print("We don't support " + distro.name(pretty=True) + " yet. Perhaps consider helping us out?") raise Exception('LINUX DISTRO IS NOT SUPPORTED YET!!!') else: print("Sorry, " + distro.name(pretty=True) + " is old and won't be officially supported. Please consider upgrading."); raise Exception('UNKNOWN LINUX DISTRO VERSION!!!') else: print("Sorry, " + distro.name(pretty=True) + " is not supported on x86_64. Please consider helping us out.") print("It's also possible to build Qt for your distribution, please see the documentation at:") print("https://github.com/vircadia/vircadia/tree/master/tools/qt-builder") raise Exception('UNKNOWN LINUX VERSION!!!') elif 'aarch64' == cpu_architecture: if distro.id() == 'ubuntu': u_major = int( distro.major_version() ) u_minor = int( distro.minor_version() ) if u_major == 18: self.qtUrl = 'http://motofckr9k.ddns.net/vircadia_packages/qt5-install-5.15.2-ubuntu-18.04-aarch64_test.tar.xz' elif u_major > 19: print("We don't support " + distro.name(pretty=True) + " on aarch64 yet. Perhaps consider helping us out?") raise Exception('LINUX DISTRO IS NOT SUPPORTED YET!!!') else: print("Sorry, " + distro.name(pretty=True) + " is old and won't be officially supported. Please consider upgrading."); raise Exception('UNKNOWN LINUX DISTRO VERSION!!!') elif distro.id() == 'debian': u_major = int( distro.major_version() ) u_minor = int( distro.minor_version() ) if u_major == 10: #self.qtUrl = self.assets_url + '/dependencies/vcpkg/qt5-install-5.12.3-ubuntu-16.04-with-symbols.tar.gz' print("We don't support " + distro.name(pretty=True) + " on aarch64 yet. Perhaps consider helping us out?") raise Exception('LINUX DISTRO IS NOT SUPPORTED YET!!!') elif u_major > 10: print("We don't support " + distro.name(pretty=True) + " on aarch64 yet. Perhaps consider helping us out?") raise Exception('LINUX DISTRO IS NOT SUPPORTED YET!!!') else: print("Sorry, " + distro.name(pretty=True) + " is old and won't be officially supported. Please consider upgrading."); raise Exception('UNKNOWN LINUX DISTRO VERSION!!!') else: print("Sorry, " + distro.name(pretty=True) + " is not supported on aarch64. Please consider helping us out.") print("It's also possible to build Qt for your distribution, please see the documentation at:") print("https://github.com/vircadia/vircadia/tree/master/tools/qt-builder") raise Exception('UNKNOWN LINUX VERSION!!!') else: raise Exception('UNKNOWN CPU ARCHITECTURE!!!') else: print("System : " + platform.system()) print("Architecture: " + platform.architecture()) print("Machine : " + platform.machine()) raise Exception('UNKNOWN OPERATING SYSTEM!!!')
try: import distro; print(distro.id()); except ImportError: import platform; print(platform.linux_distribution()[0].lower());
os.environ['DEBIAN_FRONTEND'] = "noninteractive" # 更新apt-get cmd('apt-get -y -q update') # 安装必须的包 cmd('apt-get -y -q install git python3 python3-pip wget curl') # 安装非必须的包 try: # 更新一下openssl cmd('apt-get -y -q install openssl', allow_failure=True) except: pass # 如果安装了, 则可以启用http2 ppa_available = cmd('apt-get -y -q install software-properties-common python-software-properties', allow_failure=True) if distro.id() == 'ubuntu' and ppa_available: # 安装高版本的Apache2(支持http2), 仅限ubuntu cmd("""LC_ALL=C.UTF-8 add-apt-repository -y ppa:ondrej/apache2 && apt-key update && apt-get -y -q update && apt-get -y -q install apache2""") else: # debian 只有低版本的可以用 cmd("apt-get -y -q install apache2") cmd("""a2enmod rewrite mime include headers filter expires deflate autoindex setenvif ssl""") if not cmd("a2enmod http2", allow_failure=True): warnprint("[Warning!] your server does not support http2") sleep(0.5)
#!/usr/bin/env python3 import pathlib import subprocess import os import distro script_dir = pathlib.Path(__file__).parent.resolve() client_dir = os.path.join(script_dir, '..', 'client') i18n_dir = os.path.join(script_dir, '..', 'client', 'app', 'i18n'); de_ts = os.path.join(i18n_dir, 'qml_de.ts') en_ts = os.path.join(i18n_dir, 'qml_en.ts') if distro.id() == "linuxmint" or distro.id() == "ubuntu": subprocess.run([ 'lupdate', client_dir, '-ts', de_ts, '-ts', en_ts, ], check=True) else: subprocess.run([ 'lupdate-qt5', client_dir, '-ts', de_ts, '-ts', en_ts, ], check=True)
def get_os_util(cls): os_name = distro.id() for subclass in cls._get_subclasses(): if subclass.is_os_name(os_name): return subclass(os_name) raise octavia_exceptions.InvalidAmphoraOperatingSystem(os_name=os_name)
def get_distro_family(): if distro.id() in ['rhel', 'centos', 'fedora']: return 'redhat' else: return distro.id()