def os_distribution(): if platform.system() == SYSTEM_WINDOWS: # windows distribution major, minor, build, code = _get_windows_version() if code in (VER_NT_DOMAIN_CONTROLLER, VER_NT_SERVER): # we are on server os release = None if major == 6: if minor == 0: release = REL_2008 elif minor == 1: release = REL_2008R2 elif minor == 2: release = REL_2012 elif minor == 3: release = REL_2012R2 distribution = (release, "{0}.{1}".format(major,minor),"WindowsServer") else: # we are on unsupported desktop os distribution = ("", "","") else: # linux distribution PYTHON_VER = sys.version_info[0] * 10 + sys.version_info[1] if PYTHON_VER < 26: distribution = platform.dist() elif os.path.exists('/etc/redhat-release'): distribution = platform.dist() else: distribution = platform.linux_distribution() return distribution
def format_platform_info(): platform_info = [ 'architecture: %s %s\n' % platform.architecture(), 'machine: %s\n' % platform.machine(), 'platform: %s\n' % platform.platform(), ] libc_ver = '%s: %s\n' % platform.libc_ver() if libc_ver.strip(): platform_info.append(libc_ver) if platform.dist() != ('', '', ''): platform_info.append('GNU/Linux: %s\n' % ' '.join(platform.dist())) elif platform.mac_ver() != ('', ('', '', ''), ''): platform_info.append('Mac OS X: %s\n' % platform.mac_ver()[0]) elif platform.win32_ver() != ('', '', '', ''): platform_info.append('Windows: %s\n' % platform.win32_ver()[0]) platform_info.append('python_compiler: %s\n' % platform.python_compiler()) platform_info.append( 'python_implementation: %s\n' % platform.python_implementation()) platform_info.append('locale: %s\n' % (locale.getlocale(),)) platform_info.append('default encoding: %s\n' % sys.getdefaultencoding()) platform_info.append('file system encoding: %s\n' % sys.getfilesystemencoding()) return platform_info
def test_dist_deprecation(self): with self.assertWarns(PendingDeprecationWarning) as cm: platform.dist() self.assertEqual(str(cm.warning), 'dist() and linux_distribution() functions are ' 'deprecated in Python 3.5 and will be removed in ' 'Python 3.7')
def getHostName(): fd = {} Host = socket.gethostname() OsRelease = platform.dist()[0] + platform.dist()[1] fd["HostName"] = Host fd["OsRelease"] = OsRelease return fd
def __init__(self): # ----------------- NIC INFO ----------------- self.os = platform.dist()[0] # If system is "debian": if self.os == 'debian': self.hostname = socket.gethostname() self.iface = ni.interfaces()[1] self.ipaddress = ni.ifaddresses(self.iface)[ni.AF_INET][0]['addr'] self.subnet = ni.ifaddresses(self.iface)[ni.AF_INET][0]['netmask'] self.gateways = ni.gateways()['default'][ni.AF_INET][0] # --- OS INFO --------------------- self.os_ver = platform.dist()[1] self.mac = ''.join('%012x' % get_mac()) self.ip_data = get_ip() self.path_ip = '/etc/network/interfaces' self.dns_file = '/etc/resolv.conf' # If system is "Arch Linux": else: self.hostname = socket.gethostname() self.iface = ni.interfaces()[1] self.ipaddress = ni.ifaddresses(self.iface)[ni.AF_INET][0]['addr'] self.subnet = ni.ifaddresses(self.iface)[ni.AF_INET][0]['netmask'] self.gateways = ni.gateways()['default'][ni.AF_INET][0] # --- OS INFO --------------------- self.os_ver = platform.dist()[1] self.mac = ''.join('%012x' % get_mac()) self.ip_data = get_ip() self.path_ip = '/etc/netctl/eth0' self.dns_file = '/etc/resolv.conf' logger.debug('GET IP SETTING OK!')
def is_unity_16_04(): if is_gtk(): ver = platform.dist()[1].split('.')[0] dist = platform.dist()[0] + ' ' + ver if dist == 'Ubuntu 16' and \ os.environ['XDG_CURRENT_DESKTOP'] == 'Unity': return True return False
def build_system(self): """ from https://github.com/Dreyer/pyinfo/blob/master/pyinfo.py """ system = { 'path': False, 'os_path': False, 'os_version': False, 'version': False, 'subversion': False, 'prefix': False, 'build_date': platform.python_build()[1], 'executable': False, 'compiler': platform.python_compiler(), 'api_version': False, 'implementation': platform.python_implementation(), 'system': platform.system(), } if platform.dist()[0] != '' and platform.dist()[1] != '': system['os_version'] = '%s %s (%s %s)' % ( platform.system(), platform.release(), platform.dist()[0].capitalize(), platform.dist()[1] ) else: system['os_version'] = '%s %s' % ( platform.system(), platform.release() ) if hasattr( os, 'path' ): system['os_path'] = os.environ['PATH'] if hasattr( sys, 'version' ): system['version'] = platform.python_version() if hasattr( sys, 'subversion' ): system['subversion'] = ', '.join( sys.subversion ) if hasattr( sys, 'prefix' ): system['prefix'] = sys.prefix if hasattr( sys, 'path' ): system['path'] = sys.path if hasattr( sys, 'executable' ): system['executable'] = sys.executable if hasattr( sys, 'api_version' ): system['api'] = sys.api_version self.system = system
def check_for_supported_waagent_and_distro_version(): """ Checks & returns if the installed waagent and the Linux distro/version are supported by this LAD. :rtype: bool :return: True iff so. """ for notsupport in ('WALinuxAgent-2.0.5', 'WALinuxAgent-2.0.4', 'WALinuxAgent-1'): code, str_ret = waagent.RunGetOutput("grep 'GuestAgentVersion.*" + notsupport + "' /usr/sbin/waagent", chk_err=False) if code == 0 and str_ret.find(notsupport) > -1: hutil.log("cannot run this extension on " + notsupport) hutil.do_status_report(g_ext_op_type, "error", '1', "cannot run this extension on " + notsupport) return False if g_dist_config is None: msg = ("LAD does not support distro/version ({0}); not installed. This extension install/enable operation is " "still considered a success as it's an external error.").format(str(platform.dist())) hutil.log(msg) hutil.do_status_report(g_ext_op_type, "success", '0', msg) waagent.AddExtensionEvent(name=hutil.get_name(), op=g_ext_op_type, isSuccess=True, version=hutil.get_extension_version(), message="Can't be installed on this OS " + str(platform.dist())) return False return True
def get_properties(group): mem = psutil.virtual_memory() disk = psutil.disk_usage('/') properties = {} if group is None or group == 'fresh': if platform.machine().startswith('arm') and platform.system() == 'Linux': # raspberry pi properties["cpuTemp"] = get_rpi_cpu_temperature() properties["ramAvailable"] = int(mem.available / (1024 * 1024)) properties["usedDiskSpaceRoot"] = int(disk.used / (1024 * 1024)) properties["bootTime"] = datetime.datetime.fromtimestamp(psutil.boot_time()).strftime(Publisher.DATE_FORMAT) properties["cpuLoad"] = psutil.cpu_percent(interval=3) if group is None or group == 'seldom': if platform.system() == 'Darwin': # mac properties["release"] = platform.mac_ver()[0] elif platform.machine().startswith('arm') and platform.system() == 'Linux': # raspberry pi properties["distribution"] = "{} {}".format(platform.dist()[0], platform.dist()[1]) for i in NET_INTERFACES: properties["{}IpAddress".format(i)] = get_ip(i) properties["totalDiskSpaceRoot"] = int(disk.total / (1024 * 1024)) properties["hostname"] = platform.node() properties["machine"] = platform.machine() properties["system"] = platform.system() properties["cpuProcessorCount"] = psutil.cpu_count() properties["ramTotal"] = int(mem.total / (1024 * 1024)) return properties
def __init__(self): self.port = PORT self.ip = IP self.dbpath = None self.index_only_acc_supported = 0 self.nojournal = "False" self.replSet = False if "Ubuntu" in platform.dist(): conf = open("/etc/mongodb.conf").readlines() elif "centos" in platform.dist() or "redhat" in platform.dist(): conf = open("/etc/mongod.conf").readlines() for line in conf: if line.strip().startswith("port"): self.port = line.split("=")[1].strip() if line.strip().startswith("bind_ip"): self.ip = line.split("=")[1].strip() if line.strip().startswith("dbpath"): self.dbpath = line.split("=")[1].strip() if line.strip().startswith("#CA_MODIFIED_MONGO_SERVER=1"): self.index_only_acc_supported = 1 if line.strip().startswith("nojournal"): self.nojournal = line.split("=")[1].strip('\n') if line.strip().startswith("replSet"): self.replSet = True return
def __init__(self, parsed_args): # TODO: put in environemnt checks, i.e., does virt-install exist, etc. # TODO: verify that we're running as root. # Save relative path to module. # This is necessary because we don't know where the site-packages # directory will live, so we have to determine that at runtime. package_directory = os.path.dirname(os.path.abspath(__file__)) # Load include_vars and funcs. varsyaml = os.path.join(package_directory, 'include_vars.yaml') include_vars_yaml = open(varsyaml).read() self.vars = yaml.load(include_vars_yaml) self.funcs = include_funcs.KVMInstallFuncs() # Check to see if we're on a supported platform. if platform.dist()[0] not in self.vars['supported_platforms']: raise Exception('unsupported platform: ' + platform.dist()[0]) # This make my IDE happy self.config = {} # Parse the config file and build our config object if parsed_args.verbose is True: print ' parsing config file' if parsed_args.configfile is None: parsed_args.configfile = self.vars['default_config'] self.config = self.funcs.parse_config(parsed_args) # Set up our random string and temp directory domain = string.ascii_letters + string.digits random8 = self.funcs.get_random(domain, 8) stdout, stderr, virsh_netdumpxml = self.funcs.setup_tmp(random8) self.config['stdout'] = stdout self.config['stderr'] = stderr self.config['virsh_netdumpxml'] = virsh_netdumpxml # If we have both a clone and image config directive, prefer LVM if 'clone' in self.config: if self.config['verbose'] is True: print ' setting up lvm' self.setup_lvm() else: if self.config['verbose'] is True: print ' setting up image' if 'image' in self.config: self.setup_image() else: raise Exception('you must specify either an LVM ' + 'or file base image with -c or -i') # Now set up the new network try: if self.config['verbose'] is True: print ' setting up network' self.setup_network() except Exception, e: raise Exception('setup network failed: ' + str(e))
def get_repo(self): if not os.path.isdir('repo/'): os.makedirs('repo/') if platform.architecture()[0] == '32bit': arch = 'x86' else: arch = 'x86_64' slackrepo = None f = open('/etc/slackpkg/mirrors', 'r') for line in f: x = line.find('#') if x == -1: slackrepo = line[:-2] f.close() gobject.idle_add(printInThread, 'Download pkg') os.popen('wget -q '+slackrepo+'/PACKAGES.TXT -O repo/slackware.txt') os.popen('wget -q ' + slackrepo + '/extra/PACKAGES.TXT -O repo/slackware_extra.txt') os.popen('wget -q ' + slackrepo + '/pasture/PACKAGES.TXT -O repo/slackware_pasture.txt') os.popen('wget -q ' + slackrepo + '/patches/PACKAGES.TXT -O repo/slackware_patches.txt') os.popen('wget -q ' + slackrepo + '/testing/PACKAGES.TXT -O repo/slackware_testing.txt') os.popen('wget -q http://bear.alienbase.nl/mirrors/people/alien/sbrepos/'+platform.dist()[1]+'/'+arch+'/PACKAGES.TXT -O repo/alien.txt') # os.popen('wget -q http://www.slackware.com/~alien/slackbuilds/PACKAGES.TXT -O repo/slackbuild_alien.txt') # non-official repo os.popen('wget -q http://bear.alienbase.nl/mirrors/people/alien/restricted_sbrepos/'+platform.dist()[1]+'/'+arch+'/PACKAGES.TXT -O repo/alien_restricted.txt') if platform.architecture()[0] == '32bit': os.popen('wget -q http://repository.slacky.eu/slackware-'+platform.dist()[1]+'/PACKAGES.TXT -O repo/slacky.txt') else: os.popen('wget -q http://repository.slacky.eu/slackware64-'+platform.dist()[1]+'/PACKAGES.TXT -O repo/slacky.txt') os.popen('wget -q http://slakfinder.org/slackpkg+/PACKAGES.TXT -O repo/slackpkg_plus.txt')
def setup_cctools_binaries(options): """Download the appropriate version of cctools and install""" for os_version in ('5', '6'): cctools_url = "http://uc3-data.uchicago.edu/parrot/" \ "cctools-current-x86_64-redhat%s.tar.gz" % (os_version) cctools_dir = download_tarball(cctools_url, options.bin_dir) if os_version == platform.dist()[1][0]: os.link(os.path.join(cctools_dir, 'bin', 'chirp_server'), os.path.join(options.bin_dir, 'chirp_server')) os.link(os.path.join(cctools_dir, 'bin', 'chirp_server_hdfs'), os.path.join(options.bin_dir, 'chirp_server_hdfs')) os.link(os.path.join(cctools_dir, 'bin', 'chirp'), os.path.join(options.bin_dir, 'chirp')) sys_cctools_dir = cctools_dir shutil.copytree(cctools_dir, os.path.join(options.bin_dir, 'parrot')) current_dir = os.getcwd() os.chdir(options.bin_dir) shutil.rmtree(os.path.join('parrot', 'doc')) shutil.rmtree(os.path.join('parrot', 'share')) tarball = tarfile.open("parrot-sl%s.tar.gz" % os_version, mode='w:gz') tarball.add('parrot') tarball.close() shutil.rmtree('parrot') if os_version != platform.dist()[1][0]: shutil.rmtree(cctools_dir) os.chdir(current_dir) return os.path.abspath(sys_cctools_dir)
def generate_binary_package(): _common_init() cleanup_workspace(['.rpm'],['src.rpm']) logger = logging.getLogger("%s:generate_binary_package" % __name__) srpm = "" # find srpm files = os.listdir(".") for i in files: if i.endswith(".src.rpm"): srpm = i break if not srpm: logger.error("No src.rpm found") sys.exit(1) logger.info("Using %s" % srpm) arch = get_env("architecture") distri = get_env("distribution") logger.info("Building for distribution %s and architecture %s" % (distri, arch)) if (platform.dist()[0] == "fedora"): from jpb.build_provider.mock import mock as cbuilder elif (platform.dist()[0] == "SuSE"): from jpb.build_provider.build import build as cbuilder elif (platform.dist()[0] == "debian"): from jpb.build_provider.mock import mock as cbuilder else: logger.error("Currently unsupported build platform") sys.exit(1) builder = cbuilder(config['WORKSPACE'], distribution = distri, architecture = arch) if not builder.build(srpm): logger.error("Build failed see log for details") sys.exit(1)
def linuxUnLoadInitServices(service): # UnLoad Init Services print "UnLoading service "+service useSYSTEMD=False if platform.dist()[0] == "Ubuntu" and LooseVersion(platform.dist()[1]) >= LooseVersion("15.0"): useSYSTEMD=True elif (platform.dist()[0] == "redhat" or platform.dist()[0] == "centos") and LooseVersion(platform.dist()[1]) >= LooseVersion("7.0"): useSYSTEMD=True else: print "Unable to start service ("+service+") at start up. OS("+platform.dist()[0]+") is unsupported." return if useSYSTEMD == True: serviceName=service+".service" etcServiceConf="/etc/systemd/system/"+serviceName if os.path.exists(etcServiceConf): os.system("/bin/systemctl stop "+serviceName) else: print "Unable to find " + etcServiceConf else: _initFile = "/etc/init.d/"+service if os.path.exists(_initFile): os.system("/etc/init.d/"+service+" stop") else: print "Unable to find " + _initFile
def do_prerun_checks(): #make sure this is running on a supported OS if os.name not in ("nt", "posix"): logging.error("Build script only supports Linux or Windows at this time") sys.exit(1) if os.name == "posix" and platform.dist()[0] != "Ubuntu": logging.error("Non-Ubuntu install detected. Only Ubuntu Linux is supported at this time") sys.exit(1) #under *nix, script must be run as root if os.name == "posix" and os.geteuid() != 0: logging.error("This script must be run as root (use 'sudo' to run)") sys.exit(1) if os.name == "posix" and "SUDO_USER" not in os.environ: logging.error("Please use `sudo` to run this script.") #establish python version global PYTHON3_VER if os.name == "nt": PYTHON3_VER = "3.3" elif os.name == "posix" and platform.dist()[0] == "Ubuntu" and platform.linux_distribution()[1] == "12.04": #ubuntu 12.04 -- python 3.3 will be installed in install_dependencies, as flask requires it PYTHON3_VER = "3.3" else: allowed_vers = ["3.4", "3.3"] for ver in allowed_vers: if which("python%s" % ver): PYTHON3_VER = ver logging.info("Found Python version %s" % PYTHON3_VER) break else: logging.error("Cannot find your Python version in your path. You need one of the following versions: %s" % ', '.join(allowed_vers)) sys.exit(1)
def _versionchecklinux(package): if platform.dist()[0] == "Ubuntu" or platform.dist()[0] == "Debian": oldversion, msg = _versioncheckapt(package) else: log("Unsupported platform %s" % platform.dist()[0]) sys.exit(0) return oldversion, msg
def installComponents(): def ErrorMessage(ec): logging.info("Installation ended with error, exit_code: %s" % str(ec)) sys.exit(1) currentPlatform = (platform.dist()[0].lower()) currentRelease = (platform.dist()[1].split('.')[0]) postgresURL = ('http://yum.postgresql.org/9.4/redhat/rhel-%s-x86_64/pgdg-%s94-9.4-1.noarch.rpm' % (currentRelease, currentPlatform)) ExitCodePostgres = call(['rpm', '-q', 'postgresql94'], stdout=PIPE) ExitCodeNonVersionPostgres = call(['rpm', '-q', 'postgresql'], stdout=PIPE) ExitCodeSqlplus = call(['rpm', '-q', 'oracle-instantclient12.1-sqlplus-12.1*'], stdout=PIPE) ExitCodeBasic = call(['rpm', '-q', 'oracle-instantclient12.1-basic-12.1*'], stdout=PIPE) ExitCodePostgresInstallRepo = call(['rpm', '-q', 'pgdg-centos94-9.4-1'], stdout=PIPE) try: if "postgresql" == install or "all" == install: if 0 == ExitCodeNonVersionPostgres: o = Popen(['rpm', '-q', 'postgresql'], stdout=PIPE) ovp, err = o.communicate() logging.info("You have installed %s, to work correctly, remove the old version of PostgreSQL" % ovp.strip()) logging.info("Installation failed because the tasks required under the terms of PostgreSQL 9.4") sys.exit(1) if 0 == ExitCodePostgres: logging.info('PostgreSQL 9.4 already installed') else: if 0 == ExitCodePostgresInstallRepo: logging.info("PostgreSQL 9.4 repo, already installed") else: ExitCodePostgresInstallRepo = call(['yum', 'install', postgresURL, '-y']) if 0 == ExitCodePostgresInstallRepo: ExitCodePostgresInstallPostgres = call(['yum', 'install', 'postgresql94', '-y']) if 0 != ExitCodePostgresInstallPostgres: ErrorMessage(ExitCodePostgresInstallPostgres) else: ErrorMessage(ExitCodePostgresInstallRepo) if "oracle" == install or "all" == install: if 0 == ExitCodeBasic: logging.info('oracle instantclient 12.1 basic already installed') else: ExitCodeBasicInstall = call(['yum', 'localinstall', '--nogpgcheck', '-y', '/opt/opflood/archive/oracle-instantclient12.1-basic-12.1.0.2.0-1.x86_64.rpm']) if 0 != ExitCodeBasicInstall: ErrorMessage(ExitCodeBasicInstall) if 0 == ExitCodeSqlplus: logging.info('oracle instantclient 12.1 sqlplus already installed') else: ExitCodeSqlplusInstall = call(['yum', 'localinstall', '--nogpgcheck', '-y', '/opt/opflood/archive/oracle-instantclient12.1-sqlplus-12.1.0.2.0-1.x86_64.rpm']) if 0 == ExitCodeSqlplusInstall: call(['cp', '/opt/opflood/archive/sqlplusenv.sh', '/etc/profile.d/sqlplusenv.sh']) logging.info('------------------------------------------------------------------------------------------') logging.info('| In order to successfully work with the Oracle database , you must supply a valid Hosts.|') logging.info('| Example /etc/hosts: |') logging.info('| 127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 |') logging.info('| ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 |') logging.info('| 127.0.0.1 hostname |') logging.info('------------------------------------------------------------------------------------------') call(['su', '-c', 'source /etc/profile.d/sqlplusenv.sh'], shell=True, executable="/bin/bash") else: ErrorMessage(ExitCodeSqlplusInstall) except OSError, e: logging.info(e) sys.exit(1)
def install_tar(): """ :rtype : object """ release_tag = latest_github_tag() distro = check_distro() agent_tar_path = "/root/nova-agent/nova-agent-Linux-x86_64-%s.tar.gz" % release_tag nova_file = "nova-agent-Linux-x86_64-%s.tar.gz" % release_tag installer_path = "/root/nova-agent/" nova_agent__process_path = "/etc/init.d/nova-agent" nova_agent_path = "/usr/share/nova-agent/" nova_agent_bin_path = "/usr/sbin/nova-agent" agent_tar_path = "/root/nova-agent/%s" % nova_file installer_command = ".installer.sh" if (distro == 'FreeBSD'): nova_agent__process_path = "/etc/rc.d/nova-agent" nova_file = "nova-agent-FreeBSD-amd64-%s.tar.gz" % release_tag agent_tar_path = "/root/nova-agent/%s" % nova_file installer_command = "bash installer.sh" subprocess.call(["pkg_add", "-r", "bash"]) subprocess.call(["curl", "-LkO", "https://github.com/rackerlabs/openstack-guest-agents-unix/releases/download" "/v%s/nova-agent-FreeBSD-amd64-%s.tar.gz" % (release_tag, release_tag)]) else: subprocess.call(["curl", "-LkO", "https://github.com/rackerlabs/openstack-guest-agents-unix/releases/download" "/v%s/nova-agent-Linux-x86_64-%s.tar.gz" % (release_tag, release_tag)]) if os.path.exists(nova_agent__process_path): subprocess.call(["%s" % nova_agent__process_path, "stop"]) if os.path.exists(nova_agent_path): #shutil.move(nova_agent_path, "/tmp") subprocess.call(["rm", "-rf", "%s" % nova_agent_path]) if os.path.exists(nova_agent_bin_path): #shutil.move(nova_agent_path, "/tmp") subprocess.call(["rm", "-rf", "%s" % nova_agent_bin_path]) if not os.path.exists(installer_path): os.mkdir(installer_path) shutil.move(nova_file, installer_path) os.chdir(installer_path) subprocess.call(["tar", "-zvxf", "%s" % agent_tar_path]) time.sleep(10) if distro == 'FreeBSD': subprocess.call(["bash", "installer.sh"]) else: subprocess.call(["%s" % installer_command]) time.sleep(5) subprocess.call(["%s" % nova_agent__process_path, "start"]) subprocess.call(["rm", "-rf", "%s" % installer_path]) if 'redhat' in platform.dist(): if int(float(platform.dist()[1])) == 5: #if (set(['redhat', '5.6']).issubset(set(platform.dist()))): install_uuid()
def __init__(self): self.system = platform.system() self.distro = platform.dist()[0] self.distro_version = platform.dist()[1] self.host_name = socket.gethostname() self.ip_address = ni.ifaddresses('eth0')[2][0]['addr'] self.mac_address = ni.ifaddresses('eth0')[17][0]['addr'] self.users = UbuntuAuditor.get_all_users()
def get_rhel_version(): default_rhel_version = "6" if platform.system() == 'Linux' and platform.dist()[0] == 'redhat': dist, release_str, name = platform.dist() release = release_str.split(".")[0] return release else: return default_rhel_version
def get_dist(): if platform.dist()[0]: return platform.dist()[0].lower() elif platform.mac_ver()[0]: darwin_version = platform.mac_ver()[0].rsplit('.', 1)[0] return 'darwin%s' % darwin_version elif platform.win32_ver()[0]: return platform.win32_ver()[0].lower() return 'unknown'
def linuxType(self): """Uses various methods to determine Linux Type""" if platform.dist()[0] == self.rhel: return self.rhel elif platform.dist()[1] == self.ubu: return self.ubu else: return self.unknown_linux
def _versionchecklinux(package): if (platform.dist()[0] == "Ubuntu" or platform.dist()[0] == "Debian"): oldversion, msg = _versioncheckapt(package) elif platform.dist(supported_dists=('boxeebox'))[0].lower() == "boxeebox": oldversion, msg = _versioncheckboxee(package) else: log("Unsupported platform %s" %platform.dist()[0]) sys.exit(0) return oldversion, msg
def get_os(self): if sys.platform == "linux" or sys.platform == "linux2": return 'Linux-%s-%s' % (platform.dist()[0], platform.dist()[1]) elif sys.platform == "darwin": return 'OSX-%s' % platform.mac_ver()[0] elif sys.platform == "win32": return platform.platform() else: return 'UNKNOWN'
def get_platform_dist(): """ :return: A human readable representation of platform.dist() , unknown if the module returned none / '' """ if platform.dist() == ('', '', ''): return 'Unknown' return ' '.join(platform.dist())
def diagnostic_info(): """Return diagnostic information as a string""" s = "BleachBit version %s" % bleachbit.APP_VERSION try: import gtk s += '\nGTK version %s' % '.'.join([str(x) for x in gtk.gtk_version]) except: pass s += "\nlocal_cleaners_dir = %s" % bleachbit.local_cleaners_dir s += "\nlocale_dir = %s" % bleachbit.locale_dir s += "\noptions_dir = %s" % bleachbit.options_dir.decode(bleachbit.FSE) s += "\npersonal_cleaners_dir = %s" % bleachbit.personal_cleaners_dir.decode(bleachbit.FSE) s += "\nsystem_cleaners_dir = %s" % bleachbit.system_cleaners_dir s += "\nlocale.getdefaultlocale = %s" % str(locale.getdefaultlocale()) if 'posix' == os.name: envs = ('DESKTOP_SESSION', 'LOGNAME', 'USER', 'SUDO_UID') if 'nt' == os.name: envs = ('APPDATA', 'LocalAppData', 'LocalAppDataLow', 'Music', 'USERPROFILE', 'ProgramFiles', 'ProgramW6432', 'TMP') for env in envs: if os.getenv(env): s += "\nos.getenv('%s') = %s" % (env, os.getenv(env).decode(bleachbit.FSE)) else: s += "\nos.getenv('%s') = %s" % (env, os.getenv(env)) s += "\nos.path.expanduser('~') = %s" % bleachbit.expanduser('~').decode(bleachbit.FSE) if sys.platform.startswith('linux'): if hasattr(platform, 'linux_distribution'): s += "\nplatform.linux_distribution() = %s" % str( platform.linux_distribution()) else: s += "\nplatform.dist() = %s" % str(platform.dist()) # Mac Version Name - Dictonary "masosx_dict" macosx_dict = {'5':'Lepoard','6':'Snow Lepoard','7':'Lion','8':'Mountain Lion','9':'Mavericks','10':'Yosemite','11':'El Capitan','12':'Sierra'} if sys.platform.startswith('darwin'): if hasattr(platform, 'mac_ver'): for key in macosx_dict: if (platform.mac_ver()[0].split('.')[1] == key): s += "\nplatform.mac_ver() = %s" % str( platform.mac_ver()[0] + " (" + macosx_dict[key] + ")") else: s += "\nplatform.dist() = %s" % str(platform.dist()) if 'nt' == os.name: s += "\nplatform.win32_ver[1]() = %s" % platform.win32_ver()[1] s += "\nplatform.platform = %s" % platform.platform() s += "\nplatform.version = %s" % platform.version() s += "\nsys.argv = %s" % sys.argv s += "\nsys.executable = %s" % sys.executable s += "\nsys.version = %s" % sys.version if 'nt' == os.name: s += "\nwin32com.shell.shell.IsUserAnAdmin() = %s" % shell.IsUserAnAdmin( ) s += "\n__file__ = %s" % __file__ return s
def geisysteminfo(): """""" print platform.system() print platform.version() print platform.architecture() print platform.node() print platform.java_ver() print platform.dist() print platform.python_version() print platform.win32_ver()
def installation_attributes(cloudify_agent, runner): if (not cloudify_agent.get('source_url') and not cloudify_agent.get('package_url')): if cloudify_agent['windows']: # no distribution difference in windows installation cloudify_agent['package_url'] = '{0}/packages/agents' \ '/cloudify-windows-agent.exe'\ .format(cloudify_utils.get_manager_file_server_url()) else: if not cloudify_agent.get('distro'): if cloudify_agent['local']: cloudify_agent['distro'] = platform.dist()[0].lower() elif cloudify_agent['remote_execution']: dist = runner.machine_distribution() cloudify_agent['distro'] = dist[0].lower() if not cloudify_agent.get('distro_codename'): if cloudify_agent['local']: cloudify_agent['distro_codename'] = platform.dist()[ 2].lower() elif cloudify_agent['remote_execution']: dist = runner.machine_distribution() cloudify_agent['distro_codename'] = dist[2].lower() if ('distro' in cloudify_agent and 'distro_codename' in cloudify_agent): cloudify_agent['package_url'] = '{0}/packages/agents' \ '/{1}-{2}-agent.tar.gz' \ .format(cloudify_utils.get_manager_file_server_url(), cloudify_agent['distro'], cloudify_agent['distro_codename']) if not cloudify_agent.get('basedir'): if cloudify_agent['local']: basedir = agent_utils.get_home_dir(cloudify_agent['user']) else: if cloudify_agent['windows']: # can't seem to figure out how to get the home_dir remotely # on windows. same was as fabric wont work because the # 'pwd' module does not exists in a windows python # installation. # TODO - maybe use some environment variables heuristics? basedir = \ agent_utils.get_windows_home_dir(cloudify_agent['user']) elif cloudify_agent['remote_execution']: basedir = runner.home_dir(cloudify_agent['user']) else: basedir = '~{0}'.format(cloudify_agent['user']) cloudify_agent['basedir'] = basedir directory_attributes(cloudify_agent)
def get_distro(self): '''It should be: DISTRIB_DESCRIPTION="UbuntuKylin 13.10''' #FILEPATH RELEASEPATH distro = "" if os.path.exists(RELEASEPATH): with open(RELEASEPATH, "r") as fsys: for line in fsys: if line.startswith("DISTRIB_DESCRIPTION"): tmp = line break # kobe: remove '"' and '\n' front = tmp.split('=')[1].replace('"', '').replace('\n', '') #(LP: #1240862) distro = front + '-' + platform.dist()[2] elif os.path.exists("/etc/os-release"): with open("/etc/os-release", "r") as fsys: for line in fsys: if line.startswith("PRETTY_NAME"): tmp = line break distro = tmp.split('=')[1].replace('"', '').replace('\n', '') else: a = platform.dist()[0] b = platform.dist()[1] c = platform.dist()[2] distro = '-'.join((a,b,c)) # if not os.path.exists(RELEASEPATH): # with open(FILEPATH, "r") as fsys: # for line in fsys: # if line.startswith("DISTRIB_DESCRIPTION"): # tmp = line # break # # kobe: remove '"' and '\n' # front = tmp.split('=')[1].replace('"', '').replace('\n', '') #(LP: #1240862) # if front.startswith("UbuntuKylin") or front.startswith("Ubuntu Kylin"): # distro = front + '-' + platform.dist()[2] # else: # a = platform.dist()[0] # b = platform.dist()[1] # c = platform.dist()[2] # distro = '-'.join((a,b,c)) # else: # with open(RELEASEPATH, "r") as fp: # for line in fp: # if line.startswith("DISTRIB_ID"): # tmp1 = line # elif line.startswith("DISTRIB_RELEASE"): # tmp2 = line # elif line.startswith("DISTRIB_CODENAME"): # tmp3 = line # # kobe: remove '"' and '\n' # id = tmp1.split('=')[1].replace('"', '').replace('\n', '') # release = tmp2.split('=')[1].replace('"', '').replace('\n', '') # codename = tmp3.split('=')[1].replace('"', '').replace('\n', '') # distro = '-'.join((id, release, codename)) return distro
def __init__(self): dist = platform.dist() self.user = getpass.getuser() self.file = False self.platform = {"system": platform.system(), "dist": (dist[0].lower(), re.search("[0-9]{1,10}", dist[1].lower()).group(0), dist[2].lower()), "release": platform.uname()[2], "uname": platform.uname()} self.minKernelVersion = { "ubuntu": "3.11.0-15-generic", "centos": "3.10.0-229.el7.x86_64" } self.genericInstallation = { "ubuntu": [ ['apt-get', 'update'], ["apt-get", "install", "-y", "apt-transport-https", "ca-certificates"], ["apt-key", "adv", "--keyserver", "hkp://p80.pool.sks-keyservers.net:80", "--recv-keys", "58118E89F3A912897C070ADBF76221572C52609D"], ["touch", "/etc/apt/sources.list.d/docker.list"] ], "centos": [ ["yum", "update","-y"] ] } self.dependenciesByVersion = { "ubuntu": { "16": { "commands": [ ["apt-get", "update"], ["apt-get", "install", "-y", "linux-image-extra-" + self.platform["release"]] ], "docker_file": "/etc/apt/sources.list.d/docker.list", "repo_content": ["deb https://apt.dockerproject.org/repo ubuntu-xenial main"] }, "14": { "commands": [ ["apt-get", "install", "-y", "apparmor"] ], "docker_file": "/etc/apt/sources.list.d/docker.list", "repo_content": ["deb https://apt.dockerproject.org/repo ubuntu-trusty main"] }, "12": { "commands": [["apt-get", "install", "-y", "apparmor"]], "min_kernel": "3.13", "update_kernel": [ ["apt-get", "install", "-y", "linux-image-generic-lts-trusty"] ], "docker_file": "/etc/apt/sources.list.d/docker.list", "repo_content": ["deb https://apt.dockerproject.org/repo ubuntu-precise main"] } }, "centos": { "7": { "docker_file": "/etc/yum.repos.d/docker.repo", "repo_content": [ "[dockerrepo]", "name=Docker Repository", "baseurl=https://yum.dockerproject.org/repo/main/centos/7/", "enabled=1", "gpgcheck=1", "gpgkey=https://yum.dockerproject.org/gpg" ] } } } self.afterDependenciesInstallation = { "ubuntu": [ ["apt-get", "update"], ["apt-get", "purge", "lxc-docker"], ["apt-cache", "policy", "docker-engine"], ["apt-get", "update"], ["apt-get", "install", "-y", "docker-engine"], ["service", "docker", "start"] ], "centos": [ ["yum", "update","-y"], ["yum", "install", "-y", "docker-engine"], ["service", "docker", "start"] ] } self.afterInstallationCommand = { "ubuntu": { "*": [["groupadd", "docker"], ["usermod", "-aG", "docker", self.user]] } }
def create_collectd_service(self): """ create a service for collectd installed :return: """ if self.os == "ubuntu" or self.os == "debian": print "found ubuntu ..." version = platform.dist()[1] print "ubuntu version: {0}".format(version) if version < "16.04": try: shutil.copyfile("/opt/collectd/init_scripts/ubuntu14.init", "/etc/init.d/collectd") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("chmod +x /etc/init.d/collectd", shell=True) else: try: shutil.copyfile("/opt/collectd/init_scripts/ubuntu16.init", "/etc/systemd/system/collectd.service") if os.path.isfile( "/opt/collectd/init_scripts/collectd_default" ) and not os.path.isfile("/etc/default/collectd"): shutil.copyfile( "/opt/collectd/init_scripts/collectd_default", "/etc/default/collectd") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("systemctl daemon-reload", shell=True, ignore_err=True) self._run_cmd("systemctl enable collectd", shell=True, ignore_err=True) elif self.os == "centos" or self.os == "redhat": print "found centos ..." version = platform.dist()[1] print "centos version: {0}".format(version) if version < "7.0": try: shutil.copyfile("/opt/collectd/init_scripts/centos6.init", "/etc/init.d/collectd") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("chmod +x /etc/init.d/collectd", shell=True) else: try: shutil.copyfile("/opt/collectd/init_scripts/centos7.init", "/etc/systemd/system/collectd.service") if os.path.isfile( "/opt/collectd/init_scripts/collectd_default" ) and not os.path.isfile("/etc/default/collectd"): shutil.copyfile( "/opt/collectd/init_scripts/collectd_default", "/etc/default/collectd") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("systemctl daemon-reload", shell=True, ignore_err=True) self._run_cmd("systemctl enable collectd", shell=True, ignore_err=True) print "terminate any old instance of collectd if available" self._run_cmd( "kill $(ps aux | grep -v grep | grep 'collectd' | awk '{print $2}')", shell=True, ignore_err=True)
def upload_file(self, command, pyversion, filename): schema, netloc, url, params, query, fragments = urlparse.urlparse( self.repository) if params or query or fragments: raise AssertionError('Incompatible url %s' % self.repository) if schema not in ('http', 'https'): raise AssertionError('unsupported schema ' + schema) if self.sign: gpg_args = ['gpg', '--detach-sign', '-a', filename] if self.identity: gpg_args[2:2] = ['--local-user', self.identity] spawn(gpg_args, dry_run=self.dry_run) f = open(filename, 'rb') try: content = f.read() finally: f.close() meta = self.distribution.metadata data = { ':action': 'file_upload', 'protcol_version': '1', 'name': meta.get_name(), 'version': meta.get_version(), 'content': (os.path.basename(filename), content), 'filetype': command, 'pyversion': pyversion, 'md5_digest': md5(content).hexdigest(), 'metadata_version': '1.0', 'summary': meta.get_description(), 'home_page': meta.get_url(), 'author': meta.get_contact(), 'author_email': meta.get_contact_email(), 'license': meta.get_licence(), 'description': meta.get_long_description(), 'keywords': meta.get_keywords(), 'platform': meta.get_platforms(), 'classifiers': meta.get_classifiers(), 'download_url': meta.get_download_url(), 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes() } comment = '' if command == 'bdist_rpm': dist, version, id = platform.dist() if dist: comment = 'built for %s %s' % (dist, version) elif command == 'bdist_dumb': comment = 'built for %s' % platform.platform(terse=1) data['comment'] = comment if self.sign: data['gpg_signature'] = (os.path.basename(filename) + '.asc', open(filename + '.asc').read()) auth = 'Basic ' + standard_b64encode(self.username + ':' + self.password) boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary end_boundary = sep_boundary + '--' body = StringIO.StringIO() for key, value in data.items(): if not isinstance(value, list): value = [value] for value in value: if isinstance(value, tuple): fn = ';filename="%s"' % value[0] value = value[1] else: fn = '' body.write(sep_boundary) body.write('\nContent-Disposition: form-data; name="%s"' % key) body.write(fn) body.write('\n\n') body.write(value) if value and value[-1] == '\r': body.write('\n') body.write(end_boundary) body.write('\n') body = body.getvalue() self.announce('Submitting %s to %s' % (filename, self.repository), log.INFO) headers = { 'Content-type': 'multipart/form-data; boundary=%s' % boundary, 'Content-length': str(len(body)), 'Authorization': auth } request = Request(self.repository, data=body, headers=headers) try: result = urlopen(request) status = result.getcode() reason = result.msg if self.show_response: msg = '\n'.join(('-' * 75, r.read(), '-' * 75)) self.announce(msg, log.INFO) except socket.error as e: self.announce(str(e), log.ERROR) return except HTTPError as e: status = e.code reason = e.msg if status == 200: self.announce('Server response (%s): %s' % (status, reason), log.INFO) else: self.announce('Upload failed (%s): %s' % (status, reason), log.ERROR)
def setup(self, apps): distrib = platform.dist()[0].lower() version = platform.dist()[1].lower() arch = platform.processor() self.do_install(distrib, version, arch, apps);
def dist_ver(): return platform.dist()[1]
def dist_name(): return platform.dist()[0]
# A string used as a component of the pkg.depend.runpath value as a special # token to determine where to insert the runpath that pkgdepend automatically # generates. PD_DEFAULT_RUNPATH = "$PKGDEPEND_RUNPATH" # A String used for an action attribute to allow pkgdepend to bypass generation # of dependencies against a given filename, eg. don't try to generate a # dependency on dtracestubs from platform/i86pc/kernel/amd64/unix PD_BYPASS_GENERATE = "pkg.depend.bypass-generate" import platform from . import util as os_util osname = os_util.get_canonical_os_name() ostype = os_util.get_canonical_os_type() distro = platform.dist()[0].lower() fragments = [distro, osname, ostype] for fragment in fragments: modname = 'os_' + fragment # try the most-specific module name first (e.g. os_suse), # then try the more generic OS Name module (e.g. os_linux), # then the OS type module (e.g. os_unix) try: exec('from .{0} import *'.format(modname)) break except ImportError: pass else: raise ImportError("cannot find portable implementation class for os " +
def is_current_platform(): return 'redhat' in platform.dist()
import argparse import sys import os import re import platform from pyroute2 import IPRoute from lib.config import Config from lib.genesis import GEN_PATH from lib.container import Container import lib.logger as logger from lib.utilities import sub_proc_exec, remove_line, get_netmask IPR = IPRoute() OPSYS = platform.dist()[0] IFCFG_PATH = '/etc/sysconfig/network-scripts/' def teardown_deployer_network(config_path=None): """Teardown the network elements on the deployer. This function is idempotent. """ cfg = Config(config_path) global LOG LOG = logger.getlogger() LOG.debug('----------------------------------------') LOG.info('Teardown Docker networks') _remove_docker_networks(cfg) LOG.info('Teardown deployer management networks') dev_label = cfg.get_depl_netw_mgmt_device()
def get_libcarla_extensions(): include_dirs = ['dependencies/include'] library_dirs = ['dependencies/lib'] libraries = [] sources = ['source/libcarla/libcarla.cpp'] if os.name == "posix": if platform.dist()[0].lower() in ["ubuntu", "debian"]: pwd = os.path.dirname(os.path.realpath(__file__)) pylib = "libboost_python%d%d.a" % (sys.version_info.major, sys.version_info.minor) extra_link_args = [ os.path.join(pwd, 'dependencies/lib/libcarla_client.a'), os.path.join(pwd, 'dependencies/lib/librpc.a'), os.path.join(pwd, 'dependencies/lib/libboost_filesystem.a'), os.path.join(pwd, 'dependencies/lib', pylib) ] extra_compile_args = [ '-fPIC', '-std=c++14', '-Wno-missing-braces', '-DBOOST_ERROR_CODE_HEADER_ONLY', '-DLIBCARLA_WITH_PYTHON_SUPPORT', '-DLIBCARLA_ENABLE_LIFETIME_PROFILER', ] if 'TRAVIS' in os.environ and os.environ['TRAVIS'] == 'true': print('Travis CI build detected: disabling PNG support.') extra_link_args += ['-ljpeg', '-ltiff'] extra_compile_args += [ '-DLIBCARLA_IMAGE_WITH_PNG_SUPPORT=false' ] else: extra_link_args += ['-lpng', '-ljpeg', '-ltiff'] extra_compile_args += [ '-DLIBCARLA_IMAGE_WITH_PNG_SUPPORT=true' ] # @todo Why would we need this? include_dirs += ['/usr/lib/gcc/x86_64-linux-gnu/7/include'] library_dirs += ['/usr/lib/gcc/x86_64-linux-gnu/7'] extra_link_args += ['/usr/lib/gcc/x86_64-linux-gnu/7/libstdc++.a'] else: raise NotImplementedError elif os.name == "nt": sources += [x for x in walk('dependencies/include/carla', '*.cpp')] pwd = os.path.dirname(os.path.realpath(__file__)) pylib = "libboost_python%d%d-vc141-mt-x64-1_67.lib" % ( sys.version_info.major, sys.version_info.minor) extra_link_args = [ 'shlwapi.lib', os.path.join(pwd, 'dependencies/lib/rpc.lib'), os.path.join(pwd, 'dependencies/lib', pylib) ] # https://docs.microsoft.com/es-es/cpp/porting/modifying-winver-and-win32-winnt extra_compile_args = [ '/DBOOST_ALL_NO_LIB', '/DBOOST_PYTHON_STATIC_LIB', '/DBOOST_ERROR_CODE_HEADER_ONLY', '/D_WIN32_WINNT=0x0501', '/DLIBCARLA_WITH_PYTHON_SUPPORT' ] else: raise NotImplementedError def walk(folder, file_filter='*'): for root, _, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_filter): yield os.path.join(root, filename) depends = [x for x in walk('source/libcarla')] depends += [x for x in walk('dependencies')] def make_extension(name, sources): return Extension(name, sources=sources, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, language='c++14', depends=depends) print('compiling:\n - %s' % '\n - '.join(sources)) return [make_extension('carla.libcarla', sources)]
def test_dist_deprecation(self): with self.assertWarns(PendingDeprecationWarning) as cm: platform.dist() self.assertEqual( str(cm.warning), 'dist() and linux_distribution() functions are ' 'deprecated in Python 3.5')
def is_redhat_centos_6_plus(): import platform if platform.dist()[0] in ['redhat', 'centos'] and platform.dist()[1] > '6.0': return True return False
(os.path.join(base_files, 'conf'), glob('conf/*.conf.*')), (os.path.join(base_files, 'collectors'), glob('conf/collectors/*')), (os.path.join(base_files, 'handlers'), glob('conf/handlers/*')), ] install_requires = [ 'ConfigObj', 'psutil', ], else: data_files = [ ('share/diamond', ['LICENSE', 'README.md', 'version.txt']), ('share/diamond/user_scripts', []), ] distro = platform.dist(supported_dists=['system'])[0] distro_major_version = platform.dist( supported_dists=['system'])[1].split('.')[0] if running_under_virtualenv(): data_files.append(('etc/diamond', glob('conf/*.conf.*'))) data_files.append( ('etc/diamond/collectors', glob('conf/collectors/*'))) data_files.append(('etc/diamond/handlers', glob('conf/handlers/*'))) else: data_files.append(('/etc/diamond', glob('conf/*.conf.*'))) data_files.append( ('/etc/diamond/collectors', glob('conf/collectors/*'))) data_files.append(('/etc/diamond/handlers', glob('conf/handlers/*'))) if distro == 'Ubuntu':
def detect_platform(): base_mapping = { 'gentoo base system': 'gentoo', 'centos linux': 'centos', 'mandriva linux': 'mandriva', 'elementary os': 'ubuntu', 'trisquel': 'ubuntu', 'linaro': 'ubuntu', 'linuxmint': 'ubuntu', 'amazon': 'ubuntu', 'redhat enterprise linux': 'rhel', 'red hat enterprise linux server': 'rhel', 'oracle linux server': 'rhel', 'fedora': 'rhel', 'olpc': 'rhel', 'xo-system': 'rhel', 'kali linux': 'debian', } platform_mapping = { 'ubuntu': 'debian', 'rhel': 'centos', } if hasattr(pyplatform, 'mac_ver') and pyplatform.mac_ver()[0] != '': return 'osx', 'osx' if pyplatform.system() != 'Linux': res = pyplatform.system().lower() return res, res dist = '' (maj, min, _) = pyplatform.python_version_tuple() maj = int(maj) min = int(min) if (maj * 10 + min) >= 36: import distro dist = distro.linux_distribution()[0].split()[0] elif (maj * 10 + min) >= 26: dist = pyplatform.linux_distribution()[0] else: dist = pyplatform.dist()[0] if dist == '': if os.path.exists('/etc/os-release'): release = open('/etc/os-release').read() if 'Arch Linux' in release: dist = 'arch' if dist == '': if os.path.exists('/etc/system-release'): release = open('/etc/system-release').read() if 'Amazon Linux AMI' in release: dist = 'centos' if dist == '': try: dist = subprocess.check_output(['strings', '-4', '/etc/issue' ]).split()[0].strip().decode() except subprocess.CalledProcessError as e: dist = 'unknown' res = dist.strip(' \'"\t\n\r').lower() if res in base_mapping: res = base_mapping[res] res_mapped = res if res in platform_mapping: res_mapped = platform_mapping[res] return res, res_mapped
def upload_file(self, command, pyversion, filename): # Makes sure the repository URL is compliant schema, netloc, url, params, query, fragments = \ urlparse.urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) if schema not in ('http', 'https'): raise AssertionError("unsupported schema " + schema) # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release f = open(filename, 'rb') try: content = f.read() finally: f.close() meta = self.distribution.metadata data = { # action ':action': 'file_upload', 'protcol_version': '1', # identify release 'name': meta.get_name(), 'version': meta.get_version(), # file content 'content': (os.path.basename(filename), content), 'filetype': command, 'pyversion': pyversion, 'md5_digest': md5(content).hexdigest(), # additional meta-data 'metadata_version': '1.0', 'summary': meta.get_description(), 'home_page': meta.get_url(), 'author': meta.get_contact(), 'author_email': meta.get_contact_email(), 'license': meta.get_licence(), 'description': meta.get_long_description(), 'keywords': meta.get_keywords(), 'platform': meta.get_platforms(), 'classifiers': meta.get_classifiers(), 'download_url': meta.get_download_url(), # PEP 314 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes(), } comment = '' if command == 'bdist_rpm': dist, version, id = platform.dist() if dist: comment = 'built for %s %s' % (dist, version) elif command == 'bdist_dumb': comment = 'built for %s' % platform.platform(terse=1) data['comment'] = comment if self.sign: data['gpg_signature'] = (os.path.basename(filename) + ".asc", open(filename + ".asc").read()) # set up the authentication auth = "Basic " + standard_b64encode(self.username + ":" + self.password) # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\r\n--' + boundary end_boundary = sep_boundary + '--\r\n' body = StringIO.StringIO() for key, value in data.items(): # handle multiple entries for the same name if not isinstance(value, list): value = [value] for value in value: if isinstance(value, tuple): fn = ';filename="%s"' % value[0] value = value[1] else: fn = "" body.write(sep_boundary) body.write('\r\nContent-Disposition: form-data; name="%s"' % key) body.write(fn) body.write("\r\n\r\n") body.write(value) body.write(end_boundary) body = body.getvalue() self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) # build the Request headers = { 'Content-type': 'multipart/form-data; boundary=%s' % boundary, 'Content-length': str(len(body)), 'Authorization': auth } request = Request(self.repository, data=body, headers=headers) # send the data try: result = urlopen(request) status = result.getcode() reason = result.msg if self.show_response: msg = '\n'.join(('-' * 75, result.read(), '-' * 75)) self.announce(msg, log.INFO) except socket.error, e: self.announce(str(e), log.ERROR) raise
api = { 'latest': 'http://api.inpanel.org/?s=latest', 'site_packages': 'http://api.inpanel.org/?s=site_packages', 'download_package': 'http://api.inpanel.org/?s=site_packages&a=download' } system = None distribution = None distname = None distversion = None distarch = None system = platform.system() if hasattr(platform, 'linux_distribution'): dist = platform.linux_distribution() else: dist = platform.dist() distribution = dist[0] distname = distribution.lower() distversion = dist[1] distversion = distversion[0:distversion.find('.', distversion.index('.') + 1)] distarch = 'x86_64' if platform.machine() == 'x86_64' else 'i386' __all__ = [ 'api', 'build', 'distname', 'distribution', 'distversion', 'distarch', 'name', 'releasetime', 'version_info', 'version' ] # distname = dist[0].lower() # print(distname, platform_version, arch)
def create_configurator_service(self): """ create a service for collectd installed :return: """ print "create_configurator_Service started" print "OS is: {0}".format(self.os) if self.os == "ubuntu" or self.os == "debian": print "found ubuntu ..." version = platform.dist()[1] print "ubuntu version: {0}".format(version) if version < "16.04": try: shutil.copyfile( "/opt/configurator-exporter/init_scripts/configurator.conf", "/etc/init/configurator.conf") except shutil.Error as err: print >> sys.stderr, err # self._run_cmd("chmod +x /etc/init/configurator.conf", shell=True) else: try: shutil.copyfile( "/opt/configurator-exporter/init_scripts/configurator.service", "/etc/systemd/system/configurator.service") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("systemctl daemon-reload", shell=True, ignore_err=True) self._run_cmd("systemctl enable configurator", shell=True, ignore_err=True) elif self.os == "centos" or self.os == "redhat": print "found centos ..." version = platform.dist()[1] print "centos version: {0}".format(version) if version < "7.0": try: shutil.copyfile( "/opt/configurator-exporter/init_scripts/configurator_centos6", "/etc/init.d/configurator") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("chmod +x /etc/init.d/configurator", shell=True) else: try: shutil.copyfile( "/opt/configurator-exporter/init_scripts/configurator.service", "/etc/systemd/system/configurator.service") except shutil.Error as err: print >> sys.stderr, err self._run_cmd("systemctl daemon-reload", shell=True, ignore_err=True) self._run_cmd("systemctl enable configurator", shell=True, ignore_err=True) print "terminate any old instance of configurator if available" self._run_cmd( "kill $(ps aux | grep -v grep | grep 'api_server' | awk '{print $2}')", shell=True, ignore_err=True)
def is_current_platform(): return 'fedora' in platform.dist()
def install_fluentd(self): """ install fluentd and start the service :return: """ distro, version, name = platform.dist() fluentd_file_name = "/tmp/install-fluentd.sh" if self.os == "ubuntu" or self.os == "debian": if self.os == "debian": cmd = "grep 'UBUNTU_CODENAME=' /etc/os-release" p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() name = out.split("UBUNTU_CODENAME=")[1].strip() print "install fluentd for ubuntu {0} {1}".format(version, name) fluentd_install_url_ubuntu = "https://toolbelt.treasuredata.com/sh/install-ubuntu-{0}-td-agent3.sh".format( name) # urllib.urlretrieve(fluentd_install_url_ubuntu.format(name), "/tmp/install-ubuntu-{0}-td-agent2.sh".format(name)) # self._run_cmd("sh /tmp/install-ubuntu-{0}-td-agent2.sh".format(name), shell=True) download_file(fluentd_install_url_ubuntu, fluentd_file_name, self.proxy) if self.proxy: self._add_proxy_for_curl_in_file(self.proxy, fluentd_file_name) self._run_cmd("sh {0}".format(fluentd_file_name), shell=True) self._run_cmd("echo '*.* @127.0.0.1:42186' >> /etc/rsyslog.conf", shell=True, ignore_err=True) self._run_cmd("sudo apt install -y build-essential", shell=True) self._run_cmd("sudo apt install -y automake autoconf libtool", shell=True) self._run_cmd("sudo apt install -y libgeoip-dev", shell=True) elif self.os in ["centos", "redhat"]: print "install fluentd for centos/redhat {0} {1}".format( version, name) # fluentd_install_url_centos = "https://toolbelt.treasuredata.com/sh/install-redhat-td-agent2.sh" fluentd_install_url_centos = "https://toolbelt.treasuredata.com/sh/install-redhat-td-agent3.sh" # urllib.urlretrieve(fluentd_install_url_centos, "/tmp/install-redhat-td-agent2.sh") # download_file(fluentd_install_url_centos, fluentd_file_name, self.proxy) if self.proxy: self._add_proxy_for_curl_in_file(self.proxy, fluentd_file_name) self._add_proxy_for_rpm_in_file(self.proxy, fluentd_file_name) self._run_cmd("sh {0}".format(fluentd_file_name), shell=True) self._run_cmd('sudo yum groupinstall -y "Development Tools"', shell=True) # self._run_cmd("sudo yum install -y geoip-devel", shell=True) self._run_cmd( "sudo yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm", shell=True, ignore_err=True) self._run_cmd( "sudo yum install -y https://rpmfind.net/linux/centos/7.7.1908/os/x86_64/Packages/GeoIP-devel-1.5.0-14.el7.x86_64.rpm", shell=True, ignore_err=True) """ self._run_cmd("yes | cp ./td-agent.conf /opt/td-agent/etc/td-agent/", shell=True) self._run_cmd("yes | cp ./td-agent.conf /etc/td-agent/", shell=True) """ self._run_cmd("sed -i '/port 8888/s/^/#/' /etc/td-agent/td-agent.conf", shell=True) self._run_cmd( "sed -i '/port 8888/s/^/#/' /opt/td-agent/etc/td-agent/td-agent.conf", shell=True) cmd = "usermod -a -G adm td-agent" print "Adding user td-agent to the group adm" self._run_cmd(cmd, ignore_err=True, shell=True) print "Install fluentd gems..." print "Install fluentd fluent-plugin-elasticsearch..." self._run_cmd( "/usr/sbin/td-agent-gem install fluent-plugin-elasticsearch", shell=True) print "Install fluentd fluent-plugin-multi-format-parser..." self._run_cmd( "/usr/sbin/td-agent-gem install fluent-plugin-multi-format-parser", shell=True) print "Install fluentd fluentd-plugin-geoip..." self._run_cmd("/usr/sbin/td-agent-gem install fluent-plugin-geoip", shell=True)
def osname(): osstr = "" for i in platform.dist(): if i: osstr += i + " " return osstr + "(" + subprocess.check_output(["uname", "-o"]).replace("\n", "").replace("\r", "") + ")"
def upload_file(self, command, pyversion, filename): # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release content = open(filename,'rb').read() meta = self.distribution.metadata data = { # action ':action': 'file_upload', 'protcol_version': '1', # identify release 'name': meta.get_name(), 'version': meta.get_version(), # file content 'content': (os.path.basename(filename),content), 'filetype': command, 'pyversion': pyversion, 'md5_digest': md5(content).hexdigest(), # additional meta-data 'metadata_version' : '1.0', 'summary': meta.get_description(), 'home_page': meta.get_url(), 'author': meta.get_contact(), 'author_email': meta.get_contact_email(), 'license': meta.get_licence(), 'description': meta.get_long_description(), 'keywords': meta.get_keywords(), 'platform': meta.get_platforms(), 'classifiers': meta.get_classifiers(), 'download_url': meta.get_download_url(), # PEP 314 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes(), } comment = '' if command == 'bdist_rpm': dist, version, id = platform.dist() if dist: comment = 'built for %s %s' % (dist, version) elif command == 'bdist_dumb': comment = 'built for %s' % platform.platform(terse=1) data['comment'] = comment if self.sign: data['gpg_signature'] = (os.path.basename(filename) + ".asc", open(filename+".asc").read()) # set up the authentication auth = "Basic " + standard_b64encode(self.username + ":" + self.password) # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary end_boundary = sep_boundary + '--' body = StringIO.StringIO() for key, value in data.items(): # handle multiple entries for the same name if type(value) != type([]): value = [value] for value in value: if type(value) is tuple: fn = ';filename="%s"' % value[0] value = value[1] else: fn = "" body.write(sep_boundary) body.write('\nContent-Disposition: form-data; name="%s"'%key) body.write(fn) body.write("\n\n") body.write(value) if value and value[-1] == '\r': body.write('\n') # write an extra newline (lurve Macs) body.write(end_boundary) body.write("\n") body = body.getvalue() self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) # build the Request # We can't use urllib2 since we need to send the Basic # auth right with the first request schema, netloc, url, params, query, fragments = \ urlparse.urlparse(self.repository) assert not params and not query and not fragments if schema == 'http': http = httplib.HTTPConnection(netloc) elif schema == 'https': http = httplib.HTTPSConnection(netloc) else: raise AssertionError, "unsupported schema "+schema data = '' loglevel = log.INFO try: http.connect() http.putrequest("POST", url) http.putheader('Content-type', 'multipart/form-data; boundary=%s'%boundary) http.putheader('Content-length', str(len(body))) http.putheader('Authorization', auth) http.endheaders() http.send(body) except socket.error, e: self.announce(str(e), log.ERROR) return
def get_os_version(): '''format: ('Linux', 'centos', '6.7', '64bit')''' return (platform.system(), platform.dist()[0], platform.dist()[1], platform.architecture()[0])
import os import glob import cPickle import numpy as np import random import math import platform plat = platform.dist()[0] if plat == "Ubuntu": base = "/home/supasorn/" else: base = "/projects/grail/supasorn2nb/" class DataLoader(): def __init__(self, dim=3, batch_size=50, seq_length=300, reprocess=0): self.data_dir = "./data" self.pose_dir = base + "/face-singleview/data/Obama2/" self.dim = dim self.batch_size = batch_size self.seq_length = seq_length data_file = os.path.join(self.data_dir, "training.cpkl") if not (os.path.exists(data_file)) or reprocess: print "creating training data cpkl file from raw source" self.preprocess(self.pose_dir, data_file) self.load_preprocessed(data_file)
def upload_file(self, command, pyversion, filename): # Makes sure the repository URL is compliant schema, netloc, url, params, query, fragments = \ urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) if schema not in ('http', 'https'): raise AssertionError("unsupported schema " + schema) # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release f = open(filename,'rb') try: content = f.read() finally: f.close() meta = self.distribution.metadata data = { # action ':action': 'file_upload', 'protcol_version': '1', # identify release 'name': meta.get_name(), 'version': meta.get_version(), # file content 'content': (os.path.basename(filename),content), 'filetype': command, 'pyversion': pyversion, 'md5_digest': md5(content).hexdigest(), # additional meta-data 'metadata_version' : '1.0', 'summary': meta.get_description(), 'home_page': meta.get_url(), 'author': meta.get_contact(), 'author_email': meta.get_contact_email(), 'license': meta.get_licence(), 'description': meta.get_long_description(), 'keywords': meta.get_keywords(), 'platform': meta.get_platforms(), 'classifiers': meta.get_classifiers(), 'download_url': meta.get_download_url(), # PEP 314 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes(), } comment = '' if command == 'bdist_rpm': dist, version, id = platform.dist() if dist: comment = 'built for %s %s' % (dist, version) elif command == 'bdist_dumb': comment = 'built for %s' % platform.platform(terse=1) data['comment'] = comment if self.sign: data['gpg_signature'] = (os.path.basename(filename) + ".asc", open(filename+".asc", "rb").read()) # set up the authentication user_pass = (self.username + ":" + self.password).encode('ascii') # The exact encoding of the authentication string is debated. # Anyway PyPI only accepts ascii for both username or password. auth = "Basic " + standard_b64encode(user_pass).decode('ascii') # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = b'\n--' + boundary.encode('ascii') end_boundary = sep_boundary + b'--' body = io.BytesIO() for key, value in data.items(): title = '\nContent-Disposition: form-data; name="%s"' % key # handle multiple entries for the same name if type(value) != type([]): value = [value] for value in value: if type(value) is tuple: title += '; filename="%s"' % value[0] value = value[1] else: value = str(value).encode('utf-8') body.write(sep_boundary) body.write(title.encode('utf-8')) body.write(b"\n\n") body.write(value) if value and value[-1:] == b'\r': body.write(b'\n') # write an extra newline (lurve Macs) body.write(end_boundary) body.write(b"\n") body = body.getvalue() self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) # build the Request headers = {'Content-type': 'multipart/form-data; boundary=%s' % boundary, 'Content-length': str(len(body)), 'Authorization': auth} request = Request(self.repository, data=body, headers=headers) # send the data try: result = urlopen(request) status = result.getcode() reason = result.msg except OSError as e: self.announce(str(e), log.ERROR) return except HTTPError as e: status = e.code reason = e.msg if status == 200: self.announce('Server response (%s): %s' % (status, reason), log.INFO) else: self.announce('Upload failed (%s): %s' % (status, reason), log.ERROR) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) self.announce(msg, log.INFO)
def build(): from sys import maxsize from os import environ environ = environ.copy() buildout_file = 'buildout-build.cfg' if system() == 'Linux': from platform import dist, linux_distribution _, version, distid = linux_distribution() dist_name = dist()[0].lower() if dist_name == 'ubuntu': if version == '16.04': buildout_file = 'buildout-build-ubuntu-16.04.cfg' else: buildout_file = 'buildout-build-ubuntu.cfg' if dist_name in ['redhat', 'centos']: if maxsize > 2**32: arch = execute_assert_success(["uname", "-i"]).get_stdout().lower() if 'ppc64le' in arch: buildout_file = 'buildout-build-redhat-ppc64le.cfg' elif 'ppc64' in arch: buildout_file = 'buildout-build-redhat-ppc64.cfg' else: if version.startswith('4.'): buildout_file = 'buildout-build-redhat-4-64bit.cfg' else: buildout_file = 'buildout-build-redhat-64bit.cfg' else: if version.startswith('4.'): buildout_file = 'buildout-build-redhat-4-32bit.cfg' else: buildout_file = 'buildout-build-redhat-32bit.cfg' if dist_name in ['suse']: if version in ['10']: buildout_file = 'buildout-build-suse-10.cfg' else: arch = execute_assert_success(["uname", "-i"]).get_stdout().lower() if 'ppc64le' in arch: buildout_file = 'buildout-build-suse-ppc64le.cfg' elif 'ppc64' in arch: buildout_file = 'buildout-build-suse-ppc64.cfg' elif system() == 'Darwin': from platform import mac_ver environ["MACOSX_DEPLOYMENT_TARGET"] = '.'.join(mac_ver()[0].split('.', 2)[:2]) gcc_version = execute_assert_success(["gcc", "--version"]).get_stdout() if 'version 5.' in gcc_version: buildout_file = 'buildout-build-osx-xcode-5.cfg' elif 'version 6.' in gcc_version: buildout_file = 'buildout-build-osx-xcode-6.cfg' elif 'version 7.' in gcc_version: buildout_file = 'buildout-build-osx-xcode-7.cfg' elif 'version 8.' in gcc_version: buildout_file = 'buildout-build-osx-xcode-8.cfg' elif 'version 9.' in gcc_version: buildout_file = 'buildout-build-osx-xcode-8.cfg' else: buildout_file = 'buildout-build-osx.cfg' elif system() == 'Windows': if maxsize > 2**32: buildout_file = 'buildout-build-windows-64bit.cfg' else: buildout_file = 'buildout-build-windows.cfg' elif system() == "SunOS": if 'sparc' in execute_assert_success(["isainfo"]).get_stdout().lower(): buildout_file = 'buildout-build-solaris-sparc.cfg' elif '64' in execute_assert_success(["isainfo", "-b"]).get_stdout(): buildout_file = 'buildout-build-solaris-64bit.cfg' else: pass # TODO support 32 bit elif system() == "AIX": from os import uname aix_version = "{0[3]}.{0[2]}".format(uname()) if aix_version == "7.1": buildout_file = 'buildout-build-aix.cfg' elif aix_version == "7.2": buildout_file = 'buildout-build-aix-7.2.cfg' execte_buildout(buildout_file, environ)
def is_centos(): """test if the platform is centos""" (dist, version, release) = platform.dist() if dist == "centos" and version not in ["6.5"]: print("WARNING: %s %s is not tested" % (dist, version)) return dist == "centos"
import os, sys, argparse, glob, platform from collections import OrderedDict if platform.system() == 'Darwin': print(platform.dist()) root = '/Volumes/Transcend/dataset/sintel2/' dest_root_folder = './split_scene' caffe_root = 'home/lwp/workspace/direct-intrinsics/modified_caffe/caffe' pretrained_model = '/home/lwp/workspace/caffe_model/vgg16.caffemodel' template_root = '/Users/albertxavier/Box Sync/Graduation Project/graduation-project/caffe/utils/example_folder/template/' elif platform.dist()[0] == 'Ubuntu' and platform.dist()[1] == '14.04': # zyypc print(platform.dist()) root = '/home/lwp/workspace/sintel2' dest_root_folder = '/home/lwp/workspace/direct-intrinsics/training/split_scene_final' caffe_root = '/home/lwp/workspace/direct-intrinsics/modified_caffe/caffe' pretrained_model = '/home/lwp/workspace/caffe_model/vgg16.caffemodel' template_root = '/home/lwp/workspace/graduation-project/caffe/utils/example_folder/template' elif platform.dist()[0] == 'debian': print(platform.dist()) root = '/home/albertxavier/dataset/sintel2' dest_root_folder = '/home/albertxavier/workspace/direct-intrinsics/training/split_scene_final' caffe_root = '/home/albertxavier/workspace/direct-intrinsics/modified_caffe/caffe' pretrained_model = '/home/albertxavier/caffe_model/vgg16.caffemodel' template_root = '/home/albertxavier/workspace/graduation-project/caffe/utils/example_folder/template' all_scenes = glob.glob(os.path.join(root, 'clean/*')) for i, s in enumerate(all_scenes): s = s.split('/') all_scenes[i] = s[-1] training_scenes = []
def is_ubuntu(): """test sif the platform is ubuntu""" (dist, version, release) = platform.dist() if dist == "ubuntu" and version not in ["14.04"]: print("ERROR: %s %s is not tested" % (dist, version)) return dist == 'Ubuntu'
def get_distro(self): return platform.dist()[0].replace('\"', '')
def CheckArchitecture(self): # Fill with Python info import sys self.archi_info.python_version = sys.version.replace('\n', '') # Fill with Platform info import platform self.archi_info.platform = platform.system() self.archi_info.release = platform.release() # Fill with number of cores import multiprocessing self.archi_info.ncores = multiprocessing.cpu_count() # Is Mac platform_text = "Platform: " + self.archi_info.platform + " " + self.archi_info.release + " " if self.archi_info.platform.lower() in ['darwin', 'mac', 'macosx']: self.archi_info.isMac = True platform_text += '\x1b[32m' + '[MAC/OSX mode]' + '\x1b[0m' else: self.archi_info.isMac = False platform_text += '\x1b[32m' + '[Linux mode]' + '\x1b[0m' self.logger.info(platform_text) # Info for debug mode if self.debug: # Machine general import platform self.logger.debug("") self.logger.debug("Machine - Cross platform information") self.logger.debug( StringTools.Left(" Machine type: ", 28) + str(platform.machine())) self.logger.debug( StringTools.Left(" Processor name: ", 28) + str(platform.processor())) self.logger.debug( StringTools.Left(" Platform: ", 28) + str(platform.platform())) self.logger.debug( StringTools.Left(" Platform release: ", 28) + str(platform.release())) self.logger.debug( StringTools.Left(" System: ", 28) + str(platform.system())) self.logger.debug( StringTools.Left(" Node: ", 28) + str(platform.node())) self.logger.debug( StringTools.Left(" Number of cores: ", 28) + str(self.archi_info.ncores)) self.logger.debug("") # Machine OS self.logger.debug("Machine - OS-specific information") try: tmp = platform.java_ver() except: tmp = '' self.logger.debug( StringTools.Left(" Java version: ", 28) + str(tmp)) try: tmp = platform.win32_ver() except: tmp = '' self.logger.debug( StringTools.Left(" Windows version: ", 28) + str(tmp)) try: tmp = platform.mac_ver() except: tmp = '' self.logger.debug( StringTools.Left(" Mac Os version: ", 28) + str(tmp)) try: tmp = platform.dist() except: tmp = '' self.logger.debug( StringTools.Left(" Unix distribution:", 28) + str(platform.platform())) self.logger.debug("") return True