def run(self): if not self.source_dir: log.error('no source directory specified') return if not self.lang: log.error('no language code specified') return build_dir = os.path.join(self.build_dir, self.lang) self.mkpath(build_dir) for name in os.listdir(self.source_dir): base, ext = os.path.splitext(name) if ext.lower() != '.pot': continue src = os.path.join(self.source_dir, name) dest = os.path.join(build_dir, name[:-1]) if os.path.exists(dest): command = 'msgmerge' options = ['--no-wrap', '--update', dest, src] else: command = 'msginit' options = [ '--no-wrap', '--locale=%s' % self.lang, '--input=%s' % src, '--output-file=%s' % dest] log.info('generating %s', dest) if not self.dry_run: subprocess.check_call([command] + options)
def run(self): has_npm = self.has_npm() if not has_npm: log.error("`npm` unavailable. If you're running this command using sudo, make sure `npm` is available to sudo") env = os.environ.copy() env['PATH'] = npm_path if self.should_run_npm_install(): log.info("Installing build dependencies with npm. This may take a while...") # remove just jupyterlab so that it is always updated shutil.rmtree(os.path.join(self.node_modules, 'jupyterlab'), ignore_errors=True) check_call(['npm', 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr) check_call(['npm', 'run', 'build'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr) os.utime(self.node_modules, None) for t in self.targets: if not os.path.exists(t): msg = 'Missing file: %s' % t if not has_npm: msg += '\nnpm is required to build a development version of widgetsnbextension' raise ValueError(msg) # update package data in case this created new files update_package_data(self.distribution)
def run(self): # Let's update the bootstrap-script to the version defined to be # distributed. See BOOTSTRAP_SCRIPT_DISTRIBUTED_VERSION above. url = ( 'https://github.com/saltstack/salt-bootstrap/raw/{0}' '/bootstrap-salt.sh'.format( BOOTSTRAP_SCRIPT_DISTRIBUTED_VERSION ) ) req = urllib2.urlopen(url) deploy_path = os.path.join( SALTCLOUD_SOURCE_DIR, 'saltcloud', 'deploy', 'bootstrap-salt.sh' ) if req.getcode() == 200: try: log.info( 'Updating bootstrap-salt.sh.' '\n\tSource: {0}' '\n\tDestination: {1}'.format( url, deploy_path ) ) with open(deploy_path, 'w') as fp_: fp_.write(req.read()) except (OSError, IOError), err: log.error( 'Failed to write the updated script: {0}'.format(err) )
def run(self): jsdeps = self.distribution.get_command_obj('jsdeps') if not is_repo and all(os.path.exists(t) for t in jsdeps.targets): # sdist, nothing to do command.run(self) return try: self.distribution.run_command('jsdeps') except Exception as e: missing = [t for t in jsdeps.targets if not os.path.exists(t)] if strict or missing: log.warn('rebuilding js and css failed') if missing: log.error('missing files: %s' % missing) # HACK: Allow users who can't build the JS to still install vispy if not is_repo: raise e log.warn('WARNING: continuing installation WITHOUT nbextension javascript') # remove JS files from data_files so setuptools doesn't try to copy # non-existent files self.distribution.data_files = [x for x in self.distribution.data_files if 'jupyter' not in x[0]] else: log.warn('rebuilding js and css failed (not a problem)') log.warn(str(e)) command.run(self) update_package_data(self.distribution)
def run(self): has_npm = self.has_npm() if not has_npm: log.error("`npm` unavailable. If you're running this command " "using sudo, make sure `npm` is available to sudo") env = os.environ.copy() env['PATH'] = npm_path if self.should_run_npm_install(): log.info("Installing build dependencies with npm. This may take " "a while...") npmName = self.get_npm_name(); check_call([npmName, 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr) os.utime(self.node_modules, None) for t in self.targets: if not os.path.exists(t): msg = 'Missing file: %s' % t if not has_npm: msg += '\nnpm is required to build a development ' \ 'version of a widget extension' raise ValueError(msg) # update package data in case this created new files update_package_data(self.distribution)
def run(self): has_npm = self.has_npm() if not has_npm: log.error("`npm` unavailable. If you're running this command using sudo, make sure `npm` is available to sudo") env = os.environ.copy() env['PATH'] = npm_path if self.should_run_npm_install(): log.info("Installing build dependencies with npm. This may take a while...") check_call(['npm', 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr) os.utime(self.node_modules, None) if self.should_run_npm_pack(): check_call(['npm', 'pack', node_root], cwd=pjoin(here, 'ipympl'), stdout=sys.stdout, stderr=sys.stderr) files = glob.glob(tar_path) self.targets.append(tar_path if not files else files[0]) for t in self.targets: if not os.path.exists(t): msg = 'Missing file: %s' % t if not has_npm: msg += '\nnpm is required to build a development version of widgetsnbextension' raise ValueError(msg) self.distribution.data_files = get_data_files() # update package data in case this created new files update_package_data(self.distribution)
def run (self): build.run (self) if self.distribution.without_gettext: return for po in glob.glob (os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'terminator.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning("msgfmt returned %d" % rc) except Exception as e: error("Building gettext files failed. Try setup.py --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1) TOP_BUILDDIR='.' INTLTOOL_MERGE='intltool-merge' desktop_in='data/terminator.desktop.in' desktop_data='data/terminator.desktop' os.system ("C_ALL=C " + INTLTOOL_MERGE + " -d -u -c " + TOP_BUILDDIR + "/po/.intltool-merge-cache " + TOP_BUILDDIR + "/po " + desktop_in + " " + desktop_data)
def _finalize_protobuf(self): if not os.path.isdir(self.with_protobuf_lib_dir): log.error("Protobuf library dir should be a directory") sys.exit(1) if not os.path.isdir(self.with_protobuf_include_dir): log.error("Protobuf include dir should be a directory") sys.exit(1) if not os.path.exists(self.protobuf_lib): os.makedirs(self.protobuf_lib) if not os.path.exists(self.protobuf_include): os.makedirs(self.protobuf_include) log.info("Copying Protobuf libraries") lib_files = glob(os.path.join(self.with_protobuf_lib_dir, "libprotobuf*")) for lib_file in lib_files: if os.path.isfile(lib_file): log.info("copying {0} -> {1}".format(lib_file, self.protobuf_lib)) shutil.copy2(lib_file, self.protobuf_lib) log.info("Copying Protobuf header files") copy_tree(self.with_protobuf_include_dir, self.protobuf_include) # Remove all but static libraries to force static linking if os.name == "posix": log.info("Removing non-static Protobuf libraries from {0}" "".format(self.protobuf_lib)) for lib_file in os.listdir(self.protobuf_lib): lib_file_path = os.path.join(self.protobuf_lib, lib_file) if os.path.isfile(lib_file_path) and not lib_file.endswith(".a"): os.unlink(os.path.join(self.protobuf_lib, lib_file))
def package_maven(): """ Run maven package lifecycle """ if not os.getenv('JAVA_HOME'): # make sure Maven uses the same JDK which we have used to compile # and link the C-code os.environ['JAVA_HOME'] = jdk_home_dir mvn_goal = 'package' log.info("Executing Maven goal '" + mvn_goal + "'") code = subprocess.call(['mvn', 'clean', mvn_goal, '-DskipTests'], shell=platform.system() == 'Windows') if code: exit(code) # # Copy JAR results to lib/*.jar # if not os.path.exists(lib_dir): os.mkdir(lib_dir) target_dir = os.path.join(base_dir, 'target') jar_files = glob.glob(os.path.join(target_dir, '*.jar')) jar_files = [f for f in jar_files if not (f.endswith('-sources.jar') or f.endswith('-javadoc.jar'))] if not jar_files: log.error('Maven did not generate any JAR artifacts') exit(1) for jar_file in jar_files: build_dir = _build_dir() log.info("Copying " + jar_file + " -> " + build_dir + "") shutil.copy(jar_file, build_dir)
def check_flags(self): """ Sanity check the compiler flags used to build the extensions """ forbidden = None if os.environ.get("SAGE_FAT_BINARY") == "yes": # When building with SAGE_FAT_BINARY=yes, we should not # enable CPU features which do not exist on every CPU. # Such flags usually come from other libraries adding the # flags to the pkgconfig configuration. So if you hit these # errors, the problem is most likely with some external # library and not with Sage. import re forbidden = re.compile(r"-march=|-mpcu=|-msse3|-msse4|-mpopcnt|-mavx") if forbidden is not None: errors = 0 for ext in self.extensions: flags = ext.extra_compile_args for flag in flags: if forbidden.match(flag): log.error("%s uses forbidden flag '%s'", ext.name, flag) errors += 1 if errors: raise RuntimeError("forbidden flags used")
def run(self): has_npm = self.has_npm() if not has_npm: log.error( "`npm` unavailable. If you're running this command using sudo, make sure `npm` is available to sudo" ) if self.should_run_npm_install(): log.info("Installing build dependencies with npm. This may take a while...") check_call(["npm", "install"], cwd=pjoin(repo_root, "ipywidgets"), stdout=sys.stdout, stderr=sys.stderr) os.utime(self.node_modules, None) env = os.environ.copy() env["PATH"] = npm_path if has_npm: log.info("Running `npm run build`...") check_call( ["npm", "run", "build"], cwd=pjoin(repo_root, "ipywidgets"), stdout=sys.stdout, stderr=sys.stderr ) for t in self.targets: if not os.path.exists(t): msg = "Missing file: %s" % t if not has_npm: msg += "\nnpm is required to build a development version of ipywidgets" raise ValueError(msg) # update package data in case this created new files update_package_data(self.distribution)
def run(self): if not self.skip_build: self.run_command('build') # First ensure that files are group-writable, that makes it # easier to update the website from several accounts. log.info("Makeing files group-writable") for dirpath, dirnames, filenames in os.walk('htdocs'): for fn in dirnames +filenames: st = os.stat(os.path.join(dirpath, fn)) mode = stat.S_IMODE(st.st_mode) | stat.S_IWGRP os.chmod(os.path.join(dirpath, fn), mode) # Use rsync to push the new version of the website to SF.net log.info("Running rsync") p = subprocess.Popen(['rsync', '-C', '-e', 'ssh', '--delete', '--delete-after', '-v', '-rltgoDz', '--exclude', 'cvs-snapshots', 'htdocs/', self.username + '@shell.sourceforge.net:/home/groups/p/py/pyobjc/htdocs/']) status = p.wait() if status != 0: log.error("Rsync failed with exit code %s", status) raise DistutilsError("rsync failed") log.info("Update is ready, don't forget to check the website!")
def run(self): if not self.source_dir: log.error('no source directory specified') return for root, dirs, files in os.walk(self.source_dir): targ_dir = os.path.join( root.replace(self.source_dir, self.build_dir), 'LC_MESSAGES') for name in files: base, ext = os.path.splitext(name) if ext.lower() != '.po': continue src = os.path.join(root, name) targ = os.path.join(targ_dir, base + '.mo') if os.path.exists(targ): targ_date = os.stat(targ)[8] else: targ_date = 0 src_date = os.stat(src)[8] if targ_date > src_date: log.debug('skipping %s', targ) continue self.mkpath(targ_dir) log.info('compiling %s to %s', src, targ) if not self.dry_run: subprocess.check_call( ['msgfmt', '--use-fuzzy', '--output-file=%s' % targ, src])
def _run_command(self, cmd, env=None): log.debug('running [%s]' % (' '.join(cmd), )) try: return check_output(cmd, cwd=self.work_path, env=env) except Exception: log.error('command failed [%s] via [%s]' % (' '.join(cmd), self.work_path, )) raise
def run(self): has_npm = self.has_npm() if not has_npm: log.error("`npm` unavailable. If you're running this command using sudo, make sure `npm` is available to sudo") env = os.environ.copy() env['PATH'] = npm_path if self.should_run_npm_install(): log.info("Installing build dependencies with npm. This may take a while...") check_call(['npm', 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr) os.utime(self.node_modules, None) for t in self.targets: if not os.path.exists(t): msg = 'Missing file: %s' % t if not has_npm: msg += '\nnpm is required to build a development version of nbmolviz-js' raise ValueError(msg) # update package data in case this created new files update_package_data(self.distribution) try: import notebook notebook.nbextensions.enable_nbextension_python('widgetsnbextension') except Exception as e: print('Failed to enable widgets: %s' % e)
def run(self): # skip this step if there are no libraries to build if not self.libraries or len(self.libraries) <= 0: return # ensure the destination folder exists log.info('Checking folder: ' + self._dest_folder) if not os.path.exists(self._dest_folder): log.info('Creating folder: ' + self._dest_folder) if not self.dry_run: os.makedirs(self._dest_folder) # check if the archive exists before downloading it archive_downloaded = False unqlite_archive_path = os.path.join(self._dest_folder, self._unqlite_archive_filename) log.info('Checking if archive downloaded: ' + self._unqlite_archive_filename) if not os.path.exists(unqlite_archive_path): try: log.info('Downloading {0} to folder {1}'.format( self._unqlite_download_url, self._dest_folder)) if not self.dry_run: download = urlopen(self._unqlite_download_url) with open(unqlite_archive_path, 'wb') as local_archive_file: local_archive_file.write(download.read()) archive_downloaded = True except HTTPError, e: log.error('HTTP Error: {0} {1}'.format(e.code, self._unqlite_download_url)) raise except URLError, e: log.error('URL Error: {0} {1}'.format(e.reason, self._unqlite_download_url)) raise
def get_output(self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang="c"): """Try to compile, link to an executable, and run a program built from 'body' and 'headers'. Returns the exit status code of the program and its output. """ from distutils.ccompiler import CompileError, LinkError self._check_compiler() exitcode, output = 255, '' try: src, obj, exe = self._link(body, headers, include_dirs, libraries, library_dirs, lang) exe = os.path.join('.', exe) exitstatus, output = exec_command(exe, execute_in='.') if hasattr(os, 'WEXITSTATUS'): exitcode = os.WEXITSTATUS(exitstatus) if os.WIFSIGNALED(exitstatus): sig = os.WTERMSIG(exitstatus) log.error('subprocess exited with signal %d' % (sig,)) if sig == signal.SIGINT: # control-C raise KeyboardInterrupt else: exitcode = exitstatus log.info("success!") except (CompileError, LinkError): log.info("failure.") self._clean() return exitcode, output
def parse_config(cls): if not os.path.exists('setup.cfg'): return {} cfg = ConfigParser() try: cfg.read('setup.cfg') except Exception as e: if DEBUG: raise log.error( "Error reading setup.cfg: {0!r}\n{1} will not be " "automatically bootstrapped and package installation may fail." "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) return {} if not cfg.has_section('ah_bootstrap'): return {} config = {} for option, type_ in CFG_OPTIONS: if not cfg.has_option('ah_bootstrap', option): continue if type_ is bool: value = cfg.getboolean('ah_bootstrap', option) else: value = cfg.get('ah_bootstrap', option) config[option] = value return config
def run (self): build.run (self) if self.distribution.without_gettext: return for po in glob.glob (os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'terminator.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning, "msgfmt returned %d" % rc except Exception, e: error("Building gettext files failed. Try setup.py --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1)
def _get_vc_env(plat_spec): if os.getenv("DISTUTILS_USE_SDK"): return { key.lower(): value for key, value in os.environ.items() } vcvarsall, vcruntime = _find_vcvarsall(plat_spec) if not vcvarsall: raise DistutilsPlatformError("Unable to find vcvarsall.bat") try: out = subprocess.check_output( '"{}" {} && set'.format(vcvarsall, plat_spec), shell=True, stderr=subprocess.STDOUT, universal_newlines=True, ) except subprocess.CalledProcessError as exc: log.error(exc.output) raise DistutilsPlatformError("Error executing {}" .format(exc.cmd)) env = { key.lower(): value for key, _, value in (line.partition('=') for line in out.splitlines()) if key and value } if vcruntime: env['py_vcruntime_redist'] = vcruntime return env
def _check_available(self): try: self.check_btype_command("list") return True except CalledProcessError: log.error("Bucket types are not supported on this Riak node!") return False
def check_python(self, req): chk = VersionPredicate(req) ver = '.'.join([str(v) for v in sys.version_info[:2]]) if not chk.satisfied_by(ver): log.error("Invalid python version, expected %s" % req) return False return True
def run(self): if skip_npm: log.info('Skipping npm-installation') return node_package = path or HERE node_modules = pjoin(node_package, 'node_modules') is_yarn = os.path.exists(pjoin(node_package, 'yarn.lock')) npm_cmd = npm if npm is None: if is_yarn: npm_cmd = ['yarn'] else: npm_cmd = ['npm'] if not which(npm_cmd[0]): log.error("`{0}` unavailable. If you're running this command " "using sudo, make sure `{0}` is available to sudo" .format(npm_cmd[0])) return if force or is_stale(node_modules, pjoin(node_package, 'package.json')): log.info('Installing build dependencies with npm. This may ' 'take a while...') run(npm_cmd + ['install'], cwd=node_package) if build_dir and source_dir and not force: should_build = is_stale(build_dir, source_dir) else: should_build = True if should_build: run(npm_cmd + ['run', build_cmd], cwd=node_package)
def run(self): build.run(self) cherrytree_man_file = "linux/cherrytree.1" cherrytree_man_file_gz = cherrytree_man_file + ".gz" if newer(cherrytree_man_file, cherrytree_man_file_gz): if os.path.isfile(cherrytree_man_file_gz): os.remove(cherrytree_man_file_gz) import gzip f_in = open(cherrytree_man_file, 'rb') f_out = gzip.open(cherrytree_man_file_gz, 'wb') f_out.writelines(f_in) f_out.close() f_in.close() if self.distribution.without_gettext: return for po in glob.glob(os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'cherrytree.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning, "msgfmt returned %d" % rc except Exception, e: error("Building gettext files failed. Try setup.py --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1)
def loadTestsFromModule(self, module): """ Support test module and function name based filtering """ try: testsuites = ScanningLoader.loadTestsFromModule(self, module) except: log.error('Failed to load tests from module %s', module) raise test_filter = getattr(__builtin__,'__test_filter') res = testsuites if test_filter['module'] is not None: name = module.__name__ if name in test_filter['allowmods']: # a parent name space pass elif re.search(test_filter['module'], name): if test_filter['function'] is not None: res = filter_testsuites(testsuites) # add parents (and module itself) pms = name.split('.') for pm_idx in range(len(pms)): pm = '.'.join(pms[:pm_idx]) if not pm in test_filter['allowmods']: test_filter['allowmods'].append(pm) else: res = type(testsuites)() return res
def run(self): parser = self.arg_parser # parse options --file and --verbose self.options, self.argv = parser.parse_known_args(self.argv) log.set_verbosity(self.options.verbose) parser.add_argument( '-h', '--help', action='help', default=argparse.SUPPRESS, help=_('show this help message and exit')) self.subparser = parser.add_subparsers(help="taget help", dest="target") try: # load native context with PakefileContext(self): file = self.options.file if file is None: if os.path.isfile(PAKEFILE_NAME): file = PAKEFILE_NAME else: self.load() return # load root context with PakefileContext(self, file): self.load() except PakeError, e: log.error("Error: %s" % e.message)
def get_output(self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang="c", use_tee=None): """Try to compile, link to an executable, and run a program built from 'body' and 'headers'. Returns the exit status code of the program and its output. """ # 2008-11-16, RemoveMe warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n" \ "Usage of get_output is deprecated: please do not \n" \ "use it anymore, and avoid configuration checks \n" \ "involving running executable on the target machine.\n" \ "+++++++++++++++++++++++++++++++++++++++++++++++++\n", DeprecationWarning, stacklevel=2) self._check_compiler() exitcode, output = 255, '' try: grabber = GrabStdout() try: src, obj, exe = self._link(body, headers, include_dirs, libraries, library_dirs, lang) grabber.restore() except Exception: output = grabber.data grabber.restore() raise exe = os.path.join('.', exe) try: # specify cwd arg for consistency with # historic usage pattern of exec_command() # also, note that exe appears to be a string, # which exec_command() handled, but we now # use a list for check_output() -- this assumes # that exe is always a single command output = subprocess.check_output([exe], cwd='.') except subprocess.CalledProcessError as exc: exitstatus = exc.returncode output = '' except OSError: # preserve the EnvironmentError exit status # used historically in exec_command() exitstatus = 127 output = '' else: output = filepath_from_subprocess_output(output) if hasattr(os, 'WEXITSTATUS'): exitcode = os.WEXITSTATUS(exitstatus) if os.WIFSIGNALED(exitstatus): sig = os.WTERMSIG(exitstatus) log.error('subprocess exited with signal %d' % (sig,)) if sig == signal.SIGINT: # control-C raise KeyboardInterrupt else: exitcode = exitstatus log.info("success!") except (CompileError, LinkError): log.info("failure.") self._clean() return exitcode, output
def run_command_hooks(cmd_obj, hook_kind): """Run hooks registered for that command and phase. *cmd_obj* is a finalized command object; *hook_kind* is either 'pre_hook' or 'post_hook'. """ hooks = getattr(cmd_obj, hook_kind, None) if not hooks: return for modname, hook in hooks: if isinstance(hook, str): try: hook_obj = resolve_name(hook) except ImportError as exc: raise DistutilsModuleError( 'cannot find hook {0}: {1}'.format(hook, err)) else: hook_obj = hook if not callable(hook_obj): raise DistutilsOptionError('hook {0!r} is not callable' % hook) log.info('running {0} from {1} for {2} command'.format( hook_kind.rstrip('s'), modname, cmd_obj.get_command_name())) try : hook_obj(cmd_obj) except Exception as exc: log.error('{0} command hook {1} raised an exception: %s\n'.format( hook_obj.__name__, cmd_obj.get_command_name())) log.error(traceback.format_exc()) sys.exit(1)
def run (self): # Gen .in files with @PREFIX@ replaced for filename in ['udev-discover']: infile = open(filename + '.in', 'r') data = infile.read().replace('@PREFIX@', self.prefix) infile.close() outfile = open(filename, 'w') outfile.write(data) outfile.close() build.run (self) for po in glob.glob (os.path.join (PO_DIR, '*.po')): lang = os.path.basename(po[:-3]) mo = os.path.join(MO_DIR, lang, 'udevdiscover.mo') directory = os.path.dirname(mo) if not os.path.exists(directory): info('creating %s' % directory) os.makedirs(directory) if newer(po, mo): info('compiling %s -> %s' % (po, mo)) try: rc = subprocess.call(['msgfmt', '-o', mo, po]) if rc != 0: raise Warning, "msgfmt returned %d" % rc except Exception, e: error("Building gettext files failed. Try setup.py \ --without-gettext [build|install]") error("Error: %s" % str(e)) sys.exit(1)
def _check_available(self): try: self.check_security_command("status") return True except CalledProcessError: log.error("Security is not supported on this Riak node!") return False
def run(self): log.info("Checking if upstream tar files exist.") if not os.path.exists(UPSTREAM_DIR): mkpath(UPSTREAM_DIR) for tarname, checksum, url in zip(UPSTREAM_TAR_NAMES, CHECKSUMS, UPSTREAM_TAR_URL): t_file = os.path.join(UPSTREAM_DIR, tarname) if not os.path.exists(t_file): log.warn( "File %s does not exist. Attempting to download from %s." % (t_file, url)) os.system("wget -O %s %s" % (t_file, url)) if not os.path.exists(t_file): log.error( """Download failed. You may wish to download the file "%s" manually from "%s" and place it in the "upstream/" directory.""" % (tarname, url)) sys.exit(1) if sha1sum(t_file) != checksum: log.error("Checksum for file %s is different." % t_file) sys.exit(1) log.info("Creating directories.") if os.path.exists("local"): remove_tree("local") mkpath(TMPDIR) log.info("Extracting tar files.") for tarname in UPSTREAM_TAR_NAMES: t_file = os.path.join(UPSTREAM_DIR, tarname) import tarfile tar = tarfile.open(t_file) tar.extractall(TMPDIR) tar.close() log.info("Applying patches.") if os.system( "patch -d %s < patches/0001-fix-compile-errors-fguptri.patch" % TMPDIR): log.error("Failed to apply patches.") sys.exit(1)
def run(self): if self.download_bootstrap_script is True: # Let's update the bootstrap-script to the version defined to be # distributed. See BOOTSTRAP_SCRIPT_DISTRIBUTED_VERSION above. url = ('https://github.com/saltstack/salt-bootstrap/raw/{0}' '/bootstrap-salt.sh'.format( BOOTSTRAP_SCRIPT_DISTRIBUTED_VERSION)) deploy_path = os.path.join(SETUP_DIRNAME, 'salt', 'cloud', 'deploy', 'bootstrap-salt.sh') log.info('Updating bootstrap-salt.sh.' '\n\tSource: {0}' '\n\tDestination: {1}'.format(url, deploy_path)) try: import requests req = requests.get(url) if req.status_code == 200: script_contents = req.text.encode(req.encoding) else: log.error( 'Failed to update the bootstrap-salt.sh script. HTTP ' 'Error code: {0}'.format(req.status_code)) except ImportError: req = urlopen(url) if req.getcode() == 200: script_contents = req.read() else: log.error( 'Failed to update the bootstrap-salt.sh script. HTTP ' 'Error code: {0}'.format(req.getcode())) try: with open(deploy_path, 'w') as fp_: fp_.write(script_contents) except (OSError, IOError) as err: log.error( 'Failed to write the updated script: {0}'.format(err)) # Let's the rest of the build command Sdist.run(self)
def run(self): if getattr(self.distribution, 'salt_download_windows_dlls', None) is None: print('This command is not meant to be called on it\'s own') exit(1) import platform import pip # pip has moved many things to `_internal` starting with pip 10 if LooseVersion(pip.__version__) < LooseVersion('10.0'): from pip.utils.logging import indent_log # pylint: disable=no-name-in-module else: from pip._internal.utils.logging import indent_log # pylint: disable=no-name-in-module platform_bits, _ = platform.architecture() url = 'https://repo.saltstack.com/windows/dependencies/{bits}/{fname}.dll' dest = os.path.join(os.path.dirname(sys.executable), '{fname}.dll') with indent_log(): for fname in ('libeay32', 'libsodium', 'ssleay32', 'msvcr120'): # See if the library is already on the system if find_library(fname): continue furl = url.format(bits=platform_bits[:2], fname=fname) fdest = dest.format(fname=fname) if not os.path.exists(fdest): log.info('Downloading {0}.dll to {1} from {2}'.format( fname, fdest, furl)) try: import requests from contextlib import closing with closing(requests.get(furl, stream=True)) as req: if req.status_code == 200: with open(fdest, 'wb') as wfh: for chunk in req.iter_content( chunk_size=4096): if chunk: # filter out keep-alive new chunks wfh.write(chunk) wfh.flush() else: log.error( 'Failed to download {0}.dll to {1} from {2}' .format(fname, fdest, furl)) except ImportError: req = urlopen(furl) if req.getcode() == 200: with open(fdest, 'wb') as wfh: if IS_PY3: while True: chunk = req.read(4096) if len(chunk) == 0: break wfh.write(chunk) wfh.flush() else: while True: for chunk in req.read(4096): if not chunk: break wfh.write(chunk) wfh.flush() else: log.error( 'Failed to download {0}.dll to {1} from {2}'. format(fname, fdest, furl))
def readlines(name): return read(name).split('\n') README = read('README.md') LICENSE = readlines('LICENSE')[0].strip() REQUIRED_PACKAGES = read('requirements.txt') execute_requires = [ 'npm', 'node', 'powershell' if platform == 'win32' else 'bash' ] for exe in execute_requires: if not find_executable(exe): log.error('%s should be installed.', exe) sys.exit(1) class build_py(setuptools.command.build_py.build_py): def run(self): cmd = ['powershell', '-NoProfile', './scripts/build.ps1' ] if platform == 'win32' else ['bash', 'scripts/build.sh'] env = dict(os.environ) subprocess.check_call(cmd, env=env) return setuptools.command.build_py.build_py.run(self) cmdclass = { 'build_py': build_py, }
def chk_petsc_dir(petsc_dir): if not os.path.isdir(petsc_dir): log.error('invalid PETSC_DIR: %s (ignored)' % petsc_dir) return None return petsc_dir
def run(self): log.error(self.description)
import subprocess import sys # python-gphoto2 version version = '1.8.1' # get gphoto2 library config cmd = ['pkg-config', '--modversion', 'libgphoto2'] FNULL = open(os.devnull, 'w') try: gphoto2_version = subprocess.check_output( cmd, stderr=FNULL, universal_newlines=True).split('.') gphoto2_version = tuple(map(int, gphoto2_version))[:3] gphoto2_version_str = '.'.join(map(str, gphoto2_version)) except Exception: error('ERROR: command "%s" failed', ' '.join(cmd)) raise gphoto2_flags = defaultdict(list) for flag in subprocess.check_output( ['pkg-config', '--cflags', '--libs', 'libgphoto2'], universal_newlines=True).split(): gphoto2_flags[flag[:2]].append(flag) gphoto2_include = gphoto2_flags['-I'] gphoto2_libs = gphoto2_flags['-l'] gphoto2_lib_dirs = gphoto2_flags['-L'] for n in range(len(gphoto2_include)): if gphoto2_include[n].endswith('/gphoto2'): gphoto2_include[n] = gphoto2_include[n][:-len('/gphoto2')] # create extension modules list ext_modules = []
def get_output( self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang="c", use_tee=None, ): """Try to compile, link to an executable, and run a program built from 'body' and 'headers'. Returns the exit status code of the program and its output. """ # 2008-11-16, RemoveMe warnings.warn( "\n+++++++++++++++++++++++++++++++++++++++++++++++++\n" "Usage of get_output is deprecated: please do not \n" "use it anymore, and avoid configuration checks \n" "involving running executable on the target machine.\n" "+++++++++++++++++++++++++++++++++++++++++++++++++\n", DeprecationWarning, stacklevel=2, ) self._check_compiler() exitcode, output = 255, "" try: grabber = GrabStdout() try: src, obj, exe = self._link( body, headers, include_dirs, libraries, library_dirs, lang ) grabber.restore() except Exception: output = grabber.data grabber.restore() raise exe = os.path.join(".", exe) try: # specify cwd arg for consistency with # historic usage pattern of exec_command() # also, note that exe appears to be a string, # which exec_command() handled, but we now # use a list for check_output() -- this assumes # that exe is always a single command output = subprocess.check_output([exe], cwd=".") except subprocess.CalledProcessError as exc: exitstatus = exc.returncode output = "" except OSError: # preserve the EnvironmentError exit status # used historically in exec_command() exitstatus = 127 output = "" else: output = filepath_from_subprocess_output(output) if hasattr(os, "WEXITSTATUS"): exitcode = os.WEXITSTATUS(exitstatus) if os.WIFSIGNALED(exitstatus): sig = os.WTERMSIG(exitstatus) log.error("subprocess exited with signal %d" % (sig,)) if sig == signal.SIGINT: # control-C raise KeyboardInterrupt else: exitcode = exitstatus log.info("success!") except (CompileError, LinkError): log.info("failure.") self._clean() return exitcode, output
# Make absolutely sure current working directory is looked in first sys.path.insert(0, '.') # Required Python Version from mysql.utilities.common.tools import check_python_version check_python_version() # Require cx_Freeze try: import cx_Freeze vercxfreeze = tuple([int(val) for val in cx_Freeze.version.split('.')]) if not vercxfreeze >= _REQUIRED_CX_FREEZE: raise ImportError except ImportError: log.error("Package cx_Freeze v{0} or later is required.".format('.'.join( [str(val) for val in _REQUIRED_CX_FREEZE]))) sys.exit(1) # Require Connector/Python try: from mysql.connector import version if not version.VERSION >= _REQUIRED_MYCONNPY: raise ImportError except ImportError: log.error("MySQL Connector/Python v{0} or later is required.".format( '.'.join([str(val) for val in _REQUIRED_MYCONNPY]))) sys.exit(1) from info import META_INFO, INSTALL # cx_Freeze executables and configuration
raise RuntimeError("Could not get version from Git repo") from e # RX3D must be checked for very early as it changes imports if '--disable-rx3d' in sys.argv: RX3D = False sys.argv.remove('--disable-rx3d') from setuptools.command.build_ext import build_ext else: RX3D = True try: from Cython.Distutils import Extension as CyExtension from Cython.Distutils import build_ext import numpy except ImportError: log.error( "ERROR: RX3D wheel requires Cython and numpy. Please install beforehand" ) sys.exit(1) class CMakeAugmentedExtension(Extension): """ Definition of an extension that depends on a project to be built using CMake Notice by default the cmake project is installed to build/cmake_install """ def __init__(self, name, sources, cmake_proj_dir="", cmake_flags=None, cmake_collect_dirs=None,
def build_extension(self, ext): if self.distribution.lief_test: log.info("LIEF tests enabled!") fullname = self.get_ext_fullname(ext.name) jobs = self.parallel if self.parallel else 1 cmake_args = ["-DLIEF_FORCE_API_EXPORTS=ON", "-DLIEF_PYTHON_API=on"] build_temp = self.build_temp cmake_library_output_directory = os.path.abspath( os.path.dirname(build_temp)) cfg = 'RelWithDebInfo' if self.debug else 'Release' is64 = sys.maxsize > 2**32 # Ninja ? build_with_ninja = False if self.has_ninja() and self.distribution.ninja: build_with_ninja = True if build_with_ninja: cmake_args += ["-G", "Ninja"] cmake_args += [ '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={}'.format( cmake_library_output_directory), '-DPYTHON_EXECUTABLE={}'.format(sys.executable), '-DLIEF_PYTHON_API=on', ] # LIEF options # ============ if self.distribution.lief_test: cmake_args += ["-DLIEF_TESTS=on"] if self.distribution.lief_no_json: log.info("LIEF JSON module disabled") cmake_args += ["-DLIEF_ENABLE_JSON=off"] if self.distribution.lief_no_logging: log.info("LIEF logging module disabled") cmake_args += ["-DLIEF_LOGGING=off"] if self.distribution.doc: log.info("LIEF documentation enabled") cmake_args += ["-DLIEF_DOC=on"] if self.debug: log.info("LIEF enables DEBUG messages") cmake_args += ["-DLIEF_LOGGING_DEBUG=on"] else: cmake_args += ["-DLIEF_LOGGING_DEBUG=off"] if self.distribution.lief_no_cache: cmake_args += ["-DLIEF_USE_CCACHE=off"] # Setup spdlog configuration flags if # the user provides --spdlog-dir if self.distribution.spdlog_dir is not None: cmake_args.append("-DLIEF_EXTERNAL_SPDLOG=ON") cmake_args.append("-Dspdlog_DIR={}".format( self.distribution.spdlog_dir)) if self.distribution.lief_config_extra is not None and len( self.distribution.lief_config_extra) > 0: args = self.distribution.lief_config_extra.replace("\n", "") args = map(lambda a: a.strip(), args.split(";")) cmake_args += list(args) # Main formats # ============ if self.distribution.lief_no_elf: log.info("LIEF ELF module disabled") cmake_args += ["-DLIEF_ELF=off"] if self.distribution.lief_no_pe: log.info("LIEF PE module disabled") cmake_args += ["-DLIEF_PE=off"] if self.distribution.lief_no_macho: log.info("LIEF MACH-O module disabled") cmake_args += ["-DLIEF_MACHO=off"] # Android formats # =============== if self.distribution.lief_no_oat or self.distribution.lief_no_android: log.info("LIEF OAT module disabled") cmake_args += ["-DLIEF_OAT=off"] if self.distribution.lief_no_dex or self.distribution.lief_no_android: log.info("LIEF DEX module disabled") cmake_args += ["-DLIEF_DEX=off"] if self.distribution.lief_no_vdex or self.distribution.lief_no_android: log.info("LIEF VDEX module disabled") cmake_args += ["-DLIEF_VDEX=off"] if self.distribution.lief_no_art or self.distribution.lief_no_android: log.info("LIEF ART module disabled") cmake_args += ["-DLIEF_ART=off"] build_args = ['--config', cfg] env = os.environ if os.getenv("CXXFLAGS", None) is not None: cmake_args += [ '-DCMAKE_CXX_FLAGS={}'.format(os.getenv("CXXFLAGS")), ] if os.getenv("CFLAGS", None) is not None: cmake_args += [ '-DCMAKE_C_FLAGS={}'.format(os.getenv("CFLAGS")), ] if platform.system() == "Windows": from setuptools import msvc cmake_args += [ '-DCMAKE_BUILD_TYPE={}'.format(cfg), '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format( cfg.upper(), cmake_library_output_directory), '-DLIEF_USE_CRT_RELEASE=MT', ] if build_with_ninja: arch = 'x64' if is64 else 'x86' ninja_env = msvc.msvc14_get_vc_env(arch) env.update(ninja_env) else: cmake_args += ['-A', 'x64'] if is64 else ['-A', 'win32'] build_args += ['--', '/m'] else: cmake_args += ['-DCMAKE_BUILD_TYPE={}'.format(cfg)] if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) log.info("Platform: %s", platform.system()) log.info("Wheel library: %s", self.get_ext_fullname(ext.name)) # 1. Configure configure_cmd = ['cmake', ext.sourcedir] + cmake_args log.info(" ".join(configure_cmd)) subprocess.check_call(configure_cmd, cwd=self.build_temp, env=env) # 2. Build targets = { 'python_bindings': 'pyLIEF', } if self.distribution.sdk: targets['sdk'] = "package" if self.distribution.doc: targets['doc'] = "lief-doc" if platform.system() == "Windows": if self.distribution.lief_test: subprocess.check_call(configure_cmd, cwd=self.build_temp, env=env) if build_with_ninja: subprocess.check_call( ['cmake', '--build', '.', '--target', "all"] + build_args, cwd=self.build_temp, env=env) else: subprocess.check_call( ['cmake', '--build', '.', '--target', "ALL_BUILD"] + build_args, cwd=self.build_temp, env=env) subprocess.check_call( ['cmake', '--build', '.', '--target', "check-lief"] + build_args, cwd=self.build_temp, env=env) else: subprocess.check_call([ 'cmake', '--build', '.', '--target', targets['python_bindings'] ] + build_args, cwd=self.build_temp, env=env) if 'sdk' in targets: subprocess.check_call( ['cmake', '--build', '.', '--target', targets['sdk']] + build_args, cwd=self.build_temp, env=env) else: if build_with_ninja: if self.distribution.lief_test: subprocess.check_call(configure_cmd, cwd=self.build_temp) subprocess.check_call(['ninja'], cwd=self.build_temp) subprocess.check_call(['ninja', "check-lief"], cwd=self.build_temp) else: subprocess.check_call( ['ninja', targets['python_bindings']], cwd=self.build_temp, env=env) if 'sdk' in targets: subprocess.check_call(['ninja', targets['sdk']], cwd=self.build_temp, env=env) if 'doc' in targets: try: subprocess.check_call(['ninja', targets['doc']], cwd=self.build_temp, env=env) except Exception as e: log.error("Documentation failed: %s" % e) else: log.info("Using {} jobs".format(jobs)) if self.distribution.lief_test: subprocess.check_call(configure_cmd, cwd=self.build_temp) subprocess.check_call( ['make', '-j', str(jobs), "all"], cwd=self.build_temp) subprocess.check_call( ['make', '-j', str(jobs), "check-lief"], cwd=self.build_temp) else: subprocess.check_call( ['make', '-j', str(jobs), targets['python_bindings']], cwd=self.build_temp, env=env) if 'sdk' in targets: subprocess.check_call( ['make', '-j', str(jobs), targets['sdk']], cwd=self.build_temp, env=env) if 'doc' in targets: try: subprocess.check_call( ['make', '-j', str(jobs), targets['doc']], cwd=self.build_temp, env=env) except Exception as e: log.error("Documentation failed: %s" % e) pylief_dst = os.path.join( self.build_lib, self.get_ext_filename(self.get_ext_fullname(ext.name))) libsuffix = pylief_dst.split(".")[-1] pylief_path = os.path.join(cmake_library_output_directory, "{}.{}".format(PACKAGE_NAME, libsuffix)) if platform.system() == "Windows": pylief_base = pathlib.Path( cmake_library_output_directory) / "Release" / "api" / "python" pylief_path = pylief_base / "Release" / "{}.{}".format( PACKAGE_NAME, libsuffix) if not pylief_path.is_file(): pylief_path = pylief_base / "{}.{}".format( PACKAGE_NAME, libsuffix) pylief_path = pylief_path.as_posix() if not os.path.exists(self.build_lib): os.makedirs(self.build_lib) log.info("Copying {} into {}".format(pylief_path, pylief_dst)) copy_file(pylief_path, pylief_dst, verbose=self.verbose, dry_run=self.dry_run) # SDK # === if self.distribution.sdk: sdk_path = list( pathlib.Path(self.build_temp).rglob("LIEF-*.{}".format( self.sdk_suffix()))) if len(sdk_path) == 0: log.error("Unable to find SDK archive") sys.exit(1) sdk_path = str(sdk_path.pop()) sdk_output = str(pathlib.Path(CURRENT_DIR) / "build") copy_file(sdk_path, sdk_output, verbose=self.verbose, dry_run=self.dry_run)
def build_extension(self, ext): if self.distribution.lief_test: log.info("LIEF tests enabled!") fullname = self.get_ext_fullname(ext.name) filename = self.get_ext_filename(fullname) jobs = self.parallel if self.parallel else 1 source_dir = ext.sourcedir build_temp = self.build_temp extdir = os.path.abspath( os.path.dirname(self.get_ext_fullpath(ext.name))) cmake_library_output_directory = os.path.abspath( os.path.dirname(build_temp)) cfg = 'Debug' if self.debug else 'Release' is64 = sys.maxsize > 2**32 cmake_args = [ '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={}'.format( cmake_library_output_directory), '-DPYTHON_EXECUTABLE={}'.format(sys.executable), '-DLIEF_PYTHON_API=on', ] # LIEF options # ============ if self.distribution.lief_test: cmake_args += ["-DLIEF_TESTS=on"] if self.distribution.lief_no_json: log.info("LIEF JSON module disabled") cmake_args += ["-DLIEF_ENABLE_JSON=off"] if self.distribution.lief_no_logging: log.info("LIEF logging module disabled") cmake_args += ["-DLIEF_LOGGING=off"] if self.distribution.doc: log.info("LIEF documentation enabled") cmake_args += ["-DLIEF_DOC=on"] # Main formats # ============ if self.distribution.lief_no_elf: log.info("LIEF ELF module disabled") cmake_args += ["-DLIEF_ELF=off"] if self.distribution.lief_no_pe: log.info("LIEF PE module disabled") cmake_args += ["-DLIEF_PE=off"] if self.distribution.lief_no_macho: log.info("LIEF MACH-O module disabled") cmake_args += ["-DLIEF_MACHO=off"] # Android formats # =============== if self.distribution.lief_no_oat or self.distribution.lief_no_android: log.info("LIEF OAT module disabled") cmake_args += ["-DLIEF_OAT=off"] if self.distribution.lief_no_dex or self.distribution.lief_no_android: log.info("LIEF DEX module disabled") cmake_args += ["-DLIEF_DEX=off"] if self.distribution.lief_no_vdex or self.distribution.lief_no_android: log.info("LIEF VDEX module disabled") cmake_args += ["-DLIEF_VDEX=off"] if self.distribution.lief_no_art or self.distribution.lief_no_android: log.info("LIEF ART module disabled") cmake_args += ["-DLIEF_ART=off"] build_args = ['--config', cfg] if platform.system() == "Windows": cmake_args += [ '-DCMAKE_BUILD_TYPE={}'.format(cfg), '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format( cfg.upper(), cmake_library_output_directory), '-DLIEF_USE_CRT_RELEASE=MT', ] cmake_args += ['-A', 'x64'] if is64 else [] # Specific to appveyor #if os.getenv("APPVEYOR", False): # build_args += ['--', '/v:m'] # logger = os.getenv("MSBuildLogger", None) # if logger: # build_args += ['/logger:{}'.format(logger)] #else: build_args += ['--', '/m'] else: cmake_args += ['-DCMAKE_BUILD_TYPE={}'.format(cfg)] env = os.environ.copy() if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) build_with_ninja = False if self.has_ninja() and self.distribution.ninja: cmake_args += ["-G", "Ninja"] build_with_ninja = True # 1. Configure configure_cmd = ['cmake', ext.sourcedir] + cmake_args log.info(" ".join(configure_cmd)) subprocess.check_call(configure_cmd, cwd=self.build_temp, env=env) # 2. Build targets = { 'python_bindings': 'pyLIEF', } if self.distribution.sdk: targets['sdk'] = "package" if self.distribution.doc: targets['doc'] = "doc-lief" if platform.system() == "Windows": build_cmd = ['cmake', '--build', '.', '--target', "lief_samples" ] + build_args #log.info(" ".join(build_cmd)) if self.distribution.lief_test: subprocess.check_call( ['cmake', '--build', '.', '--target', "lief_samples"] + build_args, cwd=self.build_temp, env=env) subprocess.check_call(configure_cmd, cwd=self.build_temp, env=env) subprocess.check_call( ['cmake', '--build', '.', '--target', "ALL_BUILD"] + build_args, cwd=self.build_temp, env=env) subprocess.check_call( ['cmake', '--build', '.', '--target', "check-lief"] + build_args, cwd=self.build_temp, env=env) else: subprocess.check_call([ 'cmake', '--build', '.', '--target', targets['python_bindings'] ] + build_args, cwd=self.build_temp, env=env) if 'sdk' in targets: subprocess.check_call( ['cmake', '--build', '.', '--target', targets['sdk']] + build_args, cwd=self.build_temp, env=env) else: if build_with_ninja: if self.distribution.lief_test: subprocess.check_call(['ninja', "lief_samples"], cwd=self.build_temp) subprocess.check_call(configure_cmd, cwd=self.build_temp) subprocess.check_call(['ninja'], cwd=self.build_temp) subprocess.check_call(['ninja', "check-lief"], cwd=self.build_temp) else: subprocess.check_call( ['ninja', targets['python_bindings']], cwd=self.build_temp) if 'sdk' in targets: subprocess.check_call(['ninja', targets['sdk']], cwd=self.build_temp) if 'doc' in targets: try: subprocess.check_call(['ninja', targets['doc']], cwd=self.build_temp) except Exception as e: log.error("Documentation failed: %s" % e) else: log.info("Using {} jobs".format(jobs)) if self.distribution.lief_test: subprocess.check_call( ['make', '-j', str(jobs), "lief_samples"], cwd=self.build_temp) subprocess.check_call(configure_cmd, cwd=self.build_temp) subprocess.check_call( ['make', '-j', str(jobs), "all"], cwd=self.build_temp) subprocess.check_call( ['make', '-j', str(jobs), "check-lief"], cwd=self.build_temp) else: subprocess.check_call( ['make', '-j', str(jobs), targets['python_bindings']], cwd=self.build_temp) if 'sdk' in targets: subprocess.check_call( ['make', '-j', str(jobs), targets['sdk']], cwd=self.build_temp) if 'doc' in targets: try: subprocess.check_call( ['make', '-j', str(jobs), targets['doc']], cwd=self.build_temp) except Exception as e: log.error("Documentation failed: %s" % e) pylief_dst = os.path.join( self.build_lib, self.get_ext_filename(self.get_ext_fullname(ext.name))) libsuffix = pylief_dst.split(".")[-1] pylief_path = os.path.join(cmake_library_output_directory, "{}.{}".format(PACKAGE_NAME, libsuffix)) if platform.system() == "Windows": pylief_path = os.path.join(cmake_library_output_directory, "Release", "api", "python", "Release", "{}.{}".format(PACKAGE_NAME, libsuffix)) if not os.path.exists(self.build_lib): os.makedirs(self.build_lib) log.info("Copying {} into {}".format(pylief_path, pylief_dst)) copy_file(pylief_path, pylief_dst, verbose=self.verbose, dry_run=self.dry_run) # SDK # === if self.distribution.sdk: sdk_path = list( pathlib.Path(self.build_temp).rglob("LIEF-*.{}".format( self.sdk_suffix()))) if len(sdk_path) == 0: log.error("Unable to find SDK archive") sys.exit(1) sdk_path = str(sdk_path.pop()) sdk_output = str(pathlib.Path(CURRENT_DIR) / "build") copy_file(sdk_path, sdk_output, verbose=self.verbose, dry_run=self.dry_run)
except OSError, e: log.error(str(e)) # Delete the directories. First reverse-sort the normalized paths by # length so that child directories are deleted before their parents. dirs = [os.path.normpath(dir) for dir in dirs] dirs.sort(key=len, reverse=True) for dir in dirs: try: log.info("Removing the directory '%s'." % dir) if not self.dry_run: os.rmdir(dir) except OSError, e: if e.errno == errno.ENOTEMPTY: log.info("Directory '%s' not empty; not removing." % dir) else: log.error(str(e)) # setup can be called in different ways depending on what we're doing. (For # example py2exe needs special handling.) These arguments are common between # all the operations. COMMON_SETUP_ARGS = { 'name': APP_NAME, 'license': 'Nmap License (https://nmap.org/book/man-legal.html)', 'url': APP_WEB_SITE, 'download_url': APP_DOWNLOAD_SITE, 'author': 'Nmap Project', 'maintainer': 'Nmap Project', 'description': "%s frontend and results viewer" % NMAP_DISPLAY_NAME, 'long_description': "%s is an %s frontend that is really useful" "for advanced users and easy to be used by newbies." % ( APP_DISPLAY_NAME, NMAP_DISPLAY_NAME),
def initialize_options(self): try: raise RuntimeError("Sphinx must be installed for build_sphinx") except: log.error('error : Sphinx must be installed for build_sphinx') sys.exit(1)
def _finalize_protobuf(self): if not self.with_protobuf_include_dir: self.with_protobuf_include_dir = \ os.environ.get("MYSQLXPB_PROTOBUF_INCLUDE_DIR") if not self.with_protobuf_lib_dir: self.with_protobuf_lib_dir = \ os.environ.get("MYSQLXPB_PROTOBUF_LIB_DIR") if not self.with_protoc: self.with_protoc = os.environ.get("MYSQLXPB_PROTOC") if self.with_protobuf_include_dir: print("# Protobuf include directory: {0}" "".format(self.with_protobuf_include_dir)) if not os.path.isdir(self.with_protobuf_include_dir): log.error("Protobuf include dir should be a directory") sys.exit(1) else: log.error("Unable to find Protobuf include directory.") sys.exit(1) if self.with_protobuf_lib_dir: print("# Protobuf library directory: {0}" "".format(self.with_protobuf_lib_dir)) if not os.path.isdir(self.with_protobuf_lib_dir): log.error("Protobuf library dir should be a directory") sys.exit(1) else: log.error("Unable to find Protobuf library directory.") sys.exit(1) if self.with_protoc: print("# Protobuf protoc binary: {0}".format(self.with_protoc)) if not os.path.isfile(self.with_protoc): log.error("Protobuf protoc binary is not valid.") sys.exit(1) else: log.error("Unable to find Protobuf protoc binary.") sys.exit(1) if not os.path.exists(self.protobuf_lib): os.makedirs(self.protobuf_lib) if not os.path.exists(self.protobuf_include): os.makedirs(self.protobuf_include) log.info("Copying Protobuf libraries") lib_files = glob( os.path.join(self.with_protobuf_lib_dir, "libprotobuf*")) for lib_file in lib_files: if os.path.isfile(lib_file): log.info("copying {0} -> {1}".format(lib_file, self.protobuf_lib)) shutil.copy2(lib_file, self.protobuf_lib) log.info("Copying Protobuf header files") copy_tree(self.with_protobuf_include_dir, self.protobuf_include) # Remove all but static libraries to force static linking if os.name == "posix": log.info("Removing non-static Protobuf libraries from {0}" "".format(self.protobuf_lib)) for lib_file in os.listdir(self.protobuf_lib): lib_file_path = os.path.join(self.protobuf_lib, lib_file) if os.path.isfile(lib_file_path) and \ not lib_file.endswith((".a", ".dylib",)): os.unlink(os.path.join(self.protobuf_lib, lib_file))
directive_defaults['binding'] = True cython_macros = [('CYTHON_TRACE', '1')] else: cython_macros = None pydigree_dir = os.path.dirname(__file__) cydigree_dir = os.path.join(pydigree_dir, 'pydigree', 'cydigree') cysources = [ 'pydigree/cydigree/cyfuncs.pyx', 'pydigree/cydigree/datastructures.pyx', 'pydigree/cydigree/vcfparse.pyx' ] cysources = [os.path.join(pydigree_dir, x) for x in cysources] if not all(os.path.exists(x) for x in cysources): error('ERROR: Cython sources not found! Giving up.') exit(1) cyext = Extension('pydigree.cydigree.cyfuncs', sources=[os.path.join(cydigree_dir, 'cyfuncs.pyx')], include_dirs=[numpy.get_include()], extra_compile_args=['-Wno-unused-function'], define_macros=cython_macros) dsext = Extension('pydigree.cydigree.datastructures', sources=[os.path.join(cydigree_dir, 'datastructures.pyx')], extra_compile_args=['-Wno-unused-function'], define_macros=cython_macros) vcfext = Extension('pydigree.cydigree.vcfparse', sources=[os.path.join(cydigree_dir, 'vcfparse.pyx')],
def get_mysql_config_info(mysql_config): """Get MySQL information using mysql_config tool Returns a dict. """ options = ['cflags', 'include', 'libs', 'libs_r', 'plugindir', 'version'] cmd = [mysql_config] + ["--{0}".format(opt) for opt in options] try: proc = Popen(cmd, stdout=PIPE, universal_newlines=True) stdout, _ = proc.communicate() except OSError as exc: raise DistutilsExecError("Failed executing mysql_config: {0}".format( str(exc))) log.debug("# stdout: {0}".format(stdout)) info = {} for option, line in zip(options, stdout.split('\n')): log.debug("# option: {0}".format(option)) log.debug("# line: {0}".format(line)) info[option] = line.strip() ver = info['version'] if '-' in ver: ver, _ = ver.split('-', 2) info['version'] = tuple([int(v) for v in ver.split('.')[0:3]]) libs = shlex.split(info['libs']) info['lib_dir'] = libs[0].replace('-L', '') info['libs'] = [lib.replace('-l', '') for lib in libs[1:]] log.debug("# info['libs']: ") for lib in info['libs']: log.debug("# {0}".format(lib)) log.error("# info['libs']: {0}".format(info['libs'])) libs = shlex.split(info['libs_r']) info['lib_r_dir'] = libs[0].replace('-L', '') info['libs_r'] = [lib.replace('-l', '') for lib in libs[1:]] info['include'] = info['include'].replace('-I', '') # Try to figure out the architecture info['arch'] = None if os.name == 'posix': pathname = os.path.join(info['lib_dir'], 'lib' + info['libs'][0]) + '*' libs = glob(pathname) log.debug("# libs: {0}".format(libs)) for lib in libs: log.debug("#- {0}".format(lib)) mysqlclient_libs = [] for filepath in libs: _, filename = os.path.split(filepath) log.debug("# filename {0}".format(filename)) if filename.startswith('libmysqlclient') and \ not os.path.islink(filepath) and \ '_r' not in filename and \ '.a' not in filename: mysqlclient_libs.append(filepath) mysqlclient_libs.sort() stdout = None try: log.debug("# mysqlclient_lib: {0}".format(mysqlclient_libs[-1])) for mysqlclient_lib in mysqlclient_libs: log.debug("#+ {0}".format(mysqlclient_lib)) log.debug("# tested mysqlclient_lib[-1]: " "{0}".format(mysqlclient_libs[-1])) proc = Popen(['file', '-L', mysqlclient_libs[-1]], stdout=PIPE, universal_newlines=True) stdout, _ = proc.communicate() stdout = stdout.split(':')[1] except OSError as exc: raise DistutilsExecError( "Although the system seems POSIX, the file-command could not " "be executed: {0}".format(str(exc))) if stdout: if '64' in stdout: info['arch'] = "x86_64" else: info['arch'] = "i386" else: raise DistutilsExecError( "Failed getting out put from the file-command") else: raise DistutilsExecError( "Cannot determine architecture on {0} systems".format(os.name)) return info
def should_run_npm(): """Test whether npm should be run""" if not which('npm'): log.error("npm unavailable") return False return is_stale(node_modules, pjoin(here, 'package.json'))
def setup_cfg_to_setup_kwargs(config, script_args=()): """Processes the setup.cfg options and converts them to arguments accepted by setuptools' setup() function. """ kwargs = {} # Temporarily holds install_requires and extra_requires while we # parse env_markers. all_requirements = {} for arg in D1_D2_SETUP_ARGS: if len(D1_D2_SETUP_ARGS[arg]) == 2: # The distutils field name is different than distutils2's. section, option = D1_D2_SETUP_ARGS[arg] elif len(D1_D2_SETUP_ARGS[arg]) == 1: # The distutils field name is the same thant distutils2's. section = D1_D2_SETUP_ARGS[arg][0] option = arg in_cfg_value = has_get_option(config, section, option) if not in_cfg_value: # There is no such option in the setup.cfg if arg == "long_description": in_cfg_value = has_get_option(config, section, "description_file") if in_cfg_value: in_cfg_value = split_multiline(in_cfg_value) value = '' for filename in in_cfg_value: description_file = open(filename) try: value += description_file.read().strip() + '\n\n' finally: description_file.close() in_cfg_value = value else: continue if arg in CSV_FIELDS: in_cfg_value = split_csv(in_cfg_value) if arg in MULTI_FIELDS: in_cfg_value = split_multiline(in_cfg_value) elif arg in MAP_FIELDS: in_cfg_map = {} for i in split_multiline(in_cfg_value): k, v = i.split('=') in_cfg_map[k.strip()] = v.strip() in_cfg_value = in_cfg_map elif arg in BOOL_FIELDS: # Provide some flexibility here... if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'): in_cfg_value = True else: in_cfg_value = False if in_cfg_value: if arg in ('install_requires', 'tests_require'): # Replaces PEP345-style version specs with the sort expected by # setuptools in_cfg_value = [ _VERSION_SPEC_RE.sub(r'\1\2', pred) for pred in in_cfg_value ] if arg == 'install_requires': # Split install_requires into package,env_marker tuples # These will be re-assembled later install_requires = [] requirement_pattern = '(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' for requirement in in_cfg_value: m = re.match(requirement_pattern, requirement) requirement_package = m.group('package').strip() env_marker = m.group('env_marker').strip() install_requires.append((requirement_package, env_marker)) all_requirements[''] = install_requires elif arg == 'package_dir': in_cfg_value = {'': in_cfg_value} elif arg in ('package_data', 'data_files'): data_files = {} firstline = True prev = None for line in in_cfg_value: if '=' in line: key, value = line.split('=', 1) key, value = (key.strip(), value.strip()) if key in data_files: # Multiple duplicates of the same package name; # this is for backwards compatibility of the old # format prior to d2to1 0.2.6. prev = data_files[key] prev.extend(value.split()) else: prev = data_files[key.strip()] = value.split() elif firstline: raise errors.DistutilsOptionError( 'malformed package_data first line %r (misses ' '"=")' % line) else: prev.extend(line.strip().split()) firstline = False if arg == 'data_files': # the data_files value is a pointlessly different structure # from the package_data value data_files = data_files.items() in_cfg_value = data_files elif arg == 'cmdclass': cmdclass = {} dist = st_dist.Distribution() for cls_name in in_cfg_value: cls = resolve_name(cls_name) cmd = cls(dist) cmdclass[cmd.get_command_name()] = cls in_cfg_value = cmdclass kwargs[arg] = in_cfg_value # Transform requirements with embedded environment markers to # setuptools' supported marker-per-requirement format. # # install_requires are treated as a special case of extras, before # being put back in the expected place # # fred = # foo:marker # bar # -> {'fred': ['bar'], 'fred:marker':['foo']} if 'extras' in config: requirement_pattern = '(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$' extras = config['extras'] # Add contents of test-requirements, if any, into an extra named # 'test' if one does not already exist. if 'test' not in extras: from pbr import packaging extras['test'] = "\n".join( packaging.parse_requirements( packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':') for extra in extras: extra_requirements = [] requirements = split_multiline(extras[extra]) for requirement in requirements: m = re.match(requirement_pattern, requirement) extras_value = m.group('package').strip() env_marker = m.group('env_marker') extra_requirements.append((extras_value, env_marker)) all_requirements[extra] = extra_requirements # Transform the full list of requirements into: # - install_requires, for those that have no extra and no # env_marker # - named extras, for those with an extra name (which may include # an env_marker) # - and as a special case, install_requires with an env_marker are # treated as named extras where the name is the empty string extras_require = {} for req_group in all_requirements: for requirement, env_marker in all_requirements[req_group]: if env_marker: extras_key = '%s:(%s)' % (req_group, env_marker) # We do not want to poison wheel creation with locally # evaluated markers. sdists always re-create the egg_info # and as such do not need guarded, and pip will never call # multiple setup.py commands at once. if 'bdist_wheel' not in script_args: try: if pkg_resources.evaluate_marker('(%s)' % env_marker): extras_key = req_group except SyntaxError: log.error( "Marker evaluation failed, see the following " "error. For more information see: " "http://docs.openstack.org/" "developer/pbr/compatibility.html#evaluate-marker") raise else: extras_key = req_group extras_require.setdefault(extras_key, []).append(requirement) kwargs['install_requires'] = extras_require.pop('', []) kwargs['extras_require'] = extras_require return kwargs
import sys import distutils.core from distutils.core import setup from distutils.command.build_scripts import build_scripts as _build_scripts from distutils.command.install import install as _install from distutils.command.install_data import install_data as _install_data from distutils.util import change_root from distutils.file_util import DistutilsFileError from distutils import log, dir_util from info import META_INFO, INSTALL # Check required Python version if sys.version_info[0:2] not in [(2, 6), (2, 7)]: log.error("MySQL Utilities requires Python v2.6 or v2.7") sys.exit(1) COMMANDS = { 'cmdclass': {}, } # Custom DistUtils command try: from support.distribution.commands import (dist_deb, dist_rpm, bdist, build, sdist) except ImportError: pass # Use default when not available else: COMMANDS['cmdclass'].update({ 'sdist': sdist.GenericSourceGPL,
def _run_cmake(self, ext): cmake = self._find_cmake() cfg = 'Debug' if self.debug else 'Release' self.outdir = os.path.abspath(ext.cmake_install_prefix) readline_flag = 'ON' if sys.platform[:6] == "darwin" else 'OFF' log.info("Building lib to: %s", self.outdir) cmake_args = [ # Generic options only. project options shall be passed as ext param '-DCMAKE_INSTALL_PREFIX=' + self.outdir, '-DPYTHON_EXECUTABLE=' + sys.executable, '-DCMAKE_BUILD_TYPE=' + cfg, '-DNRN_ENABLE_INTERNAL_READLINE=' + readline_flag, ] + ext.cmake_flags # RTD neds quick config if self.docs and os.environ.get("READTHEDOCS"): cmake_args = [ '-DNRN_ENABLE_MPI=OFF', '-DNRN_ENABLE_INTERVIEWS=OFF' ] if self.cmake_prefix: cmake_args.append("-DCMAKE_PREFIX_PATH=" + self.cmake_prefix) if self.cmake_defs: cmake_args += ["-D" + opt for opt in self.cmake_defs.split(",")] build_args = ['--config', cfg, '--', '-j4'] # , 'VERBOSE=1'] env = os.environ.copy() env['CXXFLAGS'] = "{} -DVERSION_INFO='{}'".format( env.get('CXXFLAGS', ''), self.distribution.get_version()) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) try: # Configure project subprocess.Popen("echo $CXX", shell=True, stdout=subprocess.PIPE) log.info("[CMAKE] cmd: %s", " ".join([cmake, ext.sourcedir] + cmake_args)) subprocess.check_call([cmake, ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env) if self.docs: # RTD will call sphinx for us. We just need notebooks and doxygen if os.environ.get("READTHEDOCS"): subprocess.check_call(['make', 'notebooks'], cwd=self.build_temp, env=env) subprocess.check_call(['make', 'doxygen'], cwd=self.build_temp, env=env) else: subprocess.check_call(['make', 'docs'], cwd=self.build_temp, env=env) else: subprocess.check_call( [cmake, '--build', '.', '--target', 'install'] + build_args, cwd=self.build_temp, env=env) subprocess.check_call([ ext.cmake_install_prefix + '/bin/neurondemo', '-nopython', '-nogui', '-c', 'quit()' ], cwd=self.build_temp, env=env) # mac: libnrnmech of neurondemo need to point to relative libnrniv REL_RPATH = "@loader_path" if sys.platform[:6] == "darwin" else "$ORIGIN" subprocess.check_call([ ext.sourcedir + '/packaging/python/fix_demo_libnrnmech.sh', ext.cmake_install_prefix, REL_RPATH ], cwd=self.build_temp, env=env) except subprocess.CalledProcessError as exc: log.error("Status : FAIL. Log:\n%s", exc.output) raise
def __init__(self, compiler): log.debug("Compiler include_dirs: %s" % compiler.include_dirs) if hasattr(compiler, "initialize"): compiler.initialize() # to set all variables log.debug("Compiler include_dirs after initialize: %s" % compiler.include_dirs) self.compiler = compiler log.debug( sys.version) # contains the compiler used to build this python # members with the info for the outside world self.include_dirs = get_include_dirs() self.objects = [] self.libraries = [] self.library_dirs = get_library_dirs() self.linker_flags = [] self.compile_time_env = {} if self.compiler.compiler_type == 'msvc': if (sys.version_info.major, sys.version_info.minor) < (3, 3): # The check above is a nasty hack. We're using the python # version as a proxy for the MSVC version. 2008 doesn't # have stdint.h, so is needed. 2010 does. # # We need to add the path to msvc includes msvc_2008_path = (os.path.join(os.getcwd(), 'include', 'msvc_2008')) self.include_dirs.append(msvc_2008_path) elif (sys.version_info.major, sys.version_info.minor) < (3, 5): # Actually, it seems that appveyor doesn't have a stdint that # works, so even for 2010 we use our own (hacked) version # of stdint. # This should be pretty safe in whatever case. msvc_2010_path = (os.path.join(os.getcwd(), 'include', 'msvc_2010')) self.include_dirs.append(msvc_2010_path) # To avoid http://bugs.python.org/issue4431 # # C:\Program Files\Microsoft # SDKs\Windows\v7.1\Bin\x64\mt.exe -nologo -manifest # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest # -outputresource:C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe;1 # C:\Users\appveyor\AppData\Local\Temp\1\pyfftw-9in6l66u\a.out.exe.manifest # : general error c1010070: Failed to load and parse # the manifest. The system cannot find the file # specified. self.compiler.ldflags_shared.append('/MANIFEST') if get_platform().startswith('linux'): # needed at least libm for linker checks to succeed self.libraries.append('m') # main fftw3 header is required if not self.has_header(['fftw3.h'], include_dirs=self.include_dirs): raise CompileError("Could not find the FFTW header 'fftw3.h'") # mpi is optional # self.support_mpi = self.has_header(['mpi.h', 'fftw3-mpi.h']) # TODO enable check when wrappers are included in Pyfftw self.support_mpi = False if self.support_mpi: try: import mpi4py self.include_dirs.append(mpi4py.get_include()) except ImportError: log.error( "Could not import mpi4py. Skipping support for FFTW MPI.") self.support_mpi = False self.search_dependencies()
def log_error(self, msg, *args, **kw): log.error(msg, *args)
def has_function(self, function, includes=None, objects=None, libraries=None, include_dirs=None, library_dirs=None, linker_flags=None): '''Alternative implementation of distutils.ccompiler.has_function that deletes the output and hides calls to the compiler and linker.''' if includes is None: includes = [] if objects is None: objects = self.objects if libraries is None: libraries = self.libraries if include_dirs is None: include_dirs = self.include_dirs if library_dirs is None: library_dirs = self.library_dirs if linker_flags is None: linker_flags = self.linker_flags msg = "Checking" if function: msg += " for %s" % function if includes: msg += " with includes " + str(includes) msg += "..." status = "no" log.debug("objects: %s" % objects) log.debug("libraries: %s" % libraries) log.debug("include dirs: %s" % include_dirs) import tempfile, shutil tmpdir = tempfile.mkdtemp(prefix='pyfftw-') try: try: fname = os.path.join(tmpdir, '%s.c' % function) f = open(fname, 'w') for inc in includes: f.write('#include <%s>\n' % inc) f.write("""\ int main() { """) if function: f.write('%s();\n' % function) f.write("""\ return 0; }""") finally: f.close() # the root directory file_root = os.path.abspath(os.sep) try: # output file is stored relative to input file since # the output has the full directory, joining with the # file root gives the right directory stdout = os.path.join(tmpdir, "compile-stdout") stderr = os.path.join(tmpdir, "compile-stderr") with stdchannel_redirected(sys.stdout, stdout), stdchannel_redirected( sys.stderr, stderr): tmp_objects = self.compiler.compile( [fname], output_dir=file_root, include_dirs=include_dirs) with open(stdout, 'r') as f: log.debug(f.read()) with open(stderr, 'r') as f: log.debug(f.read()) except CompileError: with open(stdout, 'r') as f: log.debug(f.read()) with open(stderr, 'r') as f: log.debug(f.read()) return False except Exception as e: log.error(e) return False try: # additional objects should come last to resolve symbols, linker order matters tmp_objects.extend(objects) stdout = os.path.join(tmpdir, "link-stdout") stderr = os.path.join(tmpdir, "link-stderr") with stdchannel_redirected(sys.stdout, stdout), stdchannel_redirected( sys.stderr, stderr): # TODO using link_executable, LDFLAGS that the # user can modify are ignored self.compiler.link_executable(tmp_objects, 'a.out', output_dir=tmpdir, libraries=libraries, extra_preargs=linker_flags, library_dirs=library_dirs) with open(stdout, 'r') as f: log.debug(f.read()) with open(stderr, 'r') as f: log.debug(f.read()) except (LinkError, TypeError): with open(stdout, 'r') as f: log.debug(f.read()) with open(stderr, 'r') as f: log.debug(f.read()) return False except Exception as e: log.error(e) return False # no error, seems to work status = "ok" return True finally: shutil.rmtree(tmpdir) log.debug(msg + status)
def wrapper(*args, **kwargs): # Parse configuration files func(*args, **kwargs) # Load the declarative requirements files config requirements = dist.command_options.get("requirements-files") if not requirements: log.info( "No 'requirements-files' section was found. Nothing to do.") return supported_config_keys = ( "setup_requires", "install_requires", "extras_require", "tests_require", ) for cfgname, (cfgfile, requirements_file) in requirements.items(): if cfgfile != "setup.cfg": # We only support configuring requirements files in setup.cfg continue if cfgname not in supported_config_keys: log.error( "The config key '%s' under 'requirements-files' is not " "supported. Allowed config keys: %s", cfgname, ", ".join("'{}'".format(key) for key in supported_config_keys), ) sys.exit(1) if cfgname == "extras_require": # Extras require supports mappings, let's parse that out if dist.extras_require is None: # If dist.extras_require is still None, make it a dictionary dist.extras_require = {} extras_require = dist.extras_require for line in requirements_file.splitlines(): if not line.strip(): # Ignore empty lines continue if "=" not in line: log.error( "Don't know how to parse the line '%s' for extras_require " "under 'requirements-files'. Should be " "'<extras_key> = path/to/requirements.txt'", line.strip(), ) sys.exit(1) extras_key, extras_requirements_file = ( part.strip() for part in line.split("=")) if extras_key not in extras_require: extras_require[extras_key] = [] extras_require[extras_key].extend( _parse_requirements_file( pathlib.Path(extras_requirements_file))) continue # The rest of the allowed config keys don't support mappings if getattr(dist, cfgname) is None: # If the dist value for the attribute is still None, make it a list setattr(dist, cfgname, []) getattr(dist, cfgname).extend( _parse_requirements_file( pathlib.Path(requirements_file.strip()))) # Be sure to call dist._finalize_requires again so that the distribution class # set's additional attributes dist._finalize_requires()
def _run_cmake(self, ext): cmake = self._find_cmake() cfg = "Debug" if self.debug else "Release" self.outdir = os.path.abspath(ext.cmake_install_prefix) log.info("Building lib to: %s", self.outdir) cmake_args = [ # Generic options only. project options shall be passed as ext param "-DCMAKE_INSTALL_PREFIX=" + self.outdir, "-DPYTHON_EXECUTABLE=" + sys.executable, "-DCMAKE_BUILD_TYPE=" + cfg, ] + ext.cmake_flags # RTD neds quick config if self.docs and os.environ.get("READTHEDOCS"): cmake_args = [ "-DNRN_ENABLE_MPI=OFF", "-DNRN_ENABLE_INTERVIEWS=OFF" ] if self.cmake_prefix: cmake_args.append("-DCMAKE_PREFIX_PATH=" + self.cmake_prefix) if self.cmake_defs: cmake_args += ["-D" + opt for opt in self.cmake_defs.split(",")] build_args = ["--config", cfg, "--", "-j4"] # , 'VERBOSE=1'] env = os.environ.copy() env["CXXFLAGS"] = "{} -DVERSION_INFO='{}'".format( env.get("CXXFLAGS", ""), self.distribution.get_version()) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) try: # Configure project subprocess.Popen("echo $CXX", shell=True, stdout=subprocess.PIPE) log.info("[CMAKE] cmd: %s", " ".join([cmake, ext.sourcedir] + cmake_args)) subprocess.check_call([cmake, ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env) if self.docs: # RTD will call sphinx for us. We just need notebooks and doxygen if os.environ.get("READTHEDOCS"): subprocess.check_call(["make", "notebooks"], cwd=self.build_temp, env=env) subprocess.check_call(["make", "doxygen"], cwd=self.build_temp, env=env) else: subprocess.check_call(["make", "docs"], cwd=self.build_temp, env=env) else: subprocess.check_call( [cmake, "--build", ".", "--target", "install"] + build_args, cwd=self.build_temp, env=env, ) subprocess.check_call( [ ext.cmake_install_prefix + "/bin/neurondemo", "-nopython", "-nogui", "-c", "quit()", ], cwd=self.build_temp, env=env, ) # mac: libnrnmech of neurondemo need to point to relative libnrniv REL_RPATH = ("@loader_path" if sys.platform[:6] == "darwin" else "$ORIGIN") subprocess.check_call( [ ext.sourcedir + "/packaging/python/fix_demo_libnrnmech.sh", ext.cmake_install_prefix, REL_RPATH, ], cwd=self.build_temp, env=env, ) except subprocess.CalledProcessError as exc: log.error("Status : FAIL. Log:\n%s", exc.output) raise
def _finalize_connector_c(self, connc_loc): """Finalize the --with-connector-c command line argument """ platform = get_platform() self._mysql_config_info = None min_version = BuildExtDynamic.min_connector_c_version err_invalid_loc = "MySQL C API location is invalid; was %s" mysql_config = None err_version = "MySQL C API {0}.{1}.{2} or later required".format( *BuildExtDynamic.min_connector_c_version) if not os.path.exists(connc_loc): log.error(err_invalid_loc, connc_loc) sys.exit(1) if os.path.isdir(connc_loc): # if directory, and no mysql_config is available, figure out the # lib/ and include/ folders from the the filesystem mysql_config = os.path.join(connc_loc, 'bin', 'mysql_config') if os.path.isfile(mysql_config) and \ os.access(mysql_config, os.X_OK): connc_loc = mysql_config log.debug("# connc_loc: {0}".format(connc_loc)) else: # Probably using MS Windows myversionh = os.path.join(connc_loc, 'include', 'mysql_version.h') if not os.path.exists(myversionh): log.error("MySQL C API installation invalid " "(mysql_version.h not found)") sys.exit(1) else: with open(myversionh, 'rb') as fp: for line in fp.readlines(): if b'#define LIBMYSQL_VERSION' in line: version = LooseVersion(line.split()[2].replace( b'"', b'').decode()).version if tuple(version) < min_version: log.error(err_version) sys.exit(1) break # On Windows we check libmysql.dll if os.name == 'nt': lib = os.path.join(self.with_mysql_capi, 'lib', 'libmysql.dll') connc_64bit = win_dll_is64bit(lib) # On OSX we check libmysqlclient.dylib elif 'macos' in platform: lib = os.path.join(self.with_mysql_capi, 'lib', 'libmysqlclient.dylib') connc_64bit = unix_lib_is64bit(lib) # On other Unices we check libmysqlclient (follow symlinks) elif os.name == 'posix': connc_64bit = unix_lib_is64bit(connc_loc) else: raise OSError("Unsupported platform: %s" % os.name) include_dirs = [os.path.join(connc_loc, 'include')] if os.name == 'nt': libraries = ['libmysql'] else: libraries = ['-lmysqlclient'] library_dirs = os.path.join(connc_loc, 'lib') log.debug("# connc_64bit: {0}".format(connc_64bit)) if connc_64bit: self.arch = 'x86_64' else: self.arch = 'i386' # We were given the location of the mysql_config tool (not on Windows) if not os.name == 'nt' and os.path.isfile(connc_loc) \ and os.access(connc_loc, os.X_OK): mysql_config = connc_loc # Check mysql_config myc_info = get_mysql_config_info(mysql_config) log.debug("# myc_info: {0}".format(myc_info)) if myc_info['version'] < min_version: log.error(err_version) sys.exit(1) include_dirs = myc_info['include'] libraries = myc_info['libs'] library_dirs = myc_info['lib_dir'] self._mysql_config_info = myc_info self.arch = self._mysql_config_info['arch'] connc_64bit = self.arch == 'x86_64' for include_dir in include_dirs: if not os.path.exists(include_dir): log.error(err_invalid_loc, connc_loc) sys.exit(1) # Set up the build_ext class self.include_dirs.extend(include_dirs) self.libraries.extend(libraries) self.library_dirs.append(library_dirs) # We try to offer a nice message when the architecture of Python # is not the same as MySQL Connector/C binaries. print("# self.arch: {0}".format(self.arch)) if ARCH_64BIT != connc_64bit: log.error("Python is {0}, but does not " "match MySQL C API {1} architecture, " "type: {2}" "".format(py_arch, '64-bit' if connc_64bit else '32-bit', self.arch)) sys.exit(1)
def run(self): # get list of modules (Python) and extensions (SWIG) file_names = os.listdir(os.path.join('src', 'gphoto2')) file_names.sort() file_names = [os.path.splitext(x) for x in file_names] ext_names = [x[0] for x in file_names if x[1] == '.i'] # get gphoto2 versions to be swigged gp_versions = get_gp_versions() self.announce('swigging gphoto2 versions %s' % str(gp_versions), 2) # do -builtin and not -builtin swig_bis = [False] cmd = ['swig', '-version'] try: swig_version = str( subprocess.check_output(cmd, universal_newlines=True)) except Exception: error('ERROR: command "%s" failed', ' '.join(cmd)) raise for line in swig_version.split('\n'): if 'Version' in line: swig_version = tuple(map(int, line.split()[-1].split('.'))) if swig_version != (2, 0, 11): swig_bis.append(True) break for use_builtin in swig_bis: # make options list swig_opts = [ '-python', '-nodefaultctor', '-O', '-Wextra', '-Werror' ] if use_builtin: swig_opts += ['-builtin', '-nofastunpack'] # do each gphoto2 version for gp_version in gp_versions: doc_file = os.path.join('src', 'gphoto2', 'common', 'doc-' + gp_version + '.i') # do Python 2 and 3 for py_version in 2, 3: output_dir = os.path.join('src', 'swig') if use_builtin: output_dir += '-bi' output_dir += '-py' + str(py_version) output_dir += '-gp' + gp_version self.mkpath(output_dir) gp_version_hex = '0x{:02x}{:02x}{:02x}'.format( *map(int, gp_version.split('.'))) version_opts = [ '-DGPHOTO2_VERSION=' + gp_version_hex, '-outdir', output_dir, ] if os.path.isfile(doc_file): version_opts.append('-DDOC_FILE=' + os.path.basename(doc_file)) inc_dir = 'libgphoto2-' + gp_version if os.path.isdir(inc_dir): version_opts.append('-I' + inc_dir) version_opts.append( '-I' + os.path.join(inc_dir, 'libgphoto2_port')) else: version_opts += gphoto2_include if py_version >= 3: version_opts.append('-py3') # do each swig module for ext_name in ext_names: in_file = os.path.join('src', 'gphoto2', ext_name + '.i') out_file = os.path.join(output_dir, ext_name + '_wrap.c') self.spawn(['swig'] + swig_opts + version_opts + ['-o', out_file, in_file]) # create init module init_file = os.path.join(output_dir, '__init__.py') with open(init_file, 'w') as im: im.write('__version__ = "{}"\n\n'.format(version)) im.write(''' class GPhoto2Error(Exception): """Exception raised by gphoto2 library errors Attributes: code (int): the gphoto2 error code string (str): corresponding error message """ def __init__(self, code): string = gp_result_as_string(code) Exception.__init__(self, '[%d] %s' % (code, string)) self.code = code self.string = string ''') for name in ext_names: im.write('from gphoto2.{} import *\n'.format(name)) im.write(''' __all__ = dir() ''') # store SWIG version info_file = os.path.join('src', 'info.txt') with open(info_file, 'w') as info: info.write('swig_version = {}\n'.format(repr(swig_version)))
def cfg_to_args(path='setup.cfg'): """ Distutils2 to distutils1 compatibility util. This method uses an existing setup.cfg to generate a dictionary of keywords that can be used by distutils.core.setup(kwargs**). :param file: The setup.cfg path. :raises DistutilsFileError: When the setup.cfg file is not found. """ # The method source code really starts here. parser = configparser.SafeConfigParser() if not os.path.exists(path): raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(path)) parser.read(path) config = {} for section in parser.sections(): config[section] = dict(parser.items(section)) # Run setup_hooks, if configured setup_hooks = has_get_option(config, 'global', 'setup_hooks') package_dir = has_get_option(config, 'files', 'packages_root') # Add the source package directory to sys.path in case it contains # additional hooks, and to make sure it's on the path before any existing # installations of the package if package_dir: package_dir = os.path.abspath(package_dir) sys.path.insert(0, package_dir) try: if setup_hooks: setup_hooks = [ hook for hook in split_multiline(setup_hooks) if hook != 'pbr.hooks.setup_hook' ] for hook in setup_hooks: hook_fn = resolve_name(hook) try: hook_fn(config) except SystemExit: log.error('setup hook %s terminated the installation') except: e = sys.exc_info()[1] log.error('setup hook %s raised exception: %s\n' % (hook, e)) log.error(traceback.format_exc()) sys.exit(1) # Run the pbr hook pbr.hooks.setup_hook(config) kwargs = setup_cfg_to_setup_kwargs(config) # Set default config overrides kwargs['include_package_data'] = True kwargs['zip_safe'] = False register_custom_compilers(config) ext_modules = get_extension_modules(config) if ext_modules: kwargs['ext_modules'] = ext_modules entry_points = get_entry_points(config) if entry_points: kwargs['entry_points'] = entry_points wrap_commands(kwargs) # Handle the [files]/extra_files option files_extra_files = has_get_option(config, 'files', 'extra_files') if files_extra_files: extra_files.set_extra_files(split_multiline(files_extra_files)) finally: # Perform cleanup if any paths were added to sys.path if package_dir: sys.path.pop(0) return kwargs
def run(self): platform_arch = platform.architecture()[0] log.info("Python architecture is %s" % platform_arch) build_type = OPTION_DEBUG and "Debug" or "Release" if OPTION_RELWITHDEBINFO: build_type = 'RelWithDebInfo' # Check env make_path = None make_generator = None if not OPTION_ONLYPACKAGE: if OPTION_MAKESPEC == "make": make_name = "make" make_generator = "Unix Makefiles" elif OPTION_MAKESPEC == "msvc": nmake_path = find_executable("nmake") if nmake_path is None or not os.path.exists(nmake_path): log.info( "nmake not found. Trying to initialize the MSVC env..." ) init_msvc_env(platform_arch, build_type) else: log.info("nmake was found in %s" % nmake_path) if OPTION_JOM: make_name = "jom" make_generator = "NMake Makefiles JOM" else: make_name = "nmake" make_generator = "NMake Makefiles" elif OPTION_MAKESPEC == "mingw": make_name = "mingw32-make" make_generator = "MinGW Makefiles" else: raise DistutilsSetupError("Invalid option --make-spec.") make_path = find_executable(make_name) if make_path is None or not os.path.exists(make_path): raise DistutilsSetupError( "You need the program \"%s\" on your system path to compile PySide." \ % make_name) if OPTION_CMAKE is None or not os.path.exists(OPTION_CMAKE): raise DistutilsSetupError( "Failed to find cmake." " Please specify the path to cmake with --cmake parameter." ) if OPTION_QMAKE is None or not os.path.exists(OPTION_QMAKE): raise DistutilsSetupError( "Failed to find qmake." " Please specify the path to qmake with --qmake parameter.") # Prepare parameters py_executable = sys.executable py_version = "%s.%s" % (sys.version_info[0], sys.version_info[1]) py_include_dir = get_config_var("INCLUDEPY") py_libdir = get_config_var("LIBDIR") py_prefix = get_config_var("prefix") if not py_prefix or not os.path.exists(py_prefix): py_prefix = sys.prefix if sys.platform == "win32": py_scripts_dir = os.path.join(py_prefix, "Scripts") else: py_scripts_dir = os.path.join(py_prefix, "bin") if py_libdir is None or not os.path.exists(py_libdir): if sys.platform == "win32": py_libdir = os.path.join(py_prefix, "libs") else: py_libdir = os.path.join(py_prefix, "lib") if py_include_dir is None or not os.path.exists(py_include_dir): if sys.platform == "win32": py_include_dir = os.path.join(py_prefix, "include") else: py_include_dir = os.path.join(py_prefix, "include/python%s" % py_version) dbgPostfix = "" if build_type == "Debug": dbgPostfix = "_d" if sys.platform == "win32": if OPTION_MAKESPEC == "mingw": py_library = os.path.join(py_libdir, "libpython%s%s.a" % \ (py_version.replace(".", ""), dbgPostfix)) else: py_library = os.path.join(py_libdir, "python%s%s.lib" % \ (py_version.replace(".", ""), dbgPostfix)) else: lib_exts = ['.so'] if sys.platform == 'darwin': lib_exts.append('.dylib') if sys.version_info[0] > 2: lib_suff = getattr(sys, 'abiflags', None) else: # Python 2 lib_suff = '' lib_exts.append('.so.1') lib_exts.append('.a') # static library as last gasp if sys.version_info[0] == 2 and dbgPostfix: # For Python2 add a duplicate set of extensions combined with # the dbgPostfix, so we test for both the debug version of # the lib and the normal one. This allows a debug PySide to # be built with a non-debug Python. lib_exts = [dbgPostfix + e for e in lib_exts] + lib_exts libs_tried = [] for lib_ext in lib_exts: lib_name = "libpython%s%s%s" % (py_version, lib_suff, lib_ext) py_library = os.path.join(py_libdir, lib_name) if os.path.exists(py_library): break libs_tried.append(py_library) else: py_multiarch = get_config_var("MULTIARCH") if py_multiarch: try_py_libdir = os.path.join(py_libdir, py_multiarch) libs_tried = [] for lib_ext in lib_exts: lib_name = "libpython%s%s%s" % (py_version, lib_suff, lib_ext) py_library = os.path.join(try_py_libdir, lib_name) if os.path.exists(py_library): py_libdir = try_py_libdir break libs_tried.append(py_library) else: raise DistutilsSetupError( "Failed to locate the Python library with %s" % ', '.join(libs_tried)) else: raise DistutilsSetupError( "Failed to locate the Python library with %s" % ', '.join(libs_tried)) if py_library.endswith('.a'): # Python was compiled as a static library log.error( "Failed to locate a dynamic Python library, using %s" % py_library) qtinfo = QtInfo(OPTION_QMAKE) qt_dir = os.path.dirname(OPTION_QMAKE) qt_version = qtinfo.version if not qt_version: log.error("Failed to query the Qt version with qmake %s" % qtinfo.qmake_path) sys.exit(1) # Update the PATH environment variable update_env_path([py_scripts_dir, qt_dir]) build_name = "py%s-qt%s-%s-%s" % \ (py_version, qt_version, platform.architecture()[0], build_type.lower()) script_dir = os.getcwd() sources_dir = os.path.join(script_dir, "sources") build_dir = os.path.join(script_dir, "pyside_build", "%s" % build_name) install_dir = os.path.join(script_dir, "pyside_install", "%s" % build_name) # Try to ensure that tools built by this script (such as shiboken) # are found before any that may already be installed on the system. update_env_path([os.path.join(install_dir, 'bin')]) # Tell cmake to look here for *.cmake files os.environ['CMAKE_PREFIX_PATH'] = install_dir self.make_path = make_path self.make_generator = make_generator self.debug = OPTION_DEBUG self.script_dir = script_dir self.sources_dir = sources_dir self.build_dir = build_dir self.install_dir = install_dir self.qmake_path = OPTION_QMAKE self.py_executable = py_executable self.py_include_dir = py_include_dir self.py_library = py_library self.py_version = py_version self.build_type = build_type self.qtinfo = qtinfo self.site_packages_dir = get_python_lib(1, 0, prefix=install_dir) self.build_tests = OPTION_BUILDTESTS log.info("=" * 30) log.info("Package version: %s" % __version__) log.info("Build type: %s" % self.build_type) log.info("Build tests: %s" % self.build_tests) log.info("-" * 3) log.info("Make path: %s" % self.make_path) log.info("Make generator: %s" % self.make_generator) log.info("Make jobs: %s" % OPTION_JOBS) log.info("-" * 3) log.info("Script directory: %s" % self.script_dir) log.info("Sources directory: %s" % self.sources_dir) log.info("Build directory: %s" % self.build_dir) log.info("Install directory: %s" % self.install_dir) log.info("Python site-packages install directory: %s" % self.site_packages_dir) log.info("-" * 3) log.info("Python executable: %s" % self.py_executable) log.info("Python includes: %s" % self.py_include_dir) log.info("Python library: %s" % self.py_library) log.info("Python prefix: %s" % py_prefix) log.info("Python scripts: %s" % py_scripts_dir) log.info("-" * 3) log.info("Qt qmake: %s" % self.qmake_path) log.info("Qt version: %s" % qtinfo.version) log.info("Qt bins: %s" % qtinfo.bins_dir) log.info("Qt plugins: %s" % qtinfo.plugins_dir) log.info("-" * 3) log.info("OpenSSL libs: %s" % OPTION_OPENSSL) log.info("=" * 30) # Prepare folders if not os.path.exists(self.sources_dir): log.info("Creating sources folder %s..." % self.sources_dir) os.makedirs(self.sources_dir) if not os.path.exists(self.build_dir): log.info("Creating build folder %s..." % self.build_dir) os.makedirs(self.build_dir) if not os.path.exists(self.install_dir): log.info("Creating install folder %s..." % self.install_dir) os.makedirs(self.install_dir) if not OPTION_ONLYPACKAGE: # Build extensions for ext in ['shiboken', 'pyside', 'pyside-tools']: self.build_extension(ext) # Build patchelf if needed self.build_patchelf() # Prepare packages self.prepare_packages() # Build packages _build.run(self)