def run(self): # Need the nose module for testing import nose # cd into the testing directory. Otherwise, the src/binwalk # directory gets imported by nose which a) clutters the src # directory with a bunch of .pyc files and b) will fail anyway # unless a build/install has already been run which creates # the version.py file. testing_directory = os.path.join(MODULE_DIRECTORY, "testing", "tests") os.chdir(testing_directory) # Run the tests retval = nose.core.run(argv=['--exe','--with-coverage']) sys.stdout.write("\n") # Clean up the resulting pyc files in the testing directory for pyc in glob.glob("%s/*.pyc" % testing_directory): sys.stdout.write("removing '%s'\n" % pyc) os.remove(pyc) input_vectors_directory = os.path.join(testing_directory, "input-vectors") for extracted_directory in glob.glob("%s/*.extracted" % input_vectors_directory): remove_tree(extracted_directory) if retval == True: sys.exit(0) else: sys.exit(1)
def run(self): """Execute install command""" subprocess.check_call(['make', '-C', 'doc']) install_data.run(self) # Old style base class # Create empty directories for directory in DIRECTORIES_TO_CREATE: if self.root: directory = change_root(self.root, directory) if not os.path.exists(directory): log.info("creating directory '%s'", directory) os.makedirs(directory) # Recursively overwrite directories for target, source in DIRECTORIES_TO_COPY: if self.root: target = change_root(self.root, target) if os.path.exists(target): remove_tree(target) log.info("recursive copy '%s' to '%s'", source, target) shutil.copytree(source, target, symlinks=True)
def locale_build(self): ''' I want python setup.py build to create the compiled gettext mo files. The primary driver is the ability to create a windows msi installer which includes these files. ''' info("COMPILING PO FILES (gettext translations)") if not os.path.isdir("src/openmolar/locale/"): warn("WARNING - language files are missing!") locale_dir = os.path.join(self.build_base, "locale") try: dir_util.remove_tree(locale_dir) except Exception: warn("unable to remove directory %s", locale_dir) self.mkpath(locale_dir) for po_file in glob.glob("src/openmolar/locale/*.po"): file_ = os.path.split(po_file)[1] lang = file_.replace(".po", "") os.mkdir(os.path.join(locale_dir, lang)) mo_file = os.path.join(locale_dir, lang, "openmolar.mo") commands = ["msgfmt", "-o", mo_file, po_file] info('executing %s' % " ".join(commands)) try: p = subprocess.Popen(commands) p.wait() except IOError: info('Error while running msgfmt on %s - ' 'perhaps msgfmt (gettext) is not installed?' % po_file)
def run_old(self): """Run the distutils command""" # check whether we can execute rpmbuild if not self.dry_run: try: devnull = open(os.devnull, 'w') subprocess.Popen(['rpmbuild', '--version'], stdin=devnull, stdout=devnull) except OSError: raise DistutilsError("Cound not execute rpmbuild. Make sure " "it is installed and in your PATH") mkpath(self.dist_dir) # build command: to get the build_base cmdbuild = self.get_finalized_command("build") cmdbuild.verbose = self.verbose self.build_base = cmdbuild.build_base self._populate_rpm_topdir(self.rpm_base) for name, rpm_spec in self.rpm_specs.items(): self._prepare_distribution(name) self._create_rpm(rpm_name=name, spec=rpm_spec) if not self.keep_temp: remove_tree(self.build_base, dry_run=self.dry_run)
def run(self): _CleanCommand.run(self) dir_names = set() if self.dist: for cmd_name, _ in self.distribution.get_command_list(): if 'dist' in cmd_name: command = self.distribution.get_command_obj(cmd_name) command.ensure_finalized() if getattr(command, 'dist_dir', None): dir_names.add(command.dist_dir) if self.eggs: for name in os.listdir(self.egg_base): if name.endswith('.egg-info'): dir_names.add(os.path.join(self.egg_base, name)) for name in os.listdir(os.curdir): if name.endswith('.egg'): dir_names.add(name) if self.environment and self.virtualenv_dir: dir_names.add(self.virtualenv_dir) if self.pycache: for root, dirs, _ in os.walk(os.curdir): if '__pycache__' in dirs: dir_names.add(os.path.join(root, '__pycache__')) for dir_name in dir_names: if os.path.exists(dir_name): dir_util.remove_tree(dir_name, dry_run=self.dry_run) else: self.announce( 'skipping {0} since it does not exist'.format(dir_name))
def run(self): clean.run(self) path = os.path.join(here_dir, 'llvmlite.egg-info') if os.path.isdir(path): remove_tree(path, dry_run=self.dry_run) if not self.dry_run: self._rm_walk()
def _check_dist(self): # make sure the tarball builds assert self.get_archive_files() distcheck_dir = os.path.abspath( os.path.join(self.dist_dir, "distcheck")) if os.path.exists(distcheck_dir): dir_util.remove_tree(distcheck_dir) self.mkpath(distcheck_dir) archive = self.get_archive_files()[0] tfile = tarfile.open(archive, "r:gz") tfile.extractall(distcheck_dir) tfile.close() name = self.distribution.get_fullname() extract_dir = os.path.join(distcheck_dir, name) old_pwd = os.getcwd() os.chdir(extract_dir) try: self.spawn([sys.executable, "setup.py", "build"]) self.spawn([sys.executable, "setup.py", "install", "--root", os.path.join(distcheck_dir, "prefix"), "--record", os.path.join(distcheck_dir, "log.txt"), ]) self.spawn([sys.executable, "setup.py", "test"]) finally: os.chdir(old_pwd)
def run(self): # call build sphinx to build docs self.run_command("build_sphinx") cmd = self.get_finalized_command("build_sphinx") source_dir = cmd.builder_target_dir # copy to directory with appropriate name dist = self.distribution arc_name = "%s-docs-%s" % (dist.get_name(), dist.get_version()) tmp_dir = os.path.join(self.build_dir, arc_name) if os.path.exists(tmp_dir): dir_util.remove_tree(tmp_dir, dry_run=self.dry_run) self.copy_tree(source_dir, tmp_dir, preserve_symlinks=True) # make archive from dir arc_base = os.path.join(self.dist_dir, arc_name) self.arc_filename = self.make_archive(arc_base, self.format, self.build_dir) # Sign if requested if self.sign: gpg_args = ["gpg", "--detach-sign", "-a", self.arc_filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] spawn(gpg_args, dry_run=self.dry_run) # cleanup if not self.keep_temp: dir_util.remove_tree(tmp_dir, dry_run=self.dry_run)
def repo_downloads(repo, releases, tags): """ finds matching download for each release. Creates one if not available """ downloads = {} # for release in repo.iter_releases(): # name = release.tag_name for vers, release in releases.items(): download_url = None download_asset = None download_asset_name = repo.name + ".zip" for asset in release.iter_assets(): if asset.name == download_asset_name: download_asset = asset break if not download_asset: # Create download... this will take a while _log.warning('Generating new release download zip for %s:%s' % (repo.name, vers)) zip_url = tags[vers].zipball_url temp_dir = tempfile.mkdtemp() try: zip_dlfile = os.path.join(temp_dir, download_asset_name) _log.warning('downloading') download(zip_url, zip_dlfile) if os.path.exists(zip_dlfile): _log.warning('extracting') # outdir = extract(zip_dlfile) outdir = os.path.splitext(zip_dlfile)[0] subprocess.check_output(['/usr/bin/unzip', zip_dlfile, '-d', outdir]) contents = os.listdir(outdir) _log.warning('renaming') if len(contents) == 1 and os.path.isdir(os.path.join(outdir,contents[0])): innerdir = contents[0] newdir = os.path.join(outdir,innerdir) if innerdir != repo.name: os.rename(newdir, os.path.join(outdir,repo.name)) outdir = os.path.join(outdir,repo.name) os.rename(zip_dlfile, zip_dlfile+".dl") _log.warning('zipping') zipdir(dirPath=outdir, zipFilePath=zip_dlfile, includeDirInZip=True, excludeDotFiles=True) if os.path.exists(zip_dlfile): with open(zip_dlfile, 'rb') as assetfile: _log.warning('uploading') download_asset = release.upload_asset( content_type='application/zip, application/octet-stream', name=download_asset_name, asset=assetfile) _log.warning('Finished new release download zip for %s:%s' % (repo.name, vers)) except: _log.exception("zip_url: %s"%zip_url) finally: remove_tree(temp_dir) if download_asset: download_url = download_asset.browser_download_url downloads[vers] = download_url return downloads
def setup_python3(): """Taken from "distribute" setup.py.""" from distutils.filelist import FileList from distutils import dir_util, file_util, util from os.path import join, exists tmp_src = join("build", "src") # Not covered by "setup.py clean --all", so explicit deletion required. if exists(tmp_src): dir_util.remove_tree(tmp_src) # log.set_verbosity(1) fl = FileList() for line in open("MANIFEST.in"): if not line.strip(): continue fl.process_template_line(line) dir_util.create_tree(tmp_src, fl.files) outfiles_2to3 = [] for f in fl.files: outf, copied = file_util.copy_file(f, join(tmp_src, f), update=1) if copied and outf.endswith(".py"): outfiles_2to3.append(outf) util.run_2to3(outfiles_2to3) # arrange setup to use the copy sys.path.insert(0, tmp_src) return tmp_src
def build(self, platform='windows'): log('Building from %s' % self.build_root) # prepare "dist" directory log('Cleaning old directories...') if os.path.isdir(os.path.join(red5_root, 'dist.java5')): remove_tree(os.path.join(red5_root, 'dist.java5')) if os.path.isdir(os.path.join(red5_root, 'dist.java6')): remove_tree(os.path.join(red5_root, 'dist.java6')) log('Compiling Java 1.5 version...') self.compile(self.ant_cmd, os.path.join(red5_root, 'build.xml'), '1.5', 'dist-installer') os.renames(os.path.join(red5_root, '.', 'dist'), os.path.join(red5_root, '.', 'dist.java5')) log('Compiling Java 1.6 version...') os.environ['JAVACMD'] = os.path.join(JAVA6_HOME, 'bin', 'java.exe') self.compile(self.ant_cmd, os.path.join(red5_root, 'build.xml'), '1.6', 'dist-installer') os.renames(os.path.join(red5_root, '.', 'dist'), os.path.join(red5_root, '.', 'dist.java6')) # build installer script = os.path.join(self.build_root, 'red5.nsi') cmd = NSIS_CMD if ' ' in cmd and not cmd[:1] == '"': cmd = '"' + cmd + '"' log('Compiling installer, this may take some time...') os.system(cmd, script) log('Installer written')
def native_image_layout(dists, subdir, native_image_root, debug_gr_8964=False): if not dists: return dest_path = join(native_image_root, subdir) # Cleanup leftovers from previous call if exists(dest_path): if debug_gr_8964: mx.log('[mx_substratevm.native_image_layout: remove_tree: ' + dest_path + ']') remove_tree(dest_path) mkpath(dest_path) # Create symlinks to conform with native-image directory layout scheme def symlink_jar(jar_path): if debug_gr_8964: def log_stat(prefix, file_name): file_stat = os.stat(file_name) mx.log(' ' + prefix + '.st_mode: ' + oct(file_stat.st_mode)) mx.log(' ' + prefix + '.st_mtime: ' + time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(file_stat.st_mtime))) dest_jar = join(dest_path, basename(jar_path)) mx.log('[mx_substratevm.native_image_layout.symlink_jar: symlink_or_copy') mx.log(' src: ' + jar_path) log_stat('src', jar_path) mx.log(' dst: ' + dest_jar) symlink_or_copy(jar_path, dest_jar, debug_gr_8964) log_stat('dst', dest_jar) mx.log(']') else: symlink_or_copy(jar_path, join(dest_path, basename(jar_path)), debug_gr_8964) for dist in dists: mx.logv('Add ' + type(dist).__name__ + ' ' + str(dist) + ' to ' + dest_path) symlink_jar(dist.path) if not dist.isBaseLibrary() and dist.sourcesPath: symlink_jar(dist.sourcesPath)
def test_posix_dirs_inaccessible(self): # test if new dir is created if both implicit dirs are not valid tmpdir = tempfile.mkdtemp() try: d_dir = catalog.default_dir_posix(tmpdir) try: os.chmod(d_dir, 0o000) except OSError: raise KnownFailureTest("Can't change permissions of default_dir.") d_dir2 = catalog.default_dir_posix(tmpdir) try: os.chmod(d_dir2, 0o000) except OSError: raise KnownFailureTest("Can't change permissions of default_dir.") new_ddir = catalog.default_dir_posix(tmpdir) assert_(not (os.path.samefile(new_ddir, d_dir) or os.path.samefile(new_ddir, d_dir2))) new_ddir2 = catalog.default_dir_posix(tmpdir) assert_(os.path.samefile(new_ddir, new_ddir2)) finally: os.chmod(d_dir, 0o700) os.chmod(d_dir2, 0o700) remove_tree(tmpdir)
def main(base_dir): opts = parse_args() #for old_dir in glob(os.path.join(base_dir, '*/')): # if os.path.exists(old_dir): # remove_tree(old_dir) # print "Removing", old_dir for (zip_filename, unzip_dir, dest_dir) in lib_infos: zip_filename = os.path.join(base_dir, zip_filename) assert(dest_dir.strip(' /\\.') != '') dest_dir = os.path.join(base_dir, dest_dir) #Remove the old if os.path.exists(dest_dir): print "Removing", dest_dir remove_tree(dest_dir) if not opts.remove: if unzip_dir is not None: unzip(zip_filename, base_dir) unzip_dir = os.path.join(base_dir, unzip_dir) print "Moving %s to %s" % (unzip_dir, dest_dir) os.rename(unzip_dir, dest_dir) else: os.makedirs(dest_dir) unzip(zip_filename, dest_dir)
def run(self): module_lib = pjoin('.',module_name+'.so') if (os.path.exists(module_lib)): os.remove(module_lib) if (os.path.exists('./wrapper.cpp')): os.remove('./wrapper.cpp') if (os.path.exists('./build')): remove_tree('./build') if (os.path.exists('./dist')): remove_tree('./dist') self.walkAndClean()
def clean(): """ Cleanup build directory and temporary files. """ info("Clean - checking for objects to clean") if os.path.isdir("build"): info("Clean - removing build/") remove_tree("build", verbose=1) files = [ "pyslurm/__init__.pyc", "pyslurm/bluegene.pxi", "pyslurm/pyslurm.c", "pyslurm/pyslurm.so", ] for file in files: if os.path.exists(file): if os.path.isfile(file): try: info("Clean - removing %s" % file) os.unlink(file) except: fatal("Clean - failed to remove %s" % file) sys.exit(-1) else: fatal("Clean - %s is not a file !" % file) sys.exit(-1) info("Clean - completed")
def make_distribution(self): """Create the source distribution(s). First, we create the release tree with 'make_release_tree()'; then, we create all required archive files (according to 'self.formats') from the release tree. Finally, we clean up by blowing away the release tree (unless 'self.keep_temp' is true). The list of archive files created is stored so it can be retrieved later by 'get_archive_files()'. """ # Don't warn about missing meta-data here -- should be (and is!) # done elsewhere. base_dir = self.distribution.get_fullname() base_name = os.path.join(self.dist_dir, base_dir) self.make_release_tree(base_dir, self.filelist.files) archive_files = [] # remember names of files we create # tar archive must be created last to avoid overwrite and remove if "tar" in self.formats: self.formats.append(self.formats.pop(self.formats.index("tar"))) for fmt in self.formats: file = self.make_archive(base_name, fmt, base_dir=base_dir) archive_files.append(file) self.distribution.dist_files.append(("sdist", "", file)) self.archive_files = archive_files if not self.keep_temp: dir_util.remove_tree(base_dir, dry_run=self.dry_run)
def copy_sources(): """Copy the C sources into the source directory. This rearranges the source files under the python distribution directory. """ src = [] try: dir_util.remove_tree("src/") except (IOError, OSError): pass dir_util.copy_tree("../../arch", "src/arch/") dir_util.copy_tree("../../include", "src/include/") dir_util.copy_tree("../../msvc/headers", "src/msvc/headers") src.extend(glob.glob("../../*.[ch]")) src.extend(glob.glob("../../*.mk")) src.extend(glob.glob("../../Makefile")) src.extend(glob.glob("../../LICENSE*")) src.extend(glob.glob("../../README")) src.extend(glob.glob("../../*.TXT")) src.extend(glob.glob("../../RELEASE_NOTES")) src.extend(glob.glob("../../make.sh")) src.extend(glob.glob("../../CMakeLists.txt")) for filename in src: outpath = os.path.join("./src/", os.path.basename(filename)) log.info("%s -> %s" % (filename, outpath)) shutil.copy(filename, outpath)
def run(self): # remove the build/temp.<plat> directory (unless it's already # gone) if os.path.exists(self.build_temp): remove_tree(self.build_temp, dry_run=self.dry_run) else: log.debug("'%s' does not exist -- can't clean it", self.build_temp) if self.all: # remove build directories for directory in (self.build_lib, self.bdist_base, self.build_scripts): if os.path.exists(directory): remove_tree(directory, dry_run=self.dry_run) else: log.warn("'%s' does not exist -- can't clean it", directory) # just for the heck of it, try to remove the base build directory: # we might have emptied it right now, but if not we don't care if not self.dry_run: try: os.rmdir(self.build_base) log.info("removing '%s'", self.build_base) except OSError: pass
def run(self): system('find . -iname .ds_store -print -delete') system('find . -name \*.pyc -print -delete') for tree in self.trees: if isdir(tree): remove_tree(tree) system('rm -rf MANIFEST.in PKG')
def _check_dist(self): assert self.get_archive_files() distcheck_dir = os.path.join(self.dist_dir, "distcheck") if os.path.exists(distcheck_dir): dir_util.remove_tree(distcheck_dir) self.mkpath(distcheck_dir) archive = self.get_archive_files()[0] tfile = tarfile.open(archive, "r:gz") tfile.extractall(distcheck_dir) tfile.close() name = self.distribution.get_fullname() extract_dir = os.path.join(distcheck_dir, name) old_pwd = os.getcwd() os.chdir(extract_dir) self.spawn([sys.executable, "setup.py", "test"]) self.spawn([sys.executable, "setup.py", "build"]) self.spawn([sys.executable, "setup.py", "build_sphinx"]) self.spawn([sys.executable, "setup.py", "install", "--prefix", "../prefix", "--record", "../log.txt"]) os.environ["LC_ALL"] = "C" self.spawn([sys.executable, "setup.py", "test", "--quick"]) os.chdir(old_pwd)
def run(self): if not self.skip_build: self.run_command('build') install = self.reinitialize_command('install', reinit_subcommands=1) install.root = self.bdist_dir install.skip_build = self.skip_build install.warn_dir = 0 log.info('installing to %s' % self.bdist_dir) self.run_command('install') archive_basename = '%s.%s' % (self.distribution.get_fullname(), self.plat_name) if os.name == 'os2': archive_basename = archive_basename.replace(':', '-') pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) if not self.relative: archive_root = self.bdist_dir elif self.distribution.has_ext_modules() and install.install_base != install.install_platbase: raise DistutilsPlatformError, "can't make a dumb built distribution where base and platbase are different (%s, %s)" % (repr(install.install_base), repr(install.install_platbase)) else: archive_root = os.path.join(self.bdist_dir, ensure_relative(install.install_base)) filename = self.make_archive(pseudoinstall_root, self.format, root_dir=archive_root, owner=self.owner, group=self.group) if self.distribution.has_ext_modules(): pyversion = get_python_version() else: pyversion = 'any' self.distribution.dist_files.append(('bdist_dumb', pyversion, filename)) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run)
def run(self): patterns = ["MANIFEST", "*~", "*flymake*", "*.pyc", "*.h"] cwd = os.getcwd() import fnmatch for path, subdirs, files in os.walk(cwd): for pattern in patterns: matches = fnmatch.filter(files, pattern) if matches: def delete(p): print "removing %s" % p os.remove(p) map(delete, [os.path.join(path, m) for m in matches]) if os.path.exists("dist"): dir_util.remove_tree("dist") if os.path.exists("build"): dir_util.remove_tree("build") if os.path.exists(DOC_BUILD_PATH): dir_util.remove_tree(DOC_BUILD_PATH) # .egg info egg_info_dir = "bauble.egg-info" if os.path.exists(egg_info_dir): dir_util.remove_tree(egg_info_dir) # deb_dist - used by stdeb deb_dist = "deb_dist" if os.path.exists(deb_dist): dir_util.remove_tree(deb_dist)
def run(self): #log.info("include_paths:\n%s", '\n'.join(self.included_paths)) dirs = [] for pat in self.included_paths: dirs += locate(normcase(pat), '.') if self.distribution.clean_exclude_paths: self.excluded_paths += map(abspath, self.distribution.clean_exclude_paths) dirs.sort(reverse=True) filtered_dirs = [] while dirs: x = dirs.pop() if x not in self.excluded_paths: for exclude in self.excluded_paths: if x.startswith(exclude): break else: filtered_dirs.append(x) # don't delete subdirectories of things we already deleted dirs = [d for d in dirs if not d.startswith(x)] log.info("removing:\n%s", '\n'.join(filtered_dirs)) if not self.dry_run: for p in filtered_dirs: if os.path.isdir(p): remove_tree(p) else: os.remove(p) else: log.info("Dry Run!")
def run(self): clean.run(self) if self.sources: if os.path.exists('src'): remove_tree('src', dry_run=self.dry_run) else: log.info("'%s' does not exist -- can't clean it", 'src')
def create_app(): cwd = os.getcwd() game_logic_path = os.path.join(cwd, 'game_logic') game_app_interface = os.path.join(cwd, 'game_app.py') app_template = os.path.join(cwd, 'engine', 'app_template') _game_logic_path_exists = os.path.exists(game_logic_path) _game_app_interface_exists = os.path.exists(game_app_interface) if _game_logic_path_exists or _game_app_interface_exists: answer = input( 'game_app.py or game_logic module already exists. Continue? (y/n). ' + '\nWARNING: This will remove all contents of game_logic module, use at your own risk:'.upper() ) if answer == 'y': if _game_app_interface_exists: os.remove(game_app_interface) if _game_logic_path_exists: remove_tree(game_logic_path) _copy_function(app_template, cwd) else: _copy_function(app_template, cwd) if not os.path.exists('settings.yaml'): shutil.copy2('settings.yaml.template', 'settings.yaml') if not os.path.exists('logging.yaml'): shutil.copy2('logging.yaml.template', 'logging.yaml')
def run(self): if self.all: if os.path.exists(self.build_doc): remove_tree(self.build_doc, dry_run=self.dry_run) else: log.debug("%s doesn't exist -- can't clean it", self.build_doc) _clean.run(self)
def cinterfacetutorial(native_image, args=None): """Build and run the tutorial for the C interface""" args = [] if args is None else args tutorial_proj = mx.dependency('com.oracle.svm.tutorial') cSourceDir = join(tutorial_proj.dir, 'native') buildDir = join(svmbuild_dir(), tutorial_proj.name, 'build') # clean / create output directory if exists(buildDir): remove_tree(buildDir) mkpath(buildDir) # Build the shared library from Java code native_image(['--shared', '-H:Path=' + buildDir, '-H:Name=libcinterfacetutorial', '-H:CLibraryPath=' + tutorial_proj.dir, '-cp', tutorial_proj.output_dir()] + args) # Build the C executable mx.run(['cc', '-g', join(cSourceDir, 'cinterfacetutorial.c'), '-I' + buildDir, '-L' + buildDir, '-lcinterfacetutorial', '-ldl', '-Wl,-rpath,' + buildDir, '-o', join(buildDir, 'cinterfacetutorial')]) # Start the C executable mx.run([buildDir + '/cinterfacetutorial'])
def install_offline(lib_name): global library_err print('\n~~~ Installing `{}` from tar files ~~~'.format(lib_name)) if not p_library: print('\n~~~ Cannot find `%s`! You MUST install it manually! ~~~\n' % (lib_name+'*.tar.gz')) library_err.append(lib_name) return fopen = tarfile.open(p_library[0]) print('Extracting `{}`...\nThis might take some time...\n'.format(p_library[0])) p_library_root = fopen.getnames()[0].split(os.sep)[0] fopen.extractall() fopen.close() ; del fopen # Install library tcr_proc = subprocess.Popen([PYTHON_EXE, '-u', (cwd_path+os.sep+p_library_root+'/setup.py'), 'install', '-f'], cwd=cwd_path + os.sep + p_library_root) tcr_proc.wait() # Remove library folder try: dir_util.remove_tree(cwd_path + os.sep + p_library_root) except: pass if tcr_proc.returncode: print('\n~~~ `%s` cannot be installed! It MUST be installed manually! ~~~\n' % import_name) else: print('\n~~~ Successfully installed `{}` ~~~\n'.format(lib_name))
def make_distribution(self): """Create the commercial source distributions """ dist_name = self.distribution.get_fullname() pkg_dir = os.path.join(self.dist_dir, dist_name) self.add_docs(os.path.join(pkg_dir, 'docs')) self.make_release_tree(pkg_dir, self.filelist.files) self._prepare_commercial(pkg_dir, self.filelist.files) self._replace_gpl(pkg_dir, self.filelist.files) self.archive_files = [] if 'tar' in self.formats: self.formats.append(self.formats.pop(self.formats.index('tar'))) for fmt in self.formats: afile = self.make_archive(pkg_dir, fmt, root_dir=self.dist_dir, base_dir=dist_name) self.archive_files.append(afile) self.distribution.dist_files.append(('sdist_com', '', afile)) if not self.keep_temp: remove_tree(pkg_dir, dry_run=self.dry_run)
print("Starting dist.\n") VERSION = __import__('dxlbootstrap').get_version() RELEASE_NAME = "dxlbootstrap-python-dist-" + str(VERSION) DIST_PY_FILE_LOCATION = os.path.dirname(os.path.realpath(__file__)) DIST_DIRECTORY = os.path.join(DIST_PY_FILE_LOCATION, "dist") DIST_DOCTMP_DIR = os.path.join(DIST_DIRECTORY, "doctmp") SETUP_PY = os.path.join(DIST_PY_FILE_LOCATION, "setup.py") DIST_LIB_DIRECTORY = os.path.join(DIST_DIRECTORY, "lib") DIST_RELEASE_DIR = os.path.join(DIST_DIRECTORY, RELEASE_NAME) # Remove the dist directory if it exists if os.path.exists(DIST_DIRECTORY): print("\nRemoving dist directory: " + DIST_DIRECTORY + "\n") remove_tree(DIST_DIRECTORY, verbose=1) # Make the dist directory print("\nMaking dist directory: " + DIST_DIRECTORY + "\n") os.makedirs(DIST_DIRECTORY) # Call Sphinx to create API doc print("\nCalling sphinx-apidoc\n") subprocess.check_call([ "sphinx-apidoc", "--force", "--separate", "--no-toc", "--output-dir=" + DIST_DOCTMP_DIR, os.path.join(DIST_PY_FILE_LOCATION, "dxlbootstrap") ]) # Delete generate files for f in os.listdir(DIST_DOCTMP_DIR):
def run(self): if (sys.platform != "win32" and (self.distribution.has_ext_modules() or self.distribution.has_c_libraries())): raise DistutilsPlatformError \ ("distribution contains extensions and/or C libraries; " "must be compiled on a Windows 32 platform") if not self.skip_build: self.run_command('build') install = self.reinitialize_command('install', reinit_subcommands=1) install.root = self.bdist_dir install.skip_build = self.skip_build install.warn_dir = 0 install.plat_name = self.plat_name install_lib = self.reinitialize_command('install_lib') # we do not want to include pyc or pyo files install_lib.compile = 0 install_lib.optimize = 0 if self.distribution.has_ext_modules(): # If we are building an installer for a Python version other # than the one we are currently running, then we need to ensure # our build_lib reflects the other Python version rather than ours. # Note that for target_version!=sys.version, we must have skipped the # build step, so there is no issue with enforcing the build of this # version. target_version = self.target_version if not target_version: assert self.skip_build, "Should have already checked this" target_version = sys.version[0:3] plat_specifier = ".%s-%s" % (self.plat_name, target_version) build = self.get_finalized_command('build') build.build_lib = os.path.join(build.build_base, 'lib' + plat_specifier) # Use a custom scheme for the zip-file, because we have to decide # at installation time which scheme to use. for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'): value = string.upper(key) if key == 'headers': value = value + '/Include/$dist_name' setattr(install, 'install_' + key, value) log.info("installing to %s", self.bdist_dir) install.ensure_finalized() # avoid warning of 'install_lib' about installing # into a directory not in sys.path sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) install.run() del sys.path[0] # And make an archive relative to the root of the # pseudo-installation tree. from tempfile import mktemp archive_basename = mktemp() fullname = self.distribution.get_fullname() arcname = self.make_archive(archive_basename, "zip", root_dir=self.bdist_dir) # create an exe containing the zip-file self.create_exe(arcname, fullname, self.bitmap) if self.distribution.has_ext_modules(): pyversion = get_python_version() else: pyversion = 'any' self.distribution.dist_files.append( ('bdist_wininst', pyversion, self.get_installer_filename(fullname))) # remove the zip-file again log.debug("removing temporary file '%s'", arcname) os.remove(arcname) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run)
if '--trade' in sys.argv: pkg.append('hft.trade') pkg_data['hft.trade'] = ['trade.dll'] sys.argv.pop(sys.argv.index('--trade')) if '--all' in sys.argv: pkg.append('hft.trade') pkg.append('hft.market') pkg_data['hft.trade'] = ['trade.dll'] pkg_data['hft.market'] = ['market.dll'] sys.argv.pop(sys.argv.index('--all')) # pre-clean try: dir_util.remove_tree('build') dir_util.remove_tree('dist') dir_util.remove_tree('hft.egg-info') except: print("not full clean or directories not exist") setup( name="hft", version="2.0.0", packages=pkg, package_data=pkg_data, install_requires=['tornado', 'redis'], distclass=BinaryDistribution, cmdclass=dict(build_py=custom_build_pyc) )
def clean_man(self): man_dir = os.path.join(self.build_base, 'man') if os.path.exists(man_dir): remove_tree(man_dir)
VERSION) DIST_PY_FILE_LOCATION = os.path.dirname(os.path.realpath(__file__)) DIST_DIRECTORY = os.path.join(DIST_PY_FILE_LOCATION, "dist") DIST_CONFIG_DIRECTORY = os.path.join(DIST_DIRECTORY, "config") DIST_DOCTMP_DIR = os.path.join(DIST_DIRECTORY, "doctmp") SETUP_PY = os.path.join(DIST_PY_FILE_LOCATION, "setup.py") DIST_LIB_DIRECTORY = os.path.join(DIST_DIRECTORY, "lib") DIST_RELEASE_DIR = os.path.join(DIST_DIRECTORY, RELEASE_NAME) CONFIG_RELEASE_DIR = os.path.join(DIST_DIRECTORY, CONFIG_RELEASE_NAME) SAMPLE_RELEASE_DIR = os.path.join(DIST_DIRECTORY, "sample") # Remove the dist directory if it exists if os.path.exists(DIST_DIRECTORY): print("\nRemoving dist directory: " + DIST_DIRECTORY + "\n") remove_tree(DIST_DIRECTORY, verbose=1) print("\nMaking dist directory: " + DIST_DIRECTORY + "\n") os.makedirs(DIST_DIRECTORY) print("\nCalling sphinx-apidoc\n") subprocess.check_call([ "sphinx-apidoc", "--force", "--separate", "--no-toc", "--output-dir=" + DIST_DOCTMP_DIR, os.path.join(DIST_PY_FILE_LOCATION, "dxlelasticsearchservice") ]) print("\nCopying conf.py, docutils.conf, and sdk directory\n") copy_file(os.path.join(DIST_PY_FILE_LOCATION, "doc", "conf.py"), os.path.join(DIST_DOCTMP_DIR, "conf.py")) copy_file(os.path.join(DIST_PY_FILE_LOCATION, "doc", "docutils.conf"),
def run(self): if not self.skip_build: self.run_command('build') install = self.reinitialize_command('install', reinit_subcommands=1) install.prefix = self.bdist_dir install.skip_build = self.skip_build install.warn_dir = 0 install_lib = self.reinitialize_command('install_lib') # we do not want to include pyc or pyo files install_lib.compile = 0 install_lib.optimize = 0 if self.distribution.has_ext_modules(): # If we are building an installer for a Python version other # than the one we are currently running, then we need to ensure # our build_lib reflects the other Python version rather than ours. # Note that for target_version!=sys.version, we must have skipped the # build step, so there is no issue with enforcing the build of this # version. target_version = self.target_version if not target_version: assert self.skip_build, "Should have already checked this" target_version = '%d.%d' % sys.version_info[:2] plat_specifier = ".%s-%s" % (self.plat_name, target_version) build = self.get_finalized_command('build') build.build_lib = os.path.join(build.build_base, 'lib' + plat_specifier) log.info("installing to %s", self.bdist_dir) install.ensure_finalized() # avoid warning of 'install_lib' about installing # into a directory not in sys.path sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) install.run() del sys.path[0] self.mkpath(self.dist_dir) fullname = self.distribution.get_fullname() installer_name = self.get_installer_filename(fullname) installer_name = os.path.abspath(installer_name) if os.path.exists(installer_name): os.unlink(installer_name) metadata = self.distribution.metadata author = metadata.author if not author: author = metadata.maintainer if not author: author = "UNKNOWN" version = metadata.get_version() # ProductVersion must be strictly numeric # XXX need to deal with prerelease versions sversion = "%d.%d.%d" % StrictVersion(version).version # Prefix ProductName with Python x.y, so that # it sorts together with the other Python packages # in Add-Remove-Programs (APR) fullname = self.distribution.get_fullname() if self.target_version: product_name = "Python %s %s" % (self.target_version, fullname) else: product_name = "Python %s" % (fullname) self.db = msilib.init_database(installer_name, schema, product_name, msilib.gen_uuid(), sversion, author) msilib.add_tables(self.db, sequence) props = [('DistVersion', version)] email = metadata.author_email or metadata.maintainer_email if email: props.append(("ARPCONTACT", email)) if metadata.url: props.append(("ARPURLINFOABOUT", metadata.url)) if props: add_data(self.db, 'Property', props) self.add_find_python() self.add_files() self.add_scripts() self.add_ui() self.db.Commit() if hasattr(self.distribution, 'dist_files'): tup = 'bdist_msi', self.target_version or 'any', fullname self.distribution.dist_files.append(tup) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run)
def run(self): # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. log.info("installing library code to %s", self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None if self.distribution.has_c_libraries() and not self.skip_build: self.run_command('build_clib') cmd = self.call_command('install_lib', warn_dir=0) instcmd.root = old_root all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] for (p, ext_name) in enumerate(ext_outputs): filename, ext = os.path.splitext(ext_name) pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s", ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) ext_outputs[p] = ext_name.replace(os.sep, '/') if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: self.do_install_data() # Make the EGG-INFO directory archive_root = self.bdist_dir egg_info = os.path.join(archive_root, 'EGG-INFO') self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') log.info("installing scripts to %s", script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") if all_outputs: log.info("writing %s", native_libs) if not self.dry_run: ensure_directory(native_libs) libs_file = open(native_libs, 'wt') libs_file.write('\n'.join(all_outputs)) libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): log.info("removing %s", native_libs) if not self.dry_run: os.unlink(native_libs) write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()) if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): log.warn( "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" "Use the install_requires/extras_require setup() args instead." ) if self.exclude_source_files: self.zap_pyfiles() # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, dry_run=self.dry_run, mode=self.gen_header()) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works getattr(self.distribution, 'dist_files', []).append( ('bdist_egg', get_python_version(), self.egg_output))
def _copy_init_tpl_files(self, type): init_tpl = { 'docker_init': { 'default_init_tpl_dir': 'docker_init_templates', 'custom_init_tpl_dir': 'custom_docker_init_templates', 'info_msg': 'You have to specify docker init template using option --tpl', 'available_msg': 'Available %s docker init templates: %s', 'missing_msg': "Docker init template '%s' does not exists." }, 'tests_init': { 'default_init_tpl_dir': 'tests_init_templates', 'custom_init_tpl_dir': 'custom_tests_init_templates', 'info_msg': 'You have to specify tests init template using option --tpl', 'available_msg': 'Available %s tests init templates: %s', 'missing_msg': "Tests init template '%s' does not exists." } } default_init_tpl_path = os.path.join( os.path.dirname(sys.modules['docker_console'].__file__), init_tpl[type]['default_init_tpl_dir']) custom_init_tpl_path = os.path.join( os.path.expanduser('~'), '.docker_console', init_tpl[type]['custom_init_tpl_dir']) all_default_templates = [] all_custom_templates = [] for item in os.listdir(default_init_tpl_path): if os.path.isdir(os.path.join(default_init_tpl_path, item)): all_default_templates.append(item) if os.path.exists(custom_init_tpl_path): for item in os.listdir(custom_init_tpl_path): if os.path.isdir(os.path.join(custom_init_tpl_path, item)): all_custom_templates.append(item) if not cmd_options.init_template: message(init_tpl[type]['info_msg'], 'error') message( init_tpl[type]['available_msg'] % ('default', ', '.join(all_default_templates)), 'error') if len(all_custom_templates): message( init_tpl[type]['available_msg'] % ('custom', ', '.join(all_custom_templates)), 'error') exit(0) if cmd_options.init_template and cmd_options.init_template not in all_default_templates and cmd_options.init_template not in all_custom_templates: message(init_tpl[type]['missing_msg'] % cmd_options.init_template, 'error') message( init_tpl[type]['available_msg'] % ('default', ', '.join(all_default_templates)), 'error') if len(all_custom_templates): message( init_tpl[type]['available_msg'] % ('custom', ', '.join(all_custom_templates)), 'error') exit(0) if cmd_options.init_template in all_custom_templates: temp_path = create_dir_copy( os.path.join(custom_init_tpl_path, cmd_options.init_template)) else: temp_path = create_dir_copy( os.path.join(default_init_tpl_path, cmd_options.init_template)) for root, dirs, files in os.walk(temp_path): for name in files: if name.endswith('-tpl'): src = os.path.join(root, name) self._init_tpl_render(src) create_dir_copy(temp_path, self.config.BUILD_PATH, cmd_options.docker_init_replace_conf) dir_util.remove_tree(temp_path)
def run(self): if os.path.exists(gen_dir): remove_tree(gen_dir, dry_run=self.dry_run) # remove tool if os.path.exists(gen_tool): os.remove(gen_tool)
def remove_directories(self): if os.path.exists(self.working_path): dir_util.remove_tree(self.working_path, 0)
def delete_all_data(self): """ delete all data json and media files in tmp """ dir_import = os.path.join(settings.TMP_PRJ_DIR, self.dir_extract) remove_tree(dir_import)
def removeBuild(): print "removing old build..." if os.path.isfile('build'): remove_tree('build')
def clean_pykeyvi_build_directory(): if os.path.exists(keyvi_build_dir): remove_tree(keyvi_build_dir)
# standard modules import os, sys # install update if user downloaded an update batch "FROZEN application only" if hasattr(sys, 'frozen'): # like if application frozen by cx_freeze current_directory = os.path.dirname(sys.executable) # Should copy contents of PyIDM_update_files folder and overwrite PyIDM original files update_batch_path = os.path.join(current_directory, 'PyIDM_update_files') if os.path.isdir(update_batch_path): from distutils.dir_util import copy_tree, remove_tree copy_tree(update_batch_path, current_directory) print('done installing updates') # delete folder remove_tree(update_batch_path) # This code should stay on top to handle relative imports in case of direct call of pyIDM.py if __package__ is None: path = os.path.realpath(os.path.abspath(__file__)) sys.path.insert(0, os.path.dirname(path)) sys.path.insert(0, os.path.dirname(os.path.dirname(path))) __package__ = 'pyidm' import pyidm # check and auto install external modules from .dependency import install_missing_pkgs install_missing_pkgs() # local modules
def __init__(self, options, log): self.options = options self.logger = log self.appname = options.manifest['appname'] log( options, "Packaging application named: %s, version: %s" % (self.appname, options.version)) options.mobile = (options.platform == 'iphone' or options.platform == 'android') if not options.mobile: if options.platform == 'osx': self.base_dir = os.path.join(options.destination, ('%s.app' % self.appname)) if not os.path.exists(self.base_dir): os.makedirs(self.base_dir) self.contents_dir = os.path.join(self.base_dir, 'Contents') if os.path.exists(self.contents_dir): dir_util.remove_tree(self.contents_dir) os.makedirs(self.contents_dir) self.executable_dir = os.path.join(self.contents_dir, 'MacOS') os.makedirs(self.executable_dir) self.resources_dir = os.path.join(self.contents_dir, 'Resources') os.makedirs(self.resources_dir) self.lproj = os.path.join(self.resources_dir, 'English.lproj') os.makedirs(self.lproj) else: self.base_dir = os.path.join(options.destination, self.appname) self.contents_dir = os.path.join(options.destination, self.appname) self.executable_dir = os.path.join(options.destination, self.appname) if os.path.exists(self.contents_dir): dir_util.remove_tree(self.contents_dir) os.makedirs(self.base_dir) self.resources_dir = os.path.join(self.contents_dir, 'Resources') os.makedirs(self.resources_dir) # copy the tiapp tiapp = os.path.join(options.appdir, 'tiapp.xml') shutil.copy(tiapp, self.contents_dir) # copy the manifest manifest = os.path.join(options.appdir, 'manifest') shutil.copy(manifest, self.contents_dir) # copy the boot if options.platform == 'win32': kboot = os.path.join(options.runtime_dir, 'template', 'kboot.exe') options.executable = os.path.join(self.executable_dir, 'installer.exe') shutil.copy(kboot, options.executable) else: kboot = os.path.join(options.runtime_dir, 'template', 'kboot') options.executable = os.path.join(self.executable_dir, self.appname) shutil.copy(kboot, options.executable) if options.platform == 'osx': shutil.copy(os.path.join(options.assets_dir, 'titanium.icns'), self.lproj) shutil.copy( os.path.join(options.runtime_dir, 'template', 'MainMenu.nib'), self.lproj) # copy in the resources rdir = os.path.join(options.appdir, 'Resources') dir_util.copy_tree(rdir, self.resources_dir, preserve_symlinks=True) # copy the installer if options.platform == 'osx': installer = os.path.join(self.contents_dir, 'installer', 'Installer App.app') if not os.path.exists(installer): os.makedirs(installer) netinstaller = os.path.join(options.runtime_dir, 'installer', 'Installer App.app') dir_util.copy_tree(netinstaller, installer, preserve_symlinks=True) elif options.platform == 'win32': installer = os.path.join(self.contents_dir, 'installer') if not os.path.exists(installer): os.makedirs(installer) netinstaller = os.path.join(options.runtime_dir, 'installer', 'Installer.exe') shutil.copy(netinstaller, installer) elif options.platform == 'linux': installer = os.path.join(self.contents_dir, 'installer') if not os.path.exists(installer): os.makedirs(installer) netinstaller = os.path.join(options.runtime_dir, 'installer', 'installer') shutil.copy(netinstaller, installer) # if selected, write in the .installed file if options.no_install: f = open(os.path.join(self.contents_dir, '.installed'), 'w') f.write("") f.close() else: # copy the license if options.license_file: f = open(os.path.join(self.contents_dir, 'LICENSE.txt'), 'w') f.write(open(options.license_file).read()) f.close() # copy in modules (this will be empty if network and no local ones) for p in options.module_paths: log(options, "adding module: %s/%s" % (p['name'], p['version'])) d = os.path.join(self.contents_dir, 'modules', p['name']) if os.path.exists(d): dir_util.remove_tree(d) os.makedirs(d) dir_util.copy_tree(p['path'], d, preserve_symlinks=True) # copy in the runtime if not network install if options.type != 'network': log(options, "adding runtime: %s" % options.runtime) rd = os.path.join(self.contents_dir, 'runtime') if os.path.exists(rd): dir_util.remove_tree(rd) os.makedirs(rd) dir_util.copy_tree(options.runtime_dir, rd, preserve_symlinks=True) if options.platform == 'osx': plist = open(os.path.join(options.assets_dir, 'Info.plist')).read() plist = plist.replace('APPNAME', self.appname) plist = plist.replace('APPEXE', self.appname) plist = plist.replace('APPICON', 'titanium.icns') plist = plist.replace('APPID', options.manifest['appid']) plist = plist.replace('APPNIB', 'MainMenu') plist = plist.replace('APPVER', str(options.version)) out_file = open(os.path.join(self.contents_dir, 'Info.plist'), 'w') out_file.write(plist) out_file.close() else: pass
def remove(path): print("cleaning %s" % path) try: remove_tree(path) except OSError: print("folder not found")
from distutils.dir_util import remove_tree from setuptools import setup, Extension import pybind11 import os ext_modules = [ Extension(name='ColorPy', sources=['Color.cpp'], include_dirs=[ pybind11.get_include(), ], language='c++') ] setup(ext_modules=ext_modules) # delete build & temp directory file_paths = ['temp', 'build'] for file_path in file_paths: if os.path.isdir(file_path): remove_tree(file_path)
def remove_dir(self, dir_to_del): if os.path.exists(dir_to_del): dir_util.remove_tree(dir_to_del, dry_run=self.dry_run)
selected = raw_input('Are you sure you want to continue? (yes/no): ') if selected.strip().lower() not in ['y', 'yes']: print('\nPlease backup your data, then restart the installer.') print('Exiting.\n') exit(0) # Backup CONFIG folder for server if os.path.exists(cfg_path): print('\nBack-up config folder (from `{}` to `{}`)...'.format( cfg_path, tmp_config)) shutil.move(cfg_path, tmp_config) # Deleting previous versions of Twister try: dir_util.remove_tree(INSTALL_PATH) print('Removed folder `{}`.'.format(INSTALL_PATH)) except: print('Warning! Cannot delete Twister dir `{}` !'.format(INSTALL_PATH)) try: os.makedirs(INSTALL_PATH) print('Created folder `{}`.'.format(INSTALL_PATH)) except: print('Warning! Cannot create Twister dir `{}` !'.format(INSTALL_PATH)) else: # Twister client path INSTALL_PATH = userHome(user_name) + '/twister/' cfg_path = INSTALL_PATH + 'config/'
def remove_tree(self): from distutils.dir_util import remove_tree if self.isdir(): remove_tree(abs(self)) else: self.remove()
def clean_tmpdir(tmpdir): # print('Clean up %s' % tmpdir) dir_util.remove_tree(tmpdir)
def compile_cpp(use_mingw=False, use_gpu=False, use_cuda=False, use_mpi=False, use_hdfs=False, boost_root=None, boost_dir=None, boost_include_dir=None, boost_librarydir=None, opencl_include_dir=None, opencl_library=None, nomp=False, bit32=False, integrated_opencl=False): if os.path.exists(os.path.join(CURRENT_DIR, "build_cpp")): remove_tree(os.path.join(CURRENT_DIR, "build_cpp")) os.makedirs(os.path.join(CURRENT_DIR, "build_cpp")) os.chdir(os.path.join(CURRENT_DIR, "build_cpp")) logger.info("Starting to compile the library.") cmake_cmd = ["cmake", "../compile/"] if integrated_opencl: use_gpu = False cmake_cmd.append("-D__INTEGRATE_OPENCL=ON") if use_gpu: cmake_cmd.append("-DUSE_GPU=ON") if boost_root: cmake_cmd.append("-DBOOST_ROOT={0}".format(boost_root)) if boost_dir: cmake_cmd.append("-DBoost_DIR={0}".format(boost_dir)) if boost_include_dir: cmake_cmd.append( "-DBoost_INCLUDE_DIR={0}".format(boost_include_dir)) if boost_librarydir: cmake_cmd.append("-DBOOST_LIBRARYDIR={0}".format(boost_librarydir)) if opencl_include_dir: cmake_cmd.append( "-DOpenCL_INCLUDE_DIR={0}".format(opencl_include_dir)) if opencl_library: cmake_cmd.append("-DOpenCL_LIBRARY={0}".format(opencl_library)) elif use_cuda: cmake_cmd.append("-DUSE_CUDA=ON") if use_mpi: cmake_cmd.append("-DUSE_MPI=ON") if nomp: cmake_cmd.append("-DUSE_OPENMP=OFF") if use_hdfs: cmake_cmd.append("-DUSE_HDFS=ON") if system() in {'Windows', 'Microsoft'}: if use_mingw: if use_mpi: raise Exception( 'MPI version cannot be compiled by MinGW due to the miss of MPI library in it' ) logger.info("Starting to compile with CMake and MinGW.") silent_call( cmake_cmd + ["-G", "MinGW Makefiles"], raise_error=True, error_msg= 'Please install CMake and all required dependencies first') silent_call(["mingw32-make.exe", "_lightgbm"], raise_error=True, error_msg='Please install MinGW first') else: status = 1 lib_path = os.path.join(CURRENT_DIR, "compile", "windows", "x64", "DLL", "lib_lightgbm.dll") if not any( (use_gpu, use_mpi, use_hdfs, nomp, bit32, integrated_opencl)): logger.info( "Starting to compile with MSBuild from existing solution file." ) platform_toolsets = ("v142", "v141", "v140") for pt in platform_toolsets: status = silent_call([ "MSBuild", os.path.join(CURRENT_DIR, "compile", "windows", "LightGBM.sln"), "/p:Configuration=DLL", "/p:Platform=x64", "/p:PlatformToolset={0}".format(pt) ]) if status == 0 and os.path.exists(lib_path): break else: clear_path( os.path.join(CURRENT_DIR, "compile", "windows", "x64")) if status != 0 or not os.path.exists(lib_path): logger.warning( "Compilation with MSBuild from existing solution file failed." ) if status != 0 or not os.path.exists(lib_path): arch = "Win32" if bit32 else "x64" vs_versions = ("Visual Studio 16 2019", "Visual Studio 15 2017", "Visual Studio 14 2015") for vs in vs_versions: logger.info("Starting to compile with %s (%s).", vs, arch) status = silent_call(cmake_cmd + ["-G", vs, "-A", arch]) if status == 0: break else: clear_path(os.path.join(CURRENT_DIR, "build_cpp")) if status != 0: raise Exception("\n".join(( 'Please install Visual Studio or MS Build and all required dependencies first', LOG_NOTICE))) silent_call([ "cmake", "--build", ".", "--target", "_lightgbm", "--config", "Release" ], raise_error=True, error_msg='Please install CMake first') else: # Linux, Darwin (macOS), etc. logger.info("Starting to compile with CMake.") silent_call( cmake_cmd, raise_error=True, error_msg='Please install CMake and all required dependencies first' ) silent_call( ["make", "_lightgbm", "-j4"], raise_error=True, error_msg= 'An error has occurred while building lightgbm library file') os.chdir(CURRENT_DIR)
from argparse import ArgumentParser from platform import system """ Freeze your controller into a binary executable. Documentation: `tdw/Documentation/misc_frontend/freeze.md` """ if __name__ == "__main__": root_dir = Path.home().joinpath("tdw_build") if not root_dir.exists(): root_dir.mkdir() # Remove an existing frozen controller. output_dir = root_dir.joinpath("tdw_controller") if output_dir.exists(): dir_util.remove_tree(str(output_dir.resolve())) output_dir.mkdir(parents=True) parser = ArgumentParser() parser.add_argument( "--controller", type=str, default="example_controllers/minimal.py", help="The relative path from this script to your controller. " "Example: example_controllers/minimal.py") args = parser.parse_args() controller = Path(args.controller) # Parse ~ as the home directory. if str(controller.resolve())[0] == "~": controller = Path.home().joinpath(str(controller.resolve())[2:]) if not controller.exists():
def setUp(self): #Before each test is run, remove the existing files in out subdirectory. try: remove_tree("out") except FileNotFoundError: print("No out/ dir to remove")
def create_project(cls, name, description, user, privacy='private', tags=None, user_token='', type='app', category='model'): """ Create a new project :param name: str :param description: str :param user_ID: ObjectId :param is_private: boolean :param type: string (app/module/dataset) :param tags: list of string :param user_token: string :return: a new created project object """ if tags is None: tags = [] user_ID = user.user_ID # user_path = os.path.join(USER_DIR, user_ID) # project_path = os.path.join(USER_DIR, user_ID, name) # generate project dir project_path = cls.gen_dir(user_ID, name) temp_path = cls.gen_dir(user_ID, uuid.uuid4().hex) # init git repo cls.init_git_repo(user_ID, name) # clone to project dir repo = cls.clone(user_ID, name, project_path) # create template to temp path cookiecutter(cat_dict[category], no_input=True, output_dir=temp_path, extra_context={ "author_name": user_ID, "module_name": name, "module_type": category, "module_description": description, }) # copy temp project to project dir and remove temp dir copy_tree(os.path.join(temp_path, name), project_path) remove_tree(temp_path) # add all repo.git.add(A=True) # initial commit repo.index.commit('Initial Commit') repo.remote(name='origin').push() # auth jupyterhub with user token res = cls.auth_hub_user(user_ID, name, user_token) # create a new project object create_time = datetime.utcnow() return cls.repo.create_one( name=name, description=description, create_time=create_time, update_time=create_time, type=type, tags=tags, hub_token=res.get('token'), path=project_path, user=user, privacy=privacy, category=category, repo_path=f'http://{GIT_SERVER_IP}/repos/{user_ID}/{name}')
print("\nSplitting\n------------------------------------") print("\nSplitting sensitive root files...") linux_sensitives = listdir(ECLIPSE_DISTRO_LINUX_DIR) win32_sensitives = listdir(ECLIPSE_DISTRO_WIN32_DIR) i = 0 printProgressBar(i, len(linux_sensitives) + len(win32_sensitives)) for f in linux_sensitives: path = join(ECLIPSE_DISTRO_LINUX_DIR, f) if isfile(path): move_file(join(ECLIPSE_DISTRO_LINUX_DIR, f), ECLIPSE_LINUX_DIR) elif path.endswith("configuration"): copy_tree( longpath_win32_prefix + join(ECLIPSE_DISTRO_LINUX_DIR, f), longpath_win32_prefix + ECLIPSE_LINUX_DIR + slash + "configuration") remove_tree(longpath_win32_prefix + join(ECLIPSE_DISTRO_LINUX_DIR, f)) elif path.endswith("dropins"): copy_tree( longpath_win32_prefix + join(ECLIPSE_DISTRO_LINUX_DIR, f), longpath_win32_prefix + ECLIPSE_LINUX_DIR + slash + "dropins") remove_tree(longpath_win32_prefix + join(ECLIPSE_DISTRO_LINUX_DIR, f)) elif path.endswith("p2"): copy_tree( longpath_win32_prefix + join(ECLIPSE_DISTRO_LINUX_DIR, f), longpath_win32_prefix + ECLIPSE_LINUX_DIR + slash + "p2") remove_tree(longpath_win32_prefix + join(ECLIPSE_DISTRO_LINUX_DIR, f)) i += 1 printProgressBar(i, len(linux_sensitives) + len(win32_sensitives))
def remove_file(f): if os.path.isdir(f): remove_tree(f, dry_run=self.dry_run) elif os.path.exists(f): log.info("Would remove '%s'", f)
def remove_temp_dir(msg): logger.info(msg) if os.path.isdir(tmp_dir): dir_util.remove_tree(tmp_dir)
def remove_file(f): if os.path.isdir(f): remove_tree(f, dry_run=self.dry_run) elif os.path.exists(f): log.info("Removing '%s'", f) os.remove(f)
def run(self): self.spawn(['xcodebuild']) remove_tree('dist/PlotDevice.app.dSYM') print "done building PlotDevice.app in ./dist"