def copy_metadata_to(self, target_dir): prefix = os.path.join(self.egg_info,'') for path in self.ei_cmd.filelist.files: if path.startswith(prefix): target = os.path.join(target_dir, path[len(prefix):]) ensure_directory(target) self.copy_file(path, target)
def unpack_zipfile( filename, extract_dir, ignores=[] ): z = zipfile.ZipFile(filename) try: for info in z.infolist(): name = info.filename perm = (info.external_attr >> 16L) & 0777 # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name: continue target = os.path.join(extract_dir, *name.split('/')) if not target: continue for ignore in ignores: if ignore in name: continue if name.endswith('/'): # directory pkg_resources.ensure_directory(target) else: # file pkg_resources.ensure_directory(target) data = z.read(info.filename) f = open(target,'wb') try: f.write(data) finally: f.close() del data try: if not os.path.islink(): os.chmod(target, mode) except: pass finally: z.close()
def unpack_directory(filename, extract_dir, progress_filter=default_filter): """"Unpack" a directory, using the same interface as for archives Raises ``UnrecognizedFormat`` if `filename` is not a directory """ if not os.path.isdir(filename): raise UnrecognizedFormat("%s is not a directory" % filename) paths = { filename: ('', extract_dir), } for base, dirs, files in os.walk(filename): src, dst = paths[base] for d in dirs: paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) for f in files: target = os.path.join(dst, f) target = progress_filter(src + f, target) if not target: # skip non-files continue ensure_directory(target) f = os.path.join(base, f) shutil.copyfile(f, target) shutil.copystat(f, target)
def unpack_zipfile( filename, extract_dir): z = zipfile.ZipFile(filename) try: for info in z.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name: continue target = os.path.join(extract_dir, *name.split('/')) if not target: continue if name.endswith('/'): # directory pkg_resources.ensure_directory(target) else: # file pkg_resources.ensure_directory(target) data = z.read(info.filename) f = open(target,'wb') try: f.write(data) finally: f.close() del data finally: z.close()
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): """Unpack zip `filename` to `extract_dir` Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation of the `progress_filter` argument. """ if not zipfile.is_zipfile(filename): raise UnrecognizedFormat("%s is not a zip file" % (filename,)) with ContextualZipFile(filename) as z: for info in z.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name.split('/'): continue target = os.path.join(extract_dir, *name.split('/')) target = progress_filter(name, target) if not target: continue if name.endswith('/'): # directory ensure_directory(target) else: # file ensure_directory(target) data = z.read(info.filename) with open(target, 'wb') as f: f.write(data) unix_attributes = info.external_attr >> 16 if unix_attributes: os.chmod(target, unix_attributes)
def _copy_executable_file_from_build_to_dist(extension): basename = executable + extension src = path.join(build_dir, basename) dst = path.join('dist', basename) ensure_directory(dst) copy(src, dst) return dst
def run(self): self.run_command("egg_info") if os.path.isdir(self.target) and not os.path.islink(self.target): dir_util.remove_tree(self.target, dry_run=self.dry_run) elif os.path.exists(self.target): self.execute(os.unlink, (self.target,), "Removing " + self.target) if not self.dry_run: pkg_resources.ensure_directory(self.target) self.execute(self.copytree, (), "Copying %s to %s" % (self.source, self.target)) self.install_namespaces()
def handle_noargs(self, **options): from django.conf import settings dir = os.path.abspath(settings.XMLRPC_TRUSTED_CA_PATH) loc = os.path.join(dir, "Makefile") print "Writing the Makefile into directory %s..." % dir pkg_resources.ensure_directory(loc) f = open(loc, mode="w") f.write(MAKEFILE) f.close() print "Done."
def copy_libfullpython(self): # libfpython already includes all the added python moduels and c extensions, no need to compile anyting # we just copy the modules to dist/ with a proper name library_name = self.get_python_module_name().split('.')[-1] [src] = [item for item in glob(path.join('build', 'embedded', '*fpython*')) if not item.endswith('.rsp')] old_library_name = path.basename(src).split('.')[0].replace('lib', '') dst = path.join('dist', path.basename(src).replace(old_library_name, library_name)) ensure_directory(dst) copy(src, dst) return dst
def write_pystick_variable_file(pystick_variable_filepath, python_files, c_extensions, scons_variables): from jinja2 import Template from json import dumps ensure_directory(pystick_variable_filepath) json_reference_dict = _write_json_files(path.dirname(pystick_variable_filepath), python_files, c_extensions) variables = dict(scons_variables) variables.update(json_reference_dict) with open(pystick_variable_filepath, 'w') as fd: fd.write(Template(SCONSCRIPT_TEMPLATE).render(repr=repr, sorted=sorted, environment_variables=variables)) with open(pystick_variable_filepath.replace('.scons', '.json'), 'w') as fd: fd.write(dumps(variables))
def copy_metadata_to(self, target_dir): "Copy metadata (egg info) to the target_dir" # normalize the path (so that a forward-slash in egg_info will # match using startswith below) norm_egg_info = os.path.normpath(self.egg_info) prefix = os.path.join(norm_egg_info, '') for path in self.ei_cmd.filelist.files: if path.startswith(prefix): target = os.path.join(target_dir, path[len(prefix):]) ensure_directory(target) self.copy_file(path, target)
def copy_static_libraries_from_isolated_python(self): extension = dict(Darwin='a', Linux='a', Windows="lib", AIX='a', SunOS='a')[system()] for src in glob(path.join(self.isolated_python_dirpath, 'lib*', '*.{}'.format(extension))): if src.rsplit('.', 1)[0].endswith("_g") or 'python2.7' in src: continue if path.basename(src).startswith('_'): continue if system() == 'Windows' and 'pyexpat' in src: continue dst = path.join(self.static_libdir, path.basename(src)) ensure_directory(dst) copy(src, dst) if not path.exists(dst) else None
def write_django_script(self, script_name, contents): log.info('Installing %s (django) script to %s', script_name, self.script_dir) target = os.path.join(self.script_dir, script_name) self.add_output(target) if not self.dry_run: pkg_resources.ensure_directory(target) f = open(target, 'wt') f.write(contents) f.close() os.chmod(target, 0755)
def write_script(self, script_name, contents, mode="t", *ignored): """Write an executable file to the scripts directory""" log.info("Installing %s script to %s", script_name, self.install_dir) target = os.path.join(self.install_dir, script_name) self.outfiles.append(target) if not self.dry_run: ensure_directory(target) f = open(target,"w"+mode) f.write(contents) f.close() chmod(target,0755)
def _copy_static_resources(html_path): module = "avocado_result_html" base_path = "resources/static" for top_dir in pkg_resources.resource_listdir(module, base_path): rsrc_dir = base_path + "/%s" % top_dir if pkg_resources.resource_isdir(module, rsrc_dir): rsrc_files = pkg_resources.resource_listdir(module, rsrc_dir) for rsrc_file in rsrc_files: source = pkg_resources.resource_filename(module, rsrc_dir + "/%s" % rsrc_file) dest = os.path.join(os.path.dirname(os.path.abspath(html_path)), top_dir, os.path.basename(source)) pkg_resources.ensure_directory(dest) shutil.copy(source, dest)
def write_script(self, script_name, contents, mode="t", *ignored): """Write an executable file to the scripts directory""" from setuptools.command.easy_install import chmod, current_umask log.info("Installing %s script to %s", script_name, self.install_dir) target = os.path.join(self.install_dir, script_name) self.outfiles.append(target) mask = current_umask() if not self.dry_run: ensure_directory(target) f = open(target,"w"+mode) f.write(contents) f.close() chmod(target, 0o777-mask)
def handle_noargs(self, **options): pkg_resources.ensure_directory(settings.MEDIA_ROOT) pkg_resources.ensure_directory( os.path.join(settings.MEDIA_ROOT, settings.AGGREGATE_LOGOS_DIR)) media_dir = os.path.join( settings.SRC_DIR, "static", "expedient", "clearinghouse", "media") for d in "css", "img", "js": path = os.path.join(media_dir, d) target = os.path.join(settings.MEDIA_ROOT, d) if not os.access(target, os.F_OK): os.symlink(path, target) print "Created media directory and symlinks in %s" \ % settings.MEDIA_ROOT
def _copy_static_resources(self): module = 'avocado.core' base_path = 'resources/htmlresult/static' for top_dir in pkg_resources.resource_listdir(module, base_path): rsrc_dir = base_path + '/%s' % top_dir if pkg_resources.resource_isdir(module, rsrc_dir): rsrc_files = pkg_resources.resource_listdir(module, rsrc_dir) for rsrc_file in rsrc_files: source = pkg_resources.resource_filename( module, rsrc_dir + '/%s' % rsrc_file) dest = os.path.join( os.path.dirname(os.path.abspath(self.output)), top_dir, os.path.basename(source)) pkg_resources.ensure_directory(dest) shutil.copy(source, dest)
def build_dependency(filepath): """:returns: base directory""" build_dir = path.abspath(path.join("build", "dependencies")) ensure_directory(path.join(build_dir, "x")) with chdir_context(build_dir): basename = path.basename(filepath) logger.info("building {}".format(basename)) if basename.endswith("egg"): # we assume that all the egg sources are pure-python and don't contain binaries # people obviosuly don't upload binary eggs for POSIX systems to PyPI # but on Windows they do, and therefore we already downloaded their source districutions # in a previous step in the build process of the recipe return path.join(build_dir, _unzip_egg(filepath)) elif basename.endswith("zip"): return path.join(build_dir, _extract_and_build_zip(filepath)) elif basename.endswith("gz"): return path.join(build_dir, _extract_and_build_tgz(filepath)) else: raise RuntimeError()
def write_script(self, script_name, contents, mode="t", *ignored): """Write an executable file to the scripts directory""" if "__PEX_UNVENDORED__" in __import__("os").environ: from setuptools.command.easy_install import chmod, current_umask # vendor:skip else: from pex.third_party.setuptools.command.easy_install import chmod, current_umask log.info("Installing %s script to %s", script_name, self.install_dir) target = os.path.join(self.install_dir, script_name) self.outfiles.append(target) mask = current_umask() if not self.dry_run: ensure_directory(target) f = open(target, "w" + mode) f.write(contents) f.close() chmod(target, 0o777 - mask)
def symlink_script(self, scriptname, ep): """Symlink script from scripts directory to entry point module""" from setuptools.command.easy_install import chmod, current_umask # build dest module path dest = os.path.join(self.dest_dir, *ep.module_name.split('.')) + '.py' if not os.path.exists(dest): raise ValueError("Module %s not found (entry_point: %s)" % (dest, ep)) # ep.attrs ignored! target = os.path.join(self.install_dir, scriptname) dest = os.path.relpath(dest, os.path.dirname(target)) log.info('symlink_script: %s -> %s', target, dest) self.outfiles.append(target) mask = current_umask() if not self.dry_run: ensure_directory(target) if os.path.exists(target): log.info('symlink_script: target exists: %s: replace', target) os.unlink(target) os.symlink(dest, target) # make dest module executable through target chmod(target, 0o777 - mask)
def bootstrap_local_settings(conf_dir=CONF_DIR): """ Create a localsettings module in C{conf_dir}. @keyword conf_dir: location of the localsettings.py file. Defaults to CONF_DIR. """ loc = os.path.join(conf_dir, "localsettings.py") pkg_resources.ensure_directory(loc) if os.access(loc, os.F_OK): print "ERROR: Found localsettings already. "\ "Cowardly refusing to overwrite." return print "Creating skeleton localsettings.py file. in %s" % conf_dir f = open(loc, mode="w") # write the conf dir location f.write("CONF_DIR = '%s'\n" % conf_dir) for item in REQUIRED_SETTINGS: for var in item[1]: f.write("%s = None\n" % var) f.close() print "Done."
def write( self, target_directory=None, context=None, dry_mode=False, verbose_mode=False ): target_filename = os.path.join(target_directory, self.target_filename) try: new_content = render_to_string(self.source_filename, context) except TemplateDoesNotExist as e: self.error("[ERROR]: %s: %s (does not exist)" % (self.source_filename, e)) return except TemplateSyntaxError as e: self.error("[ERROR]: %s: %s (syntax error)" % (self.source_filename, e)) return if len(new_content) == 0: self.notice("[SKIPPED]: %s (empty)" % self.source_filename) return new_checksum = hashlib.sha1(new_content.encode("utf-8")).hexdigest() if os.path.isfile(target_filename): previous_checksum = self.checksum(target_filename) if new_checksum == previous_checksum: self.notice("[SKIPPED]: %s (identical)" % self.source_filename) return elif verbose_mode: with open(target_filename, "r", encoding="utf-8") as fd: previous_content = fd.read() for line in unified_diff( previous_content.splitlines(), new_content.splitlines(), fromfile="%s-before" % target_filename, tofile="%s-after" % target_filename, ): self.stdout.write(line) if dry_mode: self.notice("[DRY]: %s -> %s" % (self.source_filename, target_filename)) return self.notice("[OK] %s -> %s" % (self.source_filename, target_filename)) pkg_resources.ensure_directory(target_filename) with open(target_filename, "w", encoding="utf-8") as fd: fd.write(new_content)
def write( self, target_directory=None, context=None, dry_mode=False, verbose_mode=False ): # noinspection PyUnusedLocal verbose_mode = verbose_mode # noinspection PyUnusedLocal context = context target_filename = os.path.join(target_directory, self.target_filename) source_absolute_path = finders.find(self.source_filename) new_checksum = self.checksum(source_absolute_path) if os.path.isfile(target_filename): previous_checksum = self.checksum(target_filename) if new_checksum == previous_checksum: self.notice("[SKIPPED]: %s (identical)" % source_absolute_path) return if dry_mode: self.notice("[DRY]: %s -> %s" % (source_absolute_path, target_filename)) return self.notice("[OK] %s -> %s" % (source_absolute_path, target_filename)) pkg_resources.ensure_directory(target_filename) try: os.link(source_absolute_path, target_filename) except OSError: shutil.copy(source_absolute_path, target_filename)
raise UnrecognizedFormat("%s is not a directory" % filename) paths = { filename: ('', extract_dir), } for base, dirs, files in os.walk(filename): src, dst = paths[base] for d in dirs: paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) for f in files: target = os.path.join(dst, f) target = progress_filter(src + f, target) if not target: # skip non-files continue ensure_directory(target) f = os.path.join(base, f) ======= raise UnrecognizedFormat("%s is not a directory" % (filename,)) paths = {filename:('',extract_dir)} for base, dirs, files in os.walk(filename): src,dst = paths[base] for d in dirs: paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) for f in files: name = src+f target = os.path.join(dst,f) target = progress_filter(src+f, target) if not target: continue # skip non-files
def run(self): # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. log.info("installing library code to %s" % self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None if self.distribution.has_c_libraries() and not self.skip_build: self.run_command('build_clib') cmd = self.call_command('install_lib', warn_dir=0) instcmd.root = old_root all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] for (p, ext_name) in enumerate(ext_outputs): filename, ext = os.path.splitext(ext_name) pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s" % ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) ext_outputs[p] = ext_name.replace(os.sep, '/') if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: self.do_install_data() # Make the EGG-INFO directory archive_root = self.bdist_dir egg_info = os.path.join(archive_root, 'EGG-INFO') self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') log.info("installing scripts to %s" % script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") if all_outputs: log.info("writing %s" % native_libs) if not self.dry_run: ensure_directory(native_libs) libs_file = open(native_libs, 'wt') libs_file.write('\n'.join(all_outputs)) libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): log.info("removing %s" % native_libs) if not self.dry_run: os.unlink(native_libs) write_safety_flag( os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() ) if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): log.warn( "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" "Use the install_requires/extras_require setup() args instead." ) if self.exclude_source_files: self.zap_pyfiles() # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, dry_run=self.dry_run, mode=self.gen_header()) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works getattr(self.distribution, 'dist_files', []).append( ('bdist_egg', get_python_version(), self.egg_output))
def get_cache_path(): subfolder = get_cache_path() path = pkg_resources.resource_filename('syn_inflow', '/cache/' + subfolder + '/') pkg_resources.ensure_directory(path) return path
if not os.path.isdir(filename): <<<<<<< HEAD raise UnrecognizedFormat("%s is not a directory" % (filename,)) paths = {filename:('',extract_dir)} for base, dirs, files in os.walk(filename): src,dst = paths[base] for d in dirs: paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) for f in files: name = src+f target = os.path.join(dst,f) target = progress_filter(src+f, target) if not target: continue # skip non-files ensure_directory(target) f = os.path.join(base,f) ======= raise UnrecognizedFormat("%s is not a directory" % filename) paths = { filename: ('', extract_dir), } for base, dirs, files in os.walk(filename): src, dst = paths[base] for d in dirs: paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) for f in files: target = os.path.join(dst, f) target = progress_filter(src + f, target) if not target:
def get_cache_path(): path = pkg_resources.resource_filename('syn_inflow', '/cache') pkg_resources.ensure_directory(path) return path
def report(self, pipeline: AbstractPipeline): # Set Directory path subfolder = get_cache_path() path = pkg_resources.resource_filename( 'crcdal', 'cache/' + subfolder + '/reports/') pkg_resources.ensure_directory(path) # Hist Train fig, ax = plt.subplots(figsize=(40, 40)) pipeline.train.hist(bins=100, ax=ax) fig.savefig(path + 'Hist_Train.png') # Hist Test fig, ax = plt.subplots(figsize=(40, 40)) pipeline.test.hist(bins=100, ax=ax) fig.savefig(path + 'Hist_Test.png') # Results and Metric Dics fpr = {} tpr = {} roc_t = {} fpr_train = {} tpr_train = {} roc_t_train = {} p = {} r = {} pr_t = {} auc = {} auc_train = {} for model in pipeline.get_models(): name = model.short_name preds_y_test, probs_y_test = model.predict(pipeline.test) preds_y_train, probs_y_train = model.predict(pipeline.train) try: fp_rate, tp_rate, roc_threshold = metrics.roc_curve( pipeline.test_y, probs_y_test[:, 1]) except: fp_rate, tp_rate, roc_threshold = [0, 0, 0] fpr.update({name: fp_rate}) tpr.update({name: tp_rate}) roc_t.update({name: roc_threshold}) try: fp_rate_train, tp_rate_train, roc_threshold_train = metrics.roc_curve( pipeline.train_y, probs_y_train[:, 1]) except: fp_rate_train, tp_rate_train, roc_threshold_train = [0, 0, 0] fpr_train.update({name: fp_rate_train}) tpr_train.update({name: tp_rate_train}) roc_t_train.update({name: roc_threshold_train}) try: precision, recall, pr_thresholds = metrics.precision_recall_curve( pipeline.test_y, probs_y_test[:, 1]) except: precision, recall, pr_thresholds = [0, 0, 0] p.update({name: precision}) r.update({name: recall}) pr_t.update({name: pr_thresholds}) try: au_curve = metrics.roc_auc_score(pipeline.test_y, probs_y_test[:, 1]) except: au_curve = 0 auc.update({name: au_curve}) try: au_curve_train = metrics.roc_auc_score(pipeline.train_y, probs_y_train[:, 1]) except: au_curve_train = 0 auc_train.update({name: au_curve_train}) # ROC Curves - Test Set plt.figure(None, figsize=(10, 10)) plt.plot([0, 1], [0, 1], 'k--') for model in pipeline.get_models(): name = model.short_name plt.plot(fpr[name], tpr[name], label=str(name) + ': ' + str(auc[name])) plt.xlim(0, 1) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.savefig(path + 'ROC_Curves_Test.png') # ROC Curves - Train Set plt.figure(None, figsize=(10, 10)) plt.plot([0, 1], [0, 1], 'k--') for model in pipeline.get_models(): name = model.short_name plt.plot(fpr_train[name], tpr_train[name], label=str(name) + ': ' + str(auc_train[name])) plt.xlim(0, 1) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.savefig(path + 'ROC_Curves_Train.png') # Precision Recall Curves plt.figure(None, figsize=(10, 10)) for model in pipeline.get_models(): name = model.short_name plt.plot(r[name], p[name], label=str(name)) plt.xlabel('Recall rate') plt.ylabel('Precision rate') plt.legend(loc='best') plt.xlim([0, 1]) plt.savefig(path + 'PR_Curves.png') # P&R Threshold Curves fig, axes = plt.subplots(nrows=1, ncols=2, sharex='all', sharey='all', figsize=(20, 10)) for model in pipeline.get_models(): name = model.short_name pr_t2 = pr_t[name] if isinstance(pr_t[name], int): pr_t2 = [pr_t2] if len(pr_t2) > 1: pr_t2 = np.append(pr_t2, 1) # Precision axes[0].plot(pr_t2, p[name], label=str(name)) # Recall axes[1].plot(pr_t2, r[name], label=str(name)) axes[0].set_xlabel('Threshold') axes[0].set_ylabel('Precision rate') axes[1].set_xlabel('Threshold') axes[1].set_ylabel('Recall rate') axes[0].legend(loc='best') axes[1].legend(loc='best') axes[0].set_xlim([0, 1]) axes[1].set_xlim([0, 1]) plt.savefig(path + 'PR_Threshold_Curves.png') # True & False Rate Threshold Curves fig, axes = plt.subplots(nrows=1, ncols=2, sharex='all', sharey='all', figsize=(20, 10)) for model in pipeline.get_models(): name = model.short_name # True Positve axes[0].plot(roc_t[name], tpr[name], label=str(name)) # False Positve axes[1].plot(roc_t[name], fpr[name], label=str(name)) axes[0].set_xlabel('Threshold') axes[0].set_ylabel('True positive rate') axes[1].set_xlabel('Threshold') axes[1].set_ylabel('False positive rate') axes[0].legend(loc='best') axes[1].legend(loc='best') axes[0].set_xlim([0, 1]) axes[1].set_xlim([0, 1]) plt.savefig(path + 'TF_Threshold_Curves.png')
def build_console_script(self, executable, module_name, callable_name, python_source_path): def generate_executable_c_code(): source_filename = '{}.c'.format(executable) with open(source_filename, 'wb') as fd: fd.write(MAIN.format(executable, module_name, callable_name)) return source_filename def generate_buildsystem_for_the_executable(source_filename): # we need to use the same build environment we used to build the embedded python interpreter # and add some stuff on top of it from .environment import write_pystick_variable_file, get_sorted_static_libraries, get_scons_variables from .environment import is_64bit from pprint import pformat from jinja2 import Template variables = get_scons_variables(self.static_libdir, self.options) variables['!LIBPATH' if '!LIBPATH' in variables else 'LIBPATH'].insert(0, self.embedded_python_build_dir) variables['!LIBS' if '!LIBS' in variables else 'LIBS'].insert(0, 'fpython27') # always generate pdb variables['!PDB'] = source_filename.replace('.c', '.pdb') # our generated C code for main uses headers from the Python source code variables.setdefault('CPPPATH', []).append([self.embedded_python_build_dir]) variables.setdefault('CPPPATH', []).append([path.join(python_source_path, 'Include')]) variables.setdefault('CPPDEFINES', []).append(["Py_BUILD_CORE"]) with open('SConstruct', 'w') as fd: fd.write(Template(SCONSTRUCT).render(repr=repr, sorted=sorted, environment_variables=variables, source=source_filename)) def compile_code_and_link_with_static_library(): run_in_another_process(scons, None) def _copy_executable_file_from_build_to_dist(extension): basename = executable + extension src = path.join(build_dir, basename) dst = path.join('dist', basename) ensure_directory(dst) copy(src, dst) return dst def copy_executable_to_dist(): extension = dict(Windows='.exe').get(system(), '') return _copy_executable_file_from_build_to_dist(extension) def copy_pdb_to_dist(): extension = dict(Windows='.pdb').get(system(), '') return _copy_executable_file_from_build_to_dist(extension) build_dir = path.join('build', 'executables', executable) ensure_directory(path.join(build_dir, executable)) with chdir_context(build_dir): source_filename = generate_executable_c_code() generate_buildsystem_for_the_executable(source_filename) compile_code_and_link_with_static_library() run_in_another_process(scons, None) files = [copy_executable_to_dist()] if os_name == 'nt': files.append(copy_pdb_to_dist()) if self.should_sign_files(): self.signtool.sign_executables_in_directory('dist') return files
def run(self): # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. log.info("installing library code to %s", self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None if self.distribution.has_c_libraries() and not self.skip_build: self.run_command('build_clib') cmd = self.call_command('install_lib', warn_dir=0) instcmd.root = old_root all_outputs, ext_outputs = self.get_ext_outputs() self.stubs = [] to_compile = [] for (p, ext_name) in enumerate(ext_outputs): filename, ext = os.path.splitext(ext_name) pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) log.info("creating stub loader for %s", ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) ext_outputs[p] = ext_name.replace(os.sep, '/') if to_compile: cmd.byte_compile(to_compile) if self.distribution.data_files: self.do_install_data() # Make the EGG-INFO directory archive_root = self.bdist_dir egg_info = os.path.join(archive_root, 'EGG-INFO') self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') log.info("installing scripts to %s", script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") if all_outputs: log.info("writing %s", native_libs) if not self.dry_run: ensure_directory(native_libs) libs_file = open(native_libs, 'wt') libs_file.write('\n'.join(all_outputs)) libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): log.info("removing %s", native_libs) if not self.dry_run: os.unlink(native_libs) write_safety_flag( os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() ) if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): log.warn( "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" "Use the install_requires/extras_require setup() args instead." ) if self.exclude_source_files: self.zap_pyfiles() # Make the archive make_zipfile(self.egg_output, archive_root, verbose=self.verbose, dry_run=self.dry_run, mode=self.gen_header()) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works getattr(self.distribution, 'dist_files', []).append( ('bdist_egg', get_python_version(), self.egg_output))