def _fast_link_helper(target, source, env, link_com): """fast link helper function. """ global linking_tmp_dir target_file = str(target[0]) prefix_str = "blade_%s" % target_file.replace("/", "_").replace(".", "_") fd, temporary_file = tempfile.mkstemp(suffix='xianxian', prefix=prefix_str, dir=linking_tmp_dir) os.close(fd) sources = [] for s in source: sources.append(str(s)) link_com_str = link_com.substitute(FL_TARGET=temporary_file, FL_SOURCE=' '.join(sources)) p = subprocess.Popen(link_com_str, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if std_out: print std_out if std_err: print std_err if p.returncode == 0: shutil.move(temporary_file, target_file) if not os.path.exists(target_file): warning("failed to genreate %s in link on tmpfs mode" % target_file) else: _blade_action_postfunc("failed while fast linking") return p.returncode
def __init__(self, blade_root_dir, distcc_hosts_list=[]): # ccache self.blade_root_dir = blade_root_dir self.ccache_installed = self._check_ccache_install() # distcc self.distcc_env_prepared = False self.distcc_installed = self._check_distcc_install() if distcc_hosts_list: self.distcc_host_list = distcc_hosts_list else: self.distcc_host_list = os.environ.get('DISTCC_HOSTS', '') if self.distcc_installed and self.distcc_host_list: self.distcc_env_prepared = True if self.distcc_installed and not self.distcc_host_list: warning("DISTCC_HOSTS not set but you have " "distcc installed, will just build locally") self.distcc_log_file = os.environ.get('DISTCC_LOG', '') if self.distcc_log_file: info("distcc log: %s" % self.distcc_log_file) # dccc self.dccc_env_prepared = True self.dccc_master = os.environ.get('MASTER_HOSTS', '') self.dccc_hosts_list = os.environ.get('DISTLD_HOSTS', '') self.dccc_installed = self._check_dccc_install() if self.dccc_installed: if not self.dccc_master and not self.dccc_hosts_list: self.dccc_env_prepared = False warning("MASTER_HOSTS and DISTLD_HOSTS not set " "but you have dccc installed, will just build locally") else: self.dccc_env_prepared = False self.rules_buf = []
def __init__(self, blade_root_dir, distcc_hosts_list=[]): # ccache self.blade_root_dir = blade_root_dir self.ccache_installed = self._check_ccache_install() # distcc self.distcc_env_prepared = False self.distcc_installed = self._check_distcc_install() if distcc_hosts_list: self.distcc_host_list = distcc_hosts_list else: self.distcc_host_list = os.environ.get("DISTCC_HOSTS", "") if self.distcc_installed and self.distcc_host_list: self.distcc_env_prepared = True if self.distcc_installed and not self.distcc_host_list: warning("DISTCC_HOSTS not set but you have " "distcc installed, will just build locally") self.distcc_log_file = os.environ.get("DISTCC_LOG", "") if self.distcc_log_file: info("distcc log: %s" % self.distcc_log_file) # dccc self.dccc_env_prepared = True self.dccc_master = os.environ.get("MASTER_HOSTS", "") self.dccc_hosts_list = os.environ.get("DISTLD_HOSTS", "") self.dccc_installed = self._check_dccc_install() if self.dccc_installed: if not self.dccc_master and not self.dccc_hosts_list: self.dccc_env_prepared = False warning("MASTER_HOSTS and DISTLD_HOSTS not set " "but you have dccc installed, will just build locally") else: self.dccc_env_prepared = False self.rules_buf = []
def _check_inctest_md5sum_file(self): """check the md5sum file size, remove it when it is too large. It is 2G by default. """ if os.path.exists(self.inctest_md5_file): if os.path.getsize(self.inctest_md5_file) > 2*1024*1024*1024: warning("Will remove the md5sum file for incremental test " "for it is oversized" ) os.remove(self.inctest_md5_file)
def _prepare_test_env(self, target): """Prepare the test environment. """ shutil.rmtree(self._runfiles_dir(target), ignore_errors=True) os.mkdir(self._runfiles_dir(target)) # add build profile symlink profile_link_name = os.path.basename(self.build_dir) os.symlink(os.path.abspath(self.build_dir), os.path.join(self._runfiles_dir(target), profile_link_name)) # add pre build library symlink for prebuilt_file in self._get_prebuilt_files(target): os.symlink(os.path.abspath(prebuilt_file[0]), os.path.join(self._runfiles_dir(target), prebuilt_file[1])) link_name_list = [] for i in target['options']['testdata']: if isinstance(i, tuple): data_target = i[0] link_name = i[1] else: data_target = link_name = i if '..' in data_target: continue if link_name.startswith('//'): link_name = link_name[2:] err_msg, item = self.__check_link_name(link_name, link_name_list) if err_msg == "AMBIGUOUS": error_exit("Ambiguous testdata of //%s:%s: %s, exit..." % ( target['path'], target['name'], link_name)) elif err_msg == "INCOMPATIBLE": error_exit("%s could not exist with %s in testdata of //%s:%s" % ( link_name, item, target['path'], target['name'])) link_name_list.append(link_name) try: os.makedirs(os.path.dirname('%s/%s' % ( self._runfiles_dir(target), link_name))) except os.error: pass if os.path.exists(os.path.abspath('%s/%s' % ( self._runfiles_dir(target), link_name))): error_exit("%s already existed, could not prepare testdata for " "//%s:%s" % (link_name, target['path'], target['name'])) if data_target.startswith('//'): warning("Test data not in the same directory with BUILD file") data_target = data_target[2:] os.symlink(os.path.abspath(data_target), '%s/%s' % (self._runfiles_dir(target), link_name)) else: os.symlink(os.path.abspath("%s/%s" % (target['path'], data_target)), '%s/%s' % (self._runfiles_dir(target), link_name))
def java_jar(name, srcs=[], deps=[], prebuilt=False, pre_build=False, **kwargs): """scons_java_jar. """ target = JavaJarTarget(name, srcs, deps, prebuilt or pre_build, blade.blade, kwargs) if pre_build: blade_util.warning("//%s:%s: 'pre_build' has been deprecated, " "please use 'prebuilt'" % (target.data['path'], target.data['name'])) blade.blade.register_scons_target(target.key, target)
def _check_run_targets(self): """check that run command should have only one target. """ err = False targets = [] if len(self.targets) == 0: err = True elif self.targets[0].find(':') == -1: err = True if err: error_exit("Please specify a single target to run: " "blade run //target_path:target_name (or a_path:target_name)") if self.options.command == 'run' and len(self.targets) > 1: warning("run command will only take one target to build and run") if self.targets[0].startswith("//"): targets.append(self.targets[0][2:]) else: targets.append(self.targets[0]) self.targets = targets
def _check_run_targets(self): """check that run command should have only one target. """ err = False targets = [] if len(self.targets) == 0: err = True elif self.targets[0].find(':') == -1: err = True if err: error_exit( "Please specify a single target to run: " "blade run //target_path:target_name (or a_path:target_name)") if self.options.command == 'run' and len(self.targets) > 1: warning("run command will only take one target to build and run") if self.targets[0].startswith("//"): targets.append(self.targets[0][2:]) else: targets.append(self.targets[0]) self.targets = targets
def get_targets_rules(self): """Get the build rules and return to the object who queries this. """ rules_buf = [] skip_test_targets = False if hasattr(self.options, 'no_test') and self.options.no_test: skip_test_targets = True for k in self.sorted_targets_keys: target = self.all_targets_expanded[k] if not self._is_scons_object_type(target['type']): continue scons_object = self.scons_targets_map.get(k, None) if not scons_object: warning('not registered scons object, key %s' % str(k)) continue if skip_test_targets and (target['type'] == 'cc_test' or target['type'] == 'dynamic_cc_test'): continue scons_object.scons_rules() rules_buf += scons_object.get_rules() return rules_buf
def _check_srcs(self): """Check source files. Parameters ----------- None Returns ----------- None Description ----------- It will warn if one file belongs to two different targets. """ target_srcs_map = self.blade.get_target_srcs_map() allow_dup_src_type_list = ['cc_binary', 'cc_test', 'dynamic_cc_binary'] for s in self.data['srcs']: if '..' in s or s.startswith('/'): raise Exception, ( 'Invalid source file path: %s. ' 'can only be relative path, and must in current directory or ' 'subdirectories') % s src_key = os.path.normpath('%s/%s' % (self.data['path'], s)) src_value = '%s %s:%s' % ( self.data['type'], self.current_source_path, self.data['name']) if src_key in target_srcs_map: value_existed = target_srcs_map[src_key] if not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.data['type'] in allow_dup_src_type_list): # Just warn here, not raising exception warning('Source file %s belongs to both %s and %s' % (s, target_srcs_map[src_key], src_value)) target_srcs_map[src_key] = src_value
def _get_version_info(self): """Gets svn root dir info. """ for root_dir in self.svn_roots: lc_all_env = os.environ lc_all_env['LC_ALL'] = 'POSIX' root_dir_realpath = os.path.realpath(root_dir) svn_working_dir = os.path.dirname(root_dir_realpath) svn_dir = os.path.basename(root_dir_realpath) if not os.path.exists("%s/.svn" % root_dir): warning("%s is not under version control" % root_dir) continue p = subprocess.Popen("svn info %s" % svn_dir, env=lc_all_env, cwd="%s" % svn_working_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) std_out, std_err = p.communicate() if p.returncode: warning("failed to get version control info in %s" % root_dir) else: self.svn_info_map[root_dir] = std_out.replace("\n", "\\n\\\n")
def _check_srcs(self): """Check source files. Parameters ----------- None Returns ----------- None Description ----------- It will warn if one file belongs to two different targets. """ target_srcs_map = self.blade.get_target_srcs_map() allow_dup_src_type_list = ['cc_binary', 'cc_test', 'dynamic_cc_binary'] for s in self.data['srcs']: if '..' in s or s.startswith('/'): raise Exception, ( 'Invalid source file path: %s. ' 'can only be relative path, and must in current directory or ' 'subdirectories') % s src_key = os.path.normpath('%s/%s' % (self.data['path'], s)) src_value = '%s %s:%s' % ( self.data['type'], self.current_source_path, self.data['name']) if src_key in target_srcs_map: value_existed = target_srcs_map[src_key] if not (value_existed.split(': ')[0] in allow_dup_src_type_list and self.data['type'] in allow_dup_src_type_list): # Just warn here, not raising exception warning( 'Source file %s belongs to both %s and %s' % ( s, target_srcs_map[src_key], src_value)) target_srcs_map[src_key] = src_value
def _fast_link_helper(target, source, env, link_com): """fast link helper function. """ global linking_tmp_dir target_file = str(target[0]) prefix_str = "blade_%s" % target_file.replace("/", "_").replace(".", "_") fd, temporary_file = tempfile.mkstemp(suffix='xianxian', prefix=prefix_str, dir=linking_tmp_dir) os.close(fd) sources = [] for s in source: sources.append(str(s)) link_com_str = link_com.substitute( FL_TARGET=temporary_file, FL_SOURCE=' '.join(sources)) p = subprocess.Popen( link_com_str, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if std_out: print std_out if std_err: print std_err if p.returncode == 0: shutil.move(temporary_file, target_file) if not os.path.exists(target_file): warning("failed to genreate %s in link on tmpfs mode" % target_file) else: _blade_action_postfunc("failed while fast linking") return p.returncode
def create_fast_link_builders(env): """Creates fast link builders - Program and SharedLibrary. """ # Check requirement acquire_temp_place = "df | grep tmpfs | awk '{print $5, $6}'" p = subprocess.Popen(acquire_temp_place, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() # Do not try to overwrite builder with error if p.returncode: warning( "you have link on tmp enabled, but it is not fullfilled to make it." ) return # No tmpfs to do fastlink, will not overwrite the builder if not std_out: warning( "you have link on tmp enabled, but there is no tmpfs to make it.") return # Use the first one global linking_tmp_dir usage, linking_tmp_dir = tuple(std_out.splitlines(False)[0].split()) # Do not try to do that if there is no memory space left usage = int(usage.replace("%", "")) if usage > 90: warning("you have link on tmp enabled, " "but there is not enough space on %s to make it." % linking_tmp_dir) return info("building in link on tmpfs mode") create_fast_link_sharelib_builder(env) create_fast_link_prog_builder(env)
def create_fast_link_builders(env): """Creates fast link builders - Program and SharedLibrary. """ # Check requirement acquire_temp_place = "df | grep tmpfs | awk '{print $5, $6}'" p = subprocess.Popen( acquire_temp_place, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() # Do not try to overwrite builder with error if p.returncode: warning("you have link on tmp enabled, but it is not fullfilled to make it.") return # No tmpfs to do fastlink, will not overwrite the builder if not std_out: warning("you have link on tmp enabled, but there is no tmpfs to make it.") return # Use the first one global linking_tmp_dir usage, linking_tmp_dir = tuple(std_out.splitlines(False)[0].split()) # Do not try to do that if there is no memory space left usage = int(usage.replace("%", "")) if usage > 90: warning("you have link on tmp enabled, " "but there is not enough space on %s to make it." % linking_tmp_dir) return info("building in link on tmpfs mode") create_fast_link_sharelib_builder(env) create_fast_link_prog_builder(env)
def _check_kwargs(self, kwargs): if kwargs: warning("//%s:%s: unrecognized options %s" % (self.data['path'], self.data['name'], kwargs))
def generate_compliation_flags(self): """Generates compliation flags. """ toolchain_dir = os.environ.get('TOOLCHAIN_DIR', '') if toolchain_dir and not toolchain_dir.endswith('/'): toolchain_dir += '/' cpp_str = toolchain_dir + os.environ.get('CPP', 'cpp') cc_str = toolchain_dir + os.environ.get('CC', 'gcc') cxx_str = toolchain_dir + os.environ.get('CXX', 'g++') ld_str = toolchain_dir + os.environ.get('LD', 'g++') self.ccflags_manager.set_cpp_str(cpp_str) (warn_cppflags, warn_cxxflags) = self.ccflags_manager.get_warning_ccflags() (err_cflags, err_cppflags, err_cxxflags) = self.ccflags_manager.get_error_ccflags() (cppflags_except_warning, linkflags) = self.ccflags_manager.get_flags_except_warning() # To modify CC, CXX, LD according to the building environment and # project configuration build_with_distcc = (self.distcc_enabled and self.build_environment.distcc_env_prepared) cc_str = self._append_prefix_to_building_var( prefix="distcc", building_var=cc_str, condition=build_with_distcc) cxx_str = self._append_prefix_to_building_var( prefix="distcc", building_var=cxx_str, condition=build_with_distcc) build_with_ccache = self.build_environment.ccache_installed cc_str = self._append_prefix_to_building_var( prefix="ccache", building_var=cc_str, condition=build_with_ccache) cxx_str = self._append_prefix_to_building_var( prefix="ccache", building_var=cxx_str, condition=build_with_ccache) build_with_dccc = (self.dccc_enabled and self.build_environment.dccc_env_prepared) ld_str = self._append_prefix_to_building_var( prefix="dccc", building_var=ld_str, condition=build_with_dccc) cc_env_str = "CC='%s', CXX='%s'" % (cc_str, cxx_str) ld_env_str = "LINK='%s'" % ld_str cc_config = configparse.blade_config.get_config('cc_config') extra_incs = cc_config['extra_incs'] include_list = [] for inc in extra_incs.split(" "): include_list.append("'%s'" % inc) extra_includes = ", ".join(include_list) for env in self.env_list: self._add_rule("%s = top_env.Clone()" % env) self._add_rule(""" %s.Replace(%s, CPPPATH=[%s, '%s', '%s'], CPPFLAGS=%s, CFLAGS=[], CXXFLAGS=%s, %s, LINKFLAGS=%s) """ % (self.env_list[0], cc_env_str, extra_includes, self.build_dir, self.python_inc, warn_cppflags + cppflags_except_warning, warn_cxxflags, ld_env_str, linkflags)) self._add_rule(""" %s.Replace(%s, CPPPATH=[%s, '%s', '%s'], CPPFLAGS=%s, CFLAGS=%s, CXXFLAGS=%s, %s, LINKFLAGS=%s) """ % (self.env_list[1], cc_env_str, extra_includes, self.build_dir, self.python_inc, warn_cppflags + err_cppflags + cppflags_except_warning, err_cflags, warn_cxxflags + err_cxxflags, ld_env_str, linkflags)) self._add_rule(""" %s.Replace(%s, CPPPATH=[%s, '%s', '%s'], CPPFLAGS=%s, CFLAGS=[], CXXFLAGS=[], %s, LINKFLAGS=%s) """ % (self.env_list[2], cc_env_str, extra_includes, self.build_dir, self.python_inc, cppflags_except_warning, ld_env_str, linkflags)) if hasattr(self.options, 'cache_dir') and self.options.cache_dir: if not self.build_environment.ccache_installed: self._add_rule("CacheDir('%s')" % self.options.cache_dir) self._add_rule("blade_util.info('using cache directory %s')" % ( self.options.cache_dir)) if hasattr(self.options, 'cache_size') and ( self.options.cache_size != -1): self._add_rule("scache_manager = ScacheManager('%s', cache_limit=%s)" % ( self.options.cache_dir, self.options.cache_size)) self._add_rule("Progress(scache_manager, interval=100)") self._add_rule("blade_util.info('scache size %d')" % ( self.options.cache_size)) if not self.build_environment.ccache_installed: if hasattr(self.options, 'cache_dir') and ( not self.options.cache_dir): default_cache_dir = os.path.expanduser("~/.bladescache") default_cache_size = 1024*1024*1024 warning("there is no ccache and you don't have scache enabled, " "use %s as current scache dir, scache size 1G" % ( default_cache_dir)) self._add_rule("CacheDir('%s')" % default_cache_dir) self._add_rule("blade_util.info('using cache directory %s')" % ( default_cache_dir)) self._add_rule("scache_manager = ScacheManager('%s', " "cache_limit=%d)" % ( default_cache_dir, default_cache_size)) self._add_rule("Progress(scache_manager, interval=100)") if build_with_ccache: self.build_environment.setup_ccache_env(self.env_list) if build_with_distcc: self.build_environment.setup_distcc_env(self.env_list) for rule in self.build_environment.get_rules(): self._add_rule(rule)
def _check_kwargs(self, kwargs): if kwargs: warning("//%s:%s: unrecognized options %s" % ( self.data['path'], self.data['name'], kwargs))
def generate_compliation_flags(self): """Generates compliation flags. """ toolchain_dir = os.environ.get('TOOLCHAIN_DIR', '') if toolchain_dir and not toolchain_dir.endswith('/'): toolchain_dir += '/' cpp_str = toolchain_dir + os.environ.get('CPP', 'cpp') cc_str = toolchain_dir + os.environ.get('CC', 'gcc') cxx_str = toolchain_dir + os.environ.get('CXX', 'g++') ld_str = toolchain_dir + os.environ.get('LD', 'g++') self.ccflags_manager.set_cpp_str(cpp_str) (warn_cppflags, warn_cxxflags) = self.ccflags_manager.get_warning_ccflags() (err_cflags, err_cppflags, err_cxxflags) = self.ccflags_manager.get_error_ccflags() (cppflags_except_warning, linkflags) = self.ccflags_manager.get_flags_except_warning() # To modify CC, CXX, LD according to the building environment and # project configuration build_with_distcc = (self.distcc_enabled and self.build_environment.distcc_env_prepared) cc_str = self._append_prefix_to_building_var( prefix="distcc", building_var=cc_str, condition=build_with_distcc) cxx_str = self._append_prefix_to_building_var( prefix="distcc", building_var=cxx_str, condition=build_with_distcc) build_with_ccache = self.build_environment.ccache_installed cc_str = self._append_prefix_to_building_var( prefix="ccache", building_var=cc_str, condition=build_with_ccache) cxx_str = self._append_prefix_to_building_var( prefix="ccache", building_var=cxx_str, condition=build_with_ccache) build_with_dccc = (self.dccc_enabled and self.build_environment.dccc_env_prepared) ld_str = self._append_prefix_to_building_var(prefix="dccc", building_var=ld_str, condition=build_with_dccc) cc_env_str = "CC='%s', CXX='%s'" % (cc_str, cxx_str) ld_env_str = "LINK='%s'" % ld_str cc_config = configparse.blade_config.get_config('cc_config') extra_incs = cc_config['extra_incs'] include_list = [] for inc in extra_incs.split(" "): include_list.append("'%s'" % inc) extra_includes = ", ".join(include_list) for env in self.env_list: self._add_rule("%s = top_env.Clone()" % env) self._add_rule(""" %s.Replace(%s, CPPPATH=[%s, '%s', '%s'], CPPFLAGS=%s, CFLAGS=[], CXXFLAGS=%s, %s, LINKFLAGS=%s) """ % (self.env_list[0], cc_env_str, extra_includes, self.build_dir, self.python_inc, warn_cppflags + cppflags_except_warning, warn_cxxflags, ld_env_str, linkflags)) self._add_rule(""" %s.Replace(%s, CPPPATH=[%s, '%s', '%s'], CPPFLAGS=%s, CFLAGS=%s, CXXFLAGS=%s, %s, LINKFLAGS=%s) """ % (self.env_list[1], cc_env_str, extra_includes, self.build_dir, self.python_inc, warn_cppflags + err_cppflags + cppflags_except_warning, err_cflags, warn_cxxflags + err_cxxflags, ld_env_str, linkflags)) self._add_rule(""" %s.Replace(%s, CPPPATH=[%s, '%s', '%s'], CPPFLAGS=%s, CFLAGS=[], CXXFLAGS=[], %s, LINKFLAGS=%s) """ % (self.env_list[2], cc_env_str, extra_includes, self.build_dir, self.python_inc, cppflags_except_warning, ld_env_str, linkflags)) if hasattr(self.options, 'cache_dir') and self.options.cache_dir: if not self.build_environment.ccache_installed: self._add_rule("CacheDir('%s')" % self.options.cache_dir) self._add_rule("blade_util.info('using cache directory %s')" % (self.options.cache_dir)) if hasattr(self.options, 'cache_size') and (self.options.cache_size != -1): self._add_rule( "scache_manager = ScacheManager('%s', cache_limit=%s)" % (self.options.cache_dir, self.options.cache_size)) self._add_rule("Progress(scache_manager, interval=100)") self._add_rule("blade_util.info('scache size %d')" % (self.options.cache_size)) if not self.build_environment.ccache_installed: if hasattr(self.options, 'cache_dir') and (not self.options.cache_dir): default_cache_dir = os.path.expanduser("~/.bladescache") default_cache_size = 1024 * 1024 * 1024 warning( "there is no ccache and you don't have scache enabled, " "use %s as current scache dir, scache size 1G" % (default_cache_dir)) self._add_rule("CacheDir('%s')" % default_cache_dir) self._add_rule("blade_util.info('using cache directory %s')" % (default_cache_dir)) self._add_rule("scache_manager = ScacheManager('%s', " "cache_limit=%d)" % (default_cache_dir, default_cache_size)) self._add_rule("Progress(scache_manager, interval=100)") if build_with_ccache: self.build_environment.setup_ccache_env(self.env_list) if build_with_distcc: self.build_environment.setup_distcc_env(self.env_list) for rule in self.build_environment.get_rules(): self._add_rule(rule)
def generate_python_binary(target, source, env): setup_file = '' if not str(source[0]).endswith("setup.py"): warning("setup.py not existed to generate target %s, " "blade will generate a default one for you" % str(target[0])) else: setup_file = str(source[0]) init_file = '' source_index = 2 if not setup_file: source_index = 1 init_file = str(source[0]) else: init_file = str(source[1]) init_file_dir = os.path.dirname(init_file) dep_source_list = [] for s in source[source_index:]: dep_source_list.append(str(s)) target_file = str(target[0]) target_file_dir_list = target_file.split('/') target_profile = target_file_dir_list[0] target_dir = '/'.join(target_file_dir_list[0:-1]) if not os.path.exists(target_dir): os.makedirs(target_dir) if setup_file: shutil.copyfile(setup_file, os.path.join(target_dir, 'setup.py')) else: target_name = os.path.basename(init_file_dir) if not target_name: error_exit("invalid package for target %s" % str(target[0])) # generate default setup.py for user setup_str = """ #!/usr/bin/env python # This file was generated by blade from setuptools import find_packages, setup setup( name='%s', version='0.1.0', packages=find_packages(), zip_safe=True ) """ % target_name default_setup_file = open(os.path.join(target_dir, 'setup.py'), "w") default_setup_file.write(setup_str) default_setup_file.close() package_dir = os.path.join(target_profile, init_file_dir) if os.path.exists(package_dir): shutil.rmtree(package_dir, ignore_errors=True) cmd = "cp -r %s %s" % (init_file_dir, target_dir) p = subprocess.Popen(cmd, env={}, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to copy source files from %s to %s" % (init_file_dir, target_dir)) return p.returncode # copy file to package_dir for f in dep_source_list: dep_file_basename = os.path.basename(f) dep_file_dir = os.path.dirname(f) sub_dir = '' sub_dir_list = dep_file_dir.split('/') if len(sub_dir_list) > 1: sub_dir = '/'.join(dep_file_dir.split('/')[1:]) if sub_dir: package_sub_dir = os.path.join(package_dir, sub_dir) if not os.path.exists(package_sub_dir): os.makedirs(package_sub_dir) sub_init_file = os.path.join(package_sub_dir, "__init__.py") if not os.path.exists(sub_init_file): sub_f = open(sub_init_file, "w") sub_f.close() shutil.copyfile(f, os.path.join(package_sub_dir, dep_file_basename)) make_egg_cmd = "python setup.py bdist_egg" p = subprocess.Popen(make_egg_cmd, env={}, cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to generate python binary in %s" % target_dir) return p.returncode return 0
def generate_python_binary(target, source, env): setup_file = '' if not str(source[0]).endswith("setup.py"): warning("setup.py not existed to generate target %s, " "blade will generate a default one for you" % str(target[0])) else: setup_file = str(source[0]) init_file = '' source_index = 2 if not setup_file: source_index = 1 init_file = str(source[0]) else: init_file = str(source[1]) init_file_dir = os.path.dirname(init_file) dep_source_list = [] for s in source[source_index:]: dep_source_list.append(str(s)) target_file = str(target[0]) target_file_dir_list = target_file.split('/') target_profile = target_file_dir_list[0] target_dir = '/'.join(target_file_dir_list[0:-1]) if not os.path.exists(target_dir): os.makedirs(target_dir) if setup_file: shutil.copyfile(setup_file, os.path.join(target_dir, 'setup.py')) else: target_name = os.path.basename(init_file_dir) if not target_name: error_exit("invalid package for target %s" % str(target[0])) # generate default setup.py for user setup_str = """ #!/usr/bin/env python # This file was generated by blade from setuptools import find_packages, setup setup( name='%s', version='0.1.0', packages=find_packages(), zip_safe=True ) """ % target_name default_setup_file = open(os.path.join(target_dir, 'setup.py'), "w") default_setup_file.write(setup_str) default_setup_file.close() package_dir = os.path.join(target_profile, init_file_dir) if os.path.exists(package_dir): shutil.rmtree(package_dir, ignore_errors=True) cmd = "cp -r %s %s" % (init_file_dir, target_dir) p = subprocess.Popen( cmd, env={}, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to copy source files from %s to %s" % ( init_file_dir, target_dir)) return p.returncode # copy file to package_dir for f in dep_source_list: dep_file_basename = os.path.basename(f) dep_file_dir = os.path.dirname(f) sub_dir = '' sub_dir_list = dep_file_dir.split('/') if len(sub_dir_list) > 1: sub_dir = '/'.join(dep_file_dir.split('/')[1:]) if sub_dir: package_sub_dir = os.path.join(package_dir, sub_dir) if not os.path.exists(package_sub_dir): os.makedirs(package_sub_dir) sub_init_file = os.path.join(package_sub_dir, "__init__.py") if not os.path.exists(sub_init_file): sub_f = open(sub_init_file, "w") sub_f.close() shutil.copyfile(f, os.path.join(package_sub_dir, dep_file_basename)) make_egg_cmd = "python setup.py bdist_egg" p = subprocess.Popen( make_egg_cmd, env={}, cwd=target_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) std_out, std_err = p.communicate() if p.returncode: info(std_out) info(std_err) error_exit("failed to generate python binary in %s" % target_dir) return p.returncode return 0