def execute_build(self): Logs.info("Waf: Entering directory `%s'", self.variant_dir) self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() try: self.compile() finally: if self.progress_bar == 1 and sys.stderr.isatty(): c = self.producer.processed or 1 m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) Logs.info(m, extra={ 'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2': Logs.colors.cursor_on }) Logs.info("Waf: Leaving directory `%s'", self.variant_dir) try: self.producer.bld = None del self.producer except AttributeError: pass self.post_build()
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ Logs.info("Waf: Entering directory `%s'" % self.variant_dir) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() try: self.compile() finally: if self.progress_bar == 1 and sys.stderr.isatty(): c = len(self.returned_tasks) or 1 m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) Logs.info(m, extra={ 'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2': Logs.colors.cursor_on }) Logs.info("Waf: Leaving directory `%s'" % self.variant_dir) self.post_build()
def execute(self): self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() for g in self.groups: for tg in g: try: f = tg.post except AttributeError: pass else: f() try: self.get_tgen_by_name('') except Errors.WafError: pass targets = sorted(self.task_gen_cache_names) line_just = max(len(t) for t in targets) if targets else 0 for target in targets: tgen = self.task_gen_cache_names[target] descript = getattr(tgen, 'description', '') if descript: target = target.ljust(line_just) descript = ': %s' % descript Logs.pprint('GREEN', target, label=descript)
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ if not self.is_option_true('internal_dont_check_recursive_execution'): Logs.info("[WAF] Executing '%s' in '%s'" % (self.cmd, self.variant_dir)) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() if self.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if self.progress_bar == 1: c = len(self.returned_tasks) or 1 self.to_log( self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)) print('') sys.stdout.flush() sys.stderr.write(Logs.colors.cursor_on) self.post_build()
def execute(self): """ See :py:func:`waflib.Context.Context.execute`. """ self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() for g in self.groups: for tg in g: try: f = tg.post except AttributeError: pass else: f() try: # force the cache initialization self.get_tgen_by_name('') except: pass lst = list(self.task_gen_cache_names.keys()) lst.sort() for k in lst: Logs.pprint('GREEN', k)
def run_command(cmd_name): ctx = Context.create_context(cmd_name) ctx.log_timer = Utils.Timer() ctx.options = Options.options ctx.cmd = cmd_name ctx.execute() return ctx
def execute_build(self): """ Execute the build by: * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions """ Logs.info("%s: Entering directory `%s'" % (scriptname, self.variant_dir)) self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() if self.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if self.progress_bar == 1: c = len(self.returned_tasks) or 1 self.to_log( self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)) print('') sys.stdout.flush() sys.stderr.write(Logs.colors.cursor_on) Logs.info("%s: Leaving directory `%s'" % (scriptname, self.variant_dir)) self.post_build()
def execute(self): '''Will be invoked when issuing the *cmake* command.''' self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() for group in self.groups: for tgen in group: try: f = tgen.post except AttributeError: pass else: f() try: self.get_tgen_by_name('') except Exception: pass self.cmake = True if self.options.clean: cleanup(self) else: export(self) self.timer = Utils.Timer()
def run_commands(): run_command('init') while Options.commands: cmd_name=Options.commands.pop(0) timer=Utils.Timer() run_command(cmd_name) if not Options.options.progress_bar: elapsed=' (%s)'%str(timer) Logs.info('%r finished successfully%s'%(cmd_name,elapsed)) run_command('shutdown')
def generate_shaders_pak(ctx, game, assets_platform, shader_type, shader_list_file=None, shaders_pak_dir=None): Logs.info('[INFO] Generate Shaders...') timer = Utils.Timer() if not is_shader_compiler_valid(ctx): ctx.fatal( '[ERROR] Unable to connect to the remote shader compiler for generating shaders. Please check shadercachegen.cfg in the engine root directory to ensure r_ShaderCompilerServer is set to the correct IP address' ) shader_cache_gen_path = get_shader_cache_gen_path(ctx) gen_shaders_script = ctx.engine_node.find_resource( 'Tools/PakShaders/gen_shaders.py') command_args = [ get_python_path(ctx), '"{}"'.format(gen_shaders_script.abspath()), '{}'.format(game), '{}'.format(assets_platform), '{}'.format(shader_type), '{}'.format(os.path.basename(os.path.dirname(shader_cache_gen_path))), '-g "{}"'.format(ctx.launch_dir), '-e "{}"'.format(ctx.engine_node.abspath()) ] if not run_subprocess(command_args, as_shell=True): ctx.fatal('[ERROR] Failed to generate {} shaders'.format(shader_type)) pak_shaders_script = ctx.engine_node.find_resource( 'Tools/PakShaders/pak_shaders.py') shaders_pak_dir = ctx.launch_node().make_node('build/{}/{}'.format( assets_platform, game).lower()) if os.path.isdir(shaders_pak_dir.abspath()): shaders_pak_dir.delete() shaders_pak_dir.mkdir() shaders_source = ctx.launch_node().make_node( 'Cache/{}/{}/user/cache/shaders/cache'.format(game, assets_platform)) command_args = [ get_python_path(ctx), '"{}"'.format(pak_shaders_script.abspath()), '"{}"'.format(shaders_pak_dir.abspath()), '-s {}'.format(shader_type), '-r "{}"'.format(shaders_source.abspath()) ] if not run_subprocess(command_args, as_shell=True): ctx.fatal('[ERROR] Failed to pack {} shaders'.format(shader_type)) Logs.info('[INFO] Finished Generate Shaders...({})'.format(timer)) return shaders_pak_dir
def run_command(cmd_name): """ Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`. :param cmd_name: command to execute, like ``build`` :type cmd_name: string """ ctx = Context.create_context(cmd_name) ctx.log_timer = Utils.Timer() ctx.options = Options.options # provided for convenience ctx.cmd = cmd_name ctx.execute() return ctx
def execute_build(self): Logs.info("Waf: Entering directory `%s'"%self.variant_dir) self.recurse([self.run_dir]) self.pre_build() self.timer=Utils.Timer() try: self.compile() finally: if self.progress_bar==1 and sys.stderr.isatty(): c=len(self.returned_tasks)or 1 m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL) Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on}) Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) self.post_build()
def execute(self): """ In addition to printing the name of each build target, a description column will include text for each task generator which has a "description" field set. See :py:func:`waflib.Build.BuildContext.execute`. """ self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() # display the time elapsed in the progress bar self.timer = Utils.Timer() for g in self.groups: for tg in g: try: f = tg.post except AttributeError: pass else: f() try: # force the cache initialization self.get_tgen_by_name('') except Errors.WafError: pass targets = sorted(self.task_gen_cache_names) # figure out how much to left-justify, for largest target name line_just = max(len(t) for t in targets) if targets else 0 for target in targets: tgen = self.task_gen_cache_names[target] # Support displaying the description for the target # if it was set on the tgen descript = getattr(tgen, 'description', '') if descript: target = target.ljust(line_just) descript = ': %s' % descript Logs.pprint('GREEN', target, label=descript)
def execute_build(self): Logs.info("Waf: Entering directory `%s'" % self.variant_dir) self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() if Options.options.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if Options.options.progress_bar: sys.stderr.write(Logs.colors.cursor_on) print('') Logs.info("Waf: Leaving directory `%s'" % self.variant_dir) self.post_build()
def execute_build(self): # we override this method to hide the messages "leaving directory" (just because) self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() if Options.options.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if Options.options.progress_bar: sys.stderr.write(Logs.colors.cursor_on) print('') self.post_build()
def run_commands(): """ Execute the commands that were given on the command-line. Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed after :py:func:`waflib.Scripting.parse_options`. """ run_command('init') while Options.commands: cmd_name = Options.commands.pop(0) timer = Utils.Timer() run_command(cmd_name) if not Options.options.progress_bar: elapsed = ' (%s)' % str(timer) Logs.info('%r finished successfully%s' % (cmd_name, elapsed)) run_command('shutdown')
def execute_build(self): Logs.info("Waf: Entering directory `%s'"%self.variant_dir) self.recurse([self.run_dir]) self.pre_build() self.timer=Utils.Timer() if self.progress_bar: sys.stderr.write(Logs.colors.cursor_off) try: self.compile() finally: if self.progress_bar==1: c=len(self.returned_tasks)or 1 self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL)) print('') sys.stdout.flush() sys.stderr.write(Logs.colors.cursor_on) Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) self.post_build()
def execute(self): self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() for g in self.groups: for tg in g: try: f = tg.post except AttributeError: pass else: f() try: self.get_tgen_by_name('') except Errors.WafError: pass for k in sorted(self.task_gen_cache_names.keys()): Logs.pprint('GREEN', k)
def make_dot(self): self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() tasks = [] for t in self.get_build_iterator(): if len(t) > 0: tasks.extend(t) else: break ofh = open("wscript.dot", "w") ofh.write("digraph tasks { rankdir=BT;\n") tmap = {} count = 0 for l in tasks: nname = "n" + str(count) tmap[id(l)] = nname count += 1 name = getattr(l, 'name', str(type(l))) ofh.write(" " + nname + ' [label="' + name + '"];\n') for l in tasks: for d in l.run_after: # print " " + tmap[d.name] + " -> " + tmap[l.name] ofh.write(" " + tmap[id(l)] + " -> " + tmap[id(d)] + ";\n") ofh.write("}\n") g = [] self.group_names['dot'] = g self.groups = [g] # delete all the tasks except ours self.set_group(0) # self(cmd='echo Create wscript.dot', target='wscript.dot', shell = 1) self(rule='dot -Tps -o ${TGT} ${SRC}', source='wscript.dot', target='wscript.ps', shell=True) self.compile()
def sync_3rd_party(self, third_party_subpath): Logs.info("[INFO] Syncing library {} from perforce...".format( third_party_subpath)) try: timer = Utils.Timer() sync_proc = subprocess.Popen([ self.p4_exe, '-p', self.p4_port, '-c', self.p4_workspace, 'sync', '-f', "{}/{}/...".format( self.p4_repo, third_party_subpath.rstrip('/')) ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) sync_out, sync_err = sync_proc.communicate() if not sync_err: Logs.info("[INFO] Library {} synced. ({})".format( third_party_subpath, str(timer))) except OSError as err: Logs.warn("[WARN] Unable to sync 3rd party path {}: {}".format( third_party_subpath, str(err)))
def get_shader_list(ctx, game, assets_platform, shader_type, shader_list_file=None): Logs.info('[INFO] Get the shader list from the shader compiler server...') timer = Utils.Timer() if not is_shader_compiler_valid(ctx): ctx.fatal( '[ERROR] Unable to connect to the remote shader compiler to get shaders list. Please check shadercachegen.cfg in the engine root directory to ensure r_ShaderCompilerServer is set to the correct IP address' ) shader_cache_gen_path = get_shader_cache_gen_path(ctx) get_shader_list_script = ctx.engine_node.find_resource( 'Tools/PakShaders/get_shader_list.py') command_args = [ get_python_path(ctx), '"{}"'.format(get_shader_list_script.abspath()), '{}'.format(game), '{}'.format(assets_platform), '{}'.format(shader_type), '{}'.format(os.path.basename(os.path.dirname(shader_cache_gen_path))), '-g "{}"'.format(ctx.launch_dir) ] if shader_list_file: command_args += ['-s {}'.format(shader_list_file)] command = ' '.join(command_args) Logs.info('[INFO] Running command - {}'.format(command)) try: subprocess.check_call(command, shell=True) Logs.info( '[INFO] Finished getting the shader list from the shader compiler server...({})' .format(timer)) except subprocess.CalledProcessError: Logs.error( '[ERROR] Failed to get shader list for {}'.format(shader_type)) return
def execute(self, *k, **kw): '''Executes the *export* command. The export command installs a special task process method which enables the collection of tasks being executed (i.e. the actual command line being executed). Furthermore it installs a special *post_process* methods that will be called when the build has been completed (see build_postfun). Note that before executing the *export* command, a *clean* command will forced by the *export* command. This is needed in order to (re)start the task processing sequence. ''' self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() for group in self.groups: for tgen in group: try: f = tgen.post except AttributeError: pass else: f() try: self.get_tgen_by_name('') except Exception: pass self.makefile = True if self.options.clean: self.do_clean() else: self.do_export() self.timer = Utils.Timer()
def run_command(cmd_name): """ Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`. :param cmd_name: command to execute, like ``build`` :type cmd_name: string """ ctx = Context.create_context(cmd_name) ctx.log_timer = Utils.Timer() ctx.options = Options.options # provided for convenience ctx.cmd = cmd_name try: ctx.execute() except AttributeError as e: t, v, tb = sys.exc_info() if str(v) == "'Context' object has no attribute 'add_group'": Logs.warn( '[WARN] Received invalid command "%s" - please check your command line' % cmd_name) ctx.skip_finish_message = True else: raise return ctx
def generate_game_paks(ctx, game, job, assets_platform, source_path, target_path, is_obb=False, num_threads=0, zip_split=False, zip_maxsize=0, verbose=0): Logs.info('[INFO] Generate game paks...') timer = Utils.Timer() try: ret = run_rc_job(ctx, game, job, assets_platform, source_path, target_path, is_obb, num_threads, zip_split, zip_maxsize, verbose) if ret: Logs.info( '[INFO] Finished generating game paks... ({})'.format(timer)) else: Logs.error('[ERROR] Generating game paks failed.') except: Logs.error('[ERROR] Generating game paks exception.')
def execute_tasks(self): # display the time elapsed in the progress bar self.timer = Utils.Timer() self.compile()
def execute(self): """ Extended Context.execute to perform packaging on games and tools. For an executable package to be processed by this context the wscript file must implement the package_[platform] function (i.e. package_darwin_x64), which can call the package_game or package_tool methods on this context. Those functions will create the necessary package_task objects that will be executed after all directories have been recursed through. The package_game/tool functions accept keyword arguments that define how the package_task should packge executable, resources, and assets that are needed. For more information about valid keyword arguments look at the package_task.__init__ method. """ # On windows when waf is run with incredibuild we get called multiple times # but only need to be executed once. The check for the class variable is # here to make sure we run once per package command (either specified on # the command line or when it is auto added). If multiple package commands # are executed on the command line they will get run and this check does # not interfere with that. if getattr(self.__class__, 'is_running', False): return else: self.__class__.is_running = True # When the package_* functions are called they will set the group to # packaging then back to build. This way we can filter out the package # tasks and only execute them and not the build task_generators that will # be added as we recurse through the directories self.add_group('build') self.add_group('packaging') self.set_group('build') self.project = self.get_bootstrap_game() self.restore() if not self.all_envs: self.load_envs() # The package command may be executed before SetupAssistant is executed to # configure the project, which is valid. If that is the case an exception # will be thrown by lumberyard.py to indicate this. Catch the exception and # return so that builds can complete correctly. try: self.recurse([self.run_dir]) except: Logs.info( "Could not run the package command as the build has not been run yet." ) return # display the time elapsed in the progress bar self.timer = Utils.Timer() group = self.get_group('packaging') # Generating the xcode project should only be done on macOS and if we actually have something to package (len(group) > 0) if len(group) > 0 and self.is_option_true( 'run_xcode_for_packaging') and self.platform in [ 'darwin_x64', 'ios', 'appletv' ]: Logs.debug("package: checking for xcode project... ") platform = self.platform if 'darwin' in platform: platform = "mac" # Check if the Xcode solution exists. We need it to perform bundle # stuff (processing Info.plist and icon assets...) project_name_and_location = "/{}/{}.xcodeproj".format( getattr(self.options, platform + "_project_folder", None), getattr(self.options, platform + "_project_name", None)) if not os.path.exists(self.path.abspath() + project_name_and_location): Logs.debug( "package: running xcode_{} command to generate the project {}". format(platform, self.path.abspath() + project_name_and_location)) run_command('xcode_' + platform) for task_generator in group: try: rs = task_generator.runnable_status scan = task_generator.scan run = task_generator.run except AttributeError: pass else: scan() run()
def __init__(self, global_context, cmd_argv, options_context, pkg, run_node): super(BuildWafContext, self).__init__(global_context, cmd_argv, options_context, pkg, run_node) o, a = options_context.parser.parse_args(cmd_argv) if o.jobs: jobs = int(o.jobs) else: jobs = 1 if o.verbose: verbose = int(o.verbose) zones = ["runner"] else: verbose = 0 zones = [] if o.inplace: self.inplace = 1 else: self.inplace = 0 if o.progress_bar: self.progress_bar = True else: self.progress_bar = False Logs.verbose = verbose Logs.init_log() if zones is None: Logs.zones = [] else: Logs.zones = zones run_path = self.run_node.abspath() source_path = self.top_node.abspath() build_path = self.build_node.abspath() _init(run_path=run_path, source_path=source_path, build_path=build_path) create_context("options").parse_args([]) waf_context = create_context("build") waf_context.restore() if not waf_context.all_envs: waf_context.load_envs() waf_context.jobs = jobs waf_context.timer = Utils.Timer() if self.progress_bar: waf_context.progress_bar = 1 waf_context.bento_context = self self.waf_context = waf_context def _default_extension_builder(extension, **kw): if not "features" in kw: kw["features"] = "c cshlib pyext bento" if not "source" in kw: kw["source"] = extension.sources[:] if not "name" in kw: kw["name"] = extension.name return self.waf_context(**kw) def _default_library_builder(library, **kw): if not "features" in kw: kw["features"] = "c cstlib pyext bento" if not "source" in kw: kw["source"] = library.sources[:] if not "name" in kw: kw["name"] = library.name return self.waf_context(**kw) self.builder_registry.register_category("extensions", _default_extension_builder) self.builder_registry.register_category("compiled_libraries", _default_library_builder)
def check_project_rules(bld): '''check the project rules - ensuring the targets are sane''' loops = {} inc_loops = {} tgt_list = get_tgt_list(bld) add_samba_attributes(bld, tgt_list) force_project_rules = (Options.options.SHOWDEPS or Options.options.SHOW_DUPLICATES) if not force_project_rules and load_samba_deps(bld, tgt_list): return timer = Utils.Timer() bld.new_rules = True Logs.info("Checking project rules ...") debug('deps: project rules checking started') expand_subsystem_deps(bld) debug("deps: expand_subsystem_deps: %s" % str(timer)) replace_grouping_libraries(bld, tgt_list) debug("deps: replace_grouping_libraries: %s" % str(timer)) build_direct_deps(bld, tgt_list) debug("deps: build_direct_deps: %s" % str(timer)) break_dependency_loops(bld, tgt_list) debug("deps: break_dependency_loops: %s" % str(timer)) if Options.options.SHOWDEPS: show_dependencies(bld, Options.options.SHOWDEPS, set()) calculate_final_deps(bld, tgt_list, loops) debug("deps: calculate_final_deps: %s" % str(timer)) if Options.options.SHOW_DUPLICATES: show_object_duplicates(bld, tgt_list) # run the various attribute generators for f in [build_dependencies, build_includes, add_init_functions]: debug('deps: project rules checking %s', f) for t in tgt_list: f(t) debug("deps: %s: %s" % (f, str(timer))) debug('deps: project rules stage1 completed') if not check_duplicate_sources(bld, tgt_list): Logs.error("Duplicate sources present - aborting") sys.exit(1) debug("deps: check_duplicate_sources: %s" % str(timer)) if not bld.check_group_ordering(tgt_list): Logs.error("Bad group ordering - aborting") sys.exit(1) debug("deps: check_group_ordering: %s" % str(timer)) show_final_deps(bld, tgt_list) debug("deps: show_final_deps: %s" % str(timer)) debug('deps: project rules checking completed - %u targets checked', len(tgt_list)) if not bld.is_install: save_samba_deps(bld, tgt_list) debug("deps: save_samba_deps: %s" % str(timer)) Logs.info("Project rules pass")
def make_dot(self): self.restore() if not self.all_envs: self.load_envs() self.recurse([self.run_dir]) self.pre_build() self.timer = Utils.Timer() tasks = [] for t in self.get_build_iterator(): if len(t) > 0: tasks.extend(t) else: break ofh = open("wscript.dot", "w") ofh.write("digraph tasks { rankdir=BT;\n") tmap = {} count = 0 for t in tasks: nname = "n" + str(count) tmap[id(t)] = nname count += 1 name = getattr(t, 'name', str(type(t))) ofh.write(" " + nname + ' [label="' + name + '"];\n') for a in ('inputs', 'outputs', 'dep_nodes'): l = getattr(t, a, []) if not len(l): continue for n in l: if id(n) not in tmap: nname = "f" + str(count) count += 1 tmap[id(n)] = nname name = os.path.relpath(repr(n)) ofh.write(" " + nname + ' [label="' + name + '",shape=box];\n') for t in tasks: for d in t.run_after: ofh.write(" " + tmap[id(t)] + " -> " + tmap[id(d)] + "[style=dashed];\n") for a in ('inputs', 'dep_nodes', 'outputs'): l = getattr(t, a, []) if not len(l): continue for n in l: if a == 'outputs': ofh.write(" " + tmap[id(n)] + " -> " + tmap[id(t)] + ";\n") else: ofh.write(" " + tmap[id(t)] + " -> " + tmap[id(n)] + ";\n") ofh.write("}\n") ofh.close() g = [] self.group_names['dot'] = g self.groups = [g] # delete all the tasks except ours self.set_group(0) # self(cmd='echo Create wscript.dot', target='wscript.dot', shell = 1) self(rule='/usr/bin/dot -Tps -o ${TGT} ${SRC}', source=['wscript.dot'], target='wscript.ps', shell=True) self.compile()