def Clean(cls, context, optional_output_stream): """ Handles the complexities associated with cleaning previously generated output, ultimately invoking _CleanImpl. """ assert context output_stream = StreamDecorator(optional_output_stream) output_stream.write( cls._GetStatusText("Cleaning", context, cls.GetInputItems(context))) with output_stream.DoneManager() as dm: dm.result = cls._CleanImpl(context, dm.stream) or 0 return dm.result
def _CleanImplEx(cls, context, output_stream): output_stream = StreamDecorator(output_stream) input_items = set(cls.GetInputItems(context)) for output_filename in context["output_filenames"]: if output_filename in input_items: continue if os.path.isfile(output_filename): output_stream.write("Removing '{}'...".format(output_filename)) with output_stream.DoneManager(): FileSystem.RemoveFile(output_filename) return super(MultipleOutputMixin, cls)._CleanImplEx(context, output_stream)
def Build(output_stream=sys.stdout, ): """Builds clang-formatProxy""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: command_line = '"{script}" Compile "/input={input}" "/output_dir={output_dir}" /no_bundle {exclude_modules}'.format( script=CurrentShell.CreateScriptName("CxFreezeCompiler"), input=os.path.join(_script_dir, "clang-formatProxy.py"), output_dir=os.path.join( _script_dir, "..", "..", "Tools", "clang-formatProxy", "v1.0", CurrentShell.CategoryName, ), exclude_modules=" ".join([ '"/exclude_module={}"'.format(module) for module in EXCLUDE_MODULES ], ), ) dm.result = Process.Execute(command_line, dm.stream) if dm.result != 0: return dm.result return dm.result
def EntryPoint( arg=None, output_stream=sys.stdout, ): args = arg del arg with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: python_script = os.path.join( os.path.dirname(wxglade.__file__), "wxglade.py", ) assert os.path.isfile(python_script), python_script dm.result = Process.Execute( 'python "{}" {}'.format( python_script, ' '.join(['"{}"'.format(arg) for arg in args]), ), dm.stream, ) return dm.result
def Normalize( script_filename_or_dir, output_stream=sys.stdout, ): """Normalizes a script so that it can be run from any location.""" with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if os.path.isfile(script_filename_or_dir): script_filenames = [ script_filename_or_dir, ] elif os.path.isdir(script_filename_or_dir): script_filenames = list(FileSystem.WalkFiles(script_filename_or_dir, recurse=False)) else: assert False for index, script_filename in enumerate(script_filenames): nonlocals = CommonEnvironment.Nonlocals(result=None) dm.stream.write("Processing '{}' ({} of {})...".format( script_filename, index + 1, len(script_filenames), )) with dm.stream.DoneManager( done_suffix=lambda: PythonActivationActivity.NormalizeScriptResultStrings[nonlocals.result], ): nonlocals.result = PythonActivationActivity.NormalizeScript(script_filename) return dm.result
def Clean( force=False, output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: for subdir in ["stage", "build"]: this_dir = os.path.join(boost_root, subdir) if not os.path.isdir(this_dir): continue if not force: dm.stream.write( "Call this method with the '/force' flag to remove '{}'.\n" .format(this_dir, ), ) continue dm.stream.write("Removing '{}'...".format(this_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(this_dir) return dm.result
def Build(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: command_line_template = 'python "{script}" {{plugin}} "{input}" "{{output}}"'.format( script=_script_dir, input=os.path.join(_script_dir, "Featurizers.yaml"), ) for index, (plugin, output_dir) in enumerate(_PLUGINS): dm.stream.write( "Generating '{}' ({} of {})...".format(plugin, index + 1, len(_PLUGINS)), ) with dm.stream.DoneManager(suffix="\n", ) as this_dm: this_dm.result = Process.Execute( command_line_template.format( plugin=plugin, output=output_dir, ), this_dm.stream, ) if dm.result != 0: return dm.result return dm.result
def Lcov( bin_dir=None, not_llvm=False, output_dir=None, output_filename="lcov.info", type=None, output_stream=sys.stdout, verbose=False, ): """Generates a LCOV file based on *.gcno files""" bin_dirs = bin_dir del bin_dir if not bin_dirs: bin_dirs.append(os.getcwd()) if len(bin_dirs) > 1 and not output_dir: raise CommandLine.UsageException( "An 'output_dir' must be provided when multiple 'bin_dirs' are parsed", ) if len(bin_dirs) == 1 and not output_dir: output_dir = bin_dirs[0] with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_filename = os.path.join(output_dir, output_filename) dm.stream.write("Creating '{}'...".format(output_filename)) with dm.stream.DoneManager() as this_dm: FileSystem.MakeDirs(output_dir) command_line = 'grcov {dirs} -o "{output_filename}"{llvm}{type}'.format( dirs=" ".join(['"{}"'.format(dir) for dir in bin_dirs]), output_filename=output_filename, llvm="" if not_llvm else " --llvm", type="" if type is None else " -t {}".format(type), ) if verbose: this_dm.stream.write( textwrap.dedent( """\ Command Line: {} """, ).format(command_line), ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: FileSystem.RemoveTree(os.path.join(_script_dir, "..", "GeneratedCode")) return dm.result
def _CleanImplEx(cls, context, output_stream): if context["output_filename"] not in cls.GetInputItems( context) and os.path.isfile(context["output_filename"]): output_stream.write("Removing '{}'...".format( context["output_filename"])) with StreamDecorator(output_stream).DoneManager(): FileSystem.RemoveFile(context["output_filename"]) return super(SingleOutputMixin, cls)._CleanImplEx(context, output_stream)
def Publish( image_name, registry_name, tag=None, output_stream=sys.stdout, ): """Publishes previously built content to a docker registry""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: prev_dir = os.getcwd() os.chdir(os.path.join(_script_dir, image_name)) with CallOnExit(lambda: os.chdir(prev_dir)): image_name = "{}/{}".format(DOCKER_USER_NAME, image_name) image_id = _GetImageId( image_name, dm, tag=tag, ) new_image_name = "{}/{}".format(registry_name, image_name.split("/")[-1]) if new_image_name is None: assert dm.result != 0 return dm.result dm.stream.write("Renaming image...") with dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker tag {} {}{}".format( image_id, new_image_name, ":{}".format(tag) if tag else "", ), ) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result dm.stream.write("Pushing image...") with dm.stream.DoneManager(line_prefix=" ", ) as this_dm: this_dm.result = Process.Execute( "docker push {}".format(new_image_name), this_dm.stream, ) if this_dm.result != 0: return this_dm.result return dm.result
def _CommandLineImpl( compiler, inputs, functor, # def Func(context, output_stream) -> rval output_stream, compiler_kwargs, output_via_stderr=False, # <Unused variable> pylint: disable = W0613 output_start_line=None, # <Unused variable> pylint: disable = W0613 output_end_line=None, # <Unused variable> pylint: disable = W0613 ): assert compiler assert inputs assert output_stream result = compiler.ValidateEnvironment() if result: output_stream.write("{}\n".format(result.rstrip())) return -1 # Execute with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResult: ", suffix='\n', display_exceptions=False, ) as dm: dm.stream.write("\nGenerating context...") with dm.stream.DoneManager() as this_dm: try: inputs = [ os.path.realpath(input) for input in inputs ] # <Redefinig built-in type> pylint: disable = W0622 contexts = list( compiler.GenerateContextItems(inputs, **compiler_kwargs)) except Exception as ex: this_dm.result = -1 if getattr(ex, "IsDiagnosticException", False): this_dm.stream.write("{}\n".format(str(ex))) contexts = [] else: raise for context in contexts: dm.stream.flush() result = functor(context, dm.stream) if dm.result == 0 or (dm.result > 0 and result < 0): dm.result = result return dm.result
def _Impl(working_dir, output_stream, verbose, callback_func): if not os.path.isfile(os.path.join(working_dir, "CMakeLists.txt")): raise CommandLine.UsageException( "The directory '{}' does not contain the file 'CMakeLists.txt'".format( working_dir, ), ) with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: build_dir_prefix = [ "build", CurrentShell.CategoryName, os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_COMPILER_NAME"), os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_ARCHITECTURE"), ] configuration_types = ["Debug", "Release"] # Tests cannot execute in parallel, as they must be invoked from the build # dir (which impacts the global working directory) test_lock = threading.Lock() # ---------------------------------------------------------------------- def Impl(task_index, output_stream, on_status_update): configuration = configuration_types[task_index] build_dir = os.path.join( *([working_dir] + build_dir_prefix + [configuration]) ) return callback_func( test_lock, configuration, build_dir, output_stream, on_status_update, ) # ---------------------------------------------------------------------- dm.result = TaskPool.Execute( [ TaskPool.Task(configuration_type, Impl) for configuration_type in configuration_types ], dm.stream, progress_bar=True, verbose=verbose, ) return dm.result
def Build( image_name, tag=None, output_stream=sys.stdout, verbose=False, ): """Creates a docker image""" tags = tag del tag with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: prev_dir = os.getcwd() os.chdir(os.path.join(_script_dir, image_name)) with CallOnExit(lambda: os.chdir(prev_dir)): image_name = "{}/{}".format(DOCKER_USER_NAME, image_name) dm.stream.write("Building docker image...") with dm.stream.DoneManager( line_prefix=" ", suffix="\n", ) as this_dm: this_dm.result = Process.Execute( "docker build --tag {} .".format(image_name), this_dm.stream, ) if this_dm.result != 0: return this_dm.result if tags: dm.stream.write("Applying tags...") with dm.stream.DoneManager() as tag_dm: for index, tag in enumerate(tags): tag_dm.stream.write( "'{}' ({} of {})...".format( tag, index + 1, len(tags)), ) with tag_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker tag {image_name} {image_name}:{tag}". format( image_name=image_name, tag=tag, ), ) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result return dm.result
def _InvokeImplEx(cls, invoke_reason, context, status_stream, verbose_stream, verbose): command_line = cls.CreateInvokeCommandLine(context, verbose_stream) sink = six.moves.StringIO() result = Process.Execute(command_line, StreamDecorator([ sink, verbose_stream, ])) if result != 0 and not verbose: status_stream.write(sink.getvalue()) return result
def Clean( output_dir, output_stream=sys.stdout, ): if not os.path.isdir(output_dir): output_stream.write("'{}' does not exist.\n".format(output_dir)) return 0 output_stream.write("Removing '{}'...".format(output_dir)) with StreamDecorator(output_stream).DoneManager(): FileSystem.RemoveTree(output_dir) return 0
def CommandLineCleanOutputFilename(output_filename, output_stream): output_stream = StreamDecorator(output_stream) if not os.path.isfile(output_filename): output_stream.write("'{}' does not exist.\n".format(output_filename)) else: output_stream.write("Removing '{}'...".format(output_filename)) with output_stream.DoneManager(): FileSystem.RemoveFile(output_filename) return 0
def List( root_dir, output_stream=sys.stdout, ): assert os.path.isdir(root_dir), root_dir assert output_stream with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: for build_info in _GetBuildInfos(root_dir, dm.stream): dm.stream.write("{filename:<120} {priority}\n".format( filename="{}:".format(build_info.filename), priority=build_info.configuration.Priority, )) return dm.result
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_dir = os.path.join(_script_dir, "Generated") if not os.path.isdir(output_dir): dm.stream.write("The output directory does not exist.\n") else: dm.stream.write("Removing '{}'...".format(output_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(output_dir) return dm.result
def Clean(output_stream=sys.stdout, ): output_dir = os.path.join(_script_dir, "..", "GeneratedCode") if not os.path.isdir(output_dir): output_stream.write("'{}' does not exist.\n".format(output_dir)) else: filenames = [ "Compiler.ConditionalInvocationQueryMixin.data", "CppToJson_PythonJsonSerialization.py", ] output_stream.write("Removing content in '{}'...".format(output_dir)) with StreamDecorator(output_stream).DoneManager(): for filename in filenames: filename = os.path.join(output_dir, filename) FileSystem.RemoveFile(filename) return 0
def Verify( input, # <Redefinig built-in type> pylint: disable = W0622 passing_score=None, output_stream=sys.stdout, verbose=False, ): """Verifies the given python input""" inputs = input del input return VerifierMod.CommandLineVerify( Verifier, inputs, StreamDecorator(output_stream), verbose, passing_score=passing_score, )
def Build(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: input_file = os.path.join(_script_dir, "..", "SimpleSchema.g4") assert os.path.isfile(input_file), input_file output_dir = os.path.join(_script_dir, "..", "GeneratedCode") command_line = '{script} Compile Python3 -o "{output_dir}" -no-listener -visitor "{input_file}"'.format( script=CurrentShell.CreateScriptName("ANTLR"), output_dir=output_dir, input_file=input_file, ) dm.result = Process.Execute(command_line, dm.stream) return dm.result
def Clean( configuration, output_dir, output_stream=sys.stdout, ): """Cleans previously built content""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: if not os.path.isdir(output_dir): dm.stream.write("\nNothing to clean.\n") else: dm.stream.write("Removing '{}'...".format(output_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(output_dir) return dm.result
def EntryPoint( zipped_input_filename, output_stream=sys.stdout, ): """Generates JSON files based on data previously pickled""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_dir = os.path.join(_script_dir, "..", "GeneratedCode") FileSystem.RemoveTree(output_dir) FileSystem.MakeDirs(output_dir) df = _holiday_data_loader(zipped_input_filename) #with open('holidays.json', 'w') as f: #f.write(df.to_json(orient='records', lines=True)) allCountryNames = list(set((df['countryOrRegion']))) for countryName in allCountryNames: dfByCountry = df.loc[df['countryOrRegion'] == countryName] date = [int(x.timestamp()) for x in list(dfByCountry['date'])] name = list(dfByCountry['normalizeHolidayName']) date_dict = {"Date": date} name_dict = {"Holiday": name} out = {} out.update(date_dict) out.update(name_dict) jsonPath = os.path.join(output_dir, "{}.json".format(countryName)) with open(jsonPath, 'w') as f: json.dump(out, f) return dm.result
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: for index, (plugin, output_dir) in enumerate(_PLUGINS): output_dir = os.path.realpath(output_dir) dm.stream.write( "Processing '{}' ({} of {})...".format(plugin, index + 1, len(_PLUGINS)), ) with dm.stream.DoneManager() as this_dm: if not os.path.isdir(output_dir): this_dm.stream.write( "'{}' does not exist.\n".format(output_dir)) continue this_dm.stream.write("Removing '{}'...".format(output_dir)) with this_dm.stream.DoneManager(): FileSystem.RemoveTree(output_dir) return dm.result
def Build( force=False, output_stream=sys.stdout, verbose=False, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: dm.result = Process.Execute( '"{script}" Generate PythonYaml CodeCoverageFilter "{output_dir}" "/input={input_file}" /plugin_arg=no_serialization:True{force}{verbose}' .format( script=CurrentShell.CreateScriptName("SimpleSchemaGenerator"), output_dir=os.path.join(_script_dir, "..", "GeneratedCode"), input_file=os.path.join(_script_dir, "..", "CodeCoverageFilter.SimpleSchema"), force=" /force" if force else "", verbose=" /verbose" if verbose else "", ), dm.stream, ) return dm.result
def _InvokeImpl(cls, invoke_reason, context, status_stream, verbose_stream, verbose): with status_stream.DoneManager( associated_stream=verbose_stream) as (this_dm, this_verbose_stream): generated_python_context = cls._GenerateScriptContent(context) assert generated_python_context temp_filename = CurrentShell.CreateTempFilename(".py") with open(temp_filename, 'w') as f: f.write(generated_python_context) if context["preserve_temp_dir"]: this_dm.stream.write("Writing to '{}'\n".format(temp_filename)) cleanup_func = lambda: None else: cleanup_func = lambda: os.remove(temp_filename) try: sink = six.moves.StringIO() this_dm.result = cls._Compile( context, temp_filename, StreamDecorator([ sink, this_verbose_stream, ])) if this_dm.result != 0: if not verbose: this_dm.stream.write(sink.getvalue()) return this_dm.result finally: if this_dm.result == 0: cleanup_func()
def Execute( root_dir, output_dir, mode=None, debug_only=False, release_only=False, output_stream=sys.stdout, verbose=False, ): """Recursively calls Build files with the desired mode(s)""" assert os.path.isdir(root_dir), root_dir assert output_dir modes = mode or [ "clean", "build", ]; del mode assert output_stream with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: build_infos = _GetBuildInfos(root_dir, dm.stream) if not build_infos: return dm.result # Find all the build files that have configurations that we can process build_configurations = [] dm.stream.write("Processing build files...") with dm.stream.DoneManager( done_suffix=lambda: "{} found".format(inflect.no("configuration", len(build_configurations))), ) as this_dm: # ---------------------------------------------------------------------- def GetSupportedConfigurations(configurations): # If there is a configuration that indicates completeness, execute that # and skip everything else. if COMPLETE_CONFIGURATION_NAME in configurations: yield COMPLETE_CONFIGURATION_NAME return for config in build_configurations: config_lower = config.lower() if ( (debug_only and "debug" in config_lower) or (release_only and "release" in config_lower) or (not debug_only and not release_only) ): yield config # ---------------------------------------------------------------------- for build_info in build_infos: if not build_info.configuration.Configurations: build_configurations.append(( build_info.filename, build_info.configuration, None, )) else: for config in GetSupportedConfigurations(build_info.configuration.Configurations): build_configurations.append(( build_info.filename, build_info.configuration, config, )) if not build_configurations: return dm.result dm.stream.write('\n') for mode_index, mode in enumerate(modes): dm.stream.write("Invoking '{}' ({} of {})...".format( mode, mode_index + 1, len(modes), )) with dm.stream.DoneManager() as mode_dm: for build_index, (build_filename, config, configuration) in enumerate(build_configurations): mode_dm.stream.write("Processing '{}'{} ({} of {})...".format( build_filename, " - '{}'".format(configuration) if configuration else '', build_index + 1, len(build_configurations), )) with mode_dm.stream.DoneManager() as build_dm: build_output_dir = os.path.join(output_dir, config.SuggestedOutputDirLocation, configuration or "Build") FileSystem.MakeDirs(build_output_dir) command_line = 'python "{build_filename}" {mode}{configuration}{output_dir}' \ .format( build_filename=build_filename, mode=mode, configuration=' "{}"'.format(configuration) if configuration else '', output_dir=' "{}"'.format(build_output_dir) if config.RequiresOutputDir else '', ) build_dm.result, output = Process.Execute(command_line) # It is possible that the cleaning process deleted the output directory. Recreate it # if necessary to store the log file. FileSystem.MakeDirs(build_output_dir) with open(os.path.join(build_output_dir, BUILD_LOG_TEMPLATE.format(mode=mode)), 'w') as f: f.write(output) if build_dm.result != 0: build_dm.stream.write(output) elif verbose: build_dm.stream.write(StringHelpers.LeftJustify("INFO: {}".format(output), len("INFO: "))) return dm.result
def EntryPoint( plugin, input_filename, output_dir, include=None, exclude=None, output_stream=sys.stdout, ): """Generates content based on a configuration file according to the specified plugin""" plugin = PLUGINS[plugin] # ---------------------------------------------------------------------- def ToRegex(value): try: return re.compile(value) except: raise CommandLine.UsageException( "'{}' is not a valid regular expression".format(value), ) # ---------------------------------------------------------------------- includes = [ToRegex(arg) for arg in include] del include excludes = [ToRegex(arg) for arg in exclude] del exclude with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: dm.stream.write("Reading input data...") with dm.stream.DoneManager() as this_dm: try: data = Serialization.Deserialize(input_filename) except Exception as e: this_dm.stream.write( textwrap.dedent( """\ ERROR: {} {} """, ).format( StringHelpers.LeftJustify(str(e), len("ERROR: ")), str(getattr(e, "stack", None)), ), ) this_dm.result = -1 return this_dm.result nonlocals = CommonEnvironment.Nonlocals(skipped=0, ) dm.stream.write("Preprocessing data...") with dm.stream.DoneManager( done_suffix=lambda: "{} were skipped".format( inflect.no("file", nonlocals.skipped), ), suffix=lambda: "\n" if nonlocals.skipped else None, ) as this_dm: # ---------------------------------------------------------------------- def NormalizeEnum(enum): # Simplify the provided enum structure be creating an ordered dictionary with names and values if hasattr(enum, "integer_values"): if len(enum.integer_values) != len(enum.values): raise Exception( "When integer values are specified for an enum, the number of integers must match the number of enums ('{}', '{}')" .format(enum.values, enum.integer_values)) integer_values = enum.integer_values del enum.integer_values else: integer_values = list( range(enum.starting_index, enum.starting_index + len(enum.values))) del enum.starting_index assert len( enum.values) == len(integer_values), (enum.values, integer_values) enum.values = OrderedDict([ (k, v) for k, v in zip(enum.values, integer_values) ]) return enum # ---------------------------------------------------------------------- # Get the global custom structs global_custom_struct_names = set() global_custom_structs = [] for item in data.custom_structs: if item.name in global_custom_struct_names: raise Exception( "The custom struct '{}' has already been defined". format(item.name)) global_custom_struct_names.add(item.name) global_custom_structs.append(item) # Get the global custom enums global_custom_enum_names = set() global_custom_enums = [] for item in data.custom_enums: if item.name in global_custom_enum_names: raise Exception( "The custom enum '{}' has already been defined".format( item.name)) global_custom_enum_names.add(item.name) global_custom_enums.append(NormalizeEnum(item)) # If there are templates at play, preprocess the content and expand the values new_data = [] for item in data.featurizers: if item.status != "Available": this_dm.stream.write( "The status for '{}' is set to '{}' and will not be processed.\n" .format( item.name, item.status, ), ) nonlocals.skipped += 1 continue if excludes and any( exclude.match(item.name) for exclude in excludes): this_dm.stream.write( "'{}' has been explicitly excluded.\n".format( item.name), ) nonlocals.skipped += 1 continue if includes and not any( include.match(item.name) for include in includes): this_dm.stream.write( "'{}' has not been included.\n".format(item.name), ) nonlocals.skipped += 1 continue for enum in getattr(item, "custom_enums", []): NormalizeEnum(enum) if not hasattr(item, "templates"): assert item.type_mappings for mapping in item.type_mappings: new_item = copy.deepcopy(item) new_item.input_type = mapping.input_type new_item.output_type = mapping.output_type new_data.append([new_item]) continue new_data_items = [] for template in item.templates: regex = re.compile(r"\b{}\b".format(template.name)) for template_type in template.types: new_item = copy.deepcopy(item) new_item.template = template_type # Remove the template mapping and list of templates del new_item.templates del new_item.type_mappings for configuration_param in getattr( new_item, "configuration_params", [], ): configuration_param.type = regex.sub( template_type, configuration_param.type, ) for custom_struct in getattr(new_item, "custom_structs", []): if any(gcs for gcs in global_custom_structs if gcs.name == custom_struct.name): raise Exception( "The custom structure '{}' in '{}' has already been defined as a global custom struct.\n" .format(custom_struct.name, item.name)) for member in custom_struct.members: member.type = regex.sub( template_type, member.type) for custom_enum in getattr(new_item, "custom_enums", []): if any(gce for gce in global_custom_enums if gce.name == custom_enum.name): raise Exception( "The custom enum '{}' in '{}' has already been defined as a global custom enum.\n" .format(custom_enum.name, item.name)) custom_enum.underlying_type = regex.sub( template_type, custom_enum.underlying_type) for mapping in item.type_mappings: # TODO: sub all types (for example: map<K, V> if not regex.search( mapping.input_type) and not regex.search( mapping.output_type): continue new_item.input_type = regex.sub( template_type, mapping.input_type) if new_item.input_type != mapping.input_type: new_item.input_type_template_mapping = OrderedDict( [ (template_type, template.name), ], ) new_item.output_type = regex.sub( template_type, mapping.output_type) if new_item.output_type != mapping.output_type: new_item.output_type_template_mapping = OrderedDict( [ (template_type, template.name), ], ) # This will end up copying one more time than needed, but I couldn't think of a better way for now. new_data_items.append(copy.deepcopy(new_item)) new_data.append(new_data_items) data = new_data # Validate parameters dm.stream.write("Validating types...") with dm.stream.DoneManager(): for items in data: for item in items: # ---------------------------------------------------------------------- def IsSupportedType(typename): for potential_type in SUPPORTED_TYPES: if hasattr(potential_type, "match"): if potential_type.match(typename): return True elif typename == potential_type: return True return False # ---------------------------------------------------------------------- def IsCustomStructType(typename): return any(custom_struct for custom_struct in itertools.chain( getattr(item, "custom_structs", []), global_custom_structs) if custom_struct.name == typename) # ---------------------------------------------------------------------- def IsCustomEnumType(typename): return any(custom_enum for custom_enum in itertools.chain( getattr(item, "custom_enums", []), global_custom_enums) if custom_enum.name == typename) # ---------------------------------------------------------------------- input_type = item.input_type if input_type.endswith("?"): input_type = input_type[:-1] if (not IsSupportedType(input_type) and not IsCustomStructType(input_type) and not IsCustomEnumType(input_type)): raise Exception( "The input type '{}' defined in '{}' is not valid." .format( input_type, item.name, ), ) from None output_type = item.output_type if output_type.endswith("?"): output_type = output_type[:-1] if (not IsSupportedType(output_type) and not IsCustomStructType(output_type) and not IsCustomEnumType(output_type)): raise Exception( "The output type '{}' defined in '{}' is not valid." .format( output_type, item.name, ), ) from None dm.stream.write("Generating content...") with dm.stream.DoneManager() as this_dm: FileSystem.MakeDirs(output_dir) # ---------------------------------------------------------------------- def CalcHash(filename): hash = hashlib.sha256() with open(filename, "rb") as f: while True: block = f.read(4096) if not block: break hash.update(block) return hash.digest() # ---------------------------------------------------------------------- @contextlib.contextmanager def FileWriter(filename, mode): """\ Method that writes to a temporary location and only copies to the intended destination if there are changes. This prevents full rebuilds (which are triggered based on timestamps) on files that haven't changed. """ temp_filename = CurrentShell.CreateTempFilename() with open(temp_filename, mode) as f: yield f if not os.path.isfile(filename) or CalcHash( temp_filename) != CalcHash(filename): FileSystem.RemoveFile(filename) shutil.move(temp_filename, filename) else: FileSystem.RemoveFile(temp_filename) # ---------------------------------------------------------------------- this_dm.result = plugin.Generate( FileWriter, global_custom_structs, global_custom_enums, data, output_dir, this_dm.stream, ) if this_dm.result != 0: return this_dm.result return dm.result
def EntryPoint( root_dir, output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: repositories = [] dm.stream.write( "\nSearching for repositories in '{}'...".format(root_dir)) with dm.stream.DoneManager(done_suffix=lambda: inflect.no( "repository", len(repositories)), ): for scm, directory in EnumSCMs(root_dir): if scm.Name != "Mercurial": continue repositories.append(directory) # Organize the repos dm.stream.write("Organizing...") with dm.stream.DoneManager(): repo_dict = OrderedDict() common_prefix = FileSystem.GetCommonPath(*repositories) common_prefix_len = len(common_prefix) for repository in repositories: suffix = repository[common_prefix_len:] parts = suffix.split(os.path.sep) repo_name = parts[-1] prefixes = parts[:-1] rd = repo_dict for prefix in prefixes: rd.setdefault(prefix, OrderedDict()) rd = rd[prefix] rd[repo_name] = repository # Write the content dm.stream.write("Writing TortoiseHg content...") with dm.stream.DoneManager(): filename = os.path.join(os.getenv("APPDATA"), "TortoiseHg", "thg-reporegistry.xml") assert os.path.isfile(filename), filename with open(filename, 'w') as f: # ---------------------------------------------------------------------- def GenerateContent(root, is_root): items = [] for k, v in six.iteritems(root): if isinstance(v, six.string_types): items.append( '<repo root="{}" shortname="{}" />\n'.format( v, os.path.basename(k), )) else: tag_name = "allgroup" if is_root else "group" items.append( textwrap.dedent("""\ <{tag_name} name="{name}"> {content} </{tag_name}> """).format( tag_name=tag_name, name=k, content=StringHelpers.LeftJustify( GenerateContent(v, False), 2).rstrip(), )) return ''.join(items) # ---------------------------------------------------------------------- f.write( textwrap.dedent("""\ <?xml version="1.0" encoding="UTF-8"?> <reporegistry> <treeitem> {} </treeitem> </reporegistry> """).format( StringHelpers.LeftJustify( GenerateContent(repo_dict, True).rstrip(), 4, skip_first_line=False, ))) return dm.result