def Normalize( script_filename_or_dir, output_stream=sys.stdout, ): """Normalizes a script so that it can be run from any location.""" with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if os.path.isfile(script_filename_or_dir): script_filenames = [ script_filename_or_dir, ] elif os.path.isdir(script_filename_or_dir): script_filenames = list(FileSystem.WalkFiles(script_filename_or_dir, recurse=False)) else: assert False for index, script_filename in enumerate(script_filenames): nonlocals = CommonEnvironment.Nonlocals(result=None) dm.stream.write("Processing '{}' ({} of {})...".format( script_filename, index + 1, len(script_filenames), )) with dm.stream.DoneManager( done_suffix=lambda: PythonActivationActivity.NormalizeScriptResultStrings[nonlocals.result], ): nonlocals.result = PythonActivationActivity.NormalizeScript(script_filename) return dm.result
def GetFilters(filename): """\ Given a filename, will parse all coverage yaml files in its ancestors and create a list of includes and excludes based on the union of all filters found that apply to the file. """ yaml_filenames = [] dirname = os.path.dirname(os.path.realpath(filename)) while True: potential_filename = os.path.join(dirname, FILTER_FILENAME) if os.path.isfile(potential_filename): yaml_filenames.append(potential_filename) potential_dirname = os.path.dirname(dirname) if potential_dirname == dirname: break dirname = potential_dirname nonlocals = CommonEnvironment.Nonlocals( includes=[], excludes=[], ) # ---------------------------------------------------------------------- def ProcessFilter(filter): nonlocals.includes += filter.includes nonlocals.excludes += filter.excludes return filter.continue_processing # ---------------------------------------------------------------------- should_continue = True for yaml_filename in yaml_filenames: obj = _Load(yaml_filename) if obj.filter is not None: if ProcessFilter(obj.filter) is False: break for named_filter in obj.named_filters: if fnmatch(filename, named_filter.glob): should_continue = ProcessFilter(named_filter) if not should_continue: break if not should_continue: break return nonlocals.includes, nonlocals.excludes
def test_Standard(self): nonlocals = CommonEnvironment.Nonlocals(value=False) # ---------------------------------------------------------------------- @clsinit class Object(object): @classmethod def __clsinit__(cls): nonlocals.value = True # ---------------------------------------------------------------------- self.assertTrue(nonlocals.value)
def test_SingleValue(self): nonlocals = CommonEnvironment.Nonlocals(value=0) # ---------------------------------------------------------------------- def SetValue1(): nonlocals.value = 1 # ---------------------------------------------------------------------- with CallOnExit(SetValue1): pass self.assertEqual(nonlocals.value, 1)
def test_OnlyOnSuccess(self): nonlocals = CommonEnvironment.Nonlocals(value=0) # ---------------------------------------------------------------------- def SetValue(): nonlocals.value = 1 # ---------------------------------------------------------------------- try: with CallOnExit(True, SetValue): raise Exception("") except: pass self.assertEqual(nonlocals.value, 0)
def test_AlwaysCall(self): nonlocals = CommonEnvironment.Nonlocals(value=0) # ---------------------------------------------------------------------- def SetValue(): nonlocals.value = 1 # ---------------------------------------------------------------------- try: with CallOnExit(SetValue): raise Exception("") except: pass self.assertEqual(nonlocals.value, 1)
def test_Standard(self): nonlocals = CommonEnvironment.Nonlocals( x=10, y=20, z=30, ) # ---------------------------------------------------------------------- def Foo(): nonlocals.x = 100 nonlocals.y = 200 # ---------------------------------------------------------------------- Foo() self.assertEqual(nonlocals.x, 100) self.assertEqual(nonlocals.y, 200) self.assertEqual(nonlocals.z, 30)
def _EnumerateLinesImpl( cls, lines, include_next_line_func=False, ): nonlocals = CommonEnvironment.Nonlocals( index=0, ) if include_next_line_func: # ---------------------------------------------------------------------- def SetNextLine(index): nonlocals.index = index # ---------------------------------------------------------------------- yield_func = lambda index, line: (index, line, SetNextLine) else: yield_func = lambda index, line: (index, line) in_disabled_block = False while nonlocals.index < len(lines): this_line = lines[nonlocals.index] this_index = nonlocals.index nonlocals.index += 1 comments = cls._GetComments(this_line) if comments is not None: if not in_disabled_block and comments == "clang-format off": in_disabled_block = True continue elif comments == "clang-format on": in_disabled_block = False continue if in_disabled_block: continue yield yield_func(this_index, this_line)
def test_MultipleValues(self): nonlocals = CommonEnvironment.Nonlocals( value1=0, value2=0, ) # ---------------------------------------------------------------------- def SetValue1(): nonlocals.value1 = 1 # ---------------------------------------------------------------------- def SetValue2(): nonlocals.value2 = 1 # ---------------------------------------------------------------------- with CallOnExit(SetValue1, SetValue2): pass self.assertEqual(nonlocals.value1, 1) self.assertEqual(nonlocals.value2, 1)
def _AcceptImpl(cls, element_or_elements, traverse, should_visit_func, lookup_map, child_visitation_lookup_map, visited, *args, **kwargs): if isinstance(element_or_elements, list): elements = element_or_elements else: elements = [element_or_elements] for element in elements: element_id = id(element) if element_id in visited: continue visited.add(element_id) if not should_visit_func(element): continue typ = type(element) if typ not in lookup_map: raise Exception("'{}' was not expected ({})".format( typ, element)) cls.OnEnteringElement(element, *args, **kwargs) with CallOnExit( lambda: cls.OnExitingElement(element, *args, **kwargs)): result = lookup_map[typ](element, *args, **kwargs) nonlocals = CommonEnvironment.Nonlocals(result=result, ) if traverse and isinstance(element, ChildrenMixin) and not isinstance( element, VariantElement): if typ not in child_visitation_lookup_map: raise Exception("'{}' was not expected ({})".format( typ, element)) visiting_func, visited_func = child_visitation_lookup_map[ typ] if visiting_func(element, *args, **kwargs) != False: # ---------------------------------------------------------------------- def CallVisited(): visited_result = visited_func( element, *args, **kwargs) if visited_result is not None and nonlocals.result is None: nonlocals.result = visited_result # ---------------------------------------------------------------------- with CallOnExit(CallVisited): for child in element.Children: cls._AcceptImpl(child, traverse, should_visit_func, lookup_map, child_visitation_lookup_map, visited, *args, **kwargs) if nonlocals.result is not None: return nonlocals.result return None
def EntryPoint( plugin, input_filename, output_dir, include=None, exclude=None, output_stream=sys.stdout, ): """Generates content based on a configuration file according to the specified plugin""" plugin = PLUGINS[plugin] # ---------------------------------------------------------------------- def ToRegex(value): try: return re.compile(value) except: raise CommandLine.UsageException( "'{}' is not a valid regular expression".format(value), ) # ---------------------------------------------------------------------- includes = [ToRegex(arg) for arg in include] del include excludes = [ToRegex(arg) for arg in exclude] del exclude with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: dm.stream.write("Reading input data...") with dm.stream.DoneManager() as this_dm: try: data = Serialization.Deserialize(input_filename) except Exception as e: this_dm.stream.write( textwrap.dedent( """\ ERROR: {} {} """, ).format( StringHelpers.LeftJustify(str(e), len("ERROR: ")), str(getattr(e, "stack", None)), ), ) this_dm.result = -1 return this_dm.result nonlocals = CommonEnvironment.Nonlocals(skipped=0, ) dm.stream.write("Preprocessing data...") with dm.stream.DoneManager( done_suffix=lambda: "{} were skipped".format( inflect.no("file", nonlocals.skipped), ), suffix=lambda: "\n" if nonlocals.skipped else None, ) as this_dm: # ---------------------------------------------------------------------- def NormalizeEnum(enum): # Simplify the provided enum structure be creating an ordered dictionary with names and values if hasattr(enum, "integer_values"): if len(enum.integer_values) != len(enum.values): raise Exception( "When integer values are specified for an enum, the number of integers must match the number of enums ('{}', '{}')" .format(enum.values, enum.integer_values)) integer_values = enum.integer_values del enum.integer_values else: integer_values = list( range(enum.starting_index, enum.starting_index + len(enum.values))) del enum.starting_index assert len( enum.values) == len(integer_values), (enum.values, integer_values) enum.values = OrderedDict([ (k, v) for k, v in zip(enum.values, integer_values) ]) return enum # ---------------------------------------------------------------------- # Get the global custom structs global_custom_struct_names = set() global_custom_structs = [] for item in data.custom_structs: if item.name in global_custom_struct_names: raise Exception( "The custom struct '{}' has already been defined". format(item.name)) global_custom_struct_names.add(item.name) global_custom_structs.append(item) # Get the global custom enums global_custom_enum_names = set() global_custom_enums = [] for item in data.custom_enums: if item.name in global_custom_enum_names: raise Exception( "The custom enum '{}' has already been defined".format( item.name)) global_custom_enum_names.add(item.name) global_custom_enums.append(NormalizeEnum(item)) # If there are templates at play, preprocess the content and expand the values new_data = [] for item in data.featurizers: if item.status != "Available": this_dm.stream.write( "The status for '{}' is set to '{}' and will not be processed.\n" .format( item.name, item.status, ), ) nonlocals.skipped += 1 continue if excludes and any( exclude.match(item.name) for exclude in excludes): this_dm.stream.write( "'{}' has been explicitly excluded.\n".format( item.name), ) nonlocals.skipped += 1 continue if includes and not any( include.match(item.name) for include in includes): this_dm.stream.write( "'{}' has not been included.\n".format(item.name), ) nonlocals.skipped += 1 continue for enum in getattr(item, "custom_enums", []): NormalizeEnum(enum) if not hasattr(item, "templates"): assert item.type_mappings for mapping in item.type_mappings: new_item = copy.deepcopy(item) new_item.input_type = mapping.input_type new_item.output_type = mapping.output_type new_data.append([new_item]) continue new_data_items = [] for template in item.templates: regex = re.compile(r"\b{}\b".format(template.name)) for template_type in template.types: new_item = copy.deepcopy(item) new_item.template = template_type # Remove the template mapping and list of templates del new_item.templates del new_item.type_mappings for configuration_param in getattr( new_item, "configuration_params", [], ): configuration_param.type = regex.sub( template_type, configuration_param.type, ) for custom_struct in getattr(new_item, "custom_structs", []): if any(gcs for gcs in global_custom_structs if gcs.name == custom_struct.name): raise Exception( "The custom structure '{}' in '{}' has already been defined as a global custom struct.\n" .format(custom_struct.name, item.name)) for member in custom_struct.members: member.type = regex.sub( template_type, member.type) for custom_enum in getattr(new_item, "custom_enums", []): if any(gce for gce in global_custom_enums if gce.name == custom_enum.name): raise Exception( "The custom enum '{}' in '{}' has already been defined as a global custom enum.\n" .format(custom_enum.name, item.name)) custom_enum.underlying_type = regex.sub( template_type, custom_enum.underlying_type) for mapping in item.type_mappings: # TODO: sub all types (for example: map<K, V> if not regex.search( mapping.input_type) and not regex.search( mapping.output_type): continue new_item.input_type = regex.sub( template_type, mapping.input_type) if new_item.input_type != mapping.input_type: new_item.input_type_template_mapping = OrderedDict( [ (template_type, template.name), ], ) new_item.output_type = regex.sub( template_type, mapping.output_type) if new_item.output_type != mapping.output_type: new_item.output_type_template_mapping = OrderedDict( [ (template_type, template.name), ], ) # This will end up copying one more time than needed, but I couldn't think of a better way for now. new_data_items.append(copy.deepcopy(new_item)) new_data.append(new_data_items) data = new_data # Validate parameters dm.stream.write("Validating types...") with dm.stream.DoneManager(): for items in data: for item in items: # ---------------------------------------------------------------------- def IsSupportedType(typename): for potential_type in SUPPORTED_TYPES: if hasattr(potential_type, "match"): if potential_type.match(typename): return True elif typename == potential_type: return True return False # ---------------------------------------------------------------------- def IsCustomStructType(typename): return any(custom_struct for custom_struct in itertools.chain( getattr(item, "custom_structs", []), global_custom_structs) if custom_struct.name == typename) # ---------------------------------------------------------------------- def IsCustomEnumType(typename): return any(custom_enum for custom_enum in itertools.chain( getattr(item, "custom_enums", []), global_custom_enums) if custom_enum.name == typename) # ---------------------------------------------------------------------- input_type = item.input_type if input_type.endswith("?"): input_type = input_type[:-1] if (not IsSupportedType(input_type) and not IsCustomStructType(input_type) and not IsCustomEnumType(input_type)): raise Exception( "The input type '{}' defined in '{}' is not valid." .format( input_type, item.name, ), ) from None output_type = item.output_type if output_type.endswith("?"): output_type = output_type[:-1] if (not IsSupportedType(output_type) and not IsCustomStructType(output_type) and not IsCustomEnumType(output_type)): raise Exception( "The output type '{}' defined in '{}' is not valid." .format( output_type, item.name, ), ) from None dm.stream.write("Generating content...") with dm.stream.DoneManager() as this_dm: FileSystem.MakeDirs(output_dir) # ---------------------------------------------------------------------- def CalcHash(filename): hash = hashlib.sha256() with open(filename, "rb") as f: while True: block = f.read(4096) if not block: break hash.update(block) return hash.digest() # ---------------------------------------------------------------------- @contextlib.contextmanager def FileWriter(filename, mode): """\ Method that writes to a temporary location and only copies to the intended destination if there are changes. This prevents full rebuilds (which are triggered based on timestamps) on files that haven't changed. """ temp_filename = CurrentShell.CreateTempFilename() with open(temp_filename, mode) as f: yield f if not os.path.isfile(filename) or CalcHash( temp_filename) != CalcHash(filename): FileSystem.RemoveFile(filename) shutil.move(temp_filename, filename) else: FileSystem.RemoveFile(temp_filename) # ---------------------------------------------------------------------- this_dm.result = plugin.Generate( FileWriter, global_custom_structs, global_custom_enums, data, output_dir, this_dm.stream, ) if this_dm.result != 0: return this_dm.result return dm.result
def Impl(): # Instrument the binaries on_status_update("Instrumenting Binaries") # ---------------------------------------------------------------------- def Invoke(task_index, output_stream): output_filename = context["output_filenames"][task_index] return code_coverage_executor.PreprocessBinary( output_filename, output_stream) # ---------------------------------------------------------------------- sink = six.moves.StringIO() execute_result.CoverageResult = TaskPool.Execute( [ TaskPool.Task(output_filename, Invoke) for output_filename in context["output_filenames"] ], sink, verbose=True, ) coverage_output["Instrumenting Binaries"] = sink.getvalue() if execute_result.CoverageResult != 0: return # Start coverage coverage_output_filename = os.path.join( context["output_dir"], code_coverage_executor.DefaultFileName) on_status_update("Starting Coverage Monitor") sink = six.moves.StringIO() execute_result.CoverageResult = code_coverage_executor.StartCoverage( coverage_output_filename, sink) coverage_output["Starting Coverage Monitor"] = sink.getvalue() if execute_result.CoverageResult != 0: return # Execute the test(s) on_status_update("Testing") test_start_time = time.time() sink = six.moves.StringIO() execute_result.TestResult = code_coverage_executor.Execute( command_line, sink) execute_result.TestOutput = sink.getvalue() execute_result.TestTime = datetime.timedelta( seconds=(time.time() - test_start_time), ) if execute_result.TestResult != 0: return # Stop code coverage monitoring and extract the results on_status_update("Stopping Coverage Monitor") sink = six.moves.StringIO() execute_result.CoverageResult = code_coverage_executor.StopCoverage( sink) coverage_output["Stopping Coverage Monitor"] = sink.getvalue() if execute_result.CoverageResult != 0: return # Process the results output_names = [ os.path.basename(output_filename) for output_filename in context["output_filenames"] ] all_results = [None] * len(output_names) nonlocals = CommonEnvironment.Nonlocals( remaining=len(output_names), ) nonlocals_lock = threading.Lock() status_template = "Extracting Coverage Results ({} remaining)" on_status_update(status_template.format(nonlocals.remaining)) # ---------------------------------------------------------------------- def Invoke(task_index, output_stream): output_filename = context["output_filenames"][task_index] # This is a filename that can be used to specify includes and excludes. Note that this # does not correspond to an actual file, as we don't have that information available. mock_filter_filename = os.path.join( context["input"], os.path.splitext(os.path.basename(output_filename))[0], ) includes, excludes = CodeCoverageFilter.GetFilters( mock_filter_filename) this_result = code_coverage_executor.ExtractCoverageInfo( coverage_output_filename, output_filename, includes, excludes, output_stream, ) if not isinstance(this_result, tuple): return this_result covered, not_covered = this_result all_results[task_index] = (covered, not_covered) with nonlocals_lock: nonlocals.remaining -= 1 on_status_update( status_template.format(nonlocals.remaining)) return 0 # ---------------------------------------------------------------------- sink = six.moves.StringIO() execute_result.CoverageResult = TaskPool.Execute( [ TaskPool.Task(output_name, Invoke) for output_name in output_names ], sink, verbose=True, ) coverage_output["Extracting Coverage Results"] = sink.getvalue() if execute_result.CoverageResult != 0: return # Concatenate the results on_status_update("Finalizing Results") total_covered = 0 total_not_covered = 0 all_percentages = OrderedDict() for output_name, (covered, not_covered) in zip(output_names, all_results): total_covered += covered total_not_covered += not_covered result_blocks = covered + not_covered all_percentages[output_name] = ( None if not result_blocks else ((float(covered) / result_blocks) * 100.0), "{} of {} {} covered".format(covered, result_blocks, code_coverage_executor.Units), ) total_blocks = total_covered + total_not_covered execute_result.CoverageDataFilename = coverage_output_filename execute_result.CoveragePercentage = (float(total_covered) / total_blocks if total_blocks else 0.0) * 100.0 execute_result.CoveragePercentages = all_percentages
def Generate(cls, simple_schema_generator, invoke_reason, input_filenames, output_filenames, name, elements, include_indexes, status_stream, verbose_stream, verbose, **custom_settings): assert len(output_filenames) == 1, output_filenames output_filename = output_filenames[0] del output_filenames status_stream.write("Creating '{}'...".format(output_filename)) with status_stream.DoneManager() as this_dm: with open(output_filename, "w") as f: f.write( textwrap.dedent( """\ {} from collections import OrderedDict """, ).format( cls._GenerateFileHeader( prefix="# ", filename_prefix="<SimpleSchemaGenerator>/", ), ), ) nonlocals = CommonEnvironment.Nonlocals(wrote_value=False, ) # ---------------------------------------------------------------------- def OnCompoundVisitingChildren( element, *args, **kwargs): # <Unused argument> pylint: disable = W0613 # Don't visit children return False # ---------------------------------------------------------------------- def OnSimpleVisitingChildren( element, *args, **kwargs): # <Unused argument> pylint: disable = W0613 # Don't visit children return False # ---------------------------------------------------------------------- def OnFundamental( element, *args, **kwargs): # <Unused argument> pylint: disable = W0613 if not isinstance(element.TypeInfo, EnumTypeInfo): return if not element.TypeInfo.FriendlyValues: return name = element.Name reversed_name = "{}_reversed".format(name) prefix = "{} = OrderedDict".format(name) reversed_prefix = "{} = OrderedDict".format(reversed_name) f.write( textwrap.dedent( """\ {prefix}{assignments} {name}_max_key_length = len(max({name}.keys(), len)) {name}_max_value_length = len(max({name}.values(), len)) {reversed_prefix}{reversed_assignments} {reversed_name}_max_key_length = len(max({reversed_name}.keys(), len)) {reversed_name}_max_value_length = len(max({reversed_name}.values(), len)) """, ).format( prefix=prefix, assignments=StringHelpers.LeftJustify( "([ {}\n ])".format( "\n ".join([ '( "{}", "{}" ),'.format(v, fv) for v, fv in six.moves.zip( element.TypeInfo.Values, element.TypeInfo.FriendlyValues) ]), ), len(prefix), ), name=name, reversed_prefix=reversed_prefix, reversed_assignments=StringHelpers.LeftJustify( "([ {}\n ])".format( "\n ".join([ '( "{}", "{}" ),'.format(fv, v) for v, fv in six.moves.zip( element.TypeInfo.Values, element.TypeInfo.FriendlyValues) ]), ), len(reversed_prefix), ), reversed_name=reversed_name, ), ) nonlocals.wrote_value = True # ---------------------------------------------------------------------- simple_element_visitor = Elements.CreateElementVisitor( on_fundamental_func=OnFundamental, on_compound_visiting_children_func= OnCompoundVisitingChildren, on_simple_visiting_children_func=OnSimpleVisitingChildren, ) for include_index in include_indexes: element = elements[include_index] simple_element_visitor.Accept(element) if not nonlocals.wrote_value: f.write( "# No enum values with friendly names were found.\n")