def __CreateContext(context, plugin): elements = ParseFiles(context["inputs"], plugin, context["filter_unsupported_extensions"], context["filter_unsupported_attributes"]) # Calculate the include indexes includes = [re.compile("^{}$".format(include)) for include in context["includes"]] excludes = [re.compile("^{}$".format(exclude)) for exclude in context["excludes"]] del context["includes"] del context["excludes"] include_indexes = range(len(elements)) if excludes: include_indexes = [index for index in include_indexes if not any(exclude for exclude in excludes if exclude.match(elements[index].Name))] if includes: include_indexes = [index for index in include_indexes if any(include for include in includes if include.match(elements[index].Name))] # This is a bit strange, but to detect changes, we need to compare the data in the elements rather # than the elements themselves (as the elements will be different object instances during each invocation). # Therefore, save the data (via pickling) and remove the elements. During the invocation below, we will # deserialize the elements from the pickled data before invoking the plugin's Generate method. # Note that pickling requires a fully qualified name, which is one level higher # than the current dir. Add that path. # # Note that this doesn't feel right; there is probably a better way to do this. sys.path.insert(0, os.path.join(_script_dir, "..")) with CallOnExit(lambda: sys.path.pop(0)): context["pickled_elements"] = pickle.dumps(elements) context["include_indexes"] = include_indexes return context
def EnumeratePlugins(): plugin_dir = os.path.join(_script_dir, "Plugins") if not os.path.isdir(plugin_dir): raise Exception("'{}' is not a valid directory".format(plugin_dir)) sys.path.insert(0, plugin_dir) with CallOnExit(lambda: sys.path.pop(0)): for item in os.listdir(plugin_dir): fullpath = os.path.join(plugin_dir, item) if not os.path.isfile(fullpath): continue basename, ext = os.path.splitext(item) if ext != ".py" or not basename.endswith( "Plugin") or basename == "Plugin": continue mod = importlib.import_module(basename) plugin_class = getattr(mod, "Plugin", None) if plugin_class is None: raise Exception( "The module '{}' does not contain a supported plugin". format(fullpath), ) error_string = plugin_class.IsValidEnvironment() if error_string is not None: sys.stdout.write("INFO: {}\n".format(error_string)) continue yield plugin_class
def ExecuteCommands( cls, command_or_commands, output_stream, environment=None, ): """\ Creates a temporary script file, writes the commands to that file, and then executes it. Returns the result and output generated during execution. """ from CommonEnvironment.CallOnExit import CallOnExit from CommonEnvironment import FileSystem from CommonEnvironment import Process temp_filename = cls.CreateTempFilename(cls.ScriptExtension) with open(temp_filename, 'w') as f: f.write(cls.GenerateCommands(command_or_commands)) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): cls.MakeFileExecutable(temp_filename) return Process.Execute( cls.DecorateInvokeScriptCommandLine(temp_filename), output_stream, environment=environment, )
def _PushNewStackItem(self, ctx, declaration_type): if ctx.start.type == ctx.parser.LBRACK: item_type = Item.ItemType.Attribute elif ctx.start.type == ctx.parser.LPAREN: item_type = Item.ItemType.Definition else: item_type = Item.ItemType.Standard parent = self._GetStackParent() item = Item( declaration_type, item_type, parent, self._source_name, ctx.start.line, ctx.start.column + 1, is_external=self._is_external, ) parent.items.append(item) self._stack.append(item) # Note that the lambda seems to be necessary; # # with CallOnExit(self._stack.pop): # ... # # didn't modify the _stack. Strange. with CallOnExit(lambda: self._stack.pop()): yield item
def ExtractCoverageInfo(coverage_filename, binary_filename, includes, excludes, output_stream): if excludes: excludes_func = lambda method_name: any( fnmatch(method_name, exclude) for exclude in excludes) else: excludes_func = lambda method_name: False if includes: includes_func = lambda method_name: any( fnmatch(method_name, include) for include in includes) else: includes_func = lambda method_name: True # ---------------------------------------------------------------------- def ShouldInclude(method_name): return not excludes_func(method_name) and includes_func( method_name) # ---------------------------------------------------------------------- temp_filename = CurrentShell.CreateTempFilename() command_line = '"{powershell}" -ExecutionPolicy Bypass -NoProfile -File "{filename}" "{coverage}" "{module}" > "{temp_filename}" 2>&1'.format( powershell=r"{}\syswow64\WindowsPowerShell\v1.0\powershell.exe". format(os.getenv("SystemRoot"), ), filename=os.path.join(_script_dir, "CoverageToCsv.ps1"), coverage=coverage_filename, module=os.path.basename(binary_filename), temp_filename=temp_filename, ) result = Process.Execute(command_line, output_stream) if result != 0: return result with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): covered = 0 not_covered = 0 with open(temp_filename, "r") as input: reader = csv.reader(input) for row in reader: if not isinstance(row, (tuple, list)): raise Exception(row) if len(row) == 1: raise Exception(row[0]) method_name = row[1] if not ShouldInclude(method_name): continue covered += int(row[-2]) not_covered += int(row[-1]) return covered, not_covered
def Func(): def Raiser1(): raise Exception("Raising 1") def Raiser2(): raise Exception("Raising 2") with CallOnExit(Raiser1, Raiser2): pass
def Build( image_name, tag=None, output_stream=sys.stdout, verbose=False, ): """Creates a docker image""" tags = tag del tag with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: prev_dir = os.getcwd() os.chdir(os.path.join(_script_dir, image_name)) with CallOnExit(lambda: os.chdir(prev_dir)): image_name = "{}/{}".format(DOCKER_USER_NAME, image_name) dm.stream.write("Building docker image...") with dm.stream.DoneManager( line_prefix=" ", suffix="\n", ) as this_dm: this_dm.result = Process.Execute( "docker build --tag {} .".format(image_name), this_dm.stream, ) if this_dm.result != 0: return this_dm.result if tags: dm.stream.write("Applying tags...") with dm.stream.DoneManager() as tag_dm: for index, tag in enumerate(tags): tag_dm.stream.write( "'{}' ({} of {})...".format( tag, index + 1, len(tags)), ) with tag_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker tag {image_name} {image_name}:{tag}". format( image_name=image_name, tag=tag, ), ) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result return dm.result
def Publish( image_name, registry_name, tag=None, output_stream=sys.stdout, ): """Publishes previously built content to a docker registry""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: prev_dir = os.getcwd() os.chdir(os.path.join(_script_dir, image_name)) with CallOnExit(lambda: os.chdir(prev_dir)): image_name = "{}/{}".format(DOCKER_USER_NAME, image_name) image_id = _GetImageId( image_name, dm, tag=tag, ) new_image_name = "{}/{}".format(registry_name, image_name.split("/")[-1]) if new_image_name is None: assert dm.result != 0 return dm.result dm.stream.write("Renaming image...") with dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker tag {} {}{}".format( image_id, new_image_name, ":{}".format(tag) if tag else "", ), ) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result dm.stream.write("Pushing image...") with dm.stream.DoneManager(line_prefix=" ", ) as this_dm: this_dm.result = Process.Execute( "docker push {}".format(new_image_name), this_dm.stream, ) if this_dm.result != 0: return this_dm.result return dm.result
def CreatePackageName(): # Continue traversing parent dirs as long as there is an __init__.py file. name_parts = [] filename = os.path.realpath(mod.__file__) if CurrentShell.IsSymLink(filename): filename = CurrentShell.ResolveSymLink(filename) filename = FileSystem.Normalize(filename) directory, name = os.path.split(filename) name = os.path.splitext(name)[0] while os.path.isfile(os.path.join(directory, "__init__.py")): directory, name = os.path.split(directory) name_parts.append(name) if not name_parts: # If we didn't find any __init__ files, it means that this isn't a file # that is part of a package. However, we still want to simulate package # behavior so that relative imports work as expected. if name == "__main__" or getattr(sys, "frozen", False): name = "___EntryPoint___" else: name = "___{}Lib___".format(name) assert name not in sys.modules sys.modules[name] = None return name # If here, we are looking at a file in a package. Ensure that the entire # package is included with fully qualified names. name_parts.reverse() for index, name_part in enumerate(name_parts): fully_qualified_name = '.'.join(name_parts[:index + 1]) if fully_qualified_name not in sys.modules: # When we load this module, it will be loaded under 'name_part'. # Preserve the original module (if it exists). temporary_modules[name_part] = sys.modules.pop(name_part, None) sys.path.insert(0, directory) with CallOnExit(lambda: sys.path.pop(0)): # This will add the module name to sys.modules __import__(name_part) sys.modules[fully_qualified_name] = sys.modules[name_part] directory = os.path.join(directory, name_part) return fully_qualified_name
def test_SingleValue(self): nonlocals = CommonEnvironment.Nonlocals(value=0) # ---------------------------------------------------------------------- def SetValue1(): nonlocals.value = 1 # ---------------------------------------------------------------------- with CallOnExit(SetValue1): pass self.assertEqual(nonlocals.value, 1)
def EnumTrackedFiles(cls, repo_root): temp_filename = CurrentShell.CreateTempFilename() result, output = cls.Execute( repo_root, 'git ls-files > "{}"'.format(temp_filename)) assert result == 0, (result, output) assert os.path.isfile(temp_filename), temp_filename with CallOnExit(lambda: os.remove(temp_filename)): with open(temp_filename) as f: for line in f.readlines(): line = line.strip() if line: yield os.path.join(repo_root, line)
def test_OnlyOnSuccess(self): nonlocals = CommonEnvironment.Nonlocals(value=0) # ---------------------------------------------------------------------- def SetValue(): nonlocals.value = 1 # ---------------------------------------------------------------------- try: with CallOnExit(True, SetValue): raise Exception("") except: pass self.assertEqual(nonlocals.value, 0)
def test_AlwaysCall(self): nonlocals = CommonEnvironment.Nonlocals(value=0) # ---------------------------------------------------------------------- def SetValue(): nonlocals.value = 1 # ---------------------------------------------------------------------- try: with CallOnExit(SetValue): raise Exception("") except: pass self.assertEqual(nonlocals.value, 1)
def GetAnySCM( path, raise_on_error=True, by_repository_id=False, # If True, will use much faster search heuristics ): """Returns the SCM that is active for the provided dir or any of its ancestors.""" if by_repository_id: # Use repository id filenames to determine when we are at a repo root. This # eliminates unnecessary calls to GetSCM. sys.path.insert(0, os.getenv("DEVELOPMENT_ENVIRONMENT_FUNDAMENTAL")) with CallOnExit(lambda: sys.path.pop(0)): from RepositoryBootstrap import Constants as RepositoryBootstrapConstants root = path while True: if os.path.isfile( os.path.join( root, RepositoryBootstrapConstants.REPOSITORY_ID_FILENAME)): return GetSCM(root, raise_on_error=raise_on_error) potential_root = os.path.dirname(root) if potential_root == root: break root = potential_root else: root = path while True: potential_scm = GetSCM(root, raise_on_error=False) if potential_scm: return potential_scm potential_root = os.path.dirname(root) if potential_root == root: break root = potential_root raise Exception( "No SCMs are active for '{}' or its ancestors.".format(path))
def Impl(element, include_map_type): # Prevent infinite recursion when operating on structures that have # loops if element in stack: return dn = element.DottedName include_map_value = include_map.get(dn, None) if include_map_value is not None and include_map_value.Type.value <= include_map_type.value: return include_map[dn] = cls.IncludeMapValue(element, include_map_type) # Ensure that all ancestors are included parent = element.Parent while parent: pdn = parent.DottedName if pdn in include_map: break include_map[pdn] = cls.IncludeMapValue( parent, cls.IncludeMapType.Parent) parent = parent.Parent # Ensure that all children are included stack.append(element) with CallOnExit(lambda: stack.pop()): for potential_item_name in [ "Children", "Bases", "Derived", "Reference", ]: potential_items = getattr(element, potential_item_name, None) if potential_items is None: continue if not isinstance(potential_items, list): potential_items = [potential_items] for item in potential_items: Impl(item, cls.IncludeMapType.Referenced)
def EntryPoint( arg, output_stream=sys.stdout, ): args = arg del arg # One of the args will be the filename input_filename = None for arg in args: if arg.startswith("-assume-filename="): input_filename = arg[len("-assume-filename=") :] break if input_filename is None: raise Exception("Unable to extract the filename from '{}'".format(args)) # Write the contents from stdin to a temp file input_content = sys.stdin.read() assert input_content temp_filename = CurrentShell.CreateTempFilename(os.path.splitext(input_filename)[1]) with open(temp_filename, "w") as f: f.write(input_content) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): # Invoke the script script_name = "Formatter" if CurrentShell.CategoryName != "Linux": script_name = CurrentShell.CreateScriptName(script_name) command_line = '"{script}" Format "{filename}" /quiet "/plugin_arg=hint_filename:{original_filename}"'.format( script=script_name, filename=temp_filename, original_filename=input_filename.replace(":", "\\:"), ) result, formatted_output = Process.Execute(command_line) output_stream.write(formatted_output) return result
def _BuildGenerator( source_dir, configuration, generator=_DEFAULT_GENERATOR, ): temp_dir = CurrentShell.CreateTempDirectory() with CallOnExit(lambda: FileSystem.RemoveTree(temp_dir)): command_line = 'cmake {generator}-S "{source_dir}" -B "{build_dir}" -DCppCommon_CMAKE_DEBUG_OUTPUT=On -DCMAKE_BUILD_TYPE={config}'.format( generator='-G "{}" '.format(generator) if generator else "", source_dir=source_dir, build_dir=temp_dir, config=configuration, ) result, output = Process.Execute(command_line) if result == 0: result, output = Process.Execute('cmake --build "{}"'.format(temp_dir)) yield temp_dir, result, output
def test_MultipleValues(self): nonlocals = CommonEnvironment.Nonlocals( value1=0, value2=0, ) # ---------------------------------------------------------------------- def SetValue1(): nonlocals.value1 = 1 # ---------------------------------------------------------------------- def SetValue2(): nonlocals.value2 = 1 # ---------------------------------------------------------------------- with CallOnExit(SetValue1, SetValue2): pass self.assertEqual(nonlocals.value1, 1) self.assertEqual(nonlocals.value2, 1)
def EnumSCMs(path): """ Enumerates all SCMs that are active in the provided path or its descendants. Yields (scm, repo_root) """ sys.path.insert(0, os.getenv("DEVELOPMENT_ENVIRONMENT_FUNDAMENTAL")) with CallOnExit(lambda: sys.path.pop(0)): from RepositoryBootstrap import Constants as RepositoryBootstrapConstants for root, directories, _ in os.walk(path): for scm in ALL_TYPES: if scm.IsRoot(root): yield scm, root # Don't search in subdirs, as there won't be any directories[:] = [] continue if RepositoryBootstrapConstants.GENERATED_DIRECTORY_NAME in directories: # Don't search in generated dirs, as the symlinks will cause recursive enumerations directories.remove( RepositoryBootstrapConstants.GENERATED_DIRECTORY_NAME)
def _AcceptImpl(cls, element_or_elements, traverse, should_visit_func, lookup_map, child_visitation_lookup_map, visited, *args, **kwargs): if isinstance(element_or_elements, list): elements = element_or_elements else: elements = [element_or_elements] for element in elements: element_id = id(element) if element_id in visited: continue visited.add(element_id) if not should_visit_func(element): continue typ = type(element) if typ not in lookup_map: raise Exception("'{}' was not expected ({})".format( typ, element)) cls.OnEnteringElement(element, *args, **kwargs) with CallOnExit( lambda: cls.OnExitingElement(element, *args, **kwargs)): result = lookup_map[typ](element, *args, **kwargs) nonlocals = CommonEnvironment.Nonlocals(result=result, ) if traverse and isinstance(element, ChildrenMixin) and not isinstance( element, VariantElement): if typ not in child_visitation_lookup_map: raise Exception("'{}' was not expected ({})".format( typ, element)) visiting_func, visited_func = child_visitation_lookup_map[ typ] if visiting_func(element, *args, **kwargs) != False: # ---------------------------------------------------------------------- def CallVisited(): visited_result = visited_func( element, *args, **kwargs) if visited_result is not None and nonlocals.result is None: nonlocals.result = visited_result # ---------------------------------------------------------------------- with CallOnExit(CallVisited): for child in element.Children: cls._AcceptImpl(child, traverse, should_visit_func, lookup_map, child_visitation_lookup_map, visited, *args, **kwargs) if nonlocals.result is not None: return nonlocals.result return None
import CommonEnvironment from CommonEnvironment import Nonlocals from CommonEnvironment.CallOnExit import CallOnExit from CommonEnvironment.TypeInfo import Arity from CommonEnvironmentEx.Antlr4Helpers.ErrorListener import ErrorListener from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- sys.path.insert(0, os.path.join(_script_dir, "..", "Grammar", "GeneratedCode")) with CallOnExit(lambda: sys.path.pop(0)): from SimpleSchemaLexer import SimpleSchemaLexer # <Unable to import> pylint: disable = E0401 from SimpleSchemaParser import SimpleSchemaParser # <Unable to import> pylint: disable = E0401 from SimpleSchemaVisitor import SimpleSchemaVisitor # <Unable to import> pylint: disable = E0401 with InitRelativeImports(): from .Item import Item, Metadata, MetadataValue, MetadataSource, ResolvedMetadata from ..Attributes import FUNDAMENTAL_ATTRIBUTE_INFO_MAP from .. import Exceptions from ...Plugin import ParseFlag # ---------------------------------------------------------------------- def Populate(source_name_content_generators,
def test_Constraints(self): # directory_ # Create a temp dir temp_dirname = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_dirname), temp_dirname os.mkdir(temp_dirname) with CallOnExit(lambda: FileSystem.RemoveTree(temp_dirname)): self.assertEqual(AllTypesYaml.Deserialize_directory_(os.path.basename(temp_dirname)).lower(), temp_dirname.lower()) # ---------------------------------------------------------------------- def CaseInsensitiveException(ExceptionType, regex, func): try: func() self.assertFalse(True) except ExceptionType as ex: self.assertEqual(regex.lower(), str(ex).lower()) # ---------------------------------------------------------------------- CaseInsensitiveException( AllTypesYaml.DeserializeException, "'{}' is not a valid directory [directory_]".format(os.path.join(os.getcwd(), "Does Not Exist")), lambda: AllTypesYaml.Deserialize_directory_("Does Not Exist"), ) # filename_ # Create a temp filename temp_filename = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_filename), temp_filename with open(temp_filename, "w") as f: f.write("Temp file") with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): self.assertEqual(AllTypesYaml.Deserialize_filename_('"{}"'.format(os.path.basename(temp_filename))).lower(), temp_filename.lower()) CaseInsensitiveException( AllTypesYaml.DeserializeException, "'{}' is not a valid file [filename_]".format(os.path.join(os.getcwd(), "Does Not Exist")), lambda: AllTypesYaml.Deserialize_filename_("Does Not Exist"), ) # filename_any_ temp_dirname = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_dirname), temp_dirname os.mkdir(temp_dirname) with CallOnExit(lambda: FileSystem.RemoveTree(temp_dirname)): self.assertEqual(AllTypesYaml.Deserialize_filename_any_('"{}"'.format(os.path.basename(temp_dirname))).lower(), temp_dirname.lower()) temp_filename = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_filename), temp_filename with open(temp_filename, "w") as f: f.write("Temp file") with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): self.assertEqual(AllTypesYaml.Deserialize_filename_any_('"{}"'.format(os.path.basename(temp_filename))).lower(), temp_filename.lower()) self.assertRaisesRegex(AllTypesYaml.DeserializeException, re.escape("is not a valid file or directory"), lambda: AllTypesYaml.Deserialize_filename_any_("Does Not Exist")) # number_ self.assertEqual(AllTypesYaml.Deserialize_number_("2"), 2) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"-30 is not >= -20.0", lambda: AllTypesYaml.Deserialize_number_("-30")) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"40 is not <= 20.0", lambda: AllTypesYaml.Deserialize_number_("40")) # int_ self.assertEqual(AllTypesYaml.Deserialize_int_("10"), 10) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"-30 is not >= -20", lambda: AllTypesYaml.Deserialize_int_("-30")) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"40 is not <= 20", lambda: AllTypesYaml.Deserialize_int_("40")) # string_ self.assertEqual(AllTypesYaml.Deserialize_string_("abc"), "abc") self.assertRaisesRegex( AllTypesYaml.DeserializeException, r"'a' is not a valid 'String' string - Value must have at least 2 characters, not have more than 4 characters", lambda: AllTypesYaml.Deserialize_string_("a"), ) self.assertRaisesRegex( AllTypesYaml.DeserializeException, r"'abcde' is not a valid 'String' string - Value must have at least 2 characters, not have more than 4 characters", lambda: AllTypesYaml.Deserialize_string_("abcde"), ) # string_regex_ self.assertEqual(AllTypesYaml.Deserialize_string_regex_("bit"), "bit") self.assertEqual(AllTypesYaml.Deserialize_string_regex_("but"), "but") self.assertEqual(AllTypesYaml.Deserialize_string_regex_("bat"), "bat") self.assertRaisesRegex( AllTypesYaml.DeserializeException, r"'abc' is not a valid 'String' string - Value must match the regular expression 'b.t'", lambda: AllTypesYaml.Deserialize_string_regex_("abc"), )
def _CreateContext(cls, metadata, status_stream): jinja2_context = {} # Load the custom context defined in code for context_code in metadata["jinja2_context_code"]: dirname, basename = os.path.split(context_code) basename = os.path.splitext(basename)[0] sys.path.insert(0, dirname) with CallOnExit(lambda: sys.path.pop(0)): mod = importlib.import_module(basename) var = getattr(mod, context_code.var_name) del mod if isinstance(var, dict): for k, v in six.iteritems(var): jinja2_context[k] = v else: jinja2_context[context_code.var_name] = var del metadata["jinja2_context_code"] # Load the custom context for k, v in six.iteritems(metadata["jinja2_context"]): if len(v) == 1: jinja2_context[k] = v[0] else: jinja2_context[k] = v metadata["jinja2_context"] = jinja2_context # Calculate the hashes of the input filenames. We will use this information # during comparison to determine if an input file has changed. It appears # that this value isn't used, but it is actually used when comparing the # context of two different invocations. # ---------------------------------------------------------------------- def CalculateHash(input_filename): with open(input_filename, "rb") as f: return hashlib.sha256(f.read()).digest() # ---------------------------------------------------------------------- metadata["hashes"] = [ CalculateHash(input_filename) for input_filename in metadata["inputs"] ] # Get the output filenames if not metadata["preserve_dir_structure"]: # ---------------------------------------------------------------------- def GetBaseDir(input_filename): return '' # ---------------------------------------------------------------------- else: if len(metadata["inputs"]) == 1: common_prefix = os.path.dirname(metadata["inputs"][0]) else: common_prefix = FileSystem.GetCommonPath(*metadata["inputs"]) # ---------------------------------------------------------------------- def GetBaseDir(input_filename): return FileSystem.TrimPath(input_filename, common_prefix) # ---------------------------------------------------------------------- output_filenames = [] for input_filename in metadata["inputs"]: output_filenames.append( os.path.join( metadata["output_dir"], GetBaseDir(input_filename), '.'.join([ part for part in os.path.basename(input_filename).split(".") if part != "jinja2" ]), ), ) metadata["output_filenames"] = output_filenames return super(CodeGenerator, cls)._CreateContext(metadata, status_stream)
def _TestImpl(build_dir, output_stream): # Tests must be executed from the build dir prev_dir = os.getcwd() os.chdir(build_dir) with CallOnExit(lambda: os.chdir(prev_dir)): return Process.Execute("ctest --parallel", output_stream)
def Execute(): """\ Uses cog (https://nedbatchelder.com/code/cog/) to update vscode's launch.json file. Example: Within 'launch.json': // [[[cog from CommonEnvironmentEx import VsCodeCogger; VsCodeCogger.Execute() ]]] // [[[end]]] From the command line: cog -r "<launch.json filename>" """ # Get the files cog_filename = os.path.realpath(cog.inFile) assert os.path.isfile(cog_filename), cog_filename dirname = os.path.realpath(os.path.join(os.path.dirname(cog_filename), "..")) assert os.path.isdir(dirname), dirname filenames = FileSystem.WalkFiles( dirname, include_dir_names=lambda name: name.endswith("Tests") and name != "Tests", include_file_extensions=".py", exclude_file_names="__init__.py", ) # Organize the files groups = OrderedDict() test_names_ctr = {} for filename in filenames: test_name = os.path.basename(filename) if test_name in test_names_ctr: test_names_ctr[test_name] += 1 else: test_names_ctr[test_name] = 1 assert filename.startswith(dirname), (filename, dirname) group = os.path.dirname(FileSystem.TrimPath(filename, dirname)).replace(os.path.sep, "/") groups.setdefault(group, []).append(filename) if not groups: return # Load the test parsers dynamic_test_parser_filename = os.getenv("DEVELOPMENT_ENVIRONMENT_TEST_PARSERS") assert os.path.isfile(dynamic_test_parser_filename), dynamic_test_parser_filename with open(dynamic_test_parser_filename) as f: test_parser_filenames = f.readlines() test_parsers = [] for test_parser_filename in test_parser_filenames: test_parser_filename = test_parser_filename.strip() if not test_parser_filename: continue assert test_parser_filename, test_parser_filename assert os.path.isfile(test_parser_filename), test_parser_filename dirname, basename = os.path.split(test_parser_filename) basename = os.path.splitext(basename)[0] sys.path.insert(0, dirname) with CallOnExit(lambda: sys.path.pop(0)): mod = importlib.import_module(basename) parser = getattr(mod, "TestParser", None) assert parser is not None, test_parser_filename assert parser.Name in _CONFIGURATIONS, parser.Name test_parsers.append(parser) # Write the output cog.out( textwrap.dedent( """\ // ---------------------------------------------------------------------- // | // | Cog Output // | // ---------------------------------------------------------------------- // To regenerate this content: // // cog -r "{}" // ---------------------------------------------------------------------- // ---------------------------------------------------------------------- // ---------------------------------------------------------------------- """, ).format(cog.inFile.replace("\\", "\\\\")), ) for group, filenames in groups.items(): cog.out( textwrap.dedent( """\ // ---------------------------------------------------------------------- // | // | {} // | // ---------------------------------------------------------------------- """, ).format(group), ) for filename in filenames: for parser in test_parsers: if parser.IsSupportedTestItem(filename): dirname, basename = os.path.split(filename) cog.out( _CONFIGURATIONS[parser.Name].format( filename=filename.replace(os.path.sep, "/"), dirname=dirname.replace(os.path.sep, "/"), basename=basename, group=group, name="{}{}".format( os.path.splitext(basename)[0], "" if test_names_ctr[basename] == 1 else " --- {}".format(group), ), ), ) break cog.out("\n")
def Execute( command_line, optional_output_stream_or_functor=None, # def Func(content) -> Bool convert_newlines=True, # Converts '\r\n' into '\n' line_delimited_output=False, # Buffer calls to the provided functor by lines environment=None, # Environment vars to make available to the process ): """ Invokes the given command line. Returns the exit code if output_output_stream_or_functor is not None, otherwise ( <exit_code>, <output> ) """ assert command_line sink = None output = None if optional_output_stream_or_functor is None: sink = six.moves.StringIO() output = sink.write elif hasattr(optional_output_stream_or_functor, "write"): output_stream = optional_output_stream_or_functor output = output_stream.write else: output = optional_output_stream_or_functor if convert_newlines: newlines_original_output = output # ---------------------------------------------------------------------- def ConvertNewlines(content): content = content.replace('\r\n', '\n') return newlines_original_output(content) # ---------------------------------------------------------------------- output = ConvertNewlines if line_delimited_output: line_delimited_original_output = output internal_content = [] # ---------------------------------------------------------------------- def OutputFunctor(content): if '\n' in content: assert content.endswith('\n'), content content = "{}{}".format(''.join(internal_content), content) internal_content[:] = [] return line_delimited_original_output(content) else: internal_content.append(content) return None # ---------------------------------------------------------------------- def Flush(): if internal_content: line_delimited_original_output(''.join(internal_content)) internal_content[:] = [] # ---------------------------------------------------------------------- output = OutputFunctor else: # ---------------------------------------------------------------------- def Flush(): pass # ---------------------------------------------------------------------- if environment and sys.version_info[0] == 2: # Keys and values must be strings, which can be a problem if the environment was extraced from unicode data import unicodedata # ---------------------------------------------------------------------- def ConvertToString(item): return unicodedata.normalize('NFKD', item).encode('ascii', 'ignore') # ---------------------------------------------------------------------- for key in list(six.iterkeys(environment)): value = environment[key] if isinstance( key, unicode): # <Undefined variable> pylint: disable = E0602 del environment[key] key = ConvertToString(key) if isinstance( value, unicode): # <Undefined variable> pylint: disable = E0602 value = ConvertToString(value) environment[key] = value result = subprocess.Popen( command_line, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=environment, ) ( CharacterStack_Escape, CharacterStack_LineReset, CharacterStack_Buffered, ) = range(3) # Handle differences between bytes and strings in Python 3 if sys.version_info[0] == 2: char_to_value = lambda c: c is_ascii_letter = lambda c: c in string.ascii_letters is_newline = lambda c: c in [ '\r', '\n', ] is_esc = lambda c: c == '\033' to_ascii_string = lambda c: ''.join(c) else: char_to_value = lambda c: ord(c) is_ascii_letter = lambda c: (c >= ord('a') and c <= ord('z')) or ( c >= ord('A') and c <= ord('Z')) is_newline = lambda c: c in [ 10, 13, ] is_esc = lambda c: c == 27 # ---------------------------------------------------------------------- def ToAsciiString(c): result = bytearray(c) for codec in [ "utf-8", "ansi", ]: try: return result.decode(codec) except UnicodeDecodeError: pass raise UnicodeDecodeError() # ---------------------------------------------------------------------- to_ascii_string = ToAsciiString with CallOnExit(Flush): try: character_stack = [] character_stack_type = None hard_stop = False while True: if character_stack_type == CharacterStack_Buffered: value = character_stack.pop() assert not character_stack character_stack_type = None else: c = result.stdout.read(1) if not c: break value = char_to_value(c) content = None if character_stack_type == CharacterStack_Escape: character_stack.append(value) if not is_ascii_letter(value): continue content = character_stack character_stack = [] character_stack_type = None elif character_stack_type == CharacterStack_LineReset: if is_newline(value): character_stack.append(value) continue content = character_stack character_stack = [ value, ] character_stack_type = CharacterStack_Buffered else: assert character_stack_type is None, character_stack_type if is_esc(value): character_stack.append(value) character_stack_type = CharacterStack_Escape continue elif is_newline(value): character_stack.append(value) character_stack_type = CharacterStack_LineReset continue content = [ value, ] assert content if output(to_ascii_string(content)) == False: hard_stop = True break if not hard_stop and character_stack: output(to_ascii_string(character_stack)) result = result.wait() or 0 except IOError: result = -1 if sink is None: return result return result, sink.getvalue()
def EntryPoint( code_dir_or_doxygen_filename, output_dir, output_stream=sys.stdout, verbose=False, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: # Get the doxygen files doxygen_files = [] if os.path.isfile(code_dir_or_doxygen_filename): doxygen_files.append(code_dir_or_doxygen_filename) else: dm.stream.write( "Searching for doxygen files in '{}'...".format( code_dir_or_doxygen_filename, ), ) with dm.stream.DoneManager( done_suffix=lambda: "{} found".format( inflect.no("file", len(doxygen_files)), ), suffix="\n", ) as this_dm: for fullpath in FileSystem.WalkFiles( code_dir_or_doxygen_filename, include_file_extensions=[DOXYGEN_EXTENSION], traverse_exclude_dir_names=FileSystem. CODE_EXCLUDE_DIR_NAMES, ): if not os.path.isfile( "{}{}".format( os.path.splitext(fullpath)[0], DOXYGEN_EXTENSION_IGNORE, ), ): doxygen_files.append(fullpath) if not doxygen_files: return dm.result # Process the files # ---------------------------------------------------------------------- class GetDoxygenValueError(KeyError): """Exception raised when a doxygen tag is not found""" pass # ---------------------------------------------------------------------- def GetDoxygenValue(tag, content): match = re.search( r"{}[ \t]*=[ \t]*(?P<value>.*?)\r?\n".format(re.escape(tag)), content, re.IGNORECASE, ) if not match: raise GetDoxygenValueError( "Unable to find '{}' in the doxygen configuration file". format(tag), ) return match.group("value") # ---------------------------------------------------------------------- results = OrderedDict() dm.stream.write( "Processing {}...".format( inflect.no("doxygen file", len(doxygen_files))), ) with dm.stream.DoneManager(suffix="\n", ) as doxygen_dm: for index, doxygen_file in enumerate(doxygen_files): doxygen_dm.stream.write( "Processing '{}' ({} of {})...".format( doxygen_file, index + 1, len(doxygen_files), ), ) with doxygen_dm.stream.DoneManager() as this_dm: prev_dir = os.getcwd() os.chdir(os.path.dirname(doxygen_file)) with CallOnExit(lambda: os.chdir(prev_dir)): # Execute this_dm.result = Process.Execute( 'dot -c && doxygen "{}"'.format(doxygen_file), StreamDecorator( this_dm.stream if verbose else None), ) if this_dm.result != 0: continue # Extract data from the doxygen file with open(doxygen_file) as f: content = f.read() project_name = GetDoxygenValue("PROJECT_NAME", content) # Older doxygen files don't have a PROJECT_VERSION try: project_version = GetDoxygenValue( "PROJECT_VERSION", content) except GetDoxygenValueError: project_version = GetDoxygenValue( "PROJECT_NUMBER", content) output_directory = GetDoxygenValue( "OUTPUT_DIRECTORY", content) source_dir = os.path.dirname(doxygen_file) if output_directory: output_directory = os.pth.join( source_dir, output_directory) dest_dir = os.path.join(output_dir, project_name) if project_version: dest_dir = os.path.join(dest_dir, project_version) dest_dir = dest_dir.replace('"', "").strip() FileSystem.MakeDirs(dest_dir) for content_type in [ "html", "Latex", "RTF", "man", "XML", ]: value = GetDoxygenValue( "GENERATE_{}".format(content_type), content, ) if not value or value.lower() != "yes": continue output_name = GetDoxygenValue( "{}_OUTPUT".format(content_type), content, ) source_fullpath = os.path.join( source_dir, output_name) dest_fullpath = os.path.join(dest_dir, output_name) if not os.path.isdir(source_fullpath): this_dm.stream.write( "ERROR: The directory '{}' does not exist.\n" .format(source_fullpath, ), ) this_dm.result = -1 continue FileSystem.RemoveTree(dest_fullpath) shutil.move(source_fullpath, dest_fullpath) results.setdefault( doxygen_file, OrderedDict())[content_type] = dest_fullpath # Tagfile value = GetDoxygenValue("GENERATE_TAGFILE", content) if value: source_fullpath = os.path.join(source_dir, value) dest_fullpath = os.path.join(dest_dir, value) if not os.path.isfile(source_fullpath): this_dm.stream.write( "ERROR: The filename '{}' does not exist.\n" .format(source_fullpath, ), ) this_dm.result = -1 continue FileSystem.RemoveFile(dest_fullpath) shutil.move(source_fullpath, dest_fullpath) results.setdefault( doxygen_file, OrderedDict())["tagfile"] = dest_fullpath # Generate the json file output_filename = os.path.join( output_dir, "{}.json".format(os.path.splitext(_script_name)[0]), ) dm.stream.write("Writing '{}'...".format(output_filename)) with dm.stream.DoneManager() as this_dm: with open(output_filename, "w") as f: json.dump(results, f) return dm.result
def Build( force=False, no_squash=False, keep_temporary_image=False, output_stream=sys.stdout, preserve_ansi_escape_sequences=False, ): with StreamDecorator.GenerateAnsiSequenceStream( output_stream, preserve_ansi_escape_sequences=preserve_ansi_escape_sequences, ) as output_stream: with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if not _VerifyDocker(): dm.stream.write("ERROR: Ensure that docker is installed and available within this environment.\n") dm.result = -1 return dm.result output_dir = os.path.join(calling_dir, "Generated") source_dir = os.path.join(output_dir, "Source") base_image_dir = os.path.join(output_dir, "Images", "Base") activated_image_dir = os.path.join(output_dir, "Images", "Activated") image_code_base = "/usr/lib/CommonEnvironmentImage" image_code_dir = "{}/{}".format( image_code_base, repository_name.replace('_', '/'), ) if no_now_tag: now_tag = None else: now = time.localtime() now_tag = "{0}.{1:02d}.{2:02d}".format(now[0], now[1], now[2]) # Create the base image dm.stream.write("Creating base image...") with dm.stream.DoneManager(suffix='\n') as base_dm: FileSystem.MakeDirs(base_image_dir) # Get the source scm = GetAnySCM(calling_dir) if not os.path.isdir(source_dir): base_dm.stream.write("Cloning source...") with base_dm.stream.DoneManager() as this_dm: # Ensure that the parent dir exists, but don't create the dir iteself. FileSystem.MakeDirs(os.path.dirname(source_dir)) # Enlist in the repo. temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) this_dm.result, output = scm.Clone(repository_uri, temp_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result os.rename(temp_dir, source_dir) has_changes = True else: # The repo exists base_dm.stream.write("Updating source...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = scm.Pull(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result has_changes = True if scm.Name == "Mercurial": if "no changes found" in output: has_changes = False elif scm.Name == "Git": if "Already up-to-date" in output: has_changes = False else: assert False, "Unsupported SCM: {}".format(scm.Name) if has_changes: this_dm.result, output = scm.Update(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # Filter the source filtered_source_dir = os.path.join(base_image_dir, "FilteredSource") if os.path.isdir(filtered_source_dir) and not force and not has_changes: base_dm.stream.write("No source changes were detected.\n") else: with base_dm.stream.SingleLineDoneManager( "Filtering source...", ) as this_dm: temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) FileSystem.CopyTree( source_dir, temp_dir, excludes=[ "/.git", "/.gitignore", "/.hg", "/.hgignore", "*/Generated", "*/__pycache__", "*/Windows", "/*/src", "*.cmd", "*.ps1", "*.pyc", "*.pyo", ], optional_output_stream=this_dm.stream, ) FileSystem.RemoveTree(filtered_source_dir) os.rename(temp_dir, filtered_source_dir) base_dm.stream.write("Verifying Docker base image...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker image history "{}"'.format(base_docker_image)) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result base_dm.stream.write("Creating dockerfile...") with base_dm.stream.DoneManager(): setup_statement = "./Setup.sh{}".format('' if not repository_setup_configurations else ' {}'.format(' '.join([ '"/configuration={}"'.format(configuration) for configuration in repository_setup_configurations ]))) if repository_name == "Common_Environment": commands = textwrap.dedent( """\ RUN link /usr/bin/python3 /usr/bin/python RUN adduser --disabled-password --disabled-login --gecos "" "{username}" \\ && addgroup "{groupname}" \\ && adduser "{username}" "{groupname}" RUN cd {image_code_dir} \\ && {setup_statement} """).format( username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, setup_statement=setup_statement, ) else: import io with io.open( os.path.join(base_image_dir, "SetupEnvironmentImpl.sh"), 'w', newline='\n', ) as f: f.write(textwrap.dedent( """\ #!/bin/bash . {image_code_base}/Common/Environment/Activate.sh python36 cd {image_code_dir} {setup_statement} rm --recursive {image_code_base}/Common/Environment/Generated/Linux/Default """).format( image_code_base=image_code_base, image_code_dir=image_code_dir, setup_statement=setup_statement, )) commands = textwrap.dedent( """\ COPY SetupEnvironmentImpl.sh /tmp/SetupEnvironmentImpl.sh RUN chmod a+x /tmp/SetupEnvironmentImpl.sh \\ && /tmp/SetupEnvironmentImpl.sh """) with open(os.path.join(base_image_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {base_image} COPY FilteredSource {image_code_dir} {commands} RUN chown -R {username}:{groupname} {image_code_dir} \\ && chmod g-s {image_code_dir}/Generated/Linux \\ && chmod 0750 {image_code_dir}/Generated/Linux \\ && chmod -R o-rwx {image_code_dir} # Cleanup RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* LABEL maintainer="{maintainer}" # By default, run a bash prompt as the source code user WORKDIR {image_code_dir} CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] """).format( base_image=base_docker_image, commands=commands, username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, maintainer=maintainer, )) base_dm.stream.write("Building Docker image...") with base_dm.stream.DoneManager() as this_dm: tags = [ "base", "base_latest", ] if now_tag: tags.append("base_{}".format(now_tag)) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=base_image_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result if not no_activated_image: # Create the activated image(s) dm.stream.write("Creating activated image(s)...") with dm.stream.DoneManager() as all_activated_dm: for index, configuration in enumerate(repository_activation_configurations): all_activated_dm.stream.write("Creating activated image{} ({} of {})...".format( '' if not configuration else " for the configuration '{}'".format(configuration), index + 1, len(repository_activation_configurations), )) with all_activated_dm.stream.DoneManager(suffix='\n') as activated_dm: this_activated_dir = os.path.join(activated_image_dir, configuration or "Default") FileSystem.MakeDirs(this_activated_dir) unique_id = str(uuid.uuid4()) temp_image_name = "{}_image".format(unique_id) temp_container_name = "{}_container".format(unique_id) # Activate the image so we can extract the changes activated_dm.stream.write("Activating...") with activated_dm.stream.DoneManager(suffix='\n') as this_dm: command_line = 'docker run -it --name "{container_name}" "{image_name}:base_latest" /sbin/my_init -- /sbin/setuser "{username}" bash -c "cd {image_code_dir} && . ./Activate.sh {configuration} && pushd {image_code_base}/Common/Environment && python -m RepositoryBootstrap.EnvironmentDiffs After /decorate' \ .format( container_name=temp_container_name, image_name=docker_image_name, configuration=configuration or '', username=image_username, image_code_dir=image_code_dir, image_code_base=image_code_base, ) sink = six.moves.StringIO() this_dm.result = Process.Execute(command_line, StreamDecorator([ sink, this_dm.stream, ])) if this_dm.result != 0: return this_dm.result sink = sink.getvalue() activated_dm.stream.write("Extracting enviroment diffs...") with activated_dm.stream.DoneManager(): match = re.search( textwrap.dedent( """\ //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// (?P<content>.+?) //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// """), sink, re.DOTALL | re.MULTILINE, ) assert match, sink environment_diffs = json.loads(match.group("content")) # ---------------------------------------------------------------------- def RemoveTempContainer(): activated_dm.stream.write("Removing temp container...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rm "{}"'.format(temp_container_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempContainer): # Commit the activated image activated_dm.stream.write("Committing container...") with activated_dm.stream.DoneManager() as this_dm: command_line = 'docker commit "{container_name}" "{image_name}"' \ .format( container_name=temp_container_name, image_name=temp_image_name, ) this_dm.result, output = Process.Execute(command_line) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # ---------------------------------------------------------------------- def RemoveTempImage(): if keep_temporary_image: return activated_dm.stream.write("Removing temp image...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rmi "{}"'.format(temp_image_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempImage): # Create a new dockerfile. The temp image has all the harddrive changes # made during activation, but doesn't have the environment changes. activated_dm.stream.write("Creating dockerfile...") with activated_dm.stream.DoneManager() as this_dm: with open(os.path.join(this_activated_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {temp_image_name} ENV {env} # By default, run a bash prompt as the source code user CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] LABEL maintainer="{maintainer}" """).format( temp_image_name=temp_image_name, env='\\\n'.join([ ' {}={} '.format(k, v) for k, v in six.iteritems(environment_diffs) ]), image_code_dir=image_code_dir, maintainer=maintainer, username=image_username, )) activated_dm.stream.write("Building Docker image...") with activated_dm.stream.DoneManager() as this_dm: tags = [ "latest", ] if now_tag: tags.append(now_tag) if len(repository_activation_configurations) > 1: tags = [ "{}_{}".format(configuration, tag) for tag in tags ] tags.insert(0, configuration) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=this_activated_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='', # <squash is not supported here> '' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def Build( configuration, output_dir, release_build=False, prerelease_build_name=None, no_build_info=False, keep_temp_dir=False, cmake_generator=( None if os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" or os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_USE_DEFAULT_CMAKE_GENERATOR") else "Ninja" ), output_stream=sys.stdout, verbose=False, ): """Builds the Featurizer Shared Library""" if release_build and prerelease_build_name: raise CommandLine.UsageException( "A prerelese build name cannot be provided with the 'release_build' flag", ) with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: FileSystem.RemoveTree(output_dir) FileSystem.MakeDirs(output_dir) temp_directory = CurrentShell.CreateTempDirectory() # ---------------------------------------------------------------------- def CleanupTempDir(): if keep_temp_dir: dm.stream.write( "\nCMake output has been written to '{}'.\n".format(temp_directory), ) return FileSystem.RemoveTree(temp_directory) # ---------------------------------------------------------------------- with CallOnExit(CleanupTempDir): prev_dir = os.getcwd() os.chdir(temp_directory) with CallOnExit(lambda: os.chdir(prev_dir)): if not release_build: if prerelease_build_name is None: # This value should compare as: # "manual" < "pipeline" prerelease_build_name = "manual" if not no_build_info: now = datetime.datetime.now() prerelease_build_name = "{prerelease_build_name}.{year}.{month}.{day}.{hour}.{minute}.{second}.{configuration}".format( year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, prerelease_build_name=prerelease_build_name, configuration=configuration.lower(), ) activities = [ ( "Generating cmake Files", 'cmake {generator}-DCMAKE_BUILD_TYPE={configuration} {prerelease_build_name} "{this_dir}"'.format( generator='-G "{}" '.format( cmake_generator, ) if cmake_generator else "", temp_dir=temp_directory, configuration=configuration, this_dir=_script_dir, prerelease_build_name="" if not prerelease_build_name else "-DPRODUCT_VERSION_PRERELEASE_INFO={}".format( prerelease_build_name, ), ), ), ("Building", "cmake --build ."), ] if ( os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" ): activities.append( ( "Verifying Universal Linux Binaries", "libcheck libFeaturizers.so", ), ) activities += [ ("Copying Binaries", _CopyBinaries), ("Copying Data", _CopyData), ("Copying Headers", _CopyHeaders), ] for index, (activity, command_line) in enumerate(activities): dm.stream.write( "{} ({} of {})...".format(activity, index + 1, len(activities)), ) with dm.stream.DoneManager( suffix="\n" if verbose else None, ) as this_dm: sink = six.moves.StringIO() output_streams = [sink] if verbose: output_streams.append( StreamDecorator( this_dm.stream, line_prefix="INFO: ", ), ) this_output_stream = StreamDecorator(output_streams) if callable(command_line): this_dm.result = command_line( temp_directory, output_dir, this_output_stream, ) else: this_dm.result = Process.Execute( command_line, this_output_stream, ) if this_dm.result != 0: if not verbose: this_dm.stream.write(sink.getvalue()) return this_dm.result return dm.result
def Package( output_dir, build_dir, output_stream=sys.stdout, verbose=False, ): """Packages previously built content""" build_dirs = build_dir del build_dir with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: if len(build_dirs) > 1: dm.stream.write("Ensuring that build data matches...") with dm.stream.DoneManager() as ensure_dm: ensure_dm.stream.write("Checking '{}'...".format(JSON_FILENAME)) with ensure_dm.stream.DoneManager() as this_dm: this_dm.result = ( 0 if _CompareFiles( this_dm.stream, *[ os.path.join(build_dir, JSON_FILENAME) for build_dir in build_dirs ] ) else -1 ) if this_dm.result != 0: return this_dm.result ensure_dm.stream.write("Checking 'Data' directories...") with ensure_dm.stream.DoneManager() as this_dm: this_dm.result = ( 0 if _CompareDirectories( this_dm.stream, *[os.path.join(build_dir, "Data") for build_dir in build_dirs] ) else -1 ) if this_dm.result != 0: return this_dm.result dm.stream.write("Reading build configuration...") with dm.stream.DoneManager() as this_dm: json_filename = os.path.join(build_dirs[0], JSON_FILENAME) if not os.path.isfile(json_filename): this_dm.stream.write( "ERROR: The filename '{}' does not exist.\n".format(json_filename), ) this_dm.result = -1 return this_dm.result with open(json_filename) as f: build_config = json.load(f) build_config["build_dir"] = build_dirs[0] build_config["data_dir"] = os.path.join(build_dirs[0], "Data", "**", "*.*") build_config["package_id"] = build_config["product_name"].replace(" ", ".") build_config["product_copyright"] = build_config["product_copyright"].replace( "(C)", "©", ) # Generate the correct nuget file statements based on output in the build_dir dm.stream.write("Generating nuget file statements...") with dm.stream.DoneManager() as this_dm: nuget_file_statements = {} for build_dir in build_dirs: these_files = [] value_type = None for item in os.listdir(build_dir): this_value_type = None if item == "Featurizers.dll": if "x86" in build_dir: this_value_type = "runtimes/win-x86/native" else: this_value_type = "runtimes/win-x64/native" elif item.startswith("libFeaturizers.so"): this_value_type = "runtimes/linux-x64/native" else: name, ext = os.path.splitext(item) if name.startswith("libFeaturizers") and ext == ".dylib": this_value_type = "runtimes/osx-x64/native" if this_value_type is not None: assert value_type is None or this_value_type == value_type, ( value_type, item, this_value_type, ) value_type = this_value_type these_files.append(os.path.join(build_dir, item)) if value_type in nuget_file_statements: this_dm.stream.write( "ERROR: The build directory '{}' overwrites previously captured content ({}: '{}').\n".format( build_dir, value_type, nuget_file_statements[value_type], ), ) this_dm.result = -1 return this_dm.result nuget_file_statements[value_type] = these_files file_statements = [] for k, v in six.iteritems(nuget_file_statements): for filename in v: file_statements.append( '<file src="{}" target="{}" />'.format(filename, k), ) build_config["file_statements"] = "\n".join(file_statements) FileSystem.MakeDirs(output_dir) dm.stream.write("Writing nuspec file...") with dm.stream.DoneManager(): nuspec_filename = os.path.join(output_dir, "Featurizers.nuspec") with open(nuspec_filename, "w") as f: f.write(_nuget_template.format(**build_config)) dm.stream.write("Running nuget...") with dm.stream.DoneManager() as this_dm: prev_dir = os.getcwd() os.chdir(output_dir) with CallOnExit(lambda: os.chdir(prev_dir)): this_dm.result = Process.Execute( 'nuget.exe pack "{}"'.format(nuspec_filename), this_dm.stream, ) if this_dm.result != 0: return this_dm.result return dm.result