def Build( image_name, tag=None, output_stream=sys.stdout, verbose=False, ): """Creates a docker image""" tags = tag del tag with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: prev_dir = os.getcwd() os.chdir(os.path.join(_script_dir, image_name)) with CallOnExit(lambda: os.chdir(prev_dir)): image_name = "{}/{}".format(DOCKER_USER_NAME, image_name) dm.stream.write("Building docker image...") with dm.stream.DoneManager( line_prefix=" ", suffix="\n", ) as this_dm: this_dm.result = Process.Execute( "docker build --tag {} .".format(image_name), this_dm.stream, ) if this_dm.result != 0: return this_dm.result if tags: dm.stream.write("Applying tags...") with dm.stream.DoneManager() as tag_dm: for index, tag in enumerate(tags): tag_dm.stream.write( "'{}' ({} of {})...".format( tag, index + 1, len(tags)), ) with tag_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker tag {image_name} {image_name}:{tag}". format( image_name=image_name, tag=tag, ), ) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result return dm.result
def Publish( image_name, registry_name, tag=None, output_stream=sys.stdout, ): """Publishes previously built content to a docker registry""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: prev_dir = os.getcwd() os.chdir(os.path.join(_script_dir, image_name)) with CallOnExit(lambda: os.chdir(prev_dir)): image_name = "{}/{}".format(DOCKER_USER_NAME, image_name) image_id = _GetImageId( image_name, dm, tag=tag, ) new_image_name = "{}/{}".format(registry_name, image_name.split("/")[-1]) if new_image_name is None: assert dm.result != 0 return dm.result dm.stream.write("Renaming image...") with dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker tag {} {}{}".format( image_id, new_image_name, ":{}".format(tag) if tag else "", ), ) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result dm.stream.write("Pushing image...") with dm.stream.DoneManager(line_prefix=" ", ) as this_dm: this_dm.result = Process.Execute( "docker push {}".format(new_image_name), this_dm.stream, ) if this_dm.result != 0: return this_dm.result return dm.result
def ExecuteCommands( cls, command_or_commands, output_stream, environment=None, ): """\ Creates a temporary script file, writes the commands to that file, and then executes it. Returns the result and output generated during execution. """ from CommonEnvironment.CallOnExit import CallOnExit from CommonEnvironment import FileSystem from CommonEnvironment import Process temp_filename = cls.CreateTempFilename(cls.ScriptExtension) with open(temp_filename, 'w') as f: f.write(cls.GenerateCommands(command_or_commands)) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): cls.MakeFileExecutable(temp_filename) return Process.Execute( cls.DecorateInvokeScriptCommandLine(temp_filename), output_stream, environment=environment, )
def Build(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: command_line_template = 'python "{script}" {{plugin}} "{input}" "{{output}}"'.format( script=_script_dir, input=os.path.join(_script_dir, "Featurizers.yaml"), ) for index, (plugin, output_dir) in enumerate(_PLUGINS): dm.stream.write( "Generating '{}' ({} of {})...".format(plugin, index + 1, len(_PLUGINS)), ) with dm.stream.DoneManager(suffix="\n", ) as this_dm: this_dm.result = Process.Execute( command_line_template.format( plugin=plugin, output=output_dir, ), this_dm.stream, ) if dm.result != 0: return dm.result return dm.result
def _Compile(cls, context, script_filename, output_stream): command_line = 'python "{}" build_exe{}'.format( script_filename, '' if not context["distutil_args"] else " {}".format(' '.join( ['"{}"'.format(arg) for arg in context["distutils_args"]])), ) result = Process.Execute(command_line, output_stream) if result == 0: if os.path.isdir("build"): subdirs = os.listdir("build") assert len(subdirs) == 1, subdirs source_dir = os.path.join("build", subdirs[0]) # Remove empty dirs to_remove = [] for root, dirs, _ in os.walk(source_dir): for dir in dirs: fullpath = os.path.join(root, dir) if os.path.isdir( fullpath) and not os.listdir(fullpath): to_remove.append(fullpath) for dir in to_remove: FileSystem.RemoveTree(dir) FileSystem.RemoveTree(context["output_dir"]) shutil.move(source_dir, context["output_dir"]) FileSystem.RemoveTree("build") return result
def StopCoverage(self, output_stream): if not self._dirs: return 0 # Move coverage data to this dir output_dir = os.path.dirname(self._coverage_filename) for filename in FileSystem.WalkFiles( output_dir, include_file_extensions=[".gcda"], ): dest_filename = os.path.join(output_dir, os.path.basename(filename)) if dest_filename == filename: continue if not os.path.isfile(dest_filename): shutil.copyfile(filename, dest_filename) return Process.Execute( '{script} Lcov {dirs} "/output_dir={output}"'.format( script=CurrentShell.CreateScriptName("ExtractCoverageInfo"), dirs=" ".join( ['"/bin_dir={}"'.format(dir) for dir in self._dirs]), output=output_dir, ), output_stream, )
def Build(output_stream=sys.stdout, ): """Builds clang-formatProxy""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: command_line = '"{script}" Compile "/input={input}" "/output_dir={output_dir}" /no_bundle {exclude_modules}'.format( script=CurrentShell.CreateScriptName("CxFreezeCompiler"), input=os.path.join(_script_dir, "clang-formatProxy.py"), output_dir=os.path.join( _script_dir, "..", "..", "Tools", "clang-formatProxy", "v1.0", CurrentShell.CategoryName, ), exclude_modules=" ".join([ '"/exclude_module={}"'.format(module) for module in EXCLUDE_MODULES ], ), ) dm.result = Process.Execute(command_line, dm.stream) if dm.result != 0: return dm.result return dm.result
def EntryPoint( arg=None, output_stream=sys.stdout, ): args = arg del arg with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: python_script = os.path.join( os.path.dirname(wxglade.__file__), "wxglade.py", ) assert os.path.isfile(python_script), python_script dm.result = Process.Execute( 'python "{}" {}'.format( python_script, ' '.join(['"{}"'.format(arg) for arg in args]), ), dm.stream, ) return dm.result
def _TestImpl(self, configuration): for test_type in ["standard", "build_helper"]: with _BuildGenerator( os.path.join( os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_COMMON_ROOT"), "src", "CmakeLocalEndToEndTestsImpl", test_type, "exe", ), configuration, ) as (temp_dir, result, output): self.assertTrue( result == 0, msg=output, ) found = False for potential_exe_name in ["Exe", "Exe.exe"]: exe_name = os.path.join(temp_dir, potential_exe_name) if os.path.isfile(exe_name): found = True result, output = Process.Execute( '"{}" --success'.format(exe_name)) self.assertTrue( result == 0, msg=output, ) break self.assertTrue(found)
def _BuildImpl(build_dir, output_stream): return Process.Execute( 'cmake --build "{build_dir}"'.format( build_dir=build_dir, ), output_stream, )
def Lcov( bin_dir=None, not_llvm=False, output_dir=None, output_filename="lcov.info", type=None, output_stream=sys.stdout, verbose=False, ): """Generates a LCOV file based on *.gcno files""" bin_dirs = bin_dir del bin_dir if not bin_dirs: bin_dirs.append(os.getcwd()) if len(bin_dirs) > 1 and not output_dir: raise CommandLine.UsageException( "An 'output_dir' must be provided when multiple 'bin_dirs' are parsed", ) if len(bin_dirs) == 1 and not output_dir: output_dir = bin_dirs[0] with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_filename = os.path.join(output_dir, output_filename) dm.stream.write("Creating '{}'...".format(output_filename)) with dm.stream.DoneManager() as this_dm: FileSystem.MakeDirs(output_dir) command_line = 'grcov {dirs} -o "{output_filename}"{llvm}{type}'.format( dirs=" ".join(['"{}"'.format(dir) for dir in bin_dirs]), output_filename=output_filename, llvm="" if not_llvm else " --llvm", type="" if type is None else " -t {}".format(type), ) if verbose: this_dm.stream.write( textwrap.dedent( """\ Command Line: {} """, ).format(command_line), ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def RemoveTempImage(): if keep_temporary_image: return activated_dm.stream.write("Removing temp image...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rmi "{}"'.format(temp_image_name)) if this_dm.result != 0: this_dm.stream.write(output)
def ExtractCoverageInfo(coverage_filename, binary_filename, includes, excludes, output_stream): if excludes: excludes_func = lambda method_name: any( fnmatch(method_name, exclude) for exclude in excludes) else: excludes_func = lambda method_name: False if includes: includes_func = lambda method_name: any( fnmatch(method_name, include) for include in includes) else: includes_func = lambda method_name: True # ---------------------------------------------------------------------- def ShouldInclude(method_name): return not excludes_func(method_name) and includes_func( method_name) # ---------------------------------------------------------------------- temp_filename = CurrentShell.CreateTempFilename() command_line = '"{powershell}" -ExecutionPolicy Bypass -NoProfile -File "{filename}" "{coverage}" "{module}" > "{temp_filename}" 2>&1'.format( powershell=r"{}\syswow64\WindowsPowerShell\v1.0\powershell.exe". format(os.getenv("SystemRoot"), ), filename=os.path.join(_script_dir, "CoverageToCsv.ps1"), coverage=coverage_filename, module=os.path.basename(binary_filename), temp_filename=temp_filename, ) result = Process.Execute(command_line, output_stream) if result != 0: return result with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): covered = 0 not_covered = 0 with open(temp_filename, "r") as input: reader = csv.reader(input) for row in reader: if not isinstance(row, (tuple, list)): raise Exception(row) if len(row) == 1: raise Exception(row[0]) method_name = row[1] if not ShouldInclude(method_name): continue covered += int(row[-2]) not_covered += int(row[-1]) return covered, not_covered
def _InvokeImplEx(cls, invoke_reason, context, status_stream, verbose_stream, verbose): command_line = cls.CreateInvokeCommandLine(context, verbose_stream) sink = six.moves.StringIO() result = Process.Execute(command_line, StreamDecorator([ sink, verbose_stream, ])) if result != 0 and not verbose: status_stream.write(sink.getvalue()) return result
def _BuildGenerator( source_dir, configuration, generator=_DEFAULT_GENERATOR, ): temp_dir = CurrentShell.CreateTempDirectory() with CallOnExit(lambda: FileSystem.RemoveTree(temp_dir)): command_line = 'cmake {generator}-S "{source_dir}" -B "{build_dir}" -DCppCommon_CMAKE_DEBUG_OUTPUT=On -DCMAKE_BUILD_TYPE={config}'.format( generator='-G "{}" '.format(generator) if generator else "", source_dir=source_dir, build_dir=temp_dir, config=configuration, ) result, output = Process.Execute(command_line) if result == 0: result, output = Process.Execute('cmake --build "{}"'.format(temp_dir)) yield temp_dir, result, output
def Build( force=False, output_stream=sys.stdout, verbose=False, ): return Process.Execute( '"{script}" Generate PythonJson CppToJson "{output_dir}" "/input={input}" /plugin_arg=no_serialization:true{force}{verbose}' .format( script=CurrentShell.CreateScriptName("SimpleSchemaGenerator"), output_dir=os.path.join(_script_dir, "..", "GeneratedCode"), input=os.path.join(_script_dir, "..", "CppToJson.SimpleSchema"), force=" /force" if force else "", verbose=" /verbose" if verbose else "", ), output_stream, )
def Build( output_dir, output_stream=sys.stdout, ): paths = [] includes = [] excludes = [] command_line = '"{script}" Compile "/input={input}" "/output_dir={output_dir}" /no_bundle /no_optimize {paths}{includes}{excludes}' \ .format( script=CurrentShell.CreateScriptName("CxFreezeCompiler"), input=os.path.join(_script_dir, "Backup.py"), output_dir=output_dir, paths='' if not paths else " {}".format(' '.join([ '"/path={}"'.format(path) for path in paths ])), includes='' if not includes else " {}".format(' '.join([ '"/include={}"'.format(include) for include in includes ])), excludes='' if not excludes else " {}".format(' '.join([ '"/exclude={}"'.format(exclude) for exclude in excludes ])), ) return Process.Execute(command_line, output_stream)
def Execute( cls, compiler, context, command_line, includes=None, excludes=None, verbose=False, ): start_time = time.time() result, output = Process.Execute(command_line) return cls.ExecuteResult( result, output, str(datetime.timedelta(seconds=(time.time() - start_time))), )
def _GetImageId( image_name, dm, tag=None, ): dm.stream.write("Retrieving image id...") with dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute( "docker images {}{} --quiet".format( image_name, ":{}".format(tag) if tag else "", ), ) if this_dm.result != 0: this_dm.stream.write(output) return None lines = output.strip().split("\n") return lines[0]
def EntryPoint( arg, output_stream=sys.stdout, ): args = arg del arg # One of the args will be the filename input_filename = None for arg in args: if arg.startswith("-assume-filename="): input_filename = arg[len("-assume-filename=") :] break if input_filename is None: raise Exception("Unable to extract the filename from '{}'".format(args)) # Write the contents from stdin to a temp file input_content = sys.stdin.read() assert input_content temp_filename = CurrentShell.CreateTempFilename(os.path.splitext(input_filename)[1]) with open(temp_filename, "w") as f: f.write(input_content) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): # Invoke the script script_name = "Formatter" if CurrentShell.CategoryName != "Linux": script_name = CurrentShell.CreateScriptName(script_name) command_line = '"{script}" Format "{filename}" /quiet "/plugin_arg=hint_filename:{original_filename}"'.format( script=script_name, filename=temp_filename, original_filename=input_filename.replace(":", "\\:"), ) result, formatted_output = Process.Execute(command_line) output_stream.write(formatted_output) return result
def Build(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: input_file = os.path.join(_script_dir, "..", "SimpleSchema.g4") assert os.path.isfile(input_file), input_file output_dir = os.path.join(_script_dir, "..", "GeneratedCode") command_line = '{script} Compile Python3 -o "{output_dir}" -no-listener -visitor "{input_file}"'.format( script=CurrentShell.CreateScriptName("ANTLR"), output_dir=output_dir, input_file=input_file, ) dm.result = Process.Execute(command_line, dm.stream) return dm.result
def EntryPoint( print_command_line=False, no_status=False, force=False, output_stream=sys.stdout, verbose=False, ): inputs = [] user_root = os.path.dirname(os.getenv("USERPROFILE")) for user in USERS_TO_BACKUP: inputs += [ os.path.join(user_root, user, folder) for folder in ACCOUNT_DIRECTORIES_TO_BACKUP ] inputs += ADDITIONAL_DIRECTORIES_TO_BACKUP inputs += ADDITIONAL_FILES_TO_BACKUP for input in inputs: if not os.path.exists(input): raise Exception("'{}' is not a valid file or directory".format(input)) backup_script = os.path.join( os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY"), "src", "Backup", "Backup.py", ) assert os.path.isfile(backup_script), backup_script command_line = 'python "{script}" Mirror "{output_dir}" {input}{no_status}{force}{verbose} /preserve_ansi_escape_sequences' \ .format( script=backup_script, output_dir=OUTPUT_DIR, input=' '.join([ '"/input={}"'.format(input) for input in inputs ]), no_status='' if not no_status else " /no_status", force='' if not force else " /force", verbose='' if not verbose else " /verbose", ) if print_command_line: output_stream.write(command_line) return return Process.Execute(command_line, output_stream)
def Execute( cls, repo_root, command, strip=False, newline=False, ): command = command.replace("git ", 'git -C "{}" '.format(repo_root)) if cls.Diagnostics: sys.stdout.write("VERBOSE: {}\n".format(command)) result, content = Process.Execute( command, environment=os.environ, ) if strip: content = content.strip() if newline and not content.endswith('\n'): content += '\n' return result, content
def Build( force=False, output_stream=sys.stdout, verbose=False, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: dm.result = Process.Execute( '"{script}" Generate PythonYaml CodeCoverageFilter "{output_dir}" "/input={input_file}" /plugin_arg=no_serialization:True{force}{verbose}' .format( script=CurrentShell.CreateScriptName("SimpleSchemaGenerator"), output_dir=os.path.join(_script_dir, "..", "GeneratedCode"), input_file=os.path.join(_script_dir, "..", "CodeCoverageFilter.SimpleSchema"), force=" /force" if force else "", verbose=" /verbose" if verbose else "", ), dm.stream, ) return dm.result
def ResolveSymLink(cls, filename): # Python 2.+ doesn't support symlinks on Windows and there doesn't seem to be # a way to resolve a symlink without parsing the file, and libraries mentioned # http://stackoverflow.com/questions/1447575/symlinks-on-windows/7924557#7924557 # and https://github.com/sid0/ntfs seem to have problems. The only way I have found # to reliabaly get this info is to parse the output of 'dir' and extact the info. # This is horribly painful code. import re # <No name in module> pylint: disable = E0611 # <Unable to import> pylint: disable = F0401 import six.moves.cPickle as pickle from CommonEnvironment import Process filename = filename.replace('/', os.path.sep) assert cls.IsSymLink(filename) if not hasattr(cls, "_symlink_lookup"): cls._symlink_lookup = { } # <Attribute defined outside __init__> pylint: disable = W0201 cls._symlink_redirection_maps = { } # <Attribute defined outside __init__> pylint: disable = W0201 if filename in cls._symlink_lookup: return cls._symlink_lookup[filename] # Are there any redirection maps that reside in the filename's path? path = os.path.split(filename)[0] while True: potential_map_filename = os.path.join( path, "symlink.redirection_map") if os.path.isfile(potential_map_filename): if potential_map_filename not in cls._symlink_redirection_maps: cls._symlink_redirection_maps[ potential_map_filename] = pickle.loads( open(potential_map_filename).read()) if filename in cls._symlink_redirection_maps[ potential_map_filename]: return cls._symlink_redirection_maps[ potential_map_filename][filename] new_path = os.path.split(path)[0] if new_path == path: break path = new_path # If here, there isn't a map filename so we have to do things the hard way. if os.path.isfile(filename): command_line = 'dir /AL "%s"' % filename is_match = lambda name: True else: command_line = 'dir /AL "%s"' % os.path.dirname(filename) is_match = lambda name: name == os.path.basename(filename) result, output = Process.Execute(command_line) assert result == 0 regexp = re.compile( r".+<(?P<type>.+?)>\s+(?P<link>.+?)\s+\[(?P<filename>.+?)\]\s*" ) for line in output.split('\n'): match = regexp.match(line) if match: link = match.group("link") if not is_match(link): continue target_filename = match.group("filename") assert os.path.exists(target_filename), target_filename cls._symlink_lookup[filename] = target_filename return target_filename assert False, output
def Execute( root_dir, output_dir, mode=None, debug_only=False, release_only=False, output_stream=sys.stdout, verbose=False, ): """Recursively calls Build files with the desired mode(s)""" assert os.path.isdir(root_dir), root_dir assert output_dir modes = mode or [ "clean", "build", ]; del mode assert output_stream with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: build_infos = _GetBuildInfos(root_dir, dm.stream) if not build_infos: return dm.result # Find all the build files that have configurations that we can process build_configurations = [] dm.stream.write("Processing build files...") with dm.stream.DoneManager( done_suffix=lambda: "{} found".format(inflect.no("configuration", len(build_configurations))), ) as this_dm: # ---------------------------------------------------------------------- def GetSupportedConfigurations(configurations): # If there is a configuration that indicates completeness, execute that # and skip everything else. if COMPLETE_CONFIGURATION_NAME in configurations: yield COMPLETE_CONFIGURATION_NAME return for config in build_configurations: config_lower = config.lower() if ( (debug_only and "debug" in config_lower) or (release_only and "release" in config_lower) or (not debug_only and not release_only) ): yield config # ---------------------------------------------------------------------- for build_info in build_infos: if not build_info.configuration.Configurations: build_configurations.append(( build_info.filename, build_info.configuration, None, )) else: for config in GetSupportedConfigurations(build_info.configuration.Configurations): build_configurations.append(( build_info.filename, build_info.configuration, config, )) if not build_configurations: return dm.result dm.stream.write('\n') for mode_index, mode in enumerate(modes): dm.stream.write("Invoking '{}' ({} of {})...".format( mode, mode_index + 1, len(modes), )) with dm.stream.DoneManager() as mode_dm: for build_index, (build_filename, config, configuration) in enumerate(build_configurations): mode_dm.stream.write("Processing '{}'{} ({} of {})...".format( build_filename, " - '{}'".format(configuration) if configuration else '', build_index + 1, len(build_configurations), )) with mode_dm.stream.DoneManager() as build_dm: build_output_dir = os.path.join(output_dir, config.SuggestedOutputDirLocation, configuration or "Build") FileSystem.MakeDirs(build_output_dir) command_line = 'python "{build_filename}" {mode}{configuration}{output_dir}' \ .format( build_filename=build_filename, mode=mode, configuration=' "{}"'.format(configuration) if configuration else '', output_dir=' "{}"'.format(build_output_dir) if config.RequiresOutputDir else '', ) build_dm.result, output = Process.Execute(command_line) # It is possible that the cleaning process deleted the output directory. Recreate it # if necessary to store the log file. FileSystem.MakeDirs(build_output_dir) with open(os.path.join(build_output_dir, BUILD_LOG_TEMPLATE.format(mode=mode)), 'w') as f: f.write(output) if build_dm.result != 0: build_dm.stream.write(output) elif verbose: build_dm.stream.write(StringHelpers.LeftJustify("INFO: {}".format(output), len("INFO: "))) return dm.result
def EntryPoint( code_dir_or_doxygen_filename, output_dir, output_stream=sys.stdout, verbose=False, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: # Get the doxygen files doxygen_files = [] if os.path.isfile(code_dir_or_doxygen_filename): doxygen_files.append(code_dir_or_doxygen_filename) else: dm.stream.write( "Searching for doxygen files in '{}'...".format( code_dir_or_doxygen_filename, ), ) with dm.stream.DoneManager( done_suffix=lambda: "{} found".format( inflect.no("file", len(doxygen_files)), ), suffix="\n", ) as this_dm: for fullpath in FileSystem.WalkFiles( code_dir_or_doxygen_filename, include_file_extensions=[DOXYGEN_EXTENSION], traverse_exclude_dir_names=FileSystem. CODE_EXCLUDE_DIR_NAMES, ): if not os.path.isfile( "{}{}".format( os.path.splitext(fullpath)[0], DOXYGEN_EXTENSION_IGNORE, ), ): doxygen_files.append(fullpath) if not doxygen_files: return dm.result # Process the files # ---------------------------------------------------------------------- class GetDoxygenValueError(KeyError): """Exception raised when a doxygen tag is not found""" pass # ---------------------------------------------------------------------- def GetDoxygenValue(tag, content): match = re.search( r"{}[ \t]*=[ \t]*(?P<value>.*?)\r?\n".format(re.escape(tag)), content, re.IGNORECASE, ) if not match: raise GetDoxygenValueError( "Unable to find '{}' in the doxygen configuration file". format(tag), ) return match.group("value") # ---------------------------------------------------------------------- results = OrderedDict() dm.stream.write( "Processing {}...".format( inflect.no("doxygen file", len(doxygen_files))), ) with dm.stream.DoneManager(suffix="\n", ) as doxygen_dm: for index, doxygen_file in enumerate(doxygen_files): doxygen_dm.stream.write( "Processing '{}' ({} of {})...".format( doxygen_file, index + 1, len(doxygen_files), ), ) with doxygen_dm.stream.DoneManager() as this_dm: prev_dir = os.getcwd() os.chdir(os.path.dirname(doxygen_file)) with CallOnExit(lambda: os.chdir(prev_dir)): # Execute this_dm.result = Process.Execute( 'dot -c && doxygen "{}"'.format(doxygen_file), StreamDecorator( this_dm.stream if verbose else None), ) if this_dm.result != 0: continue # Extract data from the doxygen file with open(doxygen_file) as f: content = f.read() project_name = GetDoxygenValue("PROJECT_NAME", content) # Older doxygen files don't have a PROJECT_VERSION try: project_version = GetDoxygenValue( "PROJECT_VERSION", content) except GetDoxygenValueError: project_version = GetDoxygenValue( "PROJECT_NUMBER", content) output_directory = GetDoxygenValue( "OUTPUT_DIRECTORY", content) source_dir = os.path.dirname(doxygen_file) if output_directory: output_directory = os.pth.join( source_dir, output_directory) dest_dir = os.path.join(output_dir, project_name) if project_version: dest_dir = os.path.join(dest_dir, project_version) dest_dir = dest_dir.replace('"', "").strip() FileSystem.MakeDirs(dest_dir) for content_type in [ "html", "Latex", "RTF", "man", "XML", ]: value = GetDoxygenValue( "GENERATE_{}".format(content_type), content, ) if not value or value.lower() != "yes": continue output_name = GetDoxygenValue( "{}_OUTPUT".format(content_type), content, ) source_fullpath = os.path.join( source_dir, output_name) dest_fullpath = os.path.join(dest_dir, output_name) if not os.path.isdir(source_fullpath): this_dm.stream.write( "ERROR: The directory '{}' does not exist.\n" .format(source_fullpath, ), ) this_dm.result = -1 continue FileSystem.RemoveTree(dest_fullpath) shutil.move(source_fullpath, dest_fullpath) results.setdefault( doxygen_file, OrderedDict())[content_type] = dest_fullpath # Tagfile value = GetDoxygenValue("GENERATE_TAGFILE", content) if value: source_fullpath = os.path.join(source_dir, value) dest_fullpath = os.path.join(dest_dir, value) if not os.path.isfile(source_fullpath): this_dm.stream.write( "ERROR: The filename '{}' does not exist.\n" .format(source_fullpath, ), ) this_dm.result = -1 continue FileSystem.RemoveFile(dest_fullpath) shutil.move(source_fullpath, dest_fullpath) results.setdefault( doxygen_file, OrderedDict())["tagfile"] = dest_fullpath # Generate the json file output_filename = os.path.join( output_dir, "{}.json".format(os.path.splitext(_script_name)[0]), ) dm.stream.write("Writing '{}'...".format(output_filename)) with dm.stream.DoneManager() as this_dm: with open(output_filename, "w") as f: json.dump(results, f) return dm.result
def Install( lib_name, pip_arg=None, output_stream=sys.stdout, ): """ A replacement for pip install. Will ensure that already installed python libraries are not modified in-place, but rather considered as new libraries for the currently activated repository. """ pip_args = pip_arg; del pip_arg repo_root = os.getenv(RepositoryBootstrapConstants.DE_REPO_ROOT_NAME) scm = GetSCM(repo_root, raise_on_error=False) if not scm: output_stream.write("ERROR: No SCM is active for '{}'.\n".format(repo_root)) return -1 if scm.HasWorkingChanges(repo_root) or scm.HasUntrackedWorkingChanges(repo_root): output_stream.write("ERROR: Changes were detected in '{}'; please revert/shelve these changes and run this script again.\n".format(repo_root)) return -1 with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: pip_command_line = 'pip install "{}"{}'.format( lib_name, '' if not pip_args else " {}".format(' '.join(pip_args)), ) dm.stream.write("\nDetecting libraries...") with dm.stream.DoneManager( suffix='\n', ) as this_dm: libraries = [] # ---------------------------------------------------------------------- def OnOutput(line): this_dm.stream.write(line) if not line.startswith("Installing collected packages: "): return True line = line[len("Installing collected packages: "):] for library in line.split(','): library = library.strip() if library: libraries.append(library) return False # ---------------------------------------------------------------------- this_dm.result = Process.Execute( pip_command_line, OnOutput, line_delimited_output=True, ) if libraries: this_dm.result = 0 if this_dm.result != 0: return this_dm.result if not libraries: return dm.result dm.stream.write("Reverting local changes...") with dm.stream.DoneManager( suffix='\n', ) as this_dm: this_dm.result = scm.Clean(repo_root, no_prompt=True)[0] if this_dm.result != 0: return this_dm.result dm.stream.write("Reverting existing libraries...") with dm.stream.DoneManager( suffix='\n', ) as this_dm: python_lib_dir = os.path.join( os.getenv(RepositoryBootstrapConstants.DE_REPO_GENERATED_NAME), PythonActivationActivity.Name, _EnvironmentSettings().LibraryDir, ) assert os.path.isdir(python_lib_dir), python_lib_dir library_items = {} for name in os.listdir(python_lib_dir): fullpath = os.path.join(python_lib_dir, name) if not os.path.isdir(fullpath): continue library_items[name.lower()] = CurrentShell.IsSymLink(fullpath) # ---------------------------------------------------------------------- def RemoveItem(name): name_lower = name.lower() if library_items[name_lower]: this_dm.stream.write("Removing '{}' for upgrade.\n".format(name)) os.remove(os.path.join(python_lib_dir, name)) else: this_dm.stream.write("Removing temporary '{}'.\n".format(name)) FileSystem.RemoveTree(os.path.join(python_lib_dir, name)) del library_items[name_lower] # ---------------------------------------------------------------------- for library in libraries: potential_library_names = [ library, ] # Sometimes, a library's name will begin with a 'Py' but be saved in # the file system without the 'Py' prefix. Account for that scenario. if library.lower().startswith("py"): potential_library_names.append(library[len("py"):]) for potential_library_name in potential_library_names: potential_library_name_lower = potential_library_name.lower() if potential_library_name_lower not in library_items: continue RemoveItem(potential_library_name) # Is there dist- or egg-info as well? info_items = [] for item in six.iterkeys(library_items): if ( item.startswith(potential_library_name_lower) and (item.endswith(".dist-info") or item.endswith(".egg-info")) ): info_items.append(item) for info_item in info_items: RemoveItem(info_item) break dm.stream.write("Installing...") with dm.stream.DoneManager() as this_dm: this_dm.result = Process.Execute(pip_command_line, this_dm.stream) return dm.result
def Build( force=False, no_squash=False, keep_temporary_image=False, output_stream=sys.stdout, preserve_ansi_escape_sequences=False, ): with StreamDecorator.GenerateAnsiSequenceStream( output_stream, preserve_ansi_escape_sequences=preserve_ansi_escape_sequences, ) as output_stream: with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if not _VerifyDocker(): dm.stream.write("ERROR: Ensure that docker is installed and available within this environment.\n") dm.result = -1 return dm.result output_dir = os.path.join(calling_dir, "Generated") source_dir = os.path.join(output_dir, "Source") base_image_dir = os.path.join(output_dir, "Images", "Base") activated_image_dir = os.path.join(output_dir, "Images", "Activated") image_code_base = "/usr/lib/CommonEnvironmentImage" image_code_dir = "{}/{}".format( image_code_base, repository_name.replace('_', '/'), ) if no_now_tag: now_tag = None else: now = time.localtime() now_tag = "{0}.{1:02d}.{2:02d}".format(now[0], now[1], now[2]) # Create the base image dm.stream.write("Creating base image...") with dm.stream.DoneManager(suffix='\n') as base_dm: FileSystem.MakeDirs(base_image_dir) # Get the source scm = GetAnySCM(calling_dir) if not os.path.isdir(source_dir): base_dm.stream.write("Cloning source...") with base_dm.stream.DoneManager() as this_dm: # Ensure that the parent dir exists, but don't create the dir iteself. FileSystem.MakeDirs(os.path.dirname(source_dir)) # Enlist in the repo. temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) this_dm.result, output = scm.Clone(repository_uri, temp_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result os.rename(temp_dir, source_dir) has_changes = True else: # The repo exists base_dm.stream.write("Updating source...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = scm.Pull(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result has_changes = True if scm.Name == "Mercurial": if "no changes found" in output: has_changes = False elif scm.Name == "Git": if "Already up-to-date" in output: has_changes = False else: assert False, "Unsupported SCM: {}".format(scm.Name) if has_changes: this_dm.result, output = scm.Update(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # Filter the source filtered_source_dir = os.path.join(base_image_dir, "FilteredSource") if os.path.isdir(filtered_source_dir) and not force and not has_changes: base_dm.stream.write("No source changes were detected.\n") else: with base_dm.stream.SingleLineDoneManager( "Filtering source...", ) as this_dm: temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) FileSystem.CopyTree( source_dir, temp_dir, excludes=[ "/.git", "/.gitignore", "/.hg", "/.hgignore", "*/Generated", "*/__pycache__", "*/Windows", "/*/src", "*.cmd", "*.ps1", "*.pyc", "*.pyo", ], optional_output_stream=this_dm.stream, ) FileSystem.RemoveTree(filtered_source_dir) os.rename(temp_dir, filtered_source_dir) base_dm.stream.write("Verifying Docker base image...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker image history "{}"'.format(base_docker_image)) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result base_dm.stream.write("Creating dockerfile...") with base_dm.stream.DoneManager(): setup_statement = "./Setup.sh{}".format('' if not repository_setup_configurations else ' {}'.format(' '.join([ '"/configuration={}"'.format(configuration) for configuration in repository_setup_configurations ]))) if repository_name == "Common_Environment": commands = textwrap.dedent( """\ RUN link /usr/bin/python3 /usr/bin/python RUN adduser --disabled-password --disabled-login --gecos "" "{username}" \\ && addgroup "{groupname}" \\ && adduser "{username}" "{groupname}" RUN cd {image_code_dir} \\ && {setup_statement} """).format( username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, setup_statement=setup_statement, ) else: import io with io.open( os.path.join(base_image_dir, "SetupEnvironmentImpl.sh"), 'w', newline='\n', ) as f: f.write(textwrap.dedent( """\ #!/bin/bash . {image_code_base}/Common/Environment/Activate.sh python36 cd {image_code_dir} {setup_statement} rm --recursive {image_code_base}/Common/Environment/Generated/Linux/Default """).format( image_code_base=image_code_base, image_code_dir=image_code_dir, setup_statement=setup_statement, )) commands = textwrap.dedent( """\ COPY SetupEnvironmentImpl.sh /tmp/SetupEnvironmentImpl.sh RUN chmod a+x /tmp/SetupEnvironmentImpl.sh \\ && /tmp/SetupEnvironmentImpl.sh """) with open(os.path.join(base_image_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {base_image} COPY FilteredSource {image_code_dir} {commands} RUN chown -R {username}:{groupname} {image_code_dir} \\ && chmod g-s {image_code_dir}/Generated/Linux \\ && chmod 0750 {image_code_dir}/Generated/Linux \\ && chmod -R o-rwx {image_code_dir} # Cleanup RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* LABEL maintainer="{maintainer}" # By default, run a bash prompt as the source code user WORKDIR {image_code_dir} CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] """).format( base_image=base_docker_image, commands=commands, username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, maintainer=maintainer, )) base_dm.stream.write("Building Docker image...") with base_dm.stream.DoneManager() as this_dm: tags = [ "base", "base_latest", ] if now_tag: tags.append("base_{}".format(now_tag)) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=base_image_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result if not no_activated_image: # Create the activated image(s) dm.stream.write("Creating activated image(s)...") with dm.stream.DoneManager() as all_activated_dm: for index, configuration in enumerate(repository_activation_configurations): all_activated_dm.stream.write("Creating activated image{} ({} of {})...".format( '' if not configuration else " for the configuration '{}'".format(configuration), index + 1, len(repository_activation_configurations), )) with all_activated_dm.stream.DoneManager(suffix='\n') as activated_dm: this_activated_dir = os.path.join(activated_image_dir, configuration or "Default") FileSystem.MakeDirs(this_activated_dir) unique_id = str(uuid.uuid4()) temp_image_name = "{}_image".format(unique_id) temp_container_name = "{}_container".format(unique_id) # Activate the image so we can extract the changes activated_dm.stream.write("Activating...") with activated_dm.stream.DoneManager(suffix='\n') as this_dm: command_line = 'docker run -it --name "{container_name}" "{image_name}:base_latest" /sbin/my_init -- /sbin/setuser "{username}" bash -c "cd {image_code_dir} && . ./Activate.sh {configuration} && pushd {image_code_base}/Common/Environment && python -m RepositoryBootstrap.EnvironmentDiffs After /decorate' \ .format( container_name=temp_container_name, image_name=docker_image_name, configuration=configuration or '', username=image_username, image_code_dir=image_code_dir, image_code_base=image_code_base, ) sink = six.moves.StringIO() this_dm.result = Process.Execute(command_line, StreamDecorator([ sink, this_dm.stream, ])) if this_dm.result != 0: return this_dm.result sink = sink.getvalue() activated_dm.stream.write("Extracting enviroment diffs...") with activated_dm.stream.DoneManager(): match = re.search( textwrap.dedent( """\ //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// (?P<content>.+?) //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// """), sink, re.DOTALL | re.MULTILINE, ) assert match, sink environment_diffs = json.loads(match.group("content")) # ---------------------------------------------------------------------- def RemoveTempContainer(): activated_dm.stream.write("Removing temp container...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rm "{}"'.format(temp_container_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempContainer): # Commit the activated image activated_dm.stream.write("Committing container...") with activated_dm.stream.DoneManager() as this_dm: command_line = 'docker commit "{container_name}" "{image_name}"' \ .format( container_name=temp_container_name, image_name=temp_image_name, ) this_dm.result, output = Process.Execute(command_line) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # ---------------------------------------------------------------------- def RemoveTempImage(): if keep_temporary_image: return activated_dm.stream.write("Removing temp image...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rmi "{}"'.format(temp_image_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempImage): # Create a new dockerfile. The temp image has all the harddrive changes # made during activation, but doesn't have the environment changes. activated_dm.stream.write("Creating dockerfile...") with activated_dm.stream.DoneManager() as this_dm: with open(os.path.join(this_activated_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {temp_image_name} ENV {env} # By default, run a bash prompt as the source code user CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] LABEL maintainer="{maintainer}" """).format( temp_image_name=temp_image_name, env='\\\n'.join([ ' {}={} '.format(k, v) for k, v in six.iteritems(environment_diffs) ]), image_code_dir=image_code_dir, maintainer=maintainer, username=image_username, )) activated_dm.stream.write("Building Docker image...") with activated_dm.stream.DoneManager() as this_dm: tags = [ "latest", ] if now_tag: tags.append(now_tag) if len(repository_activation_configurations) > 1: tags = [ "{}_{}".format(configuration, tag) for tag in tags ] tags.insert(0, configuration) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=this_activated_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='', # <squash is not supported here> '' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def _VerifyDocker(): result, output = Process.Execute("docker version") return "Client:" in output and "Server:" in output