def _Compile(cls, context, script_filename, output_stream): command_line = 'python "{}" build_exe{}'.format( script_filename, '' if not context["distutil_args"] else " {}".format(' '.join( ['"{}"'.format(arg) for arg in context["distutils_args"]])), ) result = Process.Execute(command_line, output_stream) if result == 0: if os.path.isdir("build"): subdirs = os.listdir("build") assert len(subdirs) == 1, subdirs source_dir = os.path.join("build", subdirs[0]) # Remove empty dirs to_remove = [] for root, dirs, _ in os.walk(source_dir): for dir in dirs: fullpath = os.path.join(root, dir) if os.path.isdir( fullpath) and not os.listdir(fullpath): to_remove.append(fullpath) for dir in to_remove: FileSystem.RemoveTree(dir) FileSystem.RemoveTree(context["output_dir"]) shutil.move(source_dir, context["output_dir"]) FileSystem.RemoveTree("build") return result
def Clean( force=False, output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: for subdir in ["stage", "build"]: this_dir = os.path.join(boost_root, subdir) if not os.path.isdir(this_dir): continue if not force: dm.stream.write( "Call this method with the '/force' flag to remove '{}'.\n" .format(this_dir, ), ) continue dm.stream.write("Removing '{}'...".format(this_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(this_dir) return dm.result
def Clean(context, status_stream): """Cleans content previously generated. The default behavior is to delete the output directory. Args: context (Dict[string, Any]): The Context to use when generating code. status_stream (file-like): Output stream used to generate short status messages. Returns (int): Return code. """ assert "output_dir" in context, context output_dir = context["output_dir"] if not os.path.isdir(output_dir): status_stream.write( "The output directory '{}' does not exist.\n".format( output_dir)) else: status_stream.write("Removing '{}'...".format(output_dir)) with status_stream.DoneManager(): FileSystem.RemoveTree(output_dir) return 0
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: FileSystem.RemoveTree(os.path.join(_script_dir, "..", "GeneratedCode")) return dm.result
def _CopyData(temp_directory, output_dir, output_stream): output_dir = os.path.join(output_dir, "Data") FileSystem.RemoveTree(output_dir) FileSystem.CopyTree(os.path.join(temp_directory, "Data"), output_dir) return 0
def CleanupTempDir(): if keep_temp_dir: dm.stream.write( "\nCMake output has been written to '{}'.\n".format(temp_directory), ) return FileSystem.RemoveTree(temp_directory)
def Impl(source_dir_or_filename, dest_dir): # shutil.move won't overwrite files, so use distutils (which will) if os.path.isdir(source_dir_or_filename): import distutils.dir_util distutils.dir_util.copy_tree(source_dir_or_filename, os.path.join(dest_dir, os.path.basename(source_dir_or_filename))) FileSystem.RemoveTree(source_dir_or_filename) else: FileSystem.MakeDirs(dest_dir) shutil.move(source_dir_or_filename, dest_dir)
def CommandLineCleanOutputDir(output_dir, output_stream): output_stream = StreamDecorator(output_stream) if not os.path.isdir(output_dir): output_stream.write("'{}' does not exist.\n".format(output_dir)) else: output_stream.write("Removing '{}'...".format(output_dir)) with output_stream.DoneManager(): FileSystem.RemoveTree(output_dir) return 0
def RemoveItem(name): name_lower = name.lower() if library_items[name_lower]: this_dm.stream.write("Removing '{}' for upgrade.\n".format(name)) os.remove(os.path.join(python_lib_dir, name)) else: this_dm.stream.write("Removing temporary '{}'.\n".format(name)) FileSystem.RemoveTree(os.path.join(python_lib_dir, name)) del library_items[name_lower]
def Clean( output_stream=sys.stdout, ): """Cleans previously build content.""" potential_dir = os.path.join(calling_dir, "Generated") if not os.path.isdir(potential_dir): output_stream.write("'{}' does not exist.\n".format(potential_dir)) else: FileSystem.RemoveTree(potential_dir) output_stream.write("'{}' has been removed.\n".format(potential_dir)) return 0
def Clean( output_dir, output_stream=sys.stdout, ): if not os.path.isdir(output_dir): output_stream.write("'{}' does not exist.\n".format(output_dir)) return 0 output_stream.write("Removing '{}'...".format(output_dir)) with StreamDecorator(output_stream).DoneManager(): FileSystem.RemoveTree(output_dir) return 0
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_dir = os.path.join(_script_dir, "Generated") if not os.path.isdir(output_dir): dm.stream.write("The output directory does not exist.\n") else: dm.stream.write("Removing '{}'...".format(output_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(output_dir) return dm.result
def Clean( configuration, output_dir, output_stream=sys.stdout, ): """Cleans previously built content""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: if not os.path.isdir(output_dir): dm.stream.write("\nNothing to clean.\n") else: dm.stream.write("Removing '{}'...".format(output_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(output_dir) return dm.result
def EntryPoint( zipped_input_filename, output_stream=sys.stdout, ): """Generates JSON files based on data previously pickled""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_dir = os.path.join(_script_dir, "..", "GeneratedCode") FileSystem.RemoveTree(output_dir) FileSystem.MakeDirs(output_dir) df = _holiday_data_loader(zipped_input_filename) #with open('holidays.json', 'w') as f: #f.write(df.to_json(orient='records', lines=True)) allCountryNames = list(set((df['countryOrRegion']))) for countryName in allCountryNames: dfByCountry = df.loc[df['countryOrRegion'] == countryName] date = [int(x.timestamp()) for x in list(dfByCountry['date'])] name = list(dfByCountry['normalizeHolidayName']) date_dict = {"Date": date} name_dict = {"Holiday": name} out = {} out.update(date_dict) out.update(name_dict) jsonPath = os.path.join(output_dir, "{}.json".format(countryName)) with open(jsonPath, 'w') as f: json.dump(out, f) return dm.result
def _BuildGenerator( source_dir, configuration, generator=_DEFAULT_GENERATOR, ): temp_dir = CurrentShell.CreateTempDirectory() with CallOnExit(lambda: FileSystem.RemoveTree(temp_dir)): command_line = 'cmake {generator}-S "{source_dir}" -B "{build_dir}" -DCppCommon_CMAKE_DEBUG_OUTPUT=On -DCMAKE_BUILD_TYPE={config}'.format( generator='-G "{}" '.format(generator) if generator else "", source_dir=source_dir, build_dir=temp_dir, config=configuration, ) result, output = Process.Execute(command_line) if result == 0: result, output = Process.Execute('cmake --build "{}"'.format(temp_dir)) yield temp_dir, result, output
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: for index, (plugin, output_dir) in enumerate(_PLUGINS): output_dir = os.path.realpath(output_dir) dm.stream.write( "Processing '{}' ({} of {})...".format(plugin, index + 1, len(_PLUGINS)), ) with dm.stream.DoneManager() as this_dm: if not os.path.isdir(output_dir): this_dm.stream.write( "'{}' does not exist.\n".format(output_dir)) continue this_dm.stream.write("Removing '{}'...".format(output_dir)) with this_dm.stream.DoneManager(): FileSystem.RemoveTree(output_dir) return dm.result
def EntryPoint( code_dir_or_doxygen_filename, output_dir, output_stream=sys.stdout, verbose=False, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: # Get the doxygen files doxygen_files = [] if os.path.isfile(code_dir_or_doxygen_filename): doxygen_files.append(code_dir_or_doxygen_filename) else: dm.stream.write( "Searching for doxygen files in '{}'...".format( code_dir_or_doxygen_filename, ), ) with dm.stream.DoneManager( done_suffix=lambda: "{} found".format( inflect.no("file", len(doxygen_files)), ), suffix="\n", ) as this_dm: for fullpath in FileSystem.WalkFiles( code_dir_or_doxygen_filename, include_file_extensions=[DOXYGEN_EXTENSION], traverse_exclude_dir_names=FileSystem. CODE_EXCLUDE_DIR_NAMES, ): if not os.path.isfile( "{}{}".format( os.path.splitext(fullpath)[0], DOXYGEN_EXTENSION_IGNORE, ), ): doxygen_files.append(fullpath) if not doxygen_files: return dm.result # Process the files # ---------------------------------------------------------------------- class GetDoxygenValueError(KeyError): """Exception raised when a doxygen tag is not found""" pass # ---------------------------------------------------------------------- def GetDoxygenValue(tag, content): match = re.search( r"{}[ \t]*=[ \t]*(?P<value>.*?)\r?\n".format(re.escape(tag)), content, re.IGNORECASE, ) if not match: raise GetDoxygenValueError( "Unable to find '{}' in the doxygen configuration file". format(tag), ) return match.group("value") # ---------------------------------------------------------------------- results = OrderedDict() dm.stream.write( "Processing {}...".format( inflect.no("doxygen file", len(doxygen_files))), ) with dm.stream.DoneManager(suffix="\n", ) as doxygen_dm: for index, doxygen_file in enumerate(doxygen_files): doxygen_dm.stream.write( "Processing '{}' ({} of {})...".format( doxygen_file, index + 1, len(doxygen_files), ), ) with doxygen_dm.stream.DoneManager() as this_dm: prev_dir = os.getcwd() os.chdir(os.path.dirname(doxygen_file)) with CallOnExit(lambda: os.chdir(prev_dir)): # Execute this_dm.result = Process.Execute( 'dot -c && doxygen "{}"'.format(doxygen_file), StreamDecorator( this_dm.stream if verbose else None), ) if this_dm.result != 0: continue # Extract data from the doxygen file with open(doxygen_file) as f: content = f.read() project_name = GetDoxygenValue("PROJECT_NAME", content) # Older doxygen files don't have a PROJECT_VERSION try: project_version = GetDoxygenValue( "PROJECT_VERSION", content) except GetDoxygenValueError: project_version = GetDoxygenValue( "PROJECT_NUMBER", content) output_directory = GetDoxygenValue( "OUTPUT_DIRECTORY", content) source_dir = os.path.dirname(doxygen_file) if output_directory: output_directory = os.pth.join( source_dir, output_directory) dest_dir = os.path.join(output_dir, project_name) if project_version: dest_dir = os.path.join(dest_dir, project_version) dest_dir = dest_dir.replace('"', "").strip() FileSystem.MakeDirs(dest_dir) for content_type in [ "html", "Latex", "RTF", "man", "XML", ]: value = GetDoxygenValue( "GENERATE_{}".format(content_type), content, ) if not value or value.lower() != "yes": continue output_name = GetDoxygenValue( "{}_OUTPUT".format(content_type), content, ) source_fullpath = os.path.join( source_dir, output_name) dest_fullpath = os.path.join(dest_dir, output_name) if not os.path.isdir(source_fullpath): this_dm.stream.write( "ERROR: The directory '{}' does not exist.\n" .format(source_fullpath, ), ) this_dm.result = -1 continue FileSystem.RemoveTree(dest_fullpath) shutil.move(source_fullpath, dest_fullpath) results.setdefault( doxygen_file, OrderedDict())[content_type] = dest_fullpath # Tagfile value = GetDoxygenValue("GENERATE_TAGFILE", content) if value: source_fullpath = os.path.join(source_dir, value) dest_fullpath = os.path.join(dest_dir, value) if not os.path.isfile(source_fullpath): this_dm.stream.write( "ERROR: The filename '{}' does not exist.\n" .format(source_fullpath, ), ) this_dm.result = -1 continue FileSystem.RemoveFile(dest_fullpath) shutil.move(source_fullpath, dest_fullpath) results.setdefault( doxygen_file, OrderedDict())["tagfile"] = dest_fullpath # Generate the json file output_filename = os.path.join( output_dir, "{}.json".format(os.path.splitext(_script_name)[0]), ) dm.stream.write("Writing '{}'...".format(output_filename)) with dm.stream.DoneManager() as this_dm: with open(output_filename, "w") as f: json.dump(results, f) return dm.result
def Build( force=False, no_squash=False, keep_temporary_image=False, output_stream=sys.stdout, preserve_ansi_escape_sequences=False, ): with StreamDecorator.GenerateAnsiSequenceStream( output_stream, preserve_ansi_escape_sequences=preserve_ansi_escape_sequences, ) as output_stream: with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if not _VerifyDocker(): dm.stream.write("ERROR: Ensure that docker is installed and available within this environment.\n") dm.result = -1 return dm.result output_dir = os.path.join(calling_dir, "Generated") source_dir = os.path.join(output_dir, "Source") base_image_dir = os.path.join(output_dir, "Images", "Base") activated_image_dir = os.path.join(output_dir, "Images", "Activated") image_code_base = "/usr/lib/CommonEnvironmentImage" image_code_dir = "{}/{}".format( image_code_base, repository_name.replace('_', '/'), ) if no_now_tag: now_tag = None else: now = time.localtime() now_tag = "{0}.{1:02d}.{2:02d}".format(now[0], now[1], now[2]) # Create the base image dm.stream.write("Creating base image...") with dm.stream.DoneManager(suffix='\n') as base_dm: FileSystem.MakeDirs(base_image_dir) # Get the source scm = GetAnySCM(calling_dir) if not os.path.isdir(source_dir): base_dm.stream.write("Cloning source...") with base_dm.stream.DoneManager() as this_dm: # Ensure that the parent dir exists, but don't create the dir iteself. FileSystem.MakeDirs(os.path.dirname(source_dir)) # Enlist in the repo. temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) this_dm.result, output = scm.Clone(repository_uri, temp_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result os.rename(temp_dir, source_dir) has_changes = True else: # The repo exists base_dm.stream.write("Updating source...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = scm.Pull(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result has_changes = True if scm.Name == "Mercurial": if "no changes found" in output: has_changes = False elif scm.Name == "Git": if "Already up-to-date" in output: has_changes = False else: assert False, "Unsupported SCM: {}".format(scm.Name) if has_changes: this_dm.result, output = scm.Update(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # Filter the source filtered_source_dir = os.path.join(base_image_dir, "FilteredSource") if os.path.isdir(filtered_source_dir) and not force and not has_changes: base_dm.stream.write("No source changes were detected.\n") else: with base_dm.stream.SingleLineDoneManager( "Filtering source...", ) as this_dm: temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) FileSystem.CopyTree( source_dir, temp_dir, excludes=[ "/.git", "/.gitignore", "/.hg", "/.hgignore", "*/Generated", "*/__pycache__", "*/Windows", "/*/src", "*.cmd", "*.ps1", "*.pyc", "*.pyo", ], optional_output_stream=this_dm.stream, ) FileSystem.RemoveTree(filtered_source_dir) os.rename(temp_dir, filtered_source_dir) base_dm.stream.write("Verifying Docker base image...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker image history "{}"'.format(base_docker_image)) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result base_dm.stream.write("Creating dockerfile...") with base_dm.stream.DoneManager(): setup_statement = "./Setup.sh{}".format('' if not repository_setup_configurations else ' {}'.format(' '.join([ '"/configuration={}"'.format(configuration) for configuration in repository_setup_configurations ]))) if repository_name == "Common_Environment": commands = textwrap.dedent( """\ RUN link /usr/bin/python3 /usr/bin/python RUN adduser --disabled-password --disabled-login --gecos "" "{username}" \\ && addgroup "{groupname}" \\ && adduser "{username}" "{groupname}" RUN cd {image_code_dir} \\ && {setup_statement} """).format( username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, setup_statement=setup_statement, ) else: import io with io.open( os.path.join(base_image_dir, "SetupEnvironmentImpl.sh"), 'w', newline='\n', ) as f: f.write(textwrap.dedent( """\ #!/bin/bash . {image_code_base}/Common/Environment/Activate.sh python36 cd {image_code_dir} {setup_statement} rm --recursive {image_code_base}/Common/Environment/Generated/Linux/Default """).format( image_code_base=image_code_base, image_code_dir=image_code_dir, setup_statement=setup_statement, )) commands = textwrap.dedent( """\ COPY SetupEnvironmentImpl.sh /tmp/SetupEnvironmentImpl.sh RUN chmod a+x /tmp/SetupEnvironmentImpl.sh \\ && /tmp/SetupEnvironmentImpl.sh """) with open(os.path.join(base_image_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {base_image} COPY FilteredSource {image_code_dir} {commands} RUN chown -R {username}:{groupname} {image_code_dir} \\ && chmod g-s {image_code_dir}/Generated/Linux \\ && chmod 0750 {image_code_dir}/Generated/Linux \\ && chmod -R o-rwx {image_code_dir} # Cleanup RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* LABEL maintainer="{maintainer}" # By default, run a bash prompt as the source code user WORKDIR {image_code_dir} CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] """).format( base_image=base_docker_image, commands=commands, username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, maintainer=maintainer, )) base_dm.stream.write("Building Docker image...") with base_dm.stream.DoneManager() as this_dm: tags = [ "base", "base_latest", ] if now_tag: tags.append("base_{}".format(now_tag)) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=base_image_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result if not no_activated_image: # Create the activated image(s) dm.stream.write("Creating activated image(s)...") with dm.stream.DoneManager() as all_activated_dm: for index, configuration in enumerate(repository_activation_configurations): all_activated_dm.stream.write("Creating activated image{} ({} of {})...".format( '' if not configuration else " for the configuration '{}'".format(configuration), index + 1, len(repository_activation_configurations), )) with all_activated_dm.stream.DoneManager(suffix='\n') as activated_dm: this_activated_dir = os.path.join(activated_image_dir, configuration or "Default") FileSystem.MakeDirs(this_activated_dir) unique_id = str(uuid.uuid4()) temp_image_name = "{}_image".format(unique_id) temp_container_name = "{}_container".format(unique_id) # Activate the image so we can extract the changes activated_dm.stream.write("Activating...") with activated_dm.stream.DoneManager(suffix='\n') as this_dm: command_line = 'docker run -it --name "{container_name}" "{image_name}:base_latest" /sbin/my_init -- /sbin/setuser "{username}" bash -c "cd {image_code_dir} && . ./Activate.sh {configuration} && pushd {image_code_base}/Common/Environment && python -m RepositoryBootstrap.EnvironmentDiffs After /decorate' \ .format( container_name=temp_container_name, image_name=docker_image_name, configuration=configuration or '', username=image_username, image_code_dir=image_code_dir, image_code_base=image_code_base, ) sink = six.moves.StringIO() this_dm.result = Process.Execute(command_line, StreamDecorator([ sink, this_dm.stream, ])) if this_dm.result != 0: return this_dm.result sink = sink.getvalue() activated_dm.stream.write("Extracting enviroment diffs...") with activated_dm.stream.DoneManager(): match = re.search( textwrap.dedent( """\ //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// (?P<content>.+?) //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// """), sink, re.DOTALL | re.MULTILINE, ) assert match, sink environment_diffs = json.loads(match.group("content")) # ---------------------------------------------------------------------- def RemoveTempContainer(): activated_dm.stream.write("Removing temp container...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rm "{}"'.format(temp_container_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempContainer): # Commit the activated image activated_dm.stream.write("Committing container...") with activated_dm.stream.DoneManager() as this_dm: command_line = 'docker commit "{container_name}" "{image_name}"' \ .format( container_name=temp_container_name, image_name=temp_image_name, ) this_dm.result, output = Process.Execute(command_line) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # ---------------------------------------------------------------------- def RemoveTempImage(): if keep_temporary_image: return activated_dm.stream.write("Removing temp image...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rmi "{}"'.format(temp_image_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempImage): # Create a new dockerfile. The temp image has all the harddrive changes # made during activation, but doesn't have the environment changes. activated_dm.stream.write("Creating dockerfile...") with activated_dm.stream.DoneManager() as this_dm: with open(os.path.join(this_activated_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {temp_image_name} ENV {env} # By default, run a bash prompt as the source code user CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] LABEL maintainer="{maintainer}" """).format( temp_image_name=temp_image_name, env='\\\n'.join([ ' {}={} '.format(k, v) for k, v in six.iteritems(environment_diffs) ]), image_code_dir=image_code_dir, maintainer=maintainer, username=image_username, )) activated_dm.stream.write("Building Docker image...") with activated_dm.stream.DoneManager() as this_dm: tags = [ "latest", ] if now_tag: tags.append(now_tag) if len(repository_activation_configurations) > 1: tags = [ "{}_{}".format(configuration, tag) for tag in tags ] tags.insert(0, configuration) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=this_activated_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='', # <squash is not supported here> '' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def Build( configuration, output_dir, release_build=False, prerelease_build_name=None, no_build_info=False, keep_temp_dir=False, cmake_generator=( None if os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" or os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_USE_DEFAULT_CMAKE_GENERATOR") else "Ninja" ), output_stream=sys.stdout, verbose=False, ): """Builds the Featurizer Shared Library""" if release_build and prerelease_build_name: raise CommandLine.UsageException( "A prerelese build name cannot be provided with the 'release_build' flag", ) with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: FileSystem.RemoveTree(output_dir) FileSystem.MakeDirs(output_dir) temp_directory = CurrentShell.CreateTempDirectory() # ---------------------------------------------------------------------- def CleanupTempDir(): if keep_temp_dir: dm.stream.write( "\nCMake output has been written to '{}'.\n".format(temp_directory), ) return FileSystem.RemoveTree(temp_directory) # ---------------------------------------------------------------------- with CallOnExit(CleanupTempDir): prev_dir = os.getcwd() os.chdir(temp_directory) with CallOnExit(lambda: os.chdir(prev_dir)): if not release_build: if prerelease_build_name is None: # This value should compare as: # "manual" < "pipeline" prerelease_build_name = "manual" if not no_build_info: now = datetime.datetime.now() prerelease_build_name = "{prerelease_build_name}.{year}.{month}.{day}.{hour}.{minute}.{second}.{configuration}".format( year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, prerelease_build_name=prerelease_build_name, configuration=configuration.lower(), ) activities = [ ( "Generating cmake Files", 'cmake {generator}-DCMAKE_BUILD_TYPE={configuration} {prerelease_build_name} "{this_dir}"'.format( generator='-G "{}" '.format( cmake_generator, ) if cmake_generator else "", temp_dir=temp_directory, configuration=configuration, this_dir=_script_dir, prerelease_build_name="" if not prerelease_build_name else "-DPRODUCT_VERSION_PRERELEASE_INFO={}".format( prerelease_build_name, ), ), ), ("Building", "cmake --build ."), ] if ( os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" ): activities.append( ( "Verifying Universal Linux Binaries", "libcheck libFeaturizers.so", ), ) activities += [ ("Copying Binaries", _CopyBinaries), ("Copying Data", _CopyData), ("Copying Headers", _CopyHeaders), ] for index, (activity, command_line) in enumerate(activities): dm.stream.write( "{} ({} of {})...".format(activity, index + 1, len(activities)), ) with dm.stream.DoneManager( suffix="\n" if verbose else None, ) as this_dm: sink = six.moves.StringIO() output_streams = [sink] if verbose: output_streams.append( StreamDecorator( this_dm.stream, line_prefix="INFO: ", ), ) this_output_stream = StreamDecorator(output_streams) if callable(command_line): this_dm.result = command_line( temp_directory, output_dir, this_output_stream, ) else: this_dm.result = Process.Execute( command_line, this_output_stream, ) if this_dm.result != 0: if not verbose: this_dm.stream.write(sink.getvalue()) return this_dm.result return dm.result
def ExtractCoverageInfo( self, coverage_filename, binary_filename, includes, excludes, output_stream, ): # This is a hack. The names extracted from the coverage files are mangled # while the names provided in includes and excludes are in the glob format. # Split the glob and then determine matches by checking to see if each component # is in the mangled name. There is a lot that could go wrong with this, but # hopefully it is good enough. # ---------------------------------------------------------------------- def ProcessFilter(value): return [part for part in value.split("::") if part != "*"] # ---------------------------------------------------------------------- def Matches(value, parts): for part in parts: if part not in value: return False return True # ---------------------------------------------------------------------- if excludes: excludes = [ProcessFilter(exclude) for exclude in excludes] excludes_func = lambda method_name: any( Matches(method_name, exclude) for exclude in excludes) else: excludes_func = lambda method_name: False if includes: includes = [ProcessFilter(include) for include in includes] includes_func = lambda method_name: any( Matches(method_name, include) for include in includes) else: includes_func = lambda method_name: True # ---------------------------------------------------------------------- def ShouldInclude(method_name): return not excludes_func(method_name) and includes_func( method_name) # ---------------------------------------------------------------------- # grcov will parse every file in the directory which isn't what we want here. Move the coverage # files for this binary to a temp dir, parse that dir, and then remove it. temp_directory = CurrentShell.CreateTempDirectory() with CallOnExit(lambda: FileSystem.RemoveTree(temp_directory)): # ---------------------------------------------------------------------- def GetCoverageFilename(ext): dirname, basename = os.path.split(binary_filename) basename = os.path.splitext(basename)[0] for item in os.listdir(dirname): fullpath = os.path.join(dirname, item) if not os.path.isfile(fullpath): continue this_basename, this_ext = os.path.splitext(item) if this_ext == ext and this_basename.startswith(basename): return fullpath return None # ---------------------------------------------------------------------- gcno_filename = GetCoverageFilename(".gcno") assert gcno_filename and os.path.isfile(gcno_filename), ( binary_filename, gcno_filename) shutil.copyfile( gcno_filename, os.path.join(temp_directory, os.path.basename(gcno_filename)), ) gcda_filename = GetCoverageFilename(".gcda") assert gcda_filename and os.path.isfile(gcda_filename), ( binary_filename, gcda_filename) shutil.copyfile( gcda_filename, os.path.join(temp_directory, os.path.basename(gcda_filename)), ) # Convert the content result = Process.Execute( '{} Lcov "/bin_dir={}" /type=ade'.format( CurrentShell.CreateScriptName("ExtractCoverageInfo"), temp_directory, ), output_stream, ) if result != 0: return result # Note that the coverage files for all output was generated when coverage was stopped. # These coverage files are used to extract coverage percentages for display purposes. # Don't let the output name of the file fool you - these files are different from the # globally generated coverage file. coverage_filename = os.path.join(temp_directory, "lcov.info") assert os.path.isfile(coverage_filename), coverage_filename # Parse the file covered = 0 not_covered = 0 with open(coverage_filename) as f: for line in f.readlines(): content = json.loads(line) if "method" not in content: continue content = content["method"] if ("name" not in content or "total_covered" not in content or "total_uncovered" not in content): continue if not ShouldInclude(content["name"]): continue covered += content["total_covered"] not_covered += content["total_uncovered"] return covered, not_covered
def RemoveTemporaryArtifacts(context): for potential_dir in ["Testing"]: potential_dir = os.path.join(context["output_dir"], potential_dir) FileSystem.RemoveTree(potential_dir)
def Callback(test_lock, configuration, build_dir, output_stream, on_status_update): on_status_update("Generating") _PrintHeader("Generate Output", output_stream) if os.path.isdir(build_dir): if not force: output_stream.write( "The output dir '{}' already exists and will not be overwritten.\n".format( build_dir, ), ) return 1 FileSystem.RemoveTree(build_dir) FileSystem.MakeDirs(build_dir) result = Process.Execute( command_line_template.format( build_dir=build_dir, configuration=configuration, ), output_stream, ) if result != 0: return result # Create a python file that can be used to clean the directory existing_items = os.listdir(build_dir) assert existing_items with open(os.path.join(build_dir, "Clean.py"), "w") as f: f.write( textwrap.dedent( """\ #!/usr/bin/env python import os import sys import CommonEnvironment from CommonEnvironment import CommandLine from CommonEnvironment import FileSystem from CommonEnvironment.StreamDecorator import StreamDecorator # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- @CommandLine.EntryPoint @CommandLine.Constraints( output_stream=None, ) def EntryPoint( all=False, output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\\nResults: ", suffix="\\n", ) as dm: existing_items = set([{existing_items_list}]) for item in os.listdir(_script_dir): if item in existing_items or item == _script_name: continue fullpath = os.path.join(_script_dir, item) dm.stream.write("Removing '{{}}'...".format(fullpath)) with dm.stream.DoneManager(): FileSystem.RemoveItem(fullpath) cmake_dirs = os.path.join(_script_dir, "CMakeFiles") if all: dm.stream.write("Removing '{{}}'...".format(cmake_dirs)) with dm.stream.DoneManager(): FileSystem.RemoveTree(cmake_dirs) else: dirs_to_delete = [] for fullpath, _ in FileSystem.WalkDirs( cmake_dirs, include_dir_names=[lambda name: os.path.splitext(name)[1] == ".dir"], ): dirs_to_delete.append(fullpath) for dir_to_delete in dirs_to_delete: dm.stream.write("Removing '{{}}'...".format(dir_to_delete)) with dm.stream.DoneManager(): FileSystem.RemoveTree(dir_to_delete) return dm.result # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- if __name__ == "__main__": try: sys.exit(CommandLine.Main()) except KeyboardInterrupt: pass """, ).format( existing_items_list=", ".join( ['"{}"'.format(existing_item) for existing_item in existing_items], ), ), ) if build: on_status_update("Building") _PrintHeader("Build Output", output_stream) result = _BuildImpl(build_dir, output_stream) if result != 0: return result if test: on_status_update("Testing (Waiting)") _PrintHeader("Test Output", output_stream) with test_lock: on_status_update("Testing") result = _TestImpl(build_dir, output_stream) if result != 0: return result return 0
def test_Constraints(self): # directory_ # Create a temp dir temp_dirname = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_dirname), temp_dirname os.mkdir(temp_dirname) with CallOnExit(lambda: FileSystem.RemoveTree(temp_dirname)): self.assertEqual(AllTypesYaml.Deserialize_directory_(os.path.basename(temp_dirname)).lower(), temp_dirname.lower()) # ---------------------------------------------------------------------- def CaseInsensitiveException(ExceptionType, regex, func): try: func() self.assertFalse(True) except ExceptionType as ex: self.assertEqual(regex.lower(), str(ex).lower()) # ---------------------------------------------------------------------- CaseInsensitiveException( AllTypesYaml.DeserializeException, "'{}' is not a valid directory [directory_]".format(os.path.join(os.getcwd(), "Does Not Exist")), lambda: AllTypesYaml.Deserialize_directory_("Does Not Exist"), ) # filename_ # Create a temp filename temp_filename = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_filename), temp_filename with open(temp_filename, "w") as f: f.write("Temp file") with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): self.assertEqual(AllTypesYaml.Deserialize_filename_('"{}"'.format(os.path.basename(temp_filename))).lower(), temp_filename.lower()) CaseInsensitiveException( AllTypesYaml.DeserializeException, "'{}' is not a valid file [filename_]".format(os.path.join(os.getcwd(), "Does Not Exist")), lambda: AllTypesYaml.Deserialize_filename_("Does Not Exist"), ) # filename_any_ temp_dirname = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_dirname), temp_dirname os.mkdir(temp_dirname) with CallOnExit(lambda: FileSystem.RemoveTree(temp_dirname)): self.assertEqual(AllTypesYaml.Deserialize_filename_any_('"{}"'.format(os.path.basename(temp_dirname))).lower(), temp_dirname.lower()) temp_filename = os.path.join(os.getcwd(), str(uuid.uuid4()).replace("-", "")) assert not os.path.exists(temp_filename), temp_filename with open(temp_filename, "w") as f: f.write("Temp file") with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): self.assertEqual(AllTypesYaml.Deserialize_filename_any_('"{}"'.format(os.path.basename(temp_filename))).lower(), temp_filename.lower()) self.assertRaisesRegex(AllTypesYaml.DeserializeException, re.escape("is not a valid file or directory"), lambda: AllTypesYaml.Deserialize_filename_any_("Does Not Exist")) # number_ self.assertEqual(AllTypesYaml.Deserialize_number_("2"), 2) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"-30 is not >= -20.0", lambda: AllTypesYaml.Deserialize_number_("-30")) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"40 is not <= 20.0", lambda: AllTypesYaml.Deserialize_number_("40")) # int_ self.assertEqual(AllTypesYaml.Deserialize_int_("10"), 10) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"-30 is not >= -20", lambda: AllTypesYaml.Deserialize_int_("-30")) self.assertRaisesRegex(AllTypesYaml.DeserializeException, r"40 is not <= 20", lambda: AllTypesYaml.Deserialize_int_("40")) # string_ self.assertEqual(AllTypesYaml.Deserialize_string_("abc"), "abc") self.assertRaisesRegex( AllTypesYaml.DeserializeException, r"'a' is not a valid 'String' string - Value must have at least 2 characters, not have more than 4 characters", lambda: AllTypesYaml.Deserialize_string_("a"), ) self.assertRaisesRegex( AllTypesYaml.DeserializeException, r"'abcde' is not a valid 'String' string - Value must have at least 2 characters, not have more than 4 characters", lambda: AllTypesYaml.Deserialize_string_("abcde"), ) # string_regex_ self.assertEqual(AllTypesYaml.Deserialize_string_regex_("bit"), "bit") self.assertEqual(AllTypesYaml.Deserialize_string_regex_("but"), "but") self.assertEqual(AllTypesYaml.Deserialize_string_regex_("bat"), "bat") self.assertRaisesRegex( AllTypesYaml.DeserializeException, r"'abc' is not a valid 'String' string - Value must match the regular expression 'b.t'", lambda: AllTypesYaml.Deserialize_string_regex_("abc"), )