def _Compile(cls, context, script_filename, output_stream): command_line = 'python "{}" build_exe{}'.format( script_filename, '' if not context["distutil_args"] else " {}".format(' '.join( ['"{}"'.format(arg) for arg in context["distutils_args"]])), ) result = Process.Execute(command_line, output_stream) if result == 0: if os.path.isdir("build"): subdirs = os.listdir("build") assert len(subdirs) == 1, subdirs source_dir = os.path.join("build", subdirs[0]) # Remove empty dirs to_remove = [] for root, dirs, _ in os.walk(source_dir): for dir in dirs: fullpath = os.path.join(root, dir) if os.path.isdir( fullpath) and not os.listdir(fullpath): to_remove.append(fullpath) for dir in to_remove: FileSystem.RemoveTree(dir) FileSystem.RemoveTree(context["output_dir"]) shutil.move(source_dir, context["output_dir"]) FileSystem.RemoveTree("build") return result
def _CopyData(temp_directory, output_dir, output_stream): output_dir = os.path.join(output_dir, "Data") FileSystem.RemoveTree(output_dir) FileSystem.CopyTree(os.path.join(temp_directory, "Data"), output_dir) return 0
def GetRelativeFiles(dir): output_stream.write("Processing files in '{}'...".format(dir)) with output_stream.DoneManager(): results = {} for filename in FileSystem.WalkFiles(dir): assert filename.startswith(dir), (filename, dir) results[FileSystem.TrimPath(filename, dir)] = _CalculateHash(filename) return results
def Impl(source_dir_or_filename, dest_dir): # shutil.move won't overwrite files, so use distutils (which will) if os.path.isdir(source_dir_or_filename): import distutils.dir_util distutils.dir_util.copy_tree(source_dir_or_filename, os.path.join(dest_dir, os.path.basename(source_dir_or_filename))) FileSystem.RemoveTree(source_dir_or_filename) else: FileSystem.MakeDirs(dest_dir) shutil.move(source_dir_or_filename, dest_dir)
def FileWriter(filename, mode): """\ Method that writes to a temporary location and only copies to the intended destination if there are changes. This prevents full rebuilds (which are triggered based on timestamps) on files that haven't changed. """ temp_filename = CurrentShell.CreateTempFilename() with open(temp_filename, mode) as f: yield f if not os.path.isfile(filename) or CalcHash(temp_filename) != CalcHash(filename): FileSystem.RemoveFile(filename) shutil.move(temp_filename, filename) else: FileSystem.RemoveFile(temp_filename)
def ExecuteCommands( cls, command_or_commands, output_stream, environment=None, ): """\ Creates a temporary script file, writes the commands to that file, and then executes it. Returns the result and output generated during execution. """ from CommonEnvironment.CallOnExit import CallOnExit from CommonEnvironment import FileSystem from CommonEnvironment import Process temp_filename = cls.CreateTempFilename(cls.ScriptExtension) with open(temp_filename, 'w') as f: f.write(cls.GenerateCommands(command_or_commands)) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): cls.MakeFileExecutable(temp_filename) return Process.Execute( cls.DecorateInvokeScriptCommandLine(temp_filename), output_stream, environment=environment, )
def _CreateContext(cls, metadata): metadata["output_dir"] = os.path.realpath(metadata["output_dir"]) FileSystem.MakeDirs(metadata["output_dir"]) return super(ConditionalInvocationQueryMixin, cls)._CreateContext(metadata)
def Clean( force=False, output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: for subdir in ["stage", "build"]: this_dir = os.path.join(boost_root, subdir) if not os.path.isdir(this_dir): continue if not force: dm.stream.write( "Call this method with the '/force' flag to remove '{}'.\n" .format(this_dir, ), ) continue dm.stream.write("Removing '{}'...".format(this_dir)) with dm.stream.DoneManager(): FileSystem.RemoveTree(this_dir) return dm.result
def StopCoverage(self, output_stream): if not self._dirs: return 0 # Move coverage data to this dir output_dir = os.path.dirname(self._coverage_filename) for filename in FileSystem.WalkFiles( output_dir, include_file_extensions=[".gcda"], ): dest_filename = os.path.join(output_dir, os.path.basename(filename)) if dest_filename == filename: continue if not os.path.isfile(dest_filename): shutil.copyfile(filename, dest_filename) return Process.Execute( '{script} Lcov {dirs} "/output_dir={output}"'.format( script=CurrentShell.CreateScriptName("ExtractCoverageInfo"), dirs=" ".join( ['"/bin_dir={}"'.format(dir) for dir in self._dirs]), output=output_dir, ), output_stream, )
def Normalize( script_filename_or_dir, output_stream=sys.stdout, ): """Normalizes a script so that it can be run from any location.""" with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if os.path.isfile(script_filename_or_dir): script_filenames = [ script_filename_or_dir, ] elif os.path.isdir(script_filename_or_dir): script_filenames = list(FileSystem.WalkFiles(script_filename_or_dir, recurse=False)) else: assert False for index, script_filename in enumerate(script_filenames): nonlocals = CommonEnvironment.Nonlocals(result=None) dm.stream.write("Processing '{}' ({} of {})...".format( script_filename, index + 1, len(script_filenames), )) with dm.stream.DoneManager( done_suffix=lambda: PythonActivationActivity.NormalizeScriptResultStrings[nonlocals.result], ): nonlocals.result = PythonActivationActivity.NormalizeScript(script_filename) return dm.result
def Clean(context, status_stream): """Cleans content previously generated. The default behavior is to delete the output directory. Args: context (Dict[string, Any]): The Context to use when generating code. status_stream (file-like): Output stream used to generate short status messages. Returns (int): Return code. """ assert "output_dir" in context, context output_dir = context["output_dir"] if not os.path.isdir(output_dir): status_stream.write( "The output directory '{}' does not exist.\n".format( output_dir)) else: status_stream.write("Removing '{}'...".format(output_dir)) with status_stream.DoneManager(): FileSystem.RemoveTree(output_dir) return 0
def _CreateContext(cls, metadata): metadata["output_filename"] = os.path.realpath( metadata["output_filename"]) FileSystem.MakeDirs(os.path.dirname(metadata["output_filename"])) return super(SingleOutputMixin, cls)._CreateContext(metadata)
def _GetBuildInfos(root_dir, output_stream): root_dir = os.path.realpath(root_dir) build_infos = [] output_stream.write("\nSearching for build files...") with output_stream.DoneManager( done_suffix=lambda: "{} found".format(inflect.no("build file", len(build_infos))), ): name, ext = os.path.splitext(BUILD_FILENAME) for fullpath in FileSystem.WalkFiles( root_dir, include_file_base_names=[ name, ], include_file_extensions=[ ext, ], ): if os.path.exists(os.path.join(os.path.dirname(fullpath), BUILD_FILENAME_IGNORE)): continue build_infos.append(_BuildInfo( fullpath, Configuration.FromBuildFile( fullpath, strip_path=root_dir, ), )) build_infos.sort(key=lambda item: item.configuration.Priority) return build_infos
def Lcov( bin_dir=None, not_llvm=False, output_dir=None, output_filename="lcov.info", type=None, output_stream=sys.stdout, verbose=False, ): """Generates a LCOV file based on *.gcno files""" bin_dirs = bin_dir del bin_dir if not bin_dirs: bin_dirs.append(os.getcwd()) if len(bin_dirs) > 1 and not output_dir: raise CommandLine.UsageException( "An 'output_dir' must be provided when multiple 'bin_dirs' are parsed", ) if len(bin_dirs) == 1 and not output_dir: output_dir = bin_dirs[0] with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_filename = os.path.join(output_dir, output_filename) dm.stream.write("Creating '{}'...".format(output_filename)) with dm.stream.DoneManager() as this_dm: FileSystem.MakeDirs(output_dir) command_line = 'grcov {dirs} -o "{output_filename}"{llvm}{type}'.format( dirs=" ".join(['"{}"'.format(dir) for dir in bin_dirs]), output_filename=output_filename, llvm="" if not_llvm else " --llvm", type="" if type is None else " -t {}".format(type), ) if verbose: this_dm.stream.write( textwrap.dedent( """\ Command Line: {} """, ).format(command_line), ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def CleanupTempDir(): if keep_temp_dir: dm.stream.write( "\nCMake output has been written to '{}'.\n".format(temp_directory), ) return FileSystem.RemoveTree(temp_directory)
def Clean(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: FileSystem.RemoveTree(os.path.join(_script_dir, "..", "GeneratedCode")) return dm.result
def _CreateContext(cls, metadata): for index, output_filename in enumerate(metadata["output_filenames"]): metadata["output_filenames"][index] = os.path.realpath( output_filename) FileSystem.MakeDirs( os.path.dirname(metadata["output_filenames"][index])) return super(MultipleOutputMixin, cls)._CreateContext(metadata)
def Create(cls, output_dir): if os.path.isdir(output_dir): raise Exception( "The directory '{}' already exists and will not be overwritten" .format(output_dir)) FileSystem.MakeDirs(output_dir) return cls.Execute(os.getcwd(), 'git init "{}"'.format(output_dir))
def ExtractCoverageInfo(coverage_filename, binary_filename, includes, excludes, output_stream): if excludes: excludes_func = lambda method_name: any( fnmatch(method_name, exclude) for exclude in excludes) else: excludes_func = lambda method_name: False if includes: includes_func = lambda method_name: any( fnmatch(method_name, include) for include in includes) else: includes_func = lambda method_name: True # ---------------------------------------------------------------------- def ShouldInclude(method_name): return not excludes_func(method_name) and includes_func( method_name) # ---------------------------------------------------------------------- temp_filename = CurrentShell.CreateTempFilename() command_line = '"{powershell}" -ExecutionPolicy Bypass -NoProfile -File "{filename}" "{coverage}" "{module}" > "{temp_filename}" 2>&1'.format( powershell=r"{}\syswow64\WindowsPowerShell\v1.0\powershell.exe". format(os.getenv("SystemRoot"), ), filename=os.path.join(_script_dir, "CoverageToCsv.ps1"), coverage=coverage_filename, module=os.path.basename(binary_filename), temp_filename=temp_filename, ) result = Process.Execute(command_line, output_stream) if result != 0: return result with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): covered = 0 not_covered = 0 with open(temp_filename, "r") as input: reader = csv.reader(input) for row in reader: if not isinstance(row, (tuple, list)): raise Exception(row) if len(row) == 1: raise Exception(row[0]) method_name = row[1] if not ShouldInclude(method_name): continue covered += int(row[-2]) not_covered += int(row[-1]) return covered, not_covered
def _CleanImplEx(cls, context, output_stream): if context["output_filename"] not in cls.GetInputItems( context) and os.path.isfile(context["output_filename"]): output_stream.write("Removing '{}'...".format( context["output_filename"])) with StreamDecorator(output_stream).DoneManager(): FileSystem.RemoveFile(context["output_filename"]) return super(SingleOutputMixin, cls)._CleanImplEx(context, output_stream)
def Save(self): data = pickle.dumps(self) data = base64.b64encode(data) data = str(data) filename = self._GetPersistedFilename(self.Context) FileSystem.MakeDirs(os.path.dirname(filename)) with open(filename, 'w') as f: f.write(self.TEMPLATE.format(data=data))
def RemoveItem(name): name_lower = name.lower() if library_items[name_lower]: this_dm.stream.write("Removing '{}' for upgrade.\n".format(name)) os.remove(os.path.join(python_lib_dir, name)) else: this_dm.stream.write("Removing temporary '{}'.\n".format(name)) FileSystem.RemoveTree(os.path.join(python_lib_dir, name)) del library_items[name_lower]
def CommandLineCleanOutputFilename(output_filename, output_stream): output_stream = StreamDecorator(output_stream) if not os.path.isfile(output_filename): output_stream.write("'{}' does not exist.\n".format(output_filename)) else: output_stream.write("Removing '{}'...".format(output_filename)) with output_stream.DoneManager(): FileSystem.RemoveFile(output_filename) return 0
def EntryPoint( zipped_input_filename, output_stream=sys.stdout, ): """Generates JSON files based on data previously pickled""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: output_dir = os.path.join(_script_dir, "..", "GeneratedCode") FileSystem.RemoveTree(output_dir) FileSystem.MakeDirs(output_dir) df = _holiday_data_loader(zipped_input_filename) #with open('holidays.json', 'w') as f: #f.write(df.to_json(orient='records', lines=True)) allCountryNames = list(set((df['countryOrRegion']))) for countryName in allCountryNames: dfByCountry = df.loc[df['countryOrRegion'] == countryName] date = [int(x.timestamp()) for x in list(dfByCountry['date'])] name = list(dfByCountry['normalizeHolidayName']) date_dict = {"Date": date} name_dict = {"Holiday": name} out = {} out.update(date_dict) out.update(name_dict) jsonPath = os.path.join(output_dir, "{}.json".format(countryName)) with open(jsonPath, 'w') as f: json.dump(out, f) return dm.result
def CreatePackageName(): # Continue traversing parent dirs as long as there is an __init__.py file. name_parts = [] filename = os.path.realpath(mod.__file__) if CurrentShell.IsSymLink(filename): filename = CurrentShell.ResolveSymLink(filename) filename = FileSystem.Normalize(filename) directory, name = os.path.split(filename) name = os.path.splitext(name)[0] while os.path.isfile(os.path.join(directory, "__init__.py")): directory, name = os.path.split(directory) name_parts.append(name) if not name_parts: # If we didn't find any __init__ files, it means that this isn't a file # that is part of a package. However, we still want to simulate package # behavior so that relative imports work as expected. if name == "__main__" or getattr(sys, "frozen", False): name = "___EntryPoint___" else: name = "___{}Lib___".format(name) assert name not in sys.modules sys.modules[name] = None return name # If here, we are looking at a file in a package. Ensure that the entire # package is included with fully qualified names. name_parts.reverse() for index, name_part in enumerate(name_parts): fully_qualified_name = '.'.join(name_parts[:index + 1]) if fully_qualified_name not in sys.modules: # When we load this module, it will be loaded under 'name_part'. # Preserve the original module (if it exists). temporary_modules[name_part] = sys.modules.pop(name_part, None) sys.path.insert(0, directory) with CallOnExit(lambda: sys.path.pop(0)): # This will add the module name to sys.modules __import__(name_part) sys.modules[fully_qualified_name] = sys.modules[name_part] directory = os.path.join(directory, name_part) return fully_qualified_name
def Clean( output_dir, output_stream=sys.stdout, ): if not os.path.isdir(output_dir): output_stream.write("'{}' does not exist.\n".format(output_dir)) return 0 output_stream.write("Removing '{}'...".format(output_dir)) with StreamDecorator(output_stream).DoneManager(): FileSystem.RemoveTree(output_dir) return 0
def Clean( output_stream=sys.stdout, ): """Cleans previously build content.""" potential_dir = os.path.join(calling_dir, "Generated") if not os.path.isdir(potential_dir): output_stream.write("'{}' does not exist.\n".format(potential_dir)) else: FileSystem.RemoveTree(potential_dir) output_stream.write("'{}' has been removed.\n".format(potential_dir)) return 0
def _CopyHeaders(temp_directory, output_dir, output_stream): output_files = [] output_files += list( FileSystem.WalkFiles( _script_dir, include_file_extensions=[".h"], include_file_base_names=[ lambda basename: basename.startswith("SharedLibrary_") ], recurse=False, ), ) output_files += list( FileSystem.WalkFiles( os.path.join(_script_dir, "GeneratedCode"), include_file_extensions=[".h"], include_file_base_names=[ lambda basename: basename.startswith("SharedLibrary_") ], exclude_file_names=["SharedLibrary_PointerTable.h"], recurse=False, ), ) for index, output_file in enumerate(output_files): output_stream.write( "Copying '{}' ({} of {})...".format(output_file, index + 1, len(output_files)), ) with output_stream.DoneManager(): shutil.copyfile( output_file, os.path.join(output_dir, os.path.basename(output_file)), ) return 0
def Clone(cls, uri, output_dir, branch=None): if os.path.isdir(output_dir): raise Exception( "The directory '{}' already exists and will not be overwritten." .format(output_dir)) clone_path, clone_name = os.path.split(output_dir) FileSystem.MakeDirs(clone_path) return cls.Execute( clone_path, 'git clone{branch} "{uri}" "{name}"'.format( branch=' --branch "{}"'.format(branch) if branch else '', uri=uri, name=clone_name, ))
def _CleanImplEx(cls, context, output_stream): output_stream = StreamDecorator(output_stream) input_items = set(cls.GetInputItems(context)) for output_filename in context["output_filenames"]: if output_filename in input_items: continue if os.path.isfile(output_filename): output_stream.write("Removing '{}'...".format(output_filename)) with output_stream.DoneManager(): FileSystem.RemoveFile(output_filename) return super(MultipleOutputMixin, cls)._CleanImplEx(context, output_stream)