def read_from_file(self, data_file, append_flag = False, filter = None, lagramge_flag = False): from distutils.text_file import TextFile; from string import split, atof; f = TextFile(filename = data_file); read_vars_flag = not append_flag or (self.length == 0); line = f.readline(); if read_vars_flag: if lagramge_flag: self.vars = line[-1].split(" "); else: self.vars = line.split(" "); for v in self.vars: self.data[v] = []; line_index = -1; for line in f.readlines(): line_index = line_index + 1; if (filter <> None) and (line_index not in filter): continue; if lagramge_flag: vals = line[:-1].split(" "); else: vals = line.split(" "); for i in range(len(vals)): if vals[i] <> "?": self.data[self.vars[i]].append(atof(vals[i])); else: self.data[self.vars[i]].append(vals[i]); self.length = self.length + 1; f.close();
def read_template(self): """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) template = TextFile( self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1 ) try: while True: line = template.readline() if line is None: # end of file break try: self.filelist.process_template_line(line) # the call above can raise a DistutilsTemplateError for # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: self.warn("%s, line %d: %s" % (template.filename, template.current_line, msg)) finally: template.close()
def parse_manifestin(template): """This function parses template file (usually MANIFEST.in)""" if not os.path.exists(template): return [] template = TextFile(template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) file_list = FileList() try: while True: line = template.readline() if line is None: # end of file break try: file_list.process_template_line(line) # the call above can raise a DistutilsTemplateError for # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: print("%s, line %d: %s" % (template.filename, template.current_line, msg)) return file_list.files finally: template.close()
def _data_files(): """List files to be copied to the TAU Commander installation. Start with the files listed in MANIFEST.in, then exclude files that should not be installed. """ from distutils.filelist import FileList from distutils.text_file import TextFile from distutils.errors import DistutilsTemplateError filelist = FileList() template = TextFile(os.path.join(PACKAGE_TOPDIR, 'MANIFEST.in'), strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) try: while True: line = template.readline() if line is None: break try: filelist.process_template_line(line) except (DistutilsTemplateError, ValueError) as err: print "%s, line %d: %s" % (template.filename, template.current_line, err) finally: template.close() excluded = ['Makefile', 'VERSION', 'MANIFEST.in', '*Miniconda*'] data_files = [] for path in filelist.files: for excl in excluded: if fnmatch.fnmatchcase(path, excl): break else: data_files.append((os.path.dirname(path), [path])) return data_files
def read_template(self): """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) try: while True: line = template.readline() if line is None: # end of file break try: self.filelist.process_template_line(line) # the call above can raise a DistutilsTemplateError for # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: self.warn("%s, line %d: %s" % (template.filename, template.current_line, msg)) finally: template.close()
def read_template (self): """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) while 1: line = template.readline() if line is None: # end of file break try: self.filelist.process_template_line(line) except DistutilsTemplateError, msg: self.warn("%s, line %d: %s" % (template.filename, template.current_line, msg))
def read_template(self): """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) while 1: line = template.readline() if line is None: # end of file break try: self.filelist.process_template_line(line) except DistutilsTemplateError, msg: self.warn("%s, line %d: %s" % (template.filename, template.current_line, msg))
def build_manifest(cls, manifest, path=None): infile = StringIO() infile.write('\n'.join(manifest)) infile.seek(0) template = TextFile(file=infile, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) lines = template.readlines() filelist = FileList() try: if path is not None and not path == os.getcwd(): oldpath = os.getcwd() os.chdir(path) else: oldpath = None for line in lines: filelist.process_template_line(line) finally: if oldpath is not None: os.chdir(oldpath) return set(filelist.files)
def read_setup_file (filename): from distutils.sysconfig import \ parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted # First pass over the file to gather "VAR = VALUE" assignments. vars = parse_makefile(filename) # Second pass to gobble up the real content: lines of the form # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) extensions = [] while 1: line = file.readline() if line is None: # eof break if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": file.warn("'%s' lines not handled yet" % line) continue #print "original line: " + line line = expand_makefile_vars(line, vars) words = split_quoted(line) #print "expanded line: " + line # NB. this parses a slightly different syntax than the old # makesetup script: here, there must be exactly one extension per # line, and it must be the first word of the line. I have no idea # why the old syntax supported multiple extensions per line, as # they all wind up being the same. module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] ; value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? # or leave it up to the CCompiler implementation to # worry about? ext.sources.append(word) elif switch == "-I": ext.include_dirs.append(value) elif
def _get_included_files(package_masks): old_threshold = set_threshold(ERROR) file_list = FileList() file_list.extend(_iter_package_modules(package_masks)) manifest = TextFile('MANIFEST.in', strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) for line in manifest.readlines(): file_list.process_template_line(line) set_threshold(old_threshold) return file_list.files
def read_dependencies(requirements=missing): if requirements is None: return [] if requirements is missing: requirements = 'requirements.txt' if not os.path.isfile(requirements): return [] text = TextFile(requirements, lstrip_ws=True) try: return text.readlines() finally: text.close()
def add_versions(self, version_filename): """ class method to populate the .image_info file with the versions of the image. """ self.logger.debug("adding the versions of the iso image") img_version_path = os.path.join(self.pkg_img_path, version_filename) # append the .image_info file with the version file information with open(self.img_info_path, "a") as img_fh: version_fh = TextFile(filename=img_version_path, lstrip_ws=True) version_line = version_fh.readline() while version_line: img_fh.write(version_line + '\n') version_line = version_fh.readline()
def get_urlimage_dictionary(self, path): '''method to retrieve the image/url mappings from the imageurl.txt file ''' urlimage_dict = dict() image_fh = TextFile(filename=os.path.join(path, self.URLMAPPING), lstrip_ws=True) for line in image_fh.readlines(): if '=' in line: filename, sep, url = line.partition('=') filename = os.path.join(path, filename) urlimage_dict[filename] = url return urlimage_dict
def get_file_list(self): self._get_file_list() manifest_filelist = self.filelist source_filelist = FileList() allfiles = self.filelist.allfiles if allfiles: source_filelist.set_allfiles(allfiles) else: source_filelist.findall() source_filelist.extend(source_filelist.allfiles) # Check for template (usually "MANIFEST.in") if os.path.isfile(self.template): template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) while 1: line = template.readline() if line is None: # end of file break try: source_filelist.process_template_line(line) except DistutilsTemplateError: # Already been warned by "real" filelist pass if self.prune: try: self.filelist = source_filelist self.prune_file_list() finally: self.filelist = manifest_filelist source_filelist.sort() source_filelist.remove_duplicates() # Ensure file paths are formatted the same # This removes any dot-slashes and converts all slashes to # OS-specific separators. manifest = set(map(os.path.normpath, manifest_filelist.files)) missing = False for filename in imap(os.path.normpath, source_filelist.files): if filename not in manifest: self.warn('missing from source distribution: %s' % filename) missing = True if missing: raw_input('WARNING: Not all source files in distribution! ' ' Press <Enter> to continue.')
def read_manifest_in(reporter, filelist=None, dirname=os.getcwd(), filename="MANIFEST.in", exclude_patterns=(r'/(RCS|CVS|\.svn|\.hg)/.*',)): """return a list of files matching the MANIFEST.in""" absfile = join(dirname, filename) if not exists(absfile): return [] orig_dir = os.getcwd() os.chdir(dirname) if filelist is None: filelist = FileList() def warn(msg, *args): if args: try: msg %= args except TypeError as ex: raise TypeError(str((ex, msg, args))) #reporter.warning(absfile,None,msg) filelist.warn = warn __warn = distutils.log.warn distutils.log.warn = warn try: template = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) while 1: line = template.readline() if line is None: # end of file break try: filelist.process_template_line(line) except DistutilsTemplateError: #reporter.error(absfile, template.current_line, msg) pass filelist.sort() filelist.remove_duplicates() for pattern in exclude_patterns: filelist.exclude_pattern(pattern, is_regex=1) return [path.replace('./', '') for path in filelist.files] finally: distutils.log.warn = __warn os.chdir(orig_dir)
def read_template(self): log.info("reading manifest template '%s'", self.template) template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) try: while 1: line = template.readline() if line is None: break try: self.filelist.process_template_line(line) except (DistutilsTemplateError, ValueError) as msg: self.warn('%s, line %d: %s' % (template.filename, template.current_line, msg)) finally: template.close() return
def parse_cache(filename): substitutions = {} try: with open(filename, 'r') as cache: text_file = TextFile(filename, cache, strip_comments=True, lstrip_ws=True, rstrip_ws=True, skip_blanks=True, join_lines=True) line = text_file.readline() while line is not None: match = config_pattern.fullmatch(line) if not match: raise ConfigureError("{}, {}".format(filename, text_file.gen_error( "Invalid configure cache"))) substitutions[match.group("name")] = match.group("value") line = text_file.readline() except FileNotFoundError: pass return substitutions
def scan_xml(self, f, xpath_expressions, namespaces=None): if not namespaces: namespaces = {} text_file = TextFile(f) tree = ElementTree.parse(f) for x in xpath_expressions: # Python 2.6's element tree doesn't support findall with namespaces # we aren't currently using namespaces so put in a shim to be compatible # If we ever need to specify namespaces, we are not going to be able # to run this code on 2.6 if namespaces: elements = tree.findall(x, namespaces) else: elements = tree.findall(x) for e in elements: text_file.warn("Found '%s' match" % x, e.sourceline) text_file.close()
def simulate(self, data_sets, init_state = None, window = 0, output = sys.stdout): from distutils.text_file import TextFile; from string import split, atof; if window == 0: self.fit_params(data_sets, init_state = init_state, n_tf_restarts = 0, n_fs_restarts = 0, output = output); return; first_flag = True; for data_set in data_sets: for i in range(data_set.length - window): ptime = [data_set.time[i + window]]; if i == 0: ptime = ptime + [data_set.time[0]]; if first_flag: ptime = ptime + [-1]; first_flag = False; ds_window = data_set.subset(range(i, i + window + 1), file_suffix = "w"); ds_window.write_to_file(); f = open("model.out", "w"); self.simulate([ds_window], output = f); f.close(); f = TextFile(filename = "model.out"); simulation_flag = False; for line in f.readlines(): if line[-4:] == "_sim": simulation_flag = True; if (-1 in ptime): print >> output, line; continue; if line[:5] == "SSE, ": simulation_flag = False; continue; if simulation_flag: fields = line.split(" "); if atof(fields[0]) in ptime: print >> output, line; f.close();
def read_requirements_txt(path): """ read only entries from requirements.txt in the form:: pkg pkg==0.1.0 pkg<=0.1.0 pkg>=0.1.0 this parser reads any non-comment, non -* line """ requirements = [] _path = os.path.join(SETUPPY_PATH, path) try: tf = TextFile(_path) requirements_txt = (x.lstrip() for x in tf.readlines()) for line in requirements_txt: if not line.startswith('-'): requirements.append(line) finally: tf and tf.close() return requirements
def read_requirements_txt(path): """ read only entries from requirements.txt in the form:: pkg pkg==0.1.0 pkg<=0.1.0 pkg>=0.1.0 this parser reads any non-comment, non -* line """ requirements = [] _path = os.path.join(SETUPPY_PATH, path) try: tf = TextFile(_path) requirements_txt = (x.lstrip() for x in tf.readlines()) for line in requirements_txt: if not line.startswith("-"): requirements.append(line) finally: tf and tf.close() return requirements
def scan_file(self, f, regexs): # Use TextFile since it has a nice function to print a warning with the # offending line's number. text_file = TextFile(f) # Thanks to http://stackoverflow.com/a/17502838/6124862 contents = '\n'.join(text_file.readlines()) for r in regexs: regex = re.compile(r, flags=re.MULTILINE | re.DOTALL) for match in regex.finditer(contents): lineno = contents.count('\n', 0, match.start()) text_file.warn("Found '%s' match" % r, lineno) text_file.close()
def _parse_template_file(filename, path=None): template = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) lines = template.readlines() filelist = FileList() try: if path is not None and not path == os.getcwd(): oldpath = os.getcwd() os.chdir(path) else: oldpath = None for line in lines: filelist.process_template_line(line) finally: if oldpath is not None: os.chdir(oldpath) return filelist.files
def test_class(self): result1 = [ '# test file\n', '\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n' ] result2 = ['\n', 'line 3 \\\n', ' continues on next line\n'] result3 = [ '# test file\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n' ] result4 = ['line 3 \\', ' continues on next line'] result5 = ['line 3 continues on next line'] result6 = ['line 3 continues on next line'] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, 'test.txt') out_file = open(filename, 'w') try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, 'no processing', in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, 'strip comments', in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, 'strip blanks', in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, 'default processing', in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, 'join lines without collapsing', in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, 'join lines with collapsing', in_file, result6) finally: in_file.close()
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) if g is None: g = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = string.strip(v) if "$" in v: notdone[n] = v else: try: v = int(v) except ValueError: pass done[n] = v # do variable interpolation here while notdone: for name in notdone.keys(): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if done.has_key(n): item = str(done[n]) elif notdone.has_key(n): # get it on a subsequent round found = False elif os.environ.has_key(n): # do it like make: fall back to environment item = os.environ[n] else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = string.strip(value) else: done[name] = value del notdone[name] else: # bogus variable reference; just drop it since we can't deal del notdone[name] fp.close() # save the results in the global dictionary g.update(done) return g
def parse_makefile(fn, existing_modules=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) local_modules = ModulePool() if existing_modules: local_modules.pool.update(existing_modules.pool) variable_pool = AndroidMKVariablePool(fn) current_module = None within_func_definition = False while 1: line = fp.readline() if line is None: # eof break if line.upper() == "include $(CLEAR_VARS)".upper(): current_module = None if line.lower().startswith("define "): within_func_definition = True if line.lower().strip() == "endef": within_func_definition = False if within_func_definition: #don't parse the line if we're within function definition continue match = makefile_parser.VariablePool.VAR_ASSIGNMENT_RX.match(line) if match: (var_name, var_value) = variable_pool.add_variable(match) if not current_module: current_module = Module() current_module.directory = os.path.dirname(fn) if var_name == "LOCAL_MODULE": var_value = variable_pool.eval_expression(var_value) temp_module = local_modules.find_module(var_value) if temp_module and temp_module != current_module: temp_module.src = current_module.src temp_module.depends = current_module.depends current_module = temp_module current_module.name = var_value local_modules.add_module(current_module) if var_name == "LOCAL_SRC_FILES": if current_module: current_module.src = var_value if var_name == "LOCAL_STATIC_LIBRARIES" or \ var_name == "LOCAL_SHARED_LIBRARIES": if current_module: for i in var_value.split(): current_module.depends.append(i) fp.close() # update values in modules for (key,item) in local_modules.pool.items(): number = len(item.depends) index = 0 while index < number: value = item.depends[index] value = variable_pool.eval_expression(value) items = value.split() if len(items) > 0: item.depends[index] = items[0] item.depends.extend(items[1:]) number += len(items) - 1 index += 1 else: item.depends.pop(index) number -= 1 #print variable_pool.immediate_variables return local_modules
def makefile(fileobj, dct=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ fp = TextFile(file=fileobj, strip_comments=1, skip_blanks=1, join_lines=1) if dct is None: dct = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = str.strip(v) if "$" in v: notdone[n] = v else: try: v = int(v) except ValueError: pass done[n] = v try: del notdone[n] except KeyError: pass fp.close() # do variable interpolation here while notdone: for name in list(notdone.keys()): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = str.strip(value) else: done[name] = value del notdone[name] else: # bogus variable reference; # just drop it since we can't deal del notdone[name] # save the results in the global dictionary dct.update(done) return dct
def parse_makefile(fn, g_vars=None, g_rules=None, g_actions=None): """Parse a Makefile-style file. Collects all of the variable definitions, rules and actions associated with rules. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") if g_vars is None: g_vars = {} if g_rules is None: g_rules = {} if g_actions is None: g_actions = {} done = {} rules = {} actions = {} line = fp.readline() while line is not None: vmatch = _variable_rx.match(line) rmatch = _rule_rx.match(line) if vmatch: n, v = vmatch.group(1, 2) v = v.strip() try: v = int(v) except ValueError: # insert literal `$' done[n] = v.replace('$$', '$') else: done[n] = v line = fp.readline() elif rmatch: n, v = rmatch.group(1, 4) rules[n] = v ## Collect the actions: collected = [] line = fp.readline() while line is not None: m = _variable_rx.match(line) or _rule_rx.match(line) if m is None: collected.append(line.lstrip()) line = fp.readline() else: break actions[n] = collected else: line = fp.readline() fp.close() # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip().replace('\t', ' ') # save the results in the global dictionary g_vars.update(done) g_rules.update(rules) g_actions.update(actions) return (g_vars, g_rules, g_actions)
def parse_requirements(filename): with open(filename, 'r') as file: return TextFile(filename, file).readlines()
"""distutils.command.sdist
"""distutils.extension
def parse_makefile(fn, g = None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) if g is None: g = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() tmpv = v.replace('$$', '') if '$' in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: done[n] = v.replace('$$', '$') else: done[n] = v while notdone: for name in notdone.keys(): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: found = False elif n in os.environ: item = os.environ[n] else: done[n] = item = '' if found: after = value[m.end():] value = value[:m.start()] + item + after if '$' in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] else: del notdone[name] fp.close() for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() g.update(done) return g
def test_class(self): # old tests moved from text_file.__main__ # so they are really called by the buildbots # result 1: no fancy options result1 = ['# test file\n', '\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n'] # result 2: just strip comments result2 = ["\n", "line 3 \\\n", " continues on next line\n"] # result 3: just strip blank lines result3 = ["# test file\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n"] # result 4: default, strip comments, blank lines, # and trailing whitespace result4 = ["line 3 \\", " continues on next line"] # result 5: strip comments and blanks, plus join lines (but don't # "collapse" joined lines result5 = ["line 3 continues on next line"] # result 6: strip comments and blanks, plus join lines (and # "collapse" joined lines result6 = ["line 3 continues on next line"] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "test.txt") out_file = open(filename, "w") try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, "strip blanks", in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, "default processing", in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close()
def read_setup_file(filename): from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted vars = parse_makefile(filename) file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while 1: line = file.readline() if line is None: break if _variable_rx.match(line): continue if line[0] == line[-1] == '*': file.warn("'%s' lines not handled yet" % line) continue line = expand_makefile_vars(line, vars) words = split_quoted(line) module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] value = word[2:] if suffix in ('.c', '.cc', '.cpp', '.cxx', '.c++', '.m', '.mm'): ext.sources.append(word) elif switch == '-I': ext.include_dirs.append(value) elif switch == '-D': equals = string.find(value, '=') if equals == -1: ext.define_macros.append((value, None)) else: ext.define_macros.append((value[0:equals], value[equals + 2:])) elif switch == '-U': ext.undef_macros.append(value) elif switch == '-C': ext.extra_compile_args.append(word) elif switch == '-l': ext.libraries.append(value) elif switch == '-L': ext.library_dirs.append(value) elif switch == '-R': ext.runtime_library_dirs.append(value) elif word == '-rpath': append_next_word = ext.runtime_library_dirs elif word == '-Xlinker': append_next_word = ext.extra_link_args elif word == '-Xcompiler': append_next_word = ext.extra_compile_args elif switch == '-u': ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif word == '-Xcompiler': append_next_word = ext.extra_compile_args elif switch == '-u': ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in ('.a', '.so', '.sl', '.o', '.dylib'): ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() return extensions
def read_setup_file(filename): """Reads a Setup file and returns Extension instances.""" from distutils.sysconfig import (parse_makefile, expand_makefile_vars, _variable_rx) from distutils.text_file import TextFile from distutils.util import split_quoted # First pass over the file to gather "VAR = VALUE" assignments. vars = parse_makefile(filename) # Second pass to gobble up the real content: lines of the form # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) extensions = [] while True: line = file.readline() if line is None: # eof break if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": file.warn("'%s' lines not handled yet" % line) continue line = expand_makefile_vars(line, vars) words = split_quoted(line) # NB. this parses a slightly different syntax than the old # makesetup script: here, there must be exactly one extension per # line, and it must be the first word of the line. I have no idea # why the old syntax supported multiple extensions per line, as # they all wind up being the same. module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] ; value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? # or leave it up to the CCompiler implementation to # worry about? ext.sources.append(word) elif switch == "-I": ext.include_dirs.append(value) elif switch == "-D": equals = value.find("=") if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) else: # "-DFOO=blah" ext.define_macros.append((value[0:equals], value[equals+2:])) elif switch == "-U": ext.undef_macros.append(value) elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) elif switch == "-L": ext.library_dirs.append(value) elif switch == "-R": ext.runtime_library_dirs.append(value) elif word == "-rpath": append_next_word = ext.runtime_library_dirs elif word == "-Xlinker": append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): # NB. a really faithful emulation of makesetup would # append a .o file to extra_objects only if it # had a slash in it; otherwise, it would s/.o/.c/ # and append it to sources. Hmmmm. ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) return extensions
"""Provide access to Python's configuration information. The specific
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") if g is None: g = {} done = {} notdone = {} while True: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() # `$$' is a literal `$' in make tmpv = v.replace('$$', '') if "$" in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: # insert literal `$' done[n] = v.replace('$$', '$') else: done[n] = v # Variables with a 'PY_' prefix in the makefile. These need to # be made available without that prefix through sysconfig. # Special care is needed to ensure that variable expansion works, even # if the expansion uses the name without a prefix. renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') # do variable interpolation here while notdone: for name in list(notdone): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] elif n in renamed_variables: if name.startswith('PY_') and name[3:] in renamed_variables: item = "" elif 'PY_' + n in notdone: found = False else: item = str(done['PY_' + n]) else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] if name.startswith('PY_') \ and name[3:] in renamed_variables: name = name[3:] if name not in done: done[name] = value else: # bogus variable reference; just drop it since we can't deal del notdone[name] fp.close() # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() # save the results in the global dictionary g.update(done) return g
def parse_requirements(filename: str): """Return requirements from requirements file.""" # Ref: https://stackoverflow.com/a/42033122/ return TextFile( filename=str(Path(__file__).with_name(filename))).readlines()
def test_class(self): result1 = ['# test file\n', '\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n'] result2 = ['\n', 'line 3 \\\n', ' continues on next line\n'] result3 = ['# test file\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n'] result4 = ['line 3 \\', ' continues on next line'] result5 = ['line 3 continues on next line'] result6 = ['line 3 continues on next line'] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, 'test.txt') out_file = open(filename, 'w') try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, 'no processing', in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, 'strip comments', in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, 'strip blanks', in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, 'default processing', in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, 'join lines without collapsing', in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, 'join lines with collapsing', in_file, result6) finally: in_file.close()
def read_setup_file(filename): """Reads a Setup file and returns Extension instances.""" from distutils.sysconfig import (parse_makefile, expand_makefile_vars, _variable_rx) from distutils.text_file import TextFile from distutils.util import split_quoted # First pass over the file to gather "VAR = VALUE" assignments. vars = parse_makefile(filename) # Second pass to gobble up the real content: lines of the form # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while True: line = file.readline() if line is None: # eof break if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": file.warn("'%s' lines not handled yet" % line) continue line = expand_makefile_vars(line, vars) words = split_quoted(line) # NB. this parses a slightly different syntax than the old # makesetup script: here, there must be exactly one extension per # line, and it must be the first word of the line. I have no idea # why the old syntax supported multiple extensions per line, as # they all wind up being the same. module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? # or leave it up to the CCompiler implementation to # worry about? ext.sources.append(word) elif switch == "-I": ext.include_dirs.append(value) elif switch == "-D": equals = value.find("=") if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) else: # "-DFOO=blah" ext.define_macros.append( (value[0:equals], value[equals + 2:])) elif switch == "-U": ext.undef_macros.append(value) elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) elif switch == "-L": ext.library_dirs.append(value) elif switch == "-R": ext.runtime_library_dirs.append(value) elif word == "-rpath": append_next_word = ext.runtime_library_dirs elif word == "-Xlinker": append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): # NB. a really faithful emulation of makesetup would # append a .o file to extra_objects only if it # had a slash in it; otherwise, it would s/.o/.c/ # and append it to sources. Hmmmm. ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() return extensions
def test_class(self): result1 = ["# test file\n", "\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n"] result2 = ["\n", "line 3 \\\n", " continues on next line\n"] result3 = ["# test file\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n"] result4 = ["line 3 \\", " continues on next line"] result5 = ["line 3 continues on next line"] result6 = ["line 3 continues on next line"] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "test.txt") out_file = open(filename, "w") try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, "strip blanks", in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, "default processing", in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close()