def _data_files(): """List files to be copied to the TAU Commander installation. Start with the files listed in MANIFEST.in, then exclude files that should not be installed. """ from distutils.filelist import FileList from distutils.text_file import TextFile from distutils.errors import DistutilsTemplateError filelist = FileList() template = TextFile(os.path.join(PACKAGE_TOPDIR, 'MANIFEST.in'), strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) try: while True: line = template.readline() if line is None: break try: filelist.process_template_line(line) except (DistutilsTemplateError, ValueError) as err: print "%s, line %d: %s" % (template.filename, template.current_line, err) finally: template.close() excluded = ['Makefile', 'VERSION', 'MANIFEST.in', '*Miniconda*'] data_files = [] for path in filelist.files: for excl in excluded: if fnmatch.fnmatchcase(path, excl): break else: data_files.append((os.path.dirname(path), [path])) return data_files
def parse_manifestin(template): """This function parses template file (usually MANIFEST.in)""" if not os.path.exists(template): return [] template = TextFile(template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) file_list = FileList() try: while True: line = template.readline() if line is None: # end of file break try: file_list.process_template_line(line) # the call above can raise a DistutilsTemplateError for # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: print("%s, line %d: %s" % (template.filename, template.current_line, msg)) return file_list.files finally: template.close()
def read_from_file(self, data_file, append_flag = False, filter = None, lagramge_flag = False): from distutils.text_file import TextFile; from string import split, atof; f = TextFile(filename = data_file); read_vars_flag = not append_flag or (self.length == 0); line = f.readline(); if read_vars_flag: if lagramge_flag: self.vars = line[-1].split(" "); else: self.vars = line.split(" "); for v in self.vars: self.data[v] = []; line_index = -1; for line in f.readlines(): line_index = line_index + 1; if (filter <> None) and (line_index not in filter): continue; if lagramge_flag: vals = line[:-1].split(" "); else: vals = line.split(" "); for i in range(len(vals)): if vals[i] <> "?": self.data[self.vars[i]].append(atof(vals[i])); else: self.data[self.vars[i]].append(vals[i]); self.length = self.length + 1; f.close();
def read_template(self): """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) try: while True: line = template.readline() if line is None: # end of file break try: self.filelist.process_template_line(line) except DistutilsTemplateError as msg: self.warn("%s, line %d: %s" % (template.filename, template.current_line, msg)) finally: template.close()
def read_template(self): """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1) try: while 1: line = template.readline() if line is None: # end of file break try: self.filelist.process_template_line(line) # the call above can raise a DistutilsTemplateError for # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: self.warn("%s, line %d: %s" % (template.filename, template.current_line, msg)) finally: template.close()
def read_dependencies(requirements=missing): if requirements is None: return [] if requirements is missing: requirements = 'requirements.txt' if not os.path.isfile(requirements): return [] text = TextFile(requirements, lstrip_ws=True) try: return text.readlines() finally: text.close()
def scan_file(self, f, regexs): # Use TextFile since it has a nice function to print a warning with the # offending line's number. text_file = TextFile(f) # Thanks to http://stackoverflow.com/a/17502838/6124862 contents = '\n'.join(text_file.readlines()) for r in regexs: regex = re.compile(r, flags=re.MULTILINE | re.DOTALL) for match in regex.finditer(contents): lineno = contents.count('\n', 0, match.start()) text_file.warn("Found '%s' match" % r, lineno) text_file.close()
def scan_xml(self, f, xpath_expressions, namespaces=None): if not namespaces: namespaces = {} text_file = TextFile(f) tree = ElementTree.parse(f) for x in xpath_expressions: # Python 2.6's element tree doesn't support findall with namespaces # we aren't currently using namespaces so put in a shim to be compatible # If we ever need to specify namespaces, we are not going to be able # to run this code on 2.6 if namespaces: elements = tree.findall(x, namespaces) else: elements = tree.findall(x) for e in elements: text_file.warn("Found '%s' match" % x, e.sourceline) text_file.close()
def simulate(self, data_sets, init_state = None, window = 0, output = sys.stdout): from distutils.text_file import TextFile; from string import split, atof; if window == 0: self.fit_params(data_sets, init_state = init_state, n_tf_restarts = 0, n_fs_restarts = 0, output = output); return; first_flag = True; for data_set in data_sets: for i in range(data_set.length - window): ptime = [data_set.time[i + window]]; if i == 0: ptime = ptime + [data_set.time[0]]; if first_flag: ptime = ptime + [-1]; first_flag = False; ds_window = data_set.subset(range(i, i + window + 1), file_suffix = "w"); ds_window.write_to_file(); f = open("model.out", "w"); self.simulate([ds_window], output = f); f.close(); f = TextFile(filename = "model.out"); simulation_flag = False; for line in f.readlines(): if line[-4:] == "_sim": simulation_flag = True; if (-1 in ptime): print >> output, line; continue; if line[:5] == "SSE, ": simulation_flag = False; continue; if simulation_flag: fields = line.split(" "); if atof(fields[0]) in ptime: print >> output, line; f.close();
def read_requirements_txt(path): """ read only entries from requirements.txt in the form:: pkg pkg==0.1.0 pkg<=0.1.0 pkg>=0.1.0 this parser reads any non-comment, non -* line """ requirements = [] _path = os.path.join(SETUPPY_PATH, path) try: tf = TextFile(_path) requirements_txt = (x.lstrip() for x in tf.readlines()) for line in requirements_txt: if not line.startswith("-"): requirements.append(line) finally: tf and tf.close() return requirements
def parse_makefile(fn, existing_modules=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) local_modules = ModulePool() if existing_modules: local_modules.pool.update(existing_modules.pool) variable_pool = AndroidMKVariablePool(fn) current_module = None within_func_definition = False while 1: line = fp.readline() if line is None: # eof break if line.upper() == "include $(CLEAR_VARS)".upper(): current_module = None if line.lower().startswith("define "): within_func_definition = True if line.lower().strip() == "endef": within_func_definition = False if within_func_definition: #don't parse the line if we're within function definition continue match = makefile_parser.VariablePool.VAR_ASSIGNMENT_RX.match(line) if match: (var_name, var_value) = variable_pool.add_variable(match) if not current_module: current_module = Module() current_module.directory = os.path.dirname(fn) if var_name == "LOCAL_MODULE": var_value = variable_pool.eval_expression(var_value) temp_module = local_modules.find_module(var_value) if temp_module and temp_module != current_module: temp_module.src = current_module.src temp_module.depends = current_module.depends current_module = temp_module current_module.name = var_value local_modules.add_module(current_module) if var_name == "LOCAL_SRC_FILES": if current_module: current_module.src = var_value if var_name == "LOCAL_STATIC_LIBRARIES" or \ var_name == "LOCAL_SHARED_LIBRARIES": if current_module: for i in var_value.split(): current_module.depends.append(i) fp.close() # update values in modules for (key,item) in local_modules.pool.items(): number = len(item.depends) index = 0 while index < number: value = item.depends[index] value = variable_pool.eval_expression(value) items = value.split() if len(items) > 0: item.depends[index] = items[0] item.depends.extend(items[1:]) number += len(items) - 1 index += 1 else: item.depends.pop(index) number -= 1 #print variable_pool.immediate_variables return local_modules
"""Provide access to Python's configuration information. The specific
def read_setup_file(filename): from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted vars = parse_makefile(filename) file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while 1: line = file.readline() if line is None: break if _variable_rx.match(line): continue if line[0] == line[-1] == '*': file.warn("'%s' lines not handled yet" % line) continue line = expand_makefile_vars(line, vars) words = split_quoted(line) module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] value = word[2:] if suffix in ('.c', '.cc', '.cpp', '.cxx', '.c++', '.m', '.mm'): ext.sources.append(word) elif switch == '-I': ext.include_dirs.append(value) elif switch == '-D': equals = string.find(value, '=') if equals == -1: ext.define_macros.append((value, None)) else: ext.define_macros.append( (value[0:equals], value[equals + 2:])) elif switch == '-U': ext.undef_macros.append(value) elif switch == '-C': ext.extra_compile_args.append(word) elif switch == '-l': ext.libraries.append(value) elif switch == '-L': ext.library_dirs.append(value) elif switch == '-R': ext.runtime_library_dirs.append(value) elif word == '-rpath': append_next_word = ext.runtime_library_dirs elif word == '-Xlinker': append_next_word = ext.extra_link_args elif word == '-Xcompiler': append_next_word = ext.extra_compile_args elif switch == '-u': ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif word == '-Xcompiler': append_next_word = ext.extra_compile_args elif switch == '-u': ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in ('.a', '.so', '.sl', '.o', '.dylib'): ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() return extensions
def test_class(self): # old tests moved from text_file.__main__ # so they are really called by the buildbots # result 1: no fancy options result1 = ['# test file\n', '\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n'] # result 2: just strip comments result2 = ["\n", "line 3 \\\n", " continues on next line\n"] # result 3: just strip blank lines result3 = ["# test file\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n"] # result 4: default, strip comments, blank lines, # and trailing whitespace result4 = ["line 3 \\", " continues on next line"] # result 5: strip comments and blanks, plus join lines (but don't # "collapse" joined lines result5 = ["line 3 continues on next line"] # result 6: strip comments and blanks, plus join lines (and # "collapse" joined lines result6 = ["line 3 continues on next line"] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "test.txt") out_file = open(filename, "w") try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, "strip blanks", in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, "default processing", in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close()
def read_setup_file (filename): from distutils.sysconfig import \ parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted # First pass over the file to gather "VAR = VALUE" assignments. vars = parse_makefile(filename) # Second pass to gobble up the real content: lines of the form # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while 1: line = file.readline() if line is None: # eof break if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": file.warn("'%s' lines not handled yet" % line) continue #shout "original line: " + line line = expand_makefile_vars(line, vars) words = split_quoted(line) #shout "expanded line: " + line # NB. this parses a slightly different syntax than the old # makesetup script: here, there must be exactly one extension per # line, and it must be the first word of the line. I have no idea # why the old syntax supported multiple extensions per line, as # they all wind up being the same. module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] ; value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? # or leave it up to the CCompiler implementation to # worry about? ext.sources.append(word) elif switch == "-I": ext.include_dirs.append(value) elif switch == "-D": equals = string.find(value, "=") if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) else: # "-DFOO=blah" ext.define_macros.append((value[0:equals], value[equals+2:])) elif switch == "-U": ext.undef_macros.append(value) elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) elif switch == "-L": ext.library_dirs.append(value) elif switch == "-R": ext.runtime_library_dirs.append(value) elif word == "-rpath": append_next_word = ext.runtime_library_dirs elif word == "-Xlinker": append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): # NB. a really faithful emulation of makesetup would # append a .o file to extra_objects only if it # had a slash in it; otherwise, it would s/.o/.c/ # and append it to sources. Hmmmm. ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() #shout "module:", module #shout "source files:", source_files #shout "cpp args:", cpp_args #shout "lib args:", library_args #extensions[module] = { 'sources': source_files, # 'cpp_args': cpp_args, # 'lib_args': library_args } return extensions
def makefile(fileobj, dct=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ fp = TextFile(file=fileobj, strip_comments=1, skip_blanks=1, join_lines=1) if dct is None: dct = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = str.strip(v) if "$" in v: notdone[n] = v else: try: v = int(v) except ValueError: pass done[n] = v try: del notdone[n] except KeyError: pass fp.close() # do variable interpolation here while notdone: for name in list(notdone.keys()): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = str.strip(value) else: done[name] = value del notdone[name] else: # bogus variable reference; # just drop it since we can't deal del notdone[name] # save the results in the global dictionary dct.update(done) return dct
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) if g is None: g = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() tmpv = v.replace('$$', '') if '$' in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: done[n] = v.replace('$$', '$') else: done[n] = v while notdone: for name in notdone.keys(): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: found = False elif n in os.environ: item = os.environ[n] else: done[n] = item = '' if found: after = value[m.end():] value = value[:m.start()] + item + after if '$' in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] else: del notdone[name] fp.close() for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() g.update(done) return g
def parse_makefile(fn, g = None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) if g is None: g = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() tmpv = v.replace('$$', '') if '$' in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: done[n] = v.replace('$$', '$') else: done[n] = v while notdone: for name in notdone.keys(): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: found = False elif n in os.environ: item = os.environ[n] else: done[n] = item = '' if found: after = value[m.end():] value = value[:m.start()] + item + after if '$' in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] else: del notdone[name] fp.close() for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() g.update(done) return g
def test_class(self): result1 = ["# test file\n", "\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n"] result2 = ["\n", "line 3 \\\n", " continues on next line\n"] result3 = ["# test file\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n"] result4 = ["line 3 \\", " continues on next line"] result5 = ["line 3 continues on next line"] result6 = ["line 3 continues on next line"] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "test.txt") out_file = open(filename, "w") try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, "strip blanks", in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, "default processing", in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close()
def test_class(self): # old tests moved from text_file.__main__ # so they are really called by the buildbots # result 1: no fancy options result1 = [ '# test file\n', '\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n' ] # result 2: just strip comments result2 = ["\n", "line 3 \\\n", " continues on next line\n"] # result 3: just strip blank lines result3 = [ "# test file\n", "line 3 \\\n", "# intervening comment\n", " continues on next line\n" ] # result 4: default, strip comments, blank lines, # and trailing whitespace result4 = ["line 3 \\", " continues on next line"] # result 5: strip comments and blanks, plus join lines (but don't # "collapse" joined lines result5 = ["line 3 continues on next line"] # result 6: strip comments and blanks, plus join lines (and # "collapse" joined lines result6 = ["line 3 continues on next line"] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "test.txt") out_file = open(filename, "w") try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, "strip blanks", in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, "default processing", in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close()
def read_setup_file(filename): """Reads a Setup file and returns Extension instances.""" from distutils.sysconfig import (parse_makefile, expand_makefile_vars, _variable_rx) from distutils.text_file import TextFile from distutils.util import split_quoted # First pass over the file to gather "VAR = VALUE" assignments. vars = parse_makefile(filename) # Second pass to gobble up the real content: lines of the form # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while True: line = file.readline() if line is None: # eof break if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": file.warn("'%s' lines not handled yet" % line) continue line = expand_makefile_vars(line, vars) words = split_quoted(line) # NB. this parses a slightly different syntax than the old # makesetup script: here, there must be exactly one extension per # line, and it must be the first word of the line. I have no idea # why the old syntax supported multiple extensions per line, as # they all wind up being the same. module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? # or leave it up to the CCompiler implementation to # worry about? ext.sources.append(word) elif switch == "-I": ext.include_dirs.append(value) elif switch == "-D": equals = value.find("=") if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) else: # "-DFOO=blah" ext.define_macros.append( (value[0:equals], value[equals + 2:])) elif switch == "-U": ext.undef_macros.append(value) elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) elif switch == "-L": ext.library_dirs.append(value) elif switch == "-R": ext.runtime_library_dirs.append(value) elif word == "-rpath": append_next_word = ext.runtime_library_dirs elif word == "-Xlinker": append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): # NB. a really faithful emulation of makesetup would # append a .o file to extra_objects only if it # had a slash in it; otherwise, it would s/.o/.c/ # and append it to sources. Hmmmm. ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() return extensions
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") if g is None: g = {} done = {} notdone = {} while True: line = fp.readline() if line is None: # eof break m = re.match(_variable_rx, line) if m: n, v = m.group(1, 2) v = v.strip() # `$$' is a literal `$' in make tmpv = v.replace('$$', '') if "$" in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: # insert literal `$' done[n] = v.replace('$$', '$') else: done[n] = v # Variables with a 'PY_' prefix in the makefile. These need to # be made available without that prefix through sysconfig. # Special care is needed to ensure that variable expansion works, even # if the expansion uses the name without a prefix. renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') # do variable interpolation here while notdone: for name in list(notdone): value = notdone[name] m = re.search(_findvar1_rx, value) or re.search( _findvar2_rx, value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] elif n in renamed_variables: if name.startswith( 'PY_') and name[3:] in renamed_variables: item = "" elif 'PY_' + n in notdone: found = False else: item = str(done['PY_' + n]) else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] if name.startswith('PY_') \ and name[3:] in renamed_variables: name = name[3:] if name not in done: done[name] = value else: # bogus variable reference; just drop it since we can't deal del notdone[name] fp.close() # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() # save the results in the global dictionary g.update(done) return g
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) if g is None: g = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = string.strip(v) if "$" in v: notdone[n] = v else: try: v = int(v) except ValueError: pass done[n] = v # do variable interpolation here while notdone: for name in notdone.keys(): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if done.has_key(n): item = str(done[n]) elif notdone.has_key(n): # get it on a subsequent round found = False elif os.environ.has_key(n): # do it like make: fall back to environment item = os.environ[n] else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = string.strip(value) else: done[name] = value del notdone[name] else: # bogus variable reference; just drop it since we can't deal del notdone[name] fp.close() # save the results in the global dictionary g.update(done) return g
def test_class(self): result1 = ['# test file\n', '\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n'] result2 = ['\n', 'line 3 \\\n', ' continues on next line\n'] result3 = ['# test file\n', 'line 3 \\\n', '# intervening comment\n', ' continues on next line\n'] result4 = ['line 3 \\', ' continues on next line'] result5 = ['line 3 continues on next line'] result6 = ['line 3 continues on next line'] def test_input(count, description, file, expected_result): result = file.readlines() self.assertEqual(result, expected_result) tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, 'test.txt') out_file = open(filename, 'w') try: out_file.write(TEST_DATA) finally: out_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(1, 'no processing', in_file, result1) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0) try: test_input(2, 'strip comments', in_file, result2) finally: in_file.close() in_file = TextFile(filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0) try: test_input(3, 'strip blanks', in_file, result3) finally: in_file.close() in_file = TextFile(filename) try: test_input(4, 'default processing', in_file, result4) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1) try: test_input(5, 'join lines without collapsing', in_file, result5) finally: in_file.close() in_file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1, collapse_join=1) try: test_input(6, 'join lines with collapsing', in_file, result6) finally: in_file.close()
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") if g is None: g = {} done = {} notdone = {} while True: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() # `$$' is a literal `$' in make tmpv = v.replace('$$', '') if "$" in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: # insert literal `$' done[n] = v.replace('$$', '$') else: done[n] = v # Variables with a 'PY_' prefix in the makefile. These need to # be made available without that prefix through sysconfig. # Special care is needed to ensure that variable expansion works, even # if the expansion uses the name without a prefix. renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') # do variable interpolation here while notdone: for name in list(notdone): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] elif n in renamed_variables: if name.startswith('PY_') and name[3:] in renamed_variables: item = "" elif 'PY_' + n in notdone: found = False else: item = str(done['PY_' + n]) else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value del notdone[name] if name.startswith('PY_') \ and name[3:] in renamed_variables: name = name[3:] if name not in done: done[name] = value else: # bogus variable reference; just drop it since we can't deal del notdone[name] fp.close() # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() # save the results in the global dictionary g.update(done) return g
def parse_makefile(fn, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1) if g is None: g = {} done = {} notdone = {} while 1: line = fp.readline() if line is None: # eof break m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = string.strip(v) if "$" in v: notdone[n] = v else: try: v = int(v) except ValueError: pass done[n] = v # do variable interpolation here while notdone: for name in notdone.keys(): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = string.strip(value) else: done[name] = value del notdone[name] else: # bogus variable reference; just drop it since we can't deal del notdone[name] fp.close() # save the results in the global dictionary g.update(done) return g
def read_setup_file(filename): from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted vars = parse_makefile(filename) file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while 1: line = file.readline() if line is None: break if _variable_rx.match(line): continue if line[0] == line[-1] == '*': file.warn("'%s' lines not handled yet" % line) continue line = expand_makefile_vars(line, vars) words = split_quoted(line) module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] value = word[2:] if suffix in ('.c', '.cc', '.cpp', '.cxx', '.c++', '.m', '.mm'): ext.sources.append(word) elif switch == '-I': ext.include_dirs.append(value) elif switch == '-D': equals = string.find(value, '=') if equals == -1: ext.define_macros.append((value, None)) else: ext.define_macros.append((value[0:equals], value[equals + 2:])) elif switch == '-U': ext.undef_macros.append(value) elif switch == '-C': ext.extra_compile_args.append(word) elif switch == '-l': ext.libraries.append(value) elif switch == '-L': ext.library_dirs.append(value) elif switch == '-R': ext.runtime_library_dirs.append(value) elif word == '-rpath': append_next_word = ext.runtime_library_dirs elif word == '-Xlinker': append_next_word = ext.extra_link_args elif word == '-Xcompiler': append_next_word = ext.extra_compile_args elif switch == '-u': ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif word == '-Xcompiler': append_next_word = ext.extra_compile_args elif switch == '-u': ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in ('.a', '.so', '.sl', '.o', '.dylib'): ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() return extensions
def parse_makefile(fn, conf, g=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ from distutils.text_file import TextFile fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape") if g is None: g = {} done = {} notdone = {} while True: line = fp.readline() if line is None: # eof break n = cond = app = v = None m = _variable_rx.match(line) if m: n, app, v = m.group(1, 2, 3) else: m = _variable_conditional_rx.match(line) if m: n, cond, app, v = m.group(1, 2, 3, 4) if v: v = v.strip() # `$$' is a literal `$' in make tmpv = v.replace('$$', '') if cond: boolean = cond in conf and conf[cond] n += ('-yes' if boolean else '-no') if "$" in tmpv: notdone[n] = v else: v = v.replace('$$', '$') if app: if n not in done: done[n] = v else: done[n] += ' ' + v else: done[n] = v # hacky. just assume they were all += notdone_done = {} # do variable interpolation here while notdone: for name in list(notdone): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] else: notdone_done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: notdone_done[name] = value del notdone[name] else: # bogus variable reference; just drop it since we can't deal del notdone[name] for k in notdone_done: if k in done: done[k] += ' ' + notdone_done[k] else: done[k] = notdone_done[k] fp.close() # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() # save the results in the global dictionary g.update(done) return g