def check_text(text, temp_root, logger=None): """ check code for style requirements using PyLama """ # creates a new temporary directory to extract the submission with tempfile.TemporaryDirectory(dir=temp_root) as temp_dir: # creates a temporary file to write the code code_file = tempfile.NamedTemporaryFile(suffix='.py', dir=temp_dir, mode='w', delete=False) # writes the code to the file code_file.write(text) # closes the file so it can be removed with the directory code_file.close() # first checks if the file can be compiled # i.e., there are no syntax errors in the file compiled = True try: compile(code_file.name, doraise=True) except: compiled = False # configures and runs pylama to analyze the temp file pylama_options = { 'linters': ['pep257', 'pydocstyle', 'pycodestyle', 'pyflakes', 'pylint'], 'ignore': list(ignored_errors.keys()) } pylama_path = temp_dir options = parse_options([pylama_path], **pylama_options) errors = check_path(options, rootdir='.') # parses and sorts the errors received results = pylama_parser(errors, compiled) results.sort(key=lambda x: (int(x['line']), int(x['place']))) if logger: logger.debug(results) return results
def test_code_quality(): """Test various code quality metrics.""" old_cwd = os.getcwd() try: root_path = os.path.dirname(os.path.dirname(__file__)) os.chdir(root_path) top_level = get_python_source_paths(root_path) options = parse_options(top_level, **PYLAMA_OPTION_OVERRIDES) errors = check_path(options, rootdir='.') if errors: print('-' * 80) for error in errors: print_pylama_error(error, root_path) print('-' * 80) assert not errors, "%s code quality errors detected." % len(errors) finally: os.chdir(old_cwd)
def process_paths(options, candidates=None, error=True): """Process files and log errors.""" errors = pylama.check_path(options, rootdir=pylamaconfig.CURDIR, candidates=candidates) pattern = "%(filename)s:%(lnum)s:%(col)s:%(type)s:%(number)s:%(text)s" for er in errors: text = er._info["text"].split() if PATTERN_NUMBER.match(text[0]): er._info["text"] = " ".join(text[1:]) if not er._info["number"]: er._info["number"] = f"0000" if options.abspath: er._info["filename"] = ospath.abspath(er.filename) LOGGER.warning(pattern, er._info) if error: sys.exit(int(bool(errors))) return errors
def lint(path): opts = {'linters': ['pyflakes'], 'async': True} options = parse_options([path], **opts) return check_path(options, rootdir=".")
def test_checkpath(): path = op.abspath('dummy.py') options = parse_options([path]) result = check_path(options) assert result assert result[0].filename == 'dummy.py'
def python_file_to_json_meta(python_file_path): """Take python source code string and extract meta-data as json file.""" python_file_path = os.path.abspath(python_file_path) log.debug("INPUT: Reading Python file {0}.".format(python_file_path)) with open(python_file_path, encoding="utf-8-sig") as python_file: python_code, json_meta = python_file.read(), {} json_meta["generator"] = __doc__.splitlines()[0] + " " + __version__ json_meta["relpath"] = os.path.relpath(python_file_path) # Paths json_meta["basename"] = os.path.basename(python_file_path) json_meta["dirname"], all_fades = os.path.dirname(python_file_path), [] json_meta["fullpath"], json_meta["is_index"] = python_file_path, False json_meta["lines_total"] = len(python_code.splitlines()) # Statistics json_meta["characters"] = len(python_code.replace("\n", "")) json_meta["kilobytes"] = int(os.path.getsize(python_file_path) / 1024) json_meta["lines_code"] = len([_ for _ in python_code.splitlines() if len( _.strip()) and not _.strip().startswith("#")]) json_meta["words"] = len([_ for _ in re.sub( "[^a-zA-Z0-9 ]", "", python_code).split(" ") if _ != ""]) json_meta["punctuations"] = len( [_ for _ in python_code if _ in punctuation]) json_meta["permissions"] = int(oct(os.stat(python_file_path).st_mode)[-3:]) json_meta["writable"] = os.access(python_file_path, os.W_OK) json_meta["executable"] = os.access(python_file_path, os.X_OK) json_meta["readable"] = os.access(python_file_path, os.R_OK) json_meta["symlink"] = os.path.islink(python_file_path) json_meta["sha1"] = sha1(python_code.encode("utf-8")).hexdigest() json_meta["import_procedural"] = "__import__(" in python_code json_meta["has_set_trace"] = ".set_trace()" in python_code json_meta["has_print"] = "print(" in python_code json_meta["has_tab"] = "\t" in python_code json_meta["has_shebang"] = re.findall('^#!/.*python', python_code) json_meta["accessed"] = datetime.utcfromtimestamp(os.path.getatime( python_file_path)).isoformat(" ").split(".")[0] json_meta["modified"] = datetime.utcfromtimestamp(os.path.getmtime( python_file_path)).isoformat(" ").split(".")[0] old_dir = os.getcwd() # Workaround for misterious file not found on Pylama os.chdir(os.path.dirname(python_file_path)) json_meta["pylama"] = [ # Bugs pylama_error.__dict__["_info"] # dict with PyLama Errors from linters for pylama_error in check_path(parse_options([python_file_path])) ] if check_path and parse_options else [] # if no PyLama empty list os.chdir(old_dir) # Workaround for misterious file not found on Pylama if len(json_meta["pylama"]) and json_meta["lines_total"]: json_meta["lines_per_bug"] = int( json_meta["lines_total"] / len(json_meta["pylama"])) regex_for_todo, all_todo = r"( # TODO| # FIXME| # OPTIMIZE| # BUG)", [] for index, line in enumerate(python_code.splitlines()): if re.findall(regex_for_todo, line): all_todo.append({ # Using same keywords as PyLama array. "lnum": index + 1, "text": line.strip(), "type": re.findall(regex_for_todo, line)[0].replace( "#", "").strip().lower()}) if len(all_todo): json_meta["todo"] = all_todo # this is all todo, fixme,etc on the code for index, line in enumerate(python_code.splitlines()): if re.findall(r"( # fades)", line): all_fades.append({"lnum": index + 1, "text": line.strip(), "type": line.split("#")[1].strip()}) if len(all_fades): # Fades: https://github.com/PyAr/fades json_meta["fades"] = all_fades # this is all todo,fixme,etc on code json_meta["links"] = re.findall(r"(?P<url>https?://[^\s]+)", python_code) for key, value in PyParse().parse_file(python_file_path).items(): json_meta[key] = value # "some_code_entity": "value_of_that_entity", return json_meta # return the Big Ol' JSON