def find_errors(self, output): """Parse errors from linter's output.""" try: content = json.loads(output) except ValueError: logger.error( "JSON Decode error: We expected JSON from 'rst-lint', " "but instead got this:\n{}\n\n".format(output)) self.notify_failure() return for entry in content: print(entry) filename = entry.get('source', None) yield LintMatch( match=None, line=entry['line'] - 1, # zero indexed col=None, error=None, warning=None, message=entry['message'], near=None, filename=filename, error_type='error' if entry['level'] >= 3 else 'warning', code=None)
def find_errors(self, output): """Parse errors from linter's output.""" try: # It is possible that users output debug messages to stdout, so we # only parse the last line, which is hopefully the actual eslint # output. # https://github.com/SublimeLinter/SublimeLinter-eslint/issues/251 last_line = output.rstrip().split('\n')[-1] content = json.loads(last_line) except ValueError: logger.error("JSON Decode error: We expected JSON from 'eslint', " "but instead got this:\n{}\n\n" "Be aware that we only parse the last line of above " "output.".format(output)) self.notify_failure() return if logger.isEnabledFor(logging.INFO): import pprint logger.info('{} output:\n{}'.format(self.name, pprint.pformat(content))) for entry in content: filename = entry.get('filePath', None) if filename == '<text>': filename = 'stdin' for match in entry['messages']: if match['message'].startswith('File ignored'): continue column = match.get('column', None) if column is not None: # apply line_col_base manually column = column - 1 if 'line' not in match: logger.error(match['message']) self.notify_failure() continue yield LintMatch( match=match, filename=filename, line=match['line'] - 1, # apply line_col_base manually col=column, error_type='error' if match['severity'] == 2 else 'warning', code=match.get('ruleId', ''), message=match['message'], )
def find_errors(self, output): """ Override find_errors() so we can parse the JSON instead of using a regex. Staticcheck reports errors as a steam of JSON object, so we parse them one line at a time. """ errors = output.splitlines() # Return early to stop iteration if there are no errors. if len(errors) == 0: return yield for obj in output.splitlines(): try: data = json.loads(obj) except Exception as e: logger.warning(e) self.notify_failure() code = data['code'] error_type = data['severity'] filename = data['location']['file'] line = data['location']['line'] - self.line_col_base[0] col = data['location']['column'] - self.line_col_base[0] message = data['message'] # Clean up the dependency message. if message.startswith("could not analyze dependency"): message = data['message'].split('[')[0] # Ensure we don't set negative line/col combinations. if line < 0: line = 0 if col < 0: col = 0 yield LintMatch( code=code, filename=filename, line=line, col=col, error_type=error_type, message=message, )
def find_errors(self, output): try: content = json.loads(output) except ValueError: logger.error("JSON Decode error: We expected JSON from CSL, " "but instead got this:\n{}\n\n".format(output)) self.notify_failure() return for error in content['messages']: yield LintMatch( match=error, line=error['firstLine'] - self.line_col_base[0] if 'firstLine' in error else error['lastLine'] - self.line_col_base[0], col=error['firstColumn'] - self.line_col_base[1] if 'firstColumn' in error else 0, message=error['message'], error_type='error', code='', )