示例#1
0
    def get_tarred_file(self):
        tar_file_link, intern_path = self.file_link.split("//")
        log.debug("Get file {} from tar.gz archive {}".format(
            intern_path, tar_file_link))
        tar_base_name = os.path.basename(tar_file_link)
        tar_prefix = tar_base_name.split(".")[0]
        tar_root_dir = os.path.join(self.job_dir, tar_prefix)
        self.file_path = os.path.join(tar_root_dir, intern_path)

        if os.path.exists(self.file_path + ".gz"):
            log.debug("File {} is already downloaded".format(self.file_path +
                                                             ".gz"))
            return self.file_path + ".gz"
        if not os.path.exists(tar_root_dir):
            os.makedirs(tar_root_dir)
        tar_file_path = os.path.join(self.job_dir, tar_base_name)
        if not os.path.exists(tar_file_path):
            web = Web(url=self.file_url)
            req = web.get()
            if req is None or int(req.status_code) != 200:
                return None
            else:
                with open(tar_file_path, "w") as f:
                    f.write(req.text)
        if self._extract(tar_file_path, tar_root_dir, intern_path):
            with open(self.file_path, 'r') as f:
                with gzip.open(self.file_path + ".gz", 'wt') as zipped_file:
                    zipped_file.writelines(f)
            os.remove(self.file_path)
            self.file_path += ".gz"
            return self.file_path
        else:
            return None
示例#2
0
    def _get_console(self, job):
        console_name = config.ACTIVE_PLUGIN_CONFIG.console_name
        if isinstance(console_name, list):
            console_name = console_name[0]
        path = os.path.join(self.down_path, job["log_hash"], console_name)
        if os.path.exists(path):
            log.debug("Console is already here: {}".format(path))
            return path

        console_names = config.ACTIVE_PLUGIN_CONFIG.console_name
        if not isinstance(console_names, list):
            console_names = [console_names]
        for console_name in console_names:
            web = Web(job["log_url"] + "/" + console_name, timeout=7)
            log.debug(
                "Trying to download console: {}".format(job["log_url"] + "/" +
                                                        console_name))
            req = web.get(ignore404=True)
            if req is None or int(req.status_code) != 200:
                log.error(
                    "Failed to retrieve console: {}".format(job["log_url"] +
                                                            "/" +
                                                            console_name))
            else:
                if not os.path.exists(os.path.dirname(path)):
                    os.makedirs(os.path.dirname(path))
                with gzip.open(path, "wb") as f:
                    f.write(req.content)
                break
        else:
            return None
        return path
示例#3
0
文件: utils.py 项目: sshnaidm/sova
    def get(self, ignore404=False):
        """Get web file

            Sometimes console.html is gzipped on logs server and console.html
            is not available anymore, so here it silently fails when trying to
            download console.html and then tries to get console.html.gz
            We don't want redundant error messages in console

        :param ignore404: not to show error message if got 404 error
        :return: request obj
        """
        log.debug("GET {url} with ignore404={i}".format(
            url=self.url, i=str(ignore404)))
        try:
            req = requests.get(self.url, timeout=self.timeout)
        except ConnectionError:
            log.error("Connection error when retriving {}".format(self.url))
            return None
        except Timeout:
            log.error("Timeout reached when retriving {}".format(self.url))
            return None
        except Exception as e:
            log.error("Unknown error when retriving {}: {}".format(
                self.url, str(e)))
            return None
        if int(req.status_code) != 200:
            if not (ignore404 and int(req.status_code) == 404):
                log.warn("Page {url} got status {code}".format(
                    url=self.url, code=req.status_code))
        return req
示例#4
0
文件: utils.py 项目: sshnaidm/sova
    def get_tarred_file(self):
        tar_file_link, intern_path = self.file_link.split("//")
        log.debug("Get file {} from tar.gz archive {}".format(intern_path,
                                                              tar_file_link))
        tar_base_name = os.path.basename(tar_file_link)
        tar_prefix = tar_base_name.split(".")[0]
        tar_root_dir = os.path.join(self.job_dir, tar_prefix)
        self.file_path = os.path.join(tar_root_dir, intern_path)

        if os.path.exists(self.file_path + ".gz"):
            log.debug("File {} is already downloaded".format(
                self.file_path + ".gz"))
            return self.file_path + ".gz"
        if not os.path.exists(tar_root_dir):
            os.makedirs(tar_root_dir)
        tar_file_path = os.path.join(self.job_dir, tar_base_name)
        if not os.path.exists(tar_file_path):
            web = Web(url=self.file_url)
            req = web.get()
            if req is None or int(req.status_code) != 200:
                return None
            else:
                with open(tar_file_path, "w") as f:
                    f.write(req.text)
        if self._extract(tar_file_path, tar_root_dir, intern_path):
            with open(self.file_path, 'r') as f:
                with gzip.open(self.file_path + ".gz", 'wt') as zipped_file:
                    zipped_file.writelines(f)
            os.remove(self.file_path)
            self.file_path += ".gz"
            return self.file_path
        else:
            return None
示例#5
0
    def get(self, ignore404=False):
        """Get web file

            Sometimes console.html is gzipped on logs server and console.html
            is not available anymore, so here it silently fails when trying to
            download console.html and then tries to get console.html.gz
            We don't want redundant error messages in console

        :param ignore404: not to show error message if got 404 error
        :return: request obj
        """
        log.debug("GET {url} with ignore404={i}".format(url=self.url,
                                                        i=str(ignore404)))
        try:
            req = requests.get(self.url, timeout=self.timeout)
        except ConnectionError:
            log.error("Connection error when retriving {}".format(self.url))
            return None
        except Timeout:
            log.error("Timeout reached when retriving {}".format(self.url))
            return None
        except Exception as e:
            log.error("Unknown error when retriving {}: {}".format(
                self.url, str(e)))
            return None
        if int(req.status_code) != 200:
            if not (ignore404 and int(req.status_code) == 404):
                log.warn("Page {url} got status {code}".format(
                    url=self.url, code=req.status_code))
        return req
示例#6
0
 def exe(self, cmd):
     log.debug("Executing cmd by ssh: {cmd}".format(cmd=cmd))
     try:
         stdin, stdout, stderr = self.ssh_cl.exec_command(cmd)
     except paramiko.ssh_exception.SSHException as e:
         log.error("SSH command failed: {}\n{}".format(cmd, e))
         return None, None, None
     return stdin, stdout.read(), stderr.read()
示例#7
0
 def exe(self, cmd):
     log.debug("Executing cmd by ssh: {cmd}".format(cmd=cmd))
     try:
         stdin, stdout, stderr = self.ssh_cl.exec_command(cmd)
     except paramiko.ssh_exception.SSHException as e:
         log.error("SSH command failed: {}\n{}".format(cmd, e))
         return None, None, None
     except EOFError as e:
         log.error("SSH command failed with: {}\n{}".format(cmd, e))
         return None, None, None
     return stdin, stdout.read(), stderr.read()
示例#8
0
 def _extract(self, tar, root_dir, file_path):
     log.debug("Extracting file {} from {} in {}".format(
         file_path, tar, root_dir))
     try:
         with contextlib.closing(lzma.LZMAFile(tar)) as xz:
             with tarfile.open(fileobj=xz) as f:
                 f.extract(file_path, path=root_dir)
         return True
     except Exception as e:
         log.error("Error when untarring file {} from {} in {}:{}".format(
             file_path, tar, root_dir, e))
         return False
示例#9
0
文件: utils.py 项目: sshnaidm/sova
 def _extract(self, tar, root_dir, file_path):
     log.debug("Extracting file {} from {} in {}".format(
         file_path, tar, root_dir))
     try:
         with contextlib.closing(lzma.LZMAFile(tar)) as xz:
             with tarfile.open(fileobj=xz) as f:
                 f.extract(file_path, path=root_dir)
         return True
     except Exception as e:
         log.error("Error when untarring file {} from {} in {}:{}".format(
             file_path, tar, root_dir, e))
         return False
示例#10
0
文件: utils.py 项目: odyssey4me/sova
 def __init__(self,
              host, port, user, timeout=None, key=None, key_path=None):
     self.ssh_cl = paramiko.SSHClient()
     self.ssh_cl.set_missing_host_key_policy(paramiko.AutoAddPolicy())
     log.debug("Executing ssh {user}@{host}:{port}".format(
         user=user, host=host, port=port))
     self.ssh_cl.connect(hostname=host,
                         port=port,
                         username=user,
                         timeout=timeout,
                         pkey=key,
                         key_filename=key_path)
示例#11
0
文件: utils.py 项目: sshnaidm/sova
 def __init__(self,
              host, port, user, timeout=None, key=None, key_path=None):
     self.ssh_cl = paramiko.SSHClient()
     self.ssh_cl.set_missing_host_key_policy(paramiko.AutoAddPolicy())
     log.debug("Executing ssh {user}@{host}:{port}".format(
         user=user, host=host, port=port))
     self.ssh_cl.connect(hostname=host,
                         port=port,
                         username=user,
                         timeout=timeout,
                         pkey=key,
                         key_filename=key_path)
示例#12
0
文件: utils.py 项目: odyssey4me/sova
    def get_project_patches(self, projects):
        def filtered(x):
            return [json.loads(i.decode(errors='ignore'))
                    for i in x.splitlines()
                    if 'project' in i.decode(errors='ignore')]

        def calc_date(x):
            return (
                datetime.datetime.today() - datetime.timedelta(days=x)
            ).date().strftime("%Y-%m-%d")

        data = []

        cmd_template = ('gerrit query '
                        'status:reviewed '
                        'project:{project} '
                        'branch:{branch} '
                        '--comments '
                        '--format JSON '
                        'limit:{limit} '
                        '--patch-sets '
                        '--current-patch-set ')
        if self.period:
            cmd_template += ' after:"{date}"'.format(
                date=calc_date(self.period))
        for proj in projects:
            # Start SSH for every project from scratch because SSH timeout
            self.ssh = SSH(host=config.GERRIT_HOST,
                           port=config.GERRIT_PORT,
                           user=config.GERRIT_USER,
                           timeout=config.GERRIT_REQ_TIMEOUT,
                           key_path=self.key_path)
            for branch in config.GERRIT_BRANCHES:
                command = cmd_template.format(
                    project=proj,
                    branch=branch,
                    limit=config.GERRIT_PATCH_LIMIT)
                out, err = self.ssh.exe(command)[1:]
                if err:
                    log.error("Error with ssh:{}".format(err))
                real_data = filtered(out) if out else []
                log.debug("Length of result is {}".format(len(real_data)))
                data += real_data
            self.ssh.close()
            # Let's not ddos Gerrit
            time.sleep(1)
        return data
示例#13
0
文件: utils.py 项目: sshnaidm/sova
    def get_project_patches(self, projects):
        def filtered(x):
            return [json.loads(i.decode(errors='ignore'))
                    for i in x.splitlines()
                    if 'project' in i.decode(errors='ignore')]

        def calc_date(x):
            return (
                datetime.datetime.today() - datetime.timedelta(days=x)
            ).date().strftime("%Y-%m-%d")

        data = []

        cmd_template = ('gerrit query '
                        'status:reviewed '
                        'project:{project} '
                        'branch:{branch} '
                        '--comments '
                        '--format JSON '
                        'limit:{limit} '
                        '--patch-sets '
                        '--current-patch-set ')
        if self.period:
            cmd_template += ' after:"{date}"'.format(
                date=calc_date(self.period))
        for proj in projects:
            # Start SSH for every project from scratch because SSH timeout
            self.ssh = SSH(host=config.GERRIT_HOST,
                           port=config.GERRIT_PORT,
                           user=config.GERRIT_USER,
                           timeout=config.GERRIT_REQ_TIMEOUT,
                           key_path=self.key_path)
            for branch in config.GERRIT_BRANCHES:
                command = cmd_template.format(
                    project=proj,
                    branch=branch,
                    limit=config.GERRIT_PATCH_LIMIT)
                out, err = self.ssh.exe(command)[1:]
                if err:
                    log.error("Error with ssh:{}".format(err))
                real_data = filtered(out) if out else []
                log.debug("Length of result is {}".format(len(real_data)))
                data += real_data
            self.ssh.close()
            # Let's not ddos Gerrit
            time.sleep(1)
        return data
示例#14
0
文件: utils.py 项目: odyssey4me/sova
 def get_regular_file(self):
     log.debug("Get regular file {}".format(self.file_link))
     self.file_name = os.path.basename(
         self.file_link).split(".gz")[0] + ".gz"
     self.file_original_path = os.path.join(
         self.job_dir,
         os.path.basename(self.file_link))
     self.file_path = os.path.join(self.job_dir, self.file_name)
     if os.path.exists(self.file_path):
         log.debug("File {} is already downloaded".format(self.file_path))
     elif os.path.exists(self.file_original_path + "_404"):
         log.debug("File {} was saved as 404".format(self.file_path))
         return None
     else:
         if "." not in self.file_url.split("/")[-1]:
             file_try1 = self.file_url + ".gz"
         else:
             file_try1 = self.file_url
         web = Web(url=file_try1)
         req = web.get(ignore404=True)
         if req is None or int(req.status_code) == 404:
             if req is None:
                 log.warn(
                     "Failed to retrieve URL, request is None: {}".format(
                         file_try1))
             if self.file_url.endswith(".html"):
                 file_try2 = self.file_url
             elif self.file_url.endswith(".txt"):
                 file_try2 = self.file_url[:-4] + ".log"
             else:
                 log.warn("Failed to retrieve URL, tried once: {}".format(
                     file_try1))
                 if req is not None:
                     open(self.file_original_path + "_404", "a").close()
                 return None
             web = Web(url=file_try2)
             log.debug("Trying to download raw file {}".format(file_try2))
             req = web.get()
             if req is None or int(req.status_code) != 200:
                 log.warn("Failed to retrieve URL, tried twice: {}".format(
                     file_try2))
                 if req is not None and int(req.status_code) == 404:
                     open(self.file_original_path + "_404", "a").close()
                 return None
         elif int(req.status_code) not in (200, 404):
             log.warn(
                 "Failed to retrieve URL, request failure: {} {}".format(
                     file_try1, req.status_code))
             return None
         if int(req.status_code) == 200:
             with gzip.open(self.file_path, "wt") as f:
                 f.write(req.text)
     return self.file_path
示例#15
0
文件: utils.py 项目: sshnaidm/sova
 def get_regular_file(self):
     log.debug("Get regular file {}".format(self.file_link))
     self.file_name = os.path.basename(
         self.file_link).split(".gz")[0] + ".gz"
     self.file_path = os.path.join(self.job_dir, self.file_name)
     if os.path.exists(self.file_path):
         log.debug("File {} is already downloaded".format(self.file_path))
     elif os.path.exists(self.file_path + "_404"):
         log.debug("File {} was saved as 404".format(self.file_path))
         return None
     else:
         if "." not in self.file_url.split("/")[-1]:
             file_try1 = self.file_url + ".gz"
         else:
             file_try1 = self.file_url
         web = Web(url=file_try1)
         req = web.get(ignore404=True)
         if req is None or int(req.status_code) == 404:
             if req is None:
                 log.warn(
                     "Failed to retrieve URL, request is None: {}".format(
                         file_try1))
             if self.file_url.endswith(".html"):
                 file_try2 = self.file_url
             elif self.file_url.endswith(".txt"):
                 file_try2 = self.file_url[:-4] + ".log"
             else:
                 log.warn("Failed to retrieve URL, tried once: {}".format(
                     file_try1))
                 if req is not None:
                     open(self.file_path + "_404", "a").close()
                 return None
             web = Web(url=file_try2)
             log.debug("Trying to download raw file {}".format(file_try2))
             req = web.get()
             if req is None or int(req.status_code) != 200:
                 log.warn("Failed to retrieve URL, tried twice: {}".format(
                     file_try2))
                 if req is not None and int(req.status_code) == 404:
                     open(self.file_path + "_404", "a").close()
                 return None
         elif int(req.status_code) not in (200, 404):
             log.warn(
                 "Failed to retrieve URL, request failure: {} {}".format(
                     file_try1, req.status_code))
             return None
         if int(req.status_code) == 200:
             with gzip.open(self.file_path, "wt") as f:
                 f.write(req.text)
     return self.file_path
示例#16
0
 def get_regular_file(self):
     log.debug("Get regular file {}".format(self.file_link))
     self.file_name = os.path.basename(
         self.file_link).split(".gz")[0] + ".gz"
     self.file_path = os.path.join(self.job_dir, self.file_name)
     if os.path.exists(self.file_path):
         log.debug("File {} is already downloaded".format(self.file_path))
         return self.file_path
     else:
         web = Web(url=self.file_url)
         ignore404 = self.file_link == "/console.html"
         req = web.get(ignore404=ignore404)
         if (req
                 and req.status_code != 200
                 and self.file_link == "/console.html"):
             self.file_url += ".gz"
             web = Web(url=self.file_url)
             log.debug("Trying to download gzipped console")
             req = web.get()
         if not req or req.status_code != 200:
             log.warn("Failed to retrieve URL: {}".format(self.file_url))
             return None
         else:
             with gzip.open(self.file_path, "wt") as f:
                 f.write(req.text)
         return self.file_path
示例#17
0
 def _get_jenkins_console(x):
     consoles_dir = os.path.join(config.DOWNLOAD_PATH, "jenkins_cons")
     if not os.path.exists(consoles_dir):
         os.makedirs(consoles_dir)
     file_path = os.path.join(
         consoles_dir,
         "_".join((x.rstrip("/").split("/")[-2:])) + ".gz")
     if os.path.exists(file_path):
         log.debug("Using cached Jenkins console: %s", file_path)
         with gzip.open(file_path, "rt") as f:
             return f.read()
     elif os.path.exists(file_path + "_404"):
         log.debug("Jenkins console cache is 404: %s", file_path)
         return None
     full_url = x + "/" + "consoleText"
     www = Web(full_url, timeout=5)
     page = www.get()
     if page and page.status_code == 404:
         log.error("Jenkins console has 404 error: %s", full_url)
         open(file_path + "_404", 'a').close()
     elif page:
         with gzip.open(file_path, "wt") as f:
             f.write(page.text)
         log.debug("Saved jenkins console cache to: %s", file_path)
         return page.content.decode('utf-8')
     else:
         log.error("Failed to get Jenkins console: %s", full_url)
         return None
示例#18
0
 def _get_console(self, job):
     path = os.path.join(
         self.down_path, job["log_hash"], "console.html.gz")
     if os.path.exists(path):
         log.debug("Console is already here: {}".format(path))
         return path
     web = Web(job["log_url"] + "/console.html")
     req = web.get(ignore404=True)
     if req is not None and req.status_code == 404:
         url = job["log_url"] + "/console.html.gz"
         web = Web(url=url)
         log.debug("Trying to download gzipped console")
         req = web.get()
     if not req or req.status_code != 200:
         log.error("Failed to retrieve console: {}".format(job["log_url"]))
         return None
     else:
         if not os.path.exists(os.path.dirname(path)):
             os.makedirs(os.path.dirname(path))
         with gzip.open(path, "wb") as f:
             f.write(req.content)
     return path
示例#19
0
def add_log_url_to_cache(key, value):
    cache.set(key, value, expire=CACHE_TIMEOUT)
    log.debug("Added to cache URL %s", value)
示例#20
0
 def close(self):
     log.debug("Closing SSH connection")
     self.ssh_cl.close()
示例#21
0
def get_log_url_from_cache(key):
    if key in cache:
        url = cache[key]
        log.debug("Getting from cache URL %s", url)
        return url
    return None
示例#22
0
def analyze(job, down_path):
    def line_match(pat, line):
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    console = JobFile(job, path=down_path, offline=DEBUG).get_file()
    if not console:
        message['text'] = 'No console file'
        message['msg'] = {'No console file': 'infra'}
        message['tags'] = ['infra']
        return message
    files = PATTERNS.keys()
    for file in files:
        jfile = JobFile(job, path=down_path, file_link=file, offline=DEBUG
                        ).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                        jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[file]:
                        if (line_match(p["pattern"], line) and
                           p["msg"] not in msg):
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update({p["msg"].format(
                                line_match(p["pattern"], line)): p["tag"]})
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(compile_logstash(
                                    line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(
        msg=" ".join(sorted(msg)),
        delim="||" if message['reason'] else "XX",
        date=job.datetime,
        job_type=job.name,
        log_url=job.log_url
    )
    return message
示例#23
0
def analyze(job, down_path, num):
    def line_match(pat, line, exclude=None):
        exclude = exclude or []
        if any([i in line for i in exclude]):
            return False
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    def filter_by_job_name(job_name, job_files):
        if "multinode" in job_name:
            job_files = [f for f in job_files if "ironic" not in f]
        return job_files

    log.debug("Starting task {}".format(num))
    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
        'success': job.status == 'SUCCESS',
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    if message['success']:
        message['text'] = 'SUCCESS'
        message['msg'] = {'SUCCESS': ''}
        message['reason'] = False
        message['tags'] = ['']
        return message
    console_names = ACTIVE_PLUGIN_CONFIG.console_name
    if not isinstance(console_names, list):
        console_names = [console_names]
    for console_name in console_names:
        console = JobFile(job,
                          path=down_path,
                          file_link=console_name,
                          offline=DEBUG).get_file()
        if console:
            break

    if not console:
        message['text'] = 'Failed to fetch logs'
        message['msg'] = {'Failed to fetch logs': 'infra'}
        message['tags'] = ['infra']
        log.debug("Failed to fetch logs %s", job.log_url)
        return message
    file_pointers = PATTERNS.keys()
    files = [PLUGIN[f] for f in file_pointers]
    REV_PLUGIN = {v: k for k, v in PLUGIN.items()}
    for file in filter_by_job_name(job.name, files):
        jfile = JobFile(job, path=down_path, file_link=file,
                        offline=DEBUG).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                    jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[REV_PLUGIN[file]]:
                        line_matched = (line_match(
                            p["pattern"], line, exclude=p.get("exclude"))
                                        and p["msg"].lower()
                                        not in [i.lower() for i in msg])
                        if line_matched:
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update(
                                {
                                    p["msg"].format(
                                        line_match(p["pattern"], line)):
                                    p["tag"]
                                })
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(
                                    compile_logstash(line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(msg=" ".join(sorted(msg)),
                                   delim="||" if message['reason'] else "XX",
                                   date=job.datetime,
                                   job_type=job.name,
                                   log_url=job.log_url)
    return message
示例#24
0
文件: analysis.py 项目: sshnaidm/sova
def analyze(job, down_path, num):
    def line_match(pat, line, exclude=None):
        exclude = exclude or []
        if any([i in line for i in exclude]):
            return False
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    def filter_by_job_name(job_name, job_files):
        if "multinode" in job_name:
            job_files = [f for f in job_files if "ironic" not in f]
        return job_files

    log.debug("Starting task {}".format(num))
    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
        'success': job.status == 'SUCCESS',
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    if message['success']:
        message['text'] = 'SUCCESS'
        message['msg'] = {'SUCCESS': ''}
        message['reason'] = False
        message['tags'] = ['']
        return message
    console_names = ACTIVE_PLUGIN_CONFIG.console_name
    if not isinstance(console_names, list):
        console_names = [console_names]
    for console_name in console_names:
        console = JobFile(job, path=down_path, file_link=console_name,
                          offline=DEBUG).get_file()
        if console:
            break

    if not console:
        message['text'] = 'Failed to fetch logs'
        message['msg'] = {'Failed to fetch logs': 'infra'}
        message['tags'] = ['infra']
        return message
    file_pointers = PATTERNS.keys()
    files = [PLUGIN[f] for f in file_pointers]
    REV_PLUGIN = {v: k for k, v in PLUGIN.items()}
    for file in filter_by_job_name(job.name, files):
        jfile = JobFile(job, path=down_path, file_link=file, offline=DEBUG
                        ).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                    jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[REV_PLUGIN[file]]:
                        line_matched = (line_match(
                            p["pattern"], line, exclude=p.get("exclude")
                        ) and p["msg"].lower() not in [i.lower() for i in msg])
                        if line_matched:
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update({p["msg"].format(
                                line_match(p["pattern"], line)): p["tag"]})
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(compile_logstash(
                                    line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(
        msg=" ".join(sorted(msg)),
        delim="||" if message['reason'] else "XX",
        date=job.datetime,
        job_type=job.name,
        log_url=job.log_url
    )
    return message
示例#25
0
文件: utils.py 项目: sshnaidm/sova
 def close(self):
     log.debug("Closing SSH connection")
     self.ssh_cl.close()