Exemplo n.º 1
0
    def _get_console(self, job):
        console_name = config.ACTIVE_PLUGIN_CONFIG.console_name
        if isinstance(console_name, list):
            console_name = console_name[0]
        path = os.path.join(self.down_path, job["log_hash"], console_name)
        if os.path.exists(path):
            log.debug("Console is already here: {}".format(path))
            return path

        console_names = config.ACTIVE_PLUGIN_CONFIG.console_name
        if not isinstance(console_names, list):
            console_names = [console_names]
        for console_name in console_names:
            web = Web(job["log_url"] + "/" + console_name, timeout=7)
            log.debug(
                "Trying to download console: {}".format(job["log_url"] + "/" +
                                                        console_name))
            req = web.get(ignore404=True)
            if req is None or int(req.status_code) != 200:
                log.error(
                    "Failed to retrieve console: {}".format(job["log_url"] +
                                                            "/" +
                                                            console_name))
            else:
                if not os.path.exists(os.path.dirname(path)):
                    os.makedirs(os.path.dirname(path))
                with gzip.open(path, "wb") as f:
                    f.write(req.content)
                break
        else:
            return None
        return path
Exemplo n.º 2
0
 def _get_index(self):
     web = Web(self.per_url)
     req = web.get()
     if not req or req.status_code != 200:
         log.error("Can not retrieve periodic page {}".format(self.per_url))
         return None
     return req.content
Exemplo n.º 3
0
 def _get_jenkins_console(x):
     consoles_dir = os.path.join(config.DOWNLOAD_PATH, "jenkins_cons")
     if not os.path.exists(consoles_dir):
         os.makedirs(consoles_dir)
     file_path = os.path.join(
         consoles_dir,
         "_".join((x.rstrip("/").split("/")[-2:])) + ".gz")
     if os.path.exists(file_path):
         log.debug("Using cached Jenkins console: %s", file_path)
         with gzip.open(file_path, "rt") as f:
             return f.read()
     elif os.path.exists(file_path + "_404"):
         log.debug("Jenkins console cache is 404: %s", file_path)
         return None
     full_url = x + "/" + "consoleText"
     www = Web(full_url, timeout=5)
     page = www.get()
     if page and page.status_code == 404:
         log.error("Jenkins console has 404 error: %s", full_url)
         open(file_path + "_404", 'a').close()
     elif page:
         with gzip.open(file_path, "wt") as f:
             f.write(page.text)
         log.debug("Saved jenkins console cache to: %s", file_path)
         return page.content.decode('utf-8')
     else:
         log.error("Failed to get Jenkins console: %s", full_url)
         return None
Exemplo n.º 4
0
 def exe(self, cmd):
     log.debug("Executing cmd by ssh: {cmd}".format(cmd=cmd))
     try:
         stdin, stdout, stderr = self.ssh_cl.exec_command(cmd)
     except paramiko.ssh_exception.SSHException as e:
         log.error("SSH command failed: {}\n{}".format(cmd, e))
         return None, None, None
     return stdin, stdout.read(), stderr.read()
Exemplo n.º 5
0
 def get_jobs(self):
     index = self._get_index()
     jobs = self.parse_index(index)[:self.limit]
     for j in jobs:
         raw = self._get_more_data(j)
         if raw is None:
             log.error("Failed to process job {}".format(repr(j)))
         else:
             yield PeriodicJob(**raw)
Exemplo n.º 6
0
 def exe(self, cmd):
     log.debug("Executing cmd by ssh: {cmd}".format(cmd=cmd))
     try:
         stdin, stdout, stderr = self.ssh_cl.exec_command(cmd)
     except paramiko.ssh_exception.SSHException as e:
         log.error("SSH command failed: {}\n{}".format(cmd, e))
         return None, None, None
     except EOFError as e:
         log.error("SSH command failed with: {}\n{}".format(cmd, e))
         return None, None, None
     return stdin, stdout.read(), stderr.read()
Exemplo n.º 7
0
 def _extract(self, tar, root_dir, file_path):
     log.debug("Extracting file {} from {} in {}".format(
         file_path, tar, root_dir))
     try:
         with contextlib.closing(lzma.LZMAFile(tar)) as xz:
             with tarfile.open(fileobj=xz) as f:
                 f.extract(file_path, path=root_dir)
         return True
     except Exception as e:
         log.error("Error when untarring file {} from {} in {}:{}".format(
             file_path, tar, root_dir, e))
         return False
Exemplo n.º 8
0
 def _get_index(self):
     web = Web(self.per_url)
     req = web.get()
     if req is None or int(req.status_code) != 200:
         log.warning("Trying again to download periodic page ".format(
             self.per_url))
         req = web.get()
         if req is None or int(req.status_code) != 200:
             log.error("Can not retrieve periodic page {}".format(
                 self.per_url))
             return None
     return req.content
Exemplo n.º 9
0
 def get_jobs(self):
     index = self._get_index()
     if index:
         jobs = self.parse_index(index)[:self.limit]
     else:
         jobs = []
     for j in jobs:
         raw = self._get_more_data(j)
         if raw is None:
             log.error("Failed to process job {}".format(repr(j)))
         else:
             yield PeriodicJob(**raw)
Exemplo n.º 10
0
 def _extract(self, tar, root_dir, file_path):
     log.debug("Extracting file {} from {} in {}".format(
         file_path, tar, root_dir))
     try:
         with contextlib.closing(lzma.LZMAFile(tar)) as xz:
             with tarfile.open(fileobj=xz) as f:
                 f.extract(file_path, path=root_dir)
         return True
     except Exception as e:
         log.error("Error when untarring file {} from {} in {}:{}".format(
             file_path, tar, root_dir, e))
         return False
Exemplo n.º 11
0
 def get_build_page(self):
     web = Web(url=self.build)
     req = web.get()
     if req is None:
         log.error("Jenkins page {} is unavailable".format(self.build))
         return None
     if int(req.status_code) != 200:
         return None
     else:
         self.file_path = os.path.join(self.job_dir, "build_page.gz")
         with gzip.open(self.file_path, "wt") as f:
             f.write(req.text)
         return self.file_path
Exemplo n.º 12
0
 def get_build_page(self):
     web = Web(url=self.build)
     req = web.get()
     if req is None:
         log.error("Jenkins page {} is unavailable".format(self.build))
         return None
     if int(req.status_code) != 200:
         return None
     else:
         self.file_path = os.path.join(self.job_dir, "build_page.gz")
         with gzip.open(self.file_path, "wt") as f:
             f.write(req.text)
         return self.file_path
Exemplo n.º 13
0
    def get_project_patches(self, projects):
        def filtered(x):
            return [json.loads(i.decode(errors='ignore'))
                    for i in x.splitlines()
                    if 'project' in i.decode(errors='ignore')]

        def calc_date(x):
            return (
                datetime.datetime.today() - datetime.timedelta(days=x)
            ).date().strftime("%Y-%m-%d")

        data = []

        cmd_template = ('gerrit query '
                        'status:reviewed '
                        'project:{project} '
                        'branch:{branch} '
                        '--comments '
                        '--format JSON '
                        'limit:{limit} '
                        '--patch-sets '
                        '--current-patch-set ')
        if self.period:
            cmd_template += ' after:"{date}"'.format(
                date=calc_date(self.period))
        for proj in projects:
            # Start SSH for every project from scratch because SSH timeout
            self.ssh = SSH(host=config.GERRIT_HOST,
                           port=config.GERRIT_PORT,
                           user=config.GERRIT_USER,
                           timeout=config.GERRIT_REQ_TIMEOUT,
                           key_path=self.key_path)
            for branch in config.GERRIT_BRANCHES:
                command = cmd_template.format(
                    project=proj,
                    branch=branch,
                    limit=config.GERRIT_PATCH_LIMIT)
                out, err = self.ssh.exe(command)[1:]
                if err:
                    log.error("Error with ssh:{}".format(err))
                real_data = filtered(out) if out else []
                log.debug("Length of result is {}".format(len(real_data)))
                data += real_data
            self.ssh.close()
            # Let's not ddos Gerrit
            time.sleep(1)
        return data
Exemplo n.º 14
0
    def get_project_patches(self, projects):
        def filtered(x):
            return [json.loads(i.decode(errors='ignore'))
                    for i in x.splitlines()
                    if 'project' in i.decode(errors='ignore')]

        def calc_date(x):
            return (
                datetime.datetime.today() - datetime.timedelta(days=x)
            ).date().strftime("%Y-%m-%d")

        data = []

        cmd_template = ('gerrit query '
                        'status:reviewed '
                        'project:{project} '
                        'branch:{branch} '
                        '--comments '
                        '--format JSON '
                        'limit:{limit} '
                        '--patch-sets '
                        '--current-patch-set ')
        if self.period:
            cmd_template += ' after:"{date}"'.format(
                date=calc_date(self.period))
        for proj in projects:
            # Start SSH for every project from scratch because SSH timeout
            self.ssh = SSH(host=config.GERRIT_HOST,
                           port=config.GERRIT_PORT,
                           user=config.GERRIT_USER,
                           timeout=config.GERRIT_REQ_TIMEOUT,
                           key_path=self.key_path)
            for branch in config.GERRIT_BRANCHES:
                command = cmd_template.format(
                    project=proj,
                    branch=branch,
                    limit=config.GERRIT_PATCH_LIMIT)
                out, err = self.ssh.exe(command)[1:]
                if err:
                    log.error("Error with ssh:{}".format(err))
                real_data = filtered(out) if out else []
                log.debug("Length of result is {}".format(len(real_data)))
                data += real_data
            self.ssh.close()
            # Let's not ddos Gerrit
            time.sleep(1)
        return data
Exemplo n.º 15
0
 def _get_list(self):
     result = []
     for per_url in config.PERIODIC_LOGS_URL:
         for page in range(self.pages):
             url = per_url + ('&skip=%d' % int(page * 50) if page else '')
             web = Web(url=url)
             req = web.get()
             if req is None or int(req.status_code) != 200:
                 log.warning(
                     "Trying again to download periodic page ".format(url))
                 req = web.get()
                 if req is None or int(req.status_code) != 200:
                     log.error(
                         "Can not retrieve periodic page {}".format(url))
                     continue
             try:
                 result += req.json()
             except json.decoder.JSONDecodeError as e:
                 log.error("Can't parse JSON of %s: %s" % (url, e))
     return result
Exemplo n.º 16
0
 def _get_list(self):
     result = []
     for per_url in config.PERIODIC_LOGS_URL:
         for page in range(self.pages):
             url = per_url + ('&skip=%d' % int(page * 50) if page else '')
             web = Web(url=url)
             req = web.get()
             if req is None or int(req.status_code) != 200:
                 log.warning(
                     "Trying again to download periodic page ".format(url))
                 req = web.get()
                 if req is None or int(req.status_code) != 200:
                     log.error(
                         "Can not retrieve periodic page {}".format(url))
                     continue
             try:
                 result += req.json()
             except json.decoder.JSONDecodeError as e:
                 log.error("Can't parse JSON of %s: %s" % (url, e))
     return result
Exemplo n.º 17
0
    def get(self, ignore404=False):
        """Get web file

            Sometimes console.html is gzipped on logs server and console.html
            is not available anymore, so here it silently fails when trying to
            download console.html and then tries to get console.html.gz
            We don't want redundant error messages in console

        :param ignore404: not to show error message if got 404 error
        :return: request obj
        """
        log.debug("GET {url} with ignore404={i}".format(
            url=self.url, i=str(ignore404)))
        try:
            req = requests.get(self.url, timeout=self.timeout)
        except ConnectionError:
            log.error("Connection error when retriving {}".format(self.url))
            return None
        except Timeout:
            log.error("Timeout reached when retriving {}".format(self.url))
            return None
        except Exception as e:
            log.error("Unknown error when retriving {}: {}".format(
                self.url, str(e)))
            return None
        if int(req.status_code) != 200:
            if not (ignore404 and int(req.status_code) == 404):
                log.warn("Page {url} got status {code}".format(
                    url=self.url, code=req.status_code))
        return req
Exemplo n.º 18
0
    def get(self, ignore404=False):
        """Get web file

            Sometimes console.html is gzipped on logs server and console.html
            is not available anymore, so here it silently fails when trying to
            download console.html and then tries to get console.html.gz
            We don't want redundant error messages in console

        :param ignore404: not to show error message if got 404 error
        :return: request obj
        """
        log.debug("GET {url} with ignore404={i}".format(url=self.url,
                                                        i=str(ignore404)))
        try:
            req = requests.get(self.url, timeout=self.timeout)
        except ConnectionError:
            log.error("Connection error when retriving {}".format(self.url))
            return None
        except Timeout:
            log.error("Timeout reached when retriving {}".format(self.url))
            return None
        except Exception as e:
            log.error("Unknown error when retriving {}: {}".format(
                self.url, str(e)))
            return None
        if int(req.status_code) != 200:
            if not (ignore404 and int(req.status_code) == 404):
                log.warn("Page {url} got status {code}".format(
                    url=self.url, code=req.status_code))
        return req
Exemplo n.º 19
0
 def _get_console(self, job):
     path = os.path.join(
         self.down_path, job["log_hash"], "console.html.gz")
     if os.path.exists(path):
         log.debug("Console is already here: {}".format(path))
         return path
     web = Web(job["log_url"] + "/console.html")
     req = web.get(ignore404=True)
     if req is not None and req.status_code == 404:
         url = job["log_url"] + "/console.html.gz"
         web = Web(url=url)
         log.debug("Trying to download gzipped console")
         req = web.get()
     if not req or req.status_code != 200:
         log.error("Failed to retrieve console: {}".format(job["log_url"]))
         return None
     else:
         if not os.path.exists(os.path.dirname(path)):
             os.makedirs(os.path.dirname(path))
         with gzip.open(path, "wb") as f:
             f.write(req.content)
     return path
Exemplo n.º 20
0
    def _get_more_data(self, j):
        def delta(e, s):
            return (self._parse_ts(e) - self._parse_ts(s)).seconds / 60

        start = end = None
        j.update({
            'status': 'FAILURE',
            'fail': True,
            'branch': '',
            'length': 0,
        })
        console = self._get_console(j)
        if not console:
            log.error("Failed to get console for periodic {}".format(repr(j)))
            return None
        else:
            finput = fileinput.FileInput(console,
                                        openhook=fileinput.hook_compressed)
            for line in finput:
                line = line.decode()
                if "Finished: SUCCESS" in line:
                    j['fail'] = False
                    j['status'] = 'SUCCESS'
                elif "Finished: FAILURE" in line:
                    j['fail'] = True
                    j['status'] = 'FAILURE'
                elif "Finished: ABORTED" in line:
                    j['fail'] = True
                    j['status'] = 'ABORTED'
                if branch_re.search(line):
                    j['branch'] = branch_re.search(line).group(1)
                if 'Started by user' in line:
                    start = ts_re.search(line).group(1)
                if "Finished: " in line:
                    end = ts_re.search(line).group(1)
            j['length'] = delta(end, start) if start and end else 0
            finput.close()
        return j
Exemplo n.º 21
0
def retrieve_log_from_swift(log_string):
    # RDO Zuul doesn't store in SWIFT, so it's a direct link to logs
    if "logs.rdoproject.org" in log_string:
        return log_string
    elif "zuul.opendev.org" in log_string:
        # retrieve JSON info from current link
        build_re = BUILD_ID.search(log_string)
        if build_re:
            build_id = build_re.group(1)
        else:
            # failed to parse URL
            log.error("Failed to parse URL=%s", log_string)
            return None
        log_url = ("https://zuul.opendev.org/api/tenant/openstack/build/%s" %
                   build_id)
        from_cache = get_log_url_from_cache(log_url)
        if from_cache:
            return from_cache
        web = Web(log_url)
        req = web.get()
        try:
            json_data = req.json()
        except Exception as e:
            log.error(
                "Exception when decoding JSON from SWIFT URL of Zuul"
                " %s: %s", log_url, str(e))
            return None
        job_log_url = json_data.get('log_url')
        if not job_log_url:
            log.error('log_url is not in data %s: %s', log_url, json_data)
            return None
        add_log_url_to_cache(log_url, job_log_url)
        return job_log_url
    else:
        # unknown log link
        log.error("Unknown log link: %s", log_string)
        return None
Exemplo n.º 22
0
def analyze(job, down_path, num):
    def line_match(pat, line, exclude=None):
        exclude = exclude or []
        if any([i in line for i in exclude]):
            return False
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    def filter_by_job_name(job_name, job_files):
        if "multinode" in job_name:
            job_files = [f for f in job_files if "ironic" not in f]
        return job_files

    log.debug("Starting task {}".format(num))
    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
        'success': job.status == 'SUCCESS',
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    if message['success']:
        message['text'] = 'SUCCESS'
        message['msg'] = {'SUCCESS': ''}
        message['reason'] = False
        message['tags'] = ['']
        return message
    console_names = ACTIVE_PLUGIN_CONFIG.console_name
    if not isinstance(console_names, list):
        console_names = [console_names]
    for console_name in console_names:
        console = JobFile(job, path=down_path, file_link=console_name,
                          offline=DEBUG).get_file()
        if console:
            break

    if not console:
        message['text'] = 'Failed to fetch logs'
        message['msg'] = {'Failed to fetch logs': 'infra'}
        message['tags'] = ['infra']
        return message
    file_pointers = PATTERNS.keys()
    files = [PLUGIN[f] for f in file_pointers]
    REV_PLUGIN = {v: k for k, v in PLUGIN.items()}
    for file in filter_by_job_name(job.name, files):
        jfile = JobFile(job, path=down_path, file_link=file, offline=DEBUG
                        ).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                    jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[REV_PLUGIN[file]]:
                        line_matched = (line_match(
                            p["pattern"], line, exclude=p.get("exclude")
                        ) and p["msg"].lower() not in [i.lower() for i in msg])
                        if line_matched:
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update({p["msg"].format(
                                line_match(p["pattern"], line)): p["tag"]})
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(compile_logstash(
                                    line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(
        msg=" ".join(sorted(msg)),
        delim="||" if message['reason'] else "XX",
        date=job.datetime,
        job_type=job.name,
        log_url=job.log_url
    )
    return message
Exemplo n.º 23
0
    def _get_more_data(self, j):
        def delta(e, s):
            return (self._parse_ts(e) - self._parse_ts(s)).seconds / 60

        start = end = last = None
        j.update({
            'status': 'FAILURE',
            'fail': True,
            'branch': '',
            'length': 0,
        })
        console = self._get_console(j)
        if not console:
            log.error("Failed to get console for job {}".format(repr(j)))
            return None
        else:
            finput = fileinput.FileInput(console,
                                         openhook=fileinput.hook_compressed)
            for line in finput:
                line = line.decode()
                if ('|  SUCCESSFULLY FINISHED' in line):
                    j['fail'] = False
                    j['status'] = 'SUCCESS'
                elif ('|  *** FAILED' in line):
                    j['fail'] = True
                    j['status'] = 'FAILURE'
                elif ("Finished: ABORTED" in line
                      or '[Zuul] Job complete, result: ABORTED' in line):
                    j['fail'] = True
                    j['status'] = 'ABORTED'
                if '  Pipeline:' in line:
                    j['pipeline'] = (pipe_re.search(line).group(1)
                                     if pipe_re.search(line) else '')
                if ("Build step 'Execute shell' marked "
                        "build as failure") in line:
                    j['status'] = 'FAILURE'
                    j['fail'] = True
                if ("dlrnapi --url" in line
                        and dlrnapi_success_re.search(line)):
                    job_state = dlrnapi_success_re.search(line).group(1)
                    if job_state == "false":
                        j['status'] = 'FAILURE'
                        j['fail'] = True
                    elif job_state == "true":
                        j['fail'] = False
                        j['status'] = 'SUCCESS'
                if branch_re.search(line):
                    j['branch'] = branch_re.search(line).group(1)
                try:
                    if ('Started by user' in line
                            or '[Zuul] Launched by' in line
                            or '| PRE-RUN START' in line):
                        start = ts_re.search(line).group(1)
                    if ("|  Run completed" in line
                            or '[Zuul] Job complete' in line
                            or '| POST-RUN START' in line):
                        end = ts_re.search(line).group(1)
                except Exception as e:
                    log.error(e)
                    return None
                if ts_re.search(line):
                    last = ts_re.search(line).group(1)
            end = end or last
            j['length'] = delta(end, start) if start and end else 0
            j['ts'] = self._parse_ts(end) if end else j['ts']
            finput.close()
        return j
Exemplo n.º 24
0
def analyze(job, down_path):
    def line_match(pat, line):
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    console = JobFile(job, path=down_path, offline=DEBUG).get_file()
    if not console:
        message['text'] = 'No console file'
        message['msg'] = {'No console file': 'infra'}
        message['tags'] = ['infra']
        return message
    files = PATTERNS.keys()
    for file in files:
        jfile = JobFile(job, path=down_path, file_link=file, offline=DEBUG
                        ).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                        jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[file]:
                        if (line_match(p["pattern"], line) and
                           p["msg"] not in msg):
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update({p["msg"].format(
                                line_match(p["pattern"], line)): p["tag"]})
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(compile_logstash(
                                    line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(
        msg=" ".join(sorted(msg)),
        delim="||" if message['reason'] else "XX",
        date=job.datetime,
        job_type=job.name,
        log_url=job.log_url
    )
    return message
Exemplo n.º 25
0
def analyze(job, down_path, num):
    def line_match(pat, line, exclude=None):
        exclude = exclude or []
        if any([i in line for i in exclude]):
            return False
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    def filter_by_job_name(job_name, job_files):
        if "multinode" in job_name:
            job_files = [f for f in job_files if "ironic" not in f]
        return job_files

    log.debug("Starting task {}".format(num))
    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
        'success': job.status == 'SUCCESS',
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    if message['success']:
        message['text'] = 'SUCCESS'
        message['msg'] = {'SUCCESS': ''}
        message['reason'] = False
        message['tags'] = ['']
        return message
    console_names = ACTIVE_PLUGIN_CONFIG.console_name
    if not isinstance(console_names, list):
        console_names = [console_names]
    for console_name in console_names:
        console = JobFile(job,
                          path=down_path,
                          file_link=console_name,
                          offline=DEBUG).get_file()
        if console:
            break

    if not console:
        message['text'] = 'Failed to fetch logs'
        message['msg'] = {'Failed to fetch logs': 'infra'}
        message['tags'] = ['infra']
        log.debug("Failed to fetch logs %s", job.log_url)
        return message
    file_pointers = PATTERNS.keys()
    files = [PLUGIN[f] for f in file_pointers]
    REV_PLUGIN = {v: k for k, v in PLUGIN.items()}
    for file in filter_by_job_name(job.name, files):
        jfile = JobFile(job, path=down_path, file_link=file,
                        offline=DEBUG).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                    jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[REV_PLUGIN[file]]:
                        line_matched = (line_match(
                            p["pattern"], line, exclude=p.get("exclude"))
                                        and p["msg"].lower()
                                        not in [i.lower() for i in msg])
                        if line_matched:
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update(
                                {
                                    p["msg"].format(
                                        line_match(p["pattern"], line)):
                                    p["tag"]
                                })
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(
                                    compile_logstash(line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(msg=" ".join(sorted(msg)),
                                   delim="||" if message['reason'] else "XX",
                                   date=job.datetime,
                                   job_type=job.name,
                                   log_url=job.log_url)
    return message