Exemplo n.º 1
0
    def get(self, ignore404=False):
        """Get web file

            Sometimes console.html is gzipped on logs server and console.html
            is not available anymore, so here it silently fails when trying to
            download console.html and then tries to get console.html.gz
            We don't want redundant error messages in console

        :param ignore404: not to show error message if got 404 error
        :return: request obj
        """
        log.debug("GET {url} with ignore404={i}".format(url=self.url,
                                                        i=str(ignore404)))
        try:
            req = requests.get(self.url, timeout=self.timeout)
        except ConnectionError:
            log.error("Connection error when retriving {}".format(self.url))
            return None
        except Timeout:
            log.error("Timeout reached when retriving {}".format(self.url))
            return None
        except Exception as e:
            log.error("Unknown error when retriving {}: {}".format(
                self.url, str(e)))
            return None
        if int(req.status_code) != 200:
            if not (ignore404 and int(req.status_code) == 404):
                log.warn("Page {url} got status {code}".format(
                    url=self.url, code=req.status_code))
        return req
Exemplo n.º 2
0
    def get(self, ignore404=False):
        """Get web file

            Sometimes console.html is gzipped on logs server and console.html
            is not available anymore, so here it silently fails when trying to
            download console.html and then tries to get console.html.gz
            We don't want redundant error messages in console

        :param ignore404: not to show error message if got 404 error
        :return: request obj
        """
        log.debug("GET {url} with ignore404={i}".format(
            url=self.url, i=str(ignore404)))
        try:
            req = requests.get(self.url, timeout=self.timeout)
        except ConnectionError:
            log.error("Connection error when retriving {}".format(self.url))
            return None
        except Timeout:
            log.error("Timeout reached when retriving {}".format(self.url))
            return None
        except Exception as e:
            log.error("Unknown error when retriving {}: {}".format(
                self.url, str(e)))
            return None
        if int(req.status_code) != 200:
            if not (ignore404 and int(req.status_code) == 404):
                log.warn("Page {url} got status {code}".format(
                    url=self.url, code=req.status_code))
        return req
Exemplo n.º 3
0
 def get_regular_file(self):
     log.debug("Get regular file {}".format(self.file_link))
     self.file_name = os.path.basename(
         self.file_link).split(".gz")[0] + ".gz"
     self.file_path = os.path.join(self.job_dir, self.file_name)
     if os.path.exists(self.file_path):
         log.debug("File {} is already downloaded".format(self.file_path))
         return self.file_path
     else:
         web = Web(url=self.file_url)
         ignore404 = self.file_link == "/console.html"
         req = web.get(ignore404=ignore404)
         if (req
                 and req.status_code != 200
                 and self.file_link == "/console.html"):
             self.file_url += ".gz"
             web = Web(url=self.file_url)
             log.debug("Trying to download gzipped console")
             req = web.get()
         if not req or req.status_code != 200:
             log.warn("Failed to retrieve URL: {}".format(self.file_url))
             return None
         else:
             with gzip.open(self.file_path, "wt") as f:
                 f.write(req.text)
         return self.file_path
Exemplo n.º 4
0
 def get_regular_file(self):
     log.debug("Get regular file {}".format(self.file_link))
     self.file_name = os.path.basename(
         self.file_link).split(".gz")[0] + ".gz"
     self.file_original_path = os.path.join(
         self.job_dir,
         os.path.basename(self.file_link))
     self.file_path = os.path.join(self.job_dir, self.file_name)
     if os.path.exists(self.file_path):
         log.debug("File {} is already downloaded".format(self.file_path))
     elif os.path.exists(self.file_original_path + "_404"):
         log.debug("File {} was saved as 404".format(self.file_path))
         return None
     else:
         if "." not in self.file_url.split("/")[-1]:
             file_try1 = self.file_url + ".gz"
         else:
             file_try1 = self.file_url
         web = Web(url=file_try1)
         req = web.get(ignore404=True)
         if req is None or int(req.status_code) == 404:
             if req is None:
                 log.warn(
                     "Failed to retrieve URL, request is None: {}".format(
                         file_try1))
             if self.file_url.endswith(".html"):
                 file_try2 = self.file_url
             elif self.file_url.endswith(".txt"):
                 file_try2 = self.file_url[:-4] + ".log"
             else:
                 log.warn("Failed to retrieve URL, tried once: {}".format(
                     file_try1))
                 if req is not None:
                     open(self.file_original_path + "_404", "a").close()
                 return None
             web = Web(url=file_try2)
             log.debug("Trying to download raw file {}".format(file_try2))
             req = web.get()
             if req is None or int(req.status_code) != 200:
                 log.warn("Failed to retrieve URL, tried twice: {}".format(
                     file_try2))
                 if req is not None and int(req.status_code) == 404:
                     open(self.file_original_path + "_404", "a").close()
                 return None
         elif int(req.status_code) not in (200, 404):
             log.warn(
                 "Failed to retrieve URL, request failure: {} {}".format(
                     file_try1, req.status_code))
             return None
         if int(req.status_code) == 200:
             with gzip.open(self.file_path, "wt") as f:
                 f.write(req.text)
     return self.file_path
Exemplo n.º 5
0
 def get_regular_file(self):
     log.debug("Get regular file {}".format(self.file_link))
     self.file_name = os.path.basename(
         self.file_link).split(".gz")[0] + ".gz"
     self.file_path = os.path.join(self.job_dir, self.file_name)
     if os.path.exists(self.file_path):
         log.debug("File {} is already downloaded".format(self.file_path))
     elif os.path.exists(self.file_path + "_404"):
         log.debug("File {} was saved as 404".format(self.file_path))
         return None
     else:
         if "." not in self.file_url.split("/")[-1]:
             file_try1 = self.file_url + ".gz"
         else:
             file_try1 = self.file_url
         web = Web(url=file_try1)
         req = web.get(ignore404=True)
         if req is None or int(req.status_code) == 404:
             if req is None:
                 log.warn(
                     "Failed to retrieve URL, request is None: {}".format(
                         file_try1))
             if self.file_url.endswith(".html"):
                 file_try2 = self.file_url
             elif self.file_url.endswith(".txt"):
                 file_try2 = self.file_url[:-4] + ".log"
             else:
                 log.warn("Failed to retrieve URL, tried once: {}".format(
                     file_try1))
                 if req is not None:
                     open(self.file_path + "_404", "a").close()
                 return None
             web = Web(url=file_try2)
             log.debug("Trying to download raw file {}".format(file_try2))
             req = web.get()
             if req is None or int(req.status_code) != 200:
                 log.warn("Failed to retrieve URL, tried twice: {}".format(
                     file_try2))
                 if req is not None and int(req.status_code) == 404:
                     open(self.file_path + "_404", "a").close()
                 return None
         elif int(req.status_code) not in (200, 404):
             log.warn(
                 "Failed to retrieve URL, request failure: {} {}".format(
                     file_try1, req.status_code))
             return None
         if int(req.status_code) == 200:
             with gzip.open(self.file_path, "wt") as f:
                 f.write(req.text)
     return self.file_path
Exemplo n.º 6
0
def analyze(job, down_path, num):
    def line_match(pat, line, exclude=None):
        exclude = exclude or []
        if any([i in line for i in exclude]):
            return False
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    def filter_by_job_name(job_name, job_files):
        if "multinode" in job_name:
            job_files = [f for f in job_files if "ironic" not in f]
        return job_files

    log.debug("Starting task {}".format(num))
    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
        'success': job.status == 'SUCCESS',
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    if message['success']:
        message['text'] = 'SUCCESS'
        message['msg'] = {'SUCCESS': ''}
        message['reason'] = False
        message['tags'] = ['']
        return message
    console_names = ACTIVE_PLUGIN_CONFIG.console_name
    if not isinstance(console_names, list):
        console_names = [console_names]
    for console_name in console_names:
        console = JobFile(job,
                          path=down_path,
                          file_link=console_name,
                          offline=DEBUG).get_file()
        if console:
            break

    if not console:
        message['text'] = 'Failed to fetch logs'
        message['msg'] = {'Failed to fetch logs': 'infra'}
        message['tags'] = ['infra']
        log.debug("Failed to fetch logs %s", job.log_url)
        return message
    file_pointers = PATTERNS.keys()
    files = [PLUGIN[f] for f in file_pointers]
    REV_PLUGIN = {v: k for k, v in PLUGIN.items()}
    for file in filter_by_job_name(job.name, files):
        jfile = JobFile(job, path=down_path, file_link=file,
                        offline=DEBUG).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                    jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[REV_PLUGIN[file]]:
                        line_matched = (line_match(
                            p["pattern"], line, exclude=p.get("exclude"))
                                        and p["msg"].lower()
                                        not in [i.lower() for i in msg])
                        if line_matched:
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update(
                                {
                                    p["msg"].format(
                                        line_match(p["pattern"], line)):
                                    p["tag"]
                                })
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(
                                    compile_logstash(line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(msg=" ".join(sorted(msg)),
                                   delim="||" if message['reason'] else "XX",
                                   date=job.datetime,
                                   job_type=job.name,
                                   log_url=job.log_url)
    return message
Exemplo n.º 7
0
def analyze(job, down_path):
    def line_match(pat, line):
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    console = JobFile(job, path=down_path, offline=DEBUG).get_file()
    if not console:
        message['text'] = 'No console file'
        message['msg'] = {'No console file': 'infra'}
        message['tags'] = ['infra']
        return message
    files = PATTERNS.keys()
    for file in files:
        jfile = JobFile(job, path=down_path, file_link=file, offline=DEBUG
                        ).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                        jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[file]:
                        if (line_match(p["pattern"], line) and
                           p["msg"] not in msg):
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update({p["msg"].format(
                                line_match(p["pattern"], line)): p["tag"]})
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(compile_logstash(
                                    line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(
        msg=" ".join(sorted(msg)),
        delim="||" if message['reason'] else "XX",
        date=job.datetime,
        job_type=job.name,
        log_url=job.log_url
    )
    return message
Exemplo n.º 8
0
def analyze(job, down_path, num):
    def line_match(pat, line, exclude=None):
        exclude = exclude or []
        if any([i in line for i in exclude]):
            return False
        if isinstance(pat, re._pattern_type):
            if not pat.search(line):
                return False
            elif pat.search(line).groups():
                return pat.search(line).group(1)
            else:
                return True
        if isinstance(pat, str):
            return pat in line

    def compile_logstash(line, pat_stash):
        if isinstance(pat_stash, re._pattern_type):
            return 'message:"' + pat_stash.search(line).group() + '"'
        else:
            return 'message:"' + pat_stash + '"'

    def filter_by_job_name(job_name, job_files):
        if "multinode" in job_name:
            job_files = [f for f in job_files if "ironic" not in f]
        return job_files

    log.debug("Starting task {}".format(num))
    message = {
        "text": '',
        "tags": set(),
        "msg": dict(),
        "reason": True,
        "job": job,
        "periodic": "periodic" in job.name,
        'patterns': set(),
        'logstash_url': set(),
        'success': job.status == 'SUCCESS',
    }
    templ = ("{date}\t"
             "{job_type:38}\t"
             "{delim}\t"
             "{msg:60}\t"
             "{delim}\t"
             "log: {log_url}")

    msg = dict()
    if message['success']:
        message['text'] = 'SUCCESS'
        message['msg'] = {'SUCCESS': ''}
        message['reason'] = False
        message['tags'] = ['']
        return message
    console_names = ACTIVE_PLUGIN_CONFIG.console_name
    if not isinstance(console_names, list):
        console_names = [console_names]
    for console_name in console_names:
        console = JobFile(job, path=down_path, file_link=console_name,
                          offline=DEBUG).get_file()
        if console:
            break

    if not console:
        message['text'] = 'Failed to fetch logs'
        message['msg'] = {'Failed to fetch logs': 'infra'}
        message['tags'] = ['infra']
        return message
    file_pointers = PATTERNS.keys()
    files = [PLUGIN[f] for f in file_pointers]
    REV_PLUGIN = {v: k for k, v in PLUGIN.items()}
    for file in filter_by_job_name(job.name, files):
        jfile = JobFile(job, path=down_path, file_link=file, offline=DEBUG
                        ).get_file()
        if not jfile:
            log.warn("File {} is not downloaded, "
                     "skipping its patterns".format(file))
            continue
        else:
            try:
                log.debug("Opening file for scan: {}".format(jfile))
                finput = fileinput.FileInput(
                    jfile, openhook=fileinput.hook_compressed)
                for line in finput:
                    line = line.decode()
                    for p in PATTERNS[REV_PLUGIN[file]]:
                        line_matched = (line_match(
                            p["pattern"], line, exclude=p.get("exclude")
                        ) and p["msg"].lower() not in [i.lower() for i in msg])
                        if line_matched:
                            log.debug("Found pattern {} in file {}:{}".format(
                                repr(p), file, jfile))
                            msg.update({p["msg"].format(
                                line_match(p["pattern"], line)): p["tag"]})
                            message['tags'].add(p["tag"])
                            message['patterns'].add(p['id'])
                            if p['logstash']:
                                message['logstash_url'].add(compile_logstash(
                                    line, p['logstash']))
                finput.close()

            except Exception as e:
                log.error("Exception when parsing {}: {}".format(
                    jfile, str(e)))
                msg = {"Error when parsing logs.": 'info'}
                message['reason'] = False
                message['tags'].add("info")
    if not msg:
        log.debug("No patterns in job files {}".format(job))
        msg = {"Reason was NOT FOUND.": 'info'}
        message['reason'] = False
        message['tags'].add("info")
    if not [i for i in message['tags'] if i not in ('info', '')]:
        message['reason'] = False
        msg.update({"Please investigate.": 'info'})
    message['msg'] = msg
    message['logstash_url'] = urlize_logstash(message['logstash_url'])
    message['text'] = templ.format(
        msg=" ".join(sorted(msg)),
        delim="||" if message['reason'] else "XX",
        date=job.datetime,
        job_type=job.name,
        log_url=job.log_url
    )
    return message