def _parse_saw_node(self, messages): pat_saw_node = re.compile( r"Node '(.*)' saw that node '(.*)' (went down|came up)") for message in messages: m = pat_saw_node.search(message) if m: on_node = extract_nodename(m.group(1), self.timeline.default_node_name) action_node = extract_nodename(m.group(2), self.timeline.default_node_name) action = m.group(3) descr = '{} {}'.format(action_node, action) self.timeline.add_event(Event( message, 'fail', descr, self.timeline.default_node_name, node_name=on_node))
def parse_zip_file(zip_file): timeline = Timeline() try: ci = zipfile.ZipFile(zip_file, 'r') except (IOError, zipfile.BadZipfile): print('Could not open file: {}'.format(zip_file), file=sys.stderr) return for name in ci.namelist(): # determine a default nodename that can be used when parsing # cannot otherwise determine the nodename m = re.search(r'couchbase\.log$', name) nodename = extract_nodename(name, 'unnamed_node') # strip cbcollect_info timestamp from nodename nodename = re.sub(r'_[0-9]{8}-[0-9]{6}$', '', nodename) timeline.default_node_name = nodename # determine if the file included in this zip can be parsed # by one of the modules. if so, add to tasks. logname = os.path.split(name)[-1] try: LOG_MODULES[logname](io.BufferedReader( ci.open(name), MAX_BUFFER_SIZE), timeline) except KeyError: pass ci.close() return timeline
def parse_zip_file(zip_file): timeline = Timeline() try: ci = zipfile.ZipFile(zip_file, 'r') except (IOError, zipfile.BadZipfile): print('Could not open file: {}'.format(zip_file), file=sys.stderr) return for name in ci.namelist(): # determine a default nodename that can be used when parsing # cannot otherwise determine the nodename if not timeline.default_node_name: nodename = extract_nodename(name, 'unnamed_node') # strip cbcollect_info timestamp from nodename nodename = re.sub(r'_[0-9]{8}-[0-9]{6}$', '', nodename) timeline.default_node_name = nodename # determine if the file included in this zip can be parsed # by one of the modules. if so, add to tasks. logname = os.path.split(name)[-1] if logname == 'couchbase.log': timeline.collection_time = datetime.datetime( *ci.getinfo(name).date_time).isoformat() try: LOG_MODULES[logname](io.BufferedReader(ci.open(name), MAX_BUFFER_SIZE), timeline) except KeyError: pass ci.close() return timeline
def _parse_saw_node(self, messages): pat_saw_node = re.compile( r"Node '(.*)' saw that node '(.*)' (went down|came up)") for message in messages: m = pat_saw_node.search(message) if m: on_node = extract_nodename(m.group(1), self.timeline.default_node_name) action_node = extract_nodename(m.group(2), self.timeline.default_node_name) action = m.group(3) descr = '{} {}'.format(action_node, action) self.timeline.add_event( Event(message, 'fail', descr, self.timeline.default_node_name, node_name=on_node))
def _parse_buckets_not_ready(self, instances): pat_bucket_not_ready = re.compile( r"The following buckets became not ready on node " "'(.*)': \[(.*)\], .*\]$") pat_bucket_not_ready_mult = re.compile( r"The following buckets became not ready on node '" "(.*)': \[\"([^\"]*)\",$") pat_bucket_not_ready_mult_middle = re.compile(r' *"(.*)",$') pat_bucket_not_ready_mult_end = re.compile(r' *"(.*)"\]') for instance in instances: single = pat_bucket_not_ready.search(instance[0]) multi = pat_bucket_not_ready_mult.search(instance[0]) if single: on = extract_nodename(single.group(1), self.timeline.default_node_name) buckets = single.group(2).replace('"', '') self.timeline.add_event( Event(instance[0], 'fail', 'buckets not ready on {}: `{}`'.format(on, buckets), self.timeline.default_node_name)) elif multi: on = extract_nodename(multi.group(1), self.timeline.default_node_name) buckets = multi.group(2).replace('"', '') for line in instance[1:]: m = pat_bucket_not_ready_mult_end.search(line) if m: buckets += ', ' + m.group(1) self.timeline.add_event( Event( instance[0], 'fail', 'buckets not ready on {}: `{}`'.format( on, buckets), self.timeline.default_node_name)) continue m = pat_bucket_not_ready_mult_middle.search(line) if m: buckets += ', ' + m.group(1) continue
def _parse_buckets_not_ready(self, instances): pat_bucket_not_ready = re.compile( r"The following buckets became not ready on node " "'(.*)': \[(.*)\], .*\]$") pat_bucket_not_ready_mult = re.compile( r"The following buckets became not ready on node '" "(.*)': \[\"([^\"]*)\",$") pat_bucket_not_ready_mult_middle = re.compile(r' *"(.*)",$') pat_bucket_not_ready_mult_end = re.compile(r' *"(.*)"\]') for instance in instances: single = pat_bucket_not_ready.search(instance[0]) multi = pat_bucket_not_ready_mult.search(instance[0]) if single: on = extract_nodename(single.group(1), self.timeline.default_node_name) buckets = single.group(2).replace('"', '') self.timeline.add_event(Event( instance[0], 'fail', 'buckets not ready on {}: `{}`'.format(on, buckets), self.timeline.default_node_name)) elif multi: on = extract_nodename(multi.group(1), self.timeline.default_node_name) buckets = multi.group(2).replace('"', '') for line in instance[1:]: m = pat_bucket_not_ready_mult_end.search(line) if m: buckets += ', ' + m.group(1) self.timeline.add_event(Event( instance[0], 'fail', 'buckets not ready on {}: `{}`'.format(on, buckets), self.timeline.default_node_name)) continue m = pat_bucket_not_ready_mult_middle.search(line) if m: buckets += ', ' + m.group(1) continue
def _parse_node_add(self, messages): pat_node_add = re.compile(r"adding node '(.*)' to nodes_wanted") for message in messages: m = pat_node_add.search(message) if m: added_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: added_node = 'unknown_node' self.timeline.add_event(Event(message, 'topology', 'added node {}'.format(added_node), self.timeline.default_node_name))
def _parse_node_add(self, messages): pat_node_add = re.compile(r"adding node '(.*)' to nodes_wanted") for message in messages: m = pat_node_add.search(message) if m: added_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: added_node = 'unknown_node' self.timeline.add_event( Event(message, 'topology', 'added node {}'.format(added_node), self.timeline.default_node_name))
def _parse_failover_complete(self, messages): pat_manual_failover = re.compile(r'Failed over \'(.*)\': ok') pat_node_manual_failover = re.compile(r'info:message\((.*)\) - Failed') for message in messages: m = pat_manual_failover.search(message) if m: action_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: action_node = 'unknown_node' m = pat_node_manual_failover.search(message) if m: on_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: on_node = 'unknown_node' self.timeline.add_event(Event( message, 'topology', '{} was failed over'.format(action_node), self.timeline.default_node_name, node_name=on_node))
def _parse_auto_failover(self, messages): pat_failover = re.compile(r'Node (.*) was automatically failovered') pat_node_failover = re.compile(r'info:message\((.*)\) - Node') for message in messages: m = pat_failover.search(message) if m: action_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: action_node = 'unknown_node' m = pat_node_failover.search(message) if m: on_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: on_node = 'unknown_node' self.timeline.add_event(Event( message, 'topology', '{} was automatically failed over'.format(action_node), self.timeline.default_node_name, node_name=on_node))
def _parse_starting_failover(self, messages): pat_failover_start = re.compile(r"Starting failing over '(.*)'") for message in messages: m = pat_failover_start.search(message) if m: action_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: action_node = 'unknown_node' self.timeline.add_event(Event( message, 'topology', "starting failing over '{}'".format(action_node), self.timeline.default_node_name))
def _parse_bucket_loads(self, messages): pat_bucket_load = re.compile( r'Bucket "(.*)" loaded on node \'(.*)\' in ([0-9]+ seconds)\.') for message in messages: m = pat_bucket_load.search(message) if m: bucket = m.group(1) on_node = extract_nodename(m.group(2), self.timeline.default_node_name) t = m.group(3) descr = 'bucket {} loaded on {} in {}'.format(bucket, on_node, t) self.timeline.add_event(Event(message, 'bucket', descr, self.timeline.default_node_name))
def _parse_starting_failover(self, messages): pat_failover_start = re.compile(r"Starting failing over '(.*)'") for message in messages: m = pat_failover_start.search(message) if m: action_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: action_node = 'unknown_node' self.timeline.add_event( Event(message, 'topology', "starting failing over '{}'".format(action_node), self.timeline.default_node_name))
def _parse_failover_complete(self, messages): pat_manual_failover = re.compile(r'Failed over \'(.*)\': ok') pat_node_manual_failover = re.compile(r'info:message\((.*)\) - Failed') for message in messages: m = pat_manual_failover.search(message) if m: action_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: action_node = 'unknown_node' m = pat_node_manual_failover.search(message) if m: on_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: on_node = 'unknown_node' self.timeline.add_event( Event(message, 'topology', '{} was failed over'.format(action_node), self.timeline.default_node_name, node_name=on_node))
def _parse_auto_failover(self, messages): pat_failover = re.compile(r'Node (.*) was automatically failovered') pat_node_failover = re.compile(r'info:message\((.*)\) - Node') for message in messages: m = pat_failover.search(message) if m: action_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: action_node = 'unknown_node' m = pat_node_failover.search(message) if m: on_node = extract_nodename(m.group(1), self.timeline.default_node_name) else: on_node = 'unknown_node' self.timeline.add_event( Event(message, 'topology', '{} was automatically failed over'.format(action_node), self.timeline.default_node_name, node_name=on_node))
def _parse_bucket_loads(self, messages): pat_bucket_load = re.compile( r'Bucket "(.*)" loaded on node \'(.*)\' in ([0-9]+ seconds)\.') for message in messages: m = pat_bucket_load.search(message) if m: bucket = m.group(1) on_node = extract_nodename(m.group(2), self.timeline.default_node_name) t = m.group(3) descr = 'bucket {} loaded on {} in {}'.format( bucket, on_node, t) self.timeline.add_event( Event(message, 'bucket', descr, self.timeline.default_node_name))