def processResults(self, cmd, result): # best effort. Try Nagios first, if that doesn't return data values # try Cacti. If cacti doesn't return value use results from nagios # since it is more likely to have been an error parsing nagios data # and the nagios parser puts more data in the event. Both parsers # have the same logic for event severity based on exit code cactiResult= None nagiosResult = ParsedResults() nagiosParser = Nagios() nagiosParser.processResults(cmd, nagiosResult) if not nagiosResult.values: cactiParser = Cacti() cactiResult= ParsedResults() cactiParser.processResults(cmd, cactiResult) if cactiResult and cactiResult.values: #use cacti results parserResult = cactiResult else: parserResult = nagiosResult result.events.extend(parserResult.events) result.values.extend(parserResult.values)
def testPsZen5278(self): """ Jira 5278 - defunct process matching """ deviceConfig = Object() deviceConfig.device = 'localhost' cmd = Object() cmd.deviceConfig = deviceConfig cmd.command = 'command' cmd.includeRegex = ".*defunct.*" cmd.excludeRegex = "nothing" cmd.replaceRegex = ".*" cmd.replacement = "defunct" cmd.primaryUrlPath = "url" cmd.displayName = "defunct process set" cmd.eventKey = "bar" cmd.severity = 1 cmd.generatedId = "url_" + md5("defunct").hexdigest().strip() p1 = Object() p1.id = 'cpu_cpu' p1.data = dict( id='url_defunct', #p1.data = dict(processName='aioserver', alertOnRestart=False, failSeverity=0) cmd.points = [p1] cmd.result = Object() cmd.result.output = """ PID RSS TIME COMMAND 28835916 00:00:00 <defunct> 28967020 1788 00:00:00 sshd: root@sherwood 29622478 448 00:00:08 aioserver 29688042 00:00:00 <defunct> """ results = ParsedResults() parser = ps() parser.processResults(cmd, results) #print "results:", results results = ParsedResults() cmd.result.output = """ PID RSS TIME COMMAND 29688042 00:00:00 <defunct> 28835916 00:00:00 <defunct> """ parser.processResults(cmd, results) #print "results:", results # Ensure that we can track defunct processes for ev in results.events: message = ev['summary'] if message.find('defunct') >= 0: self.assert_( message.find('Process up') >= 0, "\'%s\' did not contain \'Process up\'") else: raise AssertionError("unexpected event")
def _parseResults(self, resultList, cacheableDS): """ Interpret the results retrieved from the commands and pass on the datapoint values and events. @parameter resultList: results of running the commands in a DeferredList @type resultList: array of (boolean, datasource) @parameter cacheableDS: other datasources that can use the same results @type cacheableDS: dictionary of arrays of datasources """ self.state = SshPerformanceCollectionTask.STATE_PARSE_DATA results = [] for success, datasource in resultList: parsedResults = ParsedResults() if not success: # In this case, our datasource is actually a defer.Failure reason = datasource datasource, = reason.value.args msg = "Datasource %s command timed out" % datasource.name event = self._makeCmdEvent(datasource, msg, event_key='Timeout') else: # clear our timeout event msg = "Datasource %s command timed out" % datasource.name event = self._makeCmdEvent(datasource, msg, severity=Clear, event_key='Timeout') # Re-use our results for any similar datasources cache = cacheableDS.get(datasource.command, []) if datasource.name == "OSProcess/ps": # We need to process all OSProcess in a special way to # avoid more than one OSProcess matching the same process process_datasources = [datasource] process_datasources.extend(cache) process_parseable_results = self._process_os_processes_results_in_sequence( process_datasources, datasource.result) results.extend(process_parseable_results) continue for ds in cache: ds.result = copy(datasource.result) self._processDatasourceResults(ds, parsedResults) results.append((ds, parsedResults)) parsedResults = ParsedResults() self._processDatasourceResults(datasource, parsedResults) parsedResults.events.append(event) results.append((datasource, parsedResults)) return results
def step_processReturns(self): """Verifies that a 'repopulated' process set is handled correctly. """ self.cmd.result.output = '\n'.join(( " PID RSS TIME COMMAND", "124 1 00:00:00 someJob a b c", "456 1 00:00:00 someOtherJob 1 2 3", )) results = ParsedResults() ps().processResults(self.cmd, results) self.assertEqual(len(results.values), 3) cpu, cpuValue = _getDatapoint(results.values, "cpu") self.assertIsNotNone(cpu) self.assertEqual(cpuValue, 0) mem, memValue = _getDatapoint(results.values, "mem") self.assertIsNotNone(mem) self.assertEqual(memValue, 2048) count, countValue = _getDatapoint(results.values, "count") self.assertIsNotNone(count) self.assertEqual(countValue, 2) self.assertEqual(len(results.events), 2) self.assertTrue(all(e["severity"] == 0 for e in results.events)) self.assertTrue(all(e["component"] == "Job" for e in results.events)) self.assertTrue(all( e["eventGroup"] == "Process" for e in results.events )) self.assertTrue(all( e["eventClass"] == "/Status/OSProcess" for e in results.events ))
def test_SystemDEvents(self): self.cmd.result.output = SYSTEMD_OUTPUT # Test Event is Down result = ParsedResults() self.cmd.component = 'abrt-oops' self.cmd.points[0]['data']['id'] = 'abrt-oops' service().processResults(self.cmd, result) self.assertEqual(result.events[0]['summary'], 'OS Service is down') # Test Event is Up result = ParsedResults() self.cmd.component = 'abrt-ccpp' self.cmd.points[0]['data']['id'] = 'abrt-ccpp' service().processResults(self.cmd, result) self.assertEqual(result.events[0]['summary'], 'OS Service is up')
def test_UpstartEvents(self): self.cmd.result.output = UPSTART_OUTPUT # Test Event is Down result = ParsedResults() self.cmd.component = 'plymouth-shutdown' self.cmd.points[0]['data']['id'] = 'plymouth-shutdown' service().processResults(self.cmd, result) self.assertEqual(result.events[0]['summary'], 'OS Service is down') # Test Event is Up result = ParsedResults() self.cmd.component = 'tty (_dev_tty6)' self.cmd.points[0]['data']['id'] = 'tty (/dev/tty6)' service().processResults(self.cmd, result) self.assertEqual(result.events[0]['summary'], 'OS Service is up')
def parse_result(self, config, result): dsconf = config.datasources[0] parserLoader = dsconf.params['parser'] log.debug('Trying to use the %s parser' % parserLoader.pluginName) # Build emulated Zencommand Cmd object cmd = WinCmd() cmd.name = '{}/{}'.format(dsconf.template, dsconf.datasource) cmd.command = dsconf.params['script'] cmd.ds = dsconf.datasource cmd.device = dsconf.params['servername'] cmd.component = dsconf.params['contextcompname'] # Add the device id to the config for compatibility with parsers config.device = config.id cmd.deviceConfig = config cmd.deviceConfig.name = config.id # Add the component id to the points array for compatibility with parsers for point in dsconf.points: point.component = cmd.component cmd.points.append(point) cmd.usePowershell = dsconf.params['usePowershell'] cmd.result.output = '\n'.join(result.stdout) cmd.result.exitCode = result.exit_code collectedResult = ParsedResults() parser = parserLoader.create() parser.processResults(cmd, collectedResult) return collectedResult
def test_DatapointsProcessed(self): """Verifies that the processResults method populates the second argument ('results') with the expected values. """ self.cmd.result.output = "\n".join(( " PID RSS TIME COMMAND", "345 1 00:00:30 someJob a b c", )) results = ParsedResults() ps().processResults(self.cmd, results) self.assertEqual(len(results.events), 1) event = results.events[0] self.assertEqual(event.get("component"), "Job") self.assertEqual(event.get("eventGroup"), "Process") self.assertEqual(event.get("eventClass"), "/Status/OSProcess") self.assertEqual(event.get("severity"), 0) self.assertEqual(len(results.values), 3) cpu, cpuValue = _getDatapoint(results.values, "cpu") mem, memValue = _getDatapoint(results.values, "mem") count, countValue = _getDatapoint(results.values, "count") self.assertIsNotNone(cpu) self.assertIsNotNone(mem) self.assertIsNotNone(count) self.assertEqual(cpuValue, 30) self.assertEqual(memValue, 1024) self.assertEqual(countValue, 1)
def testStatus(self): parser = RabbitMQCTLParser() results = ParsedResults() parser.processResults(self._getStatusCmd(0, 'cmd_status.txt'), results) self.assertEquals(len(results.values), 0) self.assertEquals(len(results.events), 1)
def testPsZen5278(self): """ Jira 5278 - defunct process matching """ deviceConfig = Object() deviceConfig.device = 'localhost' cmd = Object() cmd.deviceConfig = deviceConfig cmd.command = 'command' p1 = Object() p1.id = 'cpu_cpu' p1.data = dict( processName='<defunct>', #p1.data = dict(processName='aioserver', ignoreParams=False, alertOnRestart=False, failSeverity=0) cmd.points = [p1] cmd.result = Object() cmd.result.output = """ PID RSS TIME COMMAND 28835916 00:00:00 <defunct> 28967020 1788 00:00:00 sshd: root@sherwood 29622478 448 00:00:08 aioserver 29688042 00:00:00 <defunct> """ results = ParsedResults() parser = ps() parser.processResults(cmd, results) # Ensure that we can track defunct processes for ev in results.events: summary = ev['summary'] if summary.find('defunct') >= 0: assert summary.find('Process running') >= 0 else: raise AssertionError("unexpected event")
def _parse_result(self, datasources, response): ds = datasources[0] log.debug( "Command succeeded " "device=%s interval=%s datasource=%s elapsed-seconds=%.2f", self._devId, self.interval, ",".join(ds.name for ds in datasources), ds.lastStop - ds.lastStart, ) # Process all OSProcess/ps datasources to avoid more than one # OSProcess matching the same process if ds.name == "OSProcess/ps": return self._process_os_processes_results_in_sequence( datasources, response, ) results = [] for ds in datasources: parsedResults = ParsedResults() self._processDatasourceResults(ds, response, parsedResults) results.append((ds, parsedResults)) return results
def testPsCase15745(self): """ Case 15745 """ deviceConfig = Object() deviceConfig.device = 'localhost' cmd = Object() cmd.deviceConfig = deviceConfig cmd.command = 'command' p1 = Object() p1.id = 'cpu_cpu' p1.data = dict(processName='oracleYAMDB1 (LOCAL=NO)', ignoreParams=False, alertOnRestart=True, failSeverity=3) cmd.points = [p1] cmd.result = Object() cmd.result.output = """ PID RSS TIME COMMAND 483362 146300 22:58:11 /usr/local/test/oracleYAMDB1 (LOCAL=NO) 495844 137916 22:45:57 /usr/bin/sendmail: MTA: accepting connections 520290 1808 00:00:00 /usr/sbin/aixmibd """ results = ParsedResults() parser = ps() parser.processResults(cmd, results) # Oracle process with parenthesis in args should be detected for ev in results.events: summary = ev['summary'] if summary.find('oracleYAMDB1') >= 0: assert summary.find('Process running') >= 0 else: raise AssertionError("unexpected event")
def _process_os_processes_results_in_sequence(self, datasources, response): """ Process OSProcesses in sequence order to avoid more than one OSProcess match the same process @param datasources: list of OSProcess datasources @type datasources: List[Cmd] @param response: the results of the command @type response: IRunner (typically SshRunner) """ process_parseable_results = [] # Sort the datasources by sequence datasources.sort(key=lambda x: x.sequence) already_matched = [] # Now we process datasources in sequence order for ds in datasources: parsed = ParsedResults() ds.result = copy(response) ds.already_matched_cmdAndArgs = already_matched self._processDatasourceResults(ds, response, parsed) already_matched = ds.already_matched_cmdAndArgs[:] del ds.already_matched_cmdAndArgs process_parseable_results.append((ds, parsed)) return process_parseable_results
def testListChannels_none(self): parser = RabbitMQCTLParser() results = ParsedResults() parser.processResults( self._getListChannelsCmd(0, 'cmd_list_channels_none.txt'), results) self.assertEquals(len(results.values), 3) self.assertEquals(len(results.events), 1)
def testPsCase15745(self): """ Case 15745 """ deviceConfig = Object() deviceConfig.device = 'localhost' cmd = Object() cmd.deviceConfig = deviceConfig cmd.command = 'command' cmd.includeRegex = ".*oracle.*" cmd.excludeRegex = "nothing" cmd.replaceRegex = ".*" cmd.replacement = "sendmail" cmd.primaryUrlPath = "url" cmd.displayName = "oracle process set" cmd.eventKey = "bar" cmd.severity = 1 cmd.generatedId = "url_" + md5("sendmail").hexdigest().strip() p1 = Object() p1.id = 'cpu_cpu' p1.data = dict(id='url_oracle', alertOnRestart=True, failSeverity=3) cmd.points = [p1] cmd.result = Object() cmd.result.output = """ PID RSS TIME COMMAND 483362 146300 22:58:11 /usr/local/test/oracleYAMDB1 (LOCAL=NO) 495844 137916 22:45:57 /usr/bin/sendmail: MTA: accepting connections 520290 1808 00:00:00 /usr/sbin/aixmibd """ results = ParsedResults() parser = ps() parser.processResults(cmd, results) #print "results:", results results = ParsedResults() cmd.result.output = """ PID RSS TIME COMMAND 483362 146300 22:58:11 /usr/local/test/oracleYAMDB1 (LOCAL=NO) """ parser.processResults(cmd, results) #print "results:", results # Oracle process with parenthesis in args should be detected for ev in results.events: summary = ev['summary'] self.assert_(summary.find('Process up') >= 0, "\'%s\' is not up")
def testPsCase10733(self): """ Case 10733 """ deviceConfig = Object() deviceConfig.device = 'localhost' cmd = Object() cmd.deviceConfig = deviceConfig cmd.command = 'command' cmd.includeRegex = ".*bogo.*" cmd.excludeRegex = "nothing" cmd.replaceRegex = ".*" cmd.replacement = "bogo" cmd.primaryUrlPath = "url" cmd.displayName = "foo" cmd.eventKey = "bar" cmd.severity = 1 cmd.generatedId = "url_" + md5("bogo").hexdigest().strip() p1 = Object() p1.id = 'cpu' p1.data = dict(id='url_bogo', alertOnRestart=True, failSeverity=3) p2 = Object() p2.id = 'mem' p2.data = dict(id='url_bogo', alertOnRestart=True, failSeverity=3) p3 = Object() p3.id = 'count' p3.data = dict(id='url_bogo', alertOnRestart=True, failSeverity=3) cmd.points = [p1, p2, p3] cmd.result = Object() cmd.result.output = """ PID RSS TIME COMMAND 483362 146300 22:58:11 /usr/local/bin/bogoApplication --conf bogo.conf instance5 495844 137916 22:45:57 /usr/local/bin/bogoApplication --conf bogo.conf instance6 508130 138196 22:23:08 /usr/local/bin/bogoApplication --conf bogo.conf instance3 520290 1808 00:00:00 /usr/sbin/aixmibd 561300 140440 22:13:15 /usr/local/bin/bogoApplication --conf bogo.conf instance4 561301 140440 22:13:15 /usr/local/bin/bogoApplication --conf bogo.conf instance4 561302 140440 22:13:15 /usr/local/bin/wrapper bogoApplication --conf bogo.conf instance4 749772 3652 00:00:00 /bin/nmon_aix53 -f -A -P -V -m /tmp """ results = ParsedResults() parser = ps() parser.processResults(cmd, results) #print "results:", results self.assertEquals(len(results.values), 3) self.assertEquals(len(results.events), 0) for dp, value in results.values: if 'count' in dp.id: self.assertEquals(value, 6) elif 'cpu' in dp.id: self.assertEquals(value, 485221.0) elif 'mem' in dp.id: self.assertEquals(value, 843732.0) else: raise AssertionError("unexpected value")
def testListQueues(self): parser = RabbitMQCTLParser() results = ParsedResults() parser.processResults(self._getListQueuesCmd(0, 'cmd_list_queues.txt'), results) self.assertEquals(len(results.values), 55) self.assertEquals(len(results.events), 1)
def testListConnections_notRunning(self): parser = RabbitMQCTLParser() results = ParsedResults() parser.processResults( self._getListConnectionsCmd( 2, 'cmd_list_connections_not_running.txt'), results) self.assertEquals(len(results.values), 0) self.assertEquals(len(results.events), 1)
def parser_test(self): if expected is None: self.fail("missing or bad: {}/parsers/{}/{}.expected".format( device, parser_name, datasource)) expected_events = expected.get("events", []) expected_values = expected.get("values", {}) if not expected_events and not expected_values: self.fail("no events or values: {}/parsers/{}/{}.expected".format( device, parser_name, datasource)) parser_module = importlib.import_module( ".parsers.{}".format(parser_name), zenpack_name()) parser_class = getattr(parser_module, parser_name) parser = parser_class() results = ParsedResults() if 'amazon-linux' in device: cgroup_path = '/cgroup' else: cgroup_path = '/sys/fs/cgroup' for component in components: cmd = Object() cmd.device = device cmd.ds = datasource cmd.command = expected.get("command", "") cmd.component = component cmd.points = [] cmd.result = Object() cmd.result.output = output for dp, v in expected_values.get(component, {}).items(): point = Object() point.id = dp point.component = component point.data = parser.dataForParser(Context(cgroup_path), None) point.expected = v cmd.points.append(point) parser.processResults(cmd, results) self.assertItemsEqual(expected_events, results.events) actual_values = {} for point, value in results.values: if point.component not in actual_values: actual_values[point.component] = {} actual_values[point.component][point.id] = value self.assertEqual(expected_values, actual_values)
def _parseResults(self, resultList, cacheableDS): """ Interpret the results retrieved from the commands and pass on the datapoint values and events. @parameter resultList: results of running the commands in a DeferredList @type resultList: array of (boolean, datasource) @parameter cacheableDS: other datasources that can use the same results @type cacheableDS: dictionary of arrays of datasources """ self.state = SshPerformanceCollectionTask.STATE_PARSE_DATA parseableResults = [] for success, datasource in resultList: results = ParsedResults() if not success: # In this case, our datasource is actually a defer.Failure reason = datasource datasource, = reason.value.args msg = "Datasource %s command timed out" % (datasource.name) ev = self._makeCmdEvent(datasource, msg, event_key='Timeout') results.events.append(ev) else: # clear our timeout event msg = "Datasource %s command timed out" % datasource.name ev = self._makeCmdEvent(datasource, msg, severity=Clear, event_key='Timeout') # Re-use our results for any similar datasources cachedDsList = cacheableDS.get(datasource.command) if cachedDsList: for ds in cachedDsList: ds.result = copy(datasource.result) results = ParsedResults() self._processDatasourceResults(ds, results) parseableResults.append((ds, results)) results = ParsedResults() results.events.append(ev) self._processDatasourceResults(datasource, results) parseableResults.append((datasource, results)) return parseableResults
def _failure_error_result(self, failure, datasource): parsed = ParsedResults() event = makeCmdEvent(self._devId, datasource, "Could not run command") event["error"] = str(failure.type) event["reason"] = str(failure.value) parsed.events.append(event) # Clear the timeout event since this command didn't time out. parsed.events.append( makeCmdTimeoutEvent(self._devId, datasource, severity=Clear), ) return parsed
def test_NoResultsOnFailure(self): """If the ps command has a non-zero exit code, disregard the output. The result is a missed collection. """ self.cmd.result.exitCode = 1 self.cmd.result.output = "\n".join(()) results = ParsedResults() ps().processResults(self.cmd, results) self.assertEqual(len(results.values), 0) self.assertEqual(len(results.events), 0)
def testCacti(self): deviceConfig = Object() deviceConfig.device = 'localhost' cmd = Object() cmd.deviceConfig = deviceConfig p1 = Object() p1.id = 'cacti_single_result' p1.data = dict(processName='someJob a b c', ignoreParams=False, alertOnRestart=True, failSeverity=3) cmd.points = [p1] cmd.result = Object() cmd.result.output = "77" cmd.result.exitCode = 2 cmd.severity = 2 cmd.command = "testCactiPlugin" cmd.name = "testCactiPlugin" cmd.eventKey = "cactiKey" cmd.eventClass = "/Cmd" cmd.component = "zencommand" results = ParsedResults() self.parser.processResults(cmd, results) self.assertEquals(len(results.values), 1) self.assertEquals(77, int(results.values[0][1])) # Now test multiple data points p2 = Object() p2.id = 'cacti_multi_result' p2.data = dict(processName='someJob a b c', ignoreParams=False, alertOnRestart=True, failSeverity=3) cmd.points.append(p2) cmd.result.output = "cacti_single_result:77 cacti_multi_result: 4.03E02" results = ParsedResults() self.parser.processResults(cmd, results) self.assertEquals(len(results.values), 2) values = map(lambda x: x[1], results.values) self.assertTrue(77.0 in values) self.assertTrue(403.0 in values)
def test_defuncts(self): """ """ self.cmd.result.output = '\n'.join(( " PID RSS TIME COMMAND", "288 00:00:00 <defunct>", "289 1788 00:00:00 sshd: root@sherwood", "294 448 00:00:08 aioserver", "296 00:00:00 <defunct>", )) results = ParsedResults() ps().processResults(self.cmd, results) self.assertEqual(len(results.values), 1) self.assertEqual(len(results.events), 0) # Reset 'result' for second collection self.cmd.result = Object(**{"exitCode": 0}) self.cmd.result.output = '\n'.join(( " PID RSS TIME COMMAND", "288 00:00:00 <defunct>", "296 00:00:00 <defunct>", )) results = ParsedResults() ps().processResults(self.cmd, results) self.assertEqual(len(results.values), 1) self.assertEqual(len(results.events), 2) event1, event2 = results.events self.assertIn("Process up", event1.get("summary", "")) self.assertIn("Process up", event2.get("summary", "")) self.assertIn("defunct", event1.get("summary", "")) self.assertIn("defunct", event2.get("summary", "")) self.assertEqual(event1.get("severity"), 0) self.assertEqual(event2.get("severity"), 0)
def test_parsesShortTimestamp(self): """Verifies that abbreviated timestamps are parsed correctly. """ self.cmd.result.output = "\n".join(( " PID RSS TIME COMMAND", "345 1 10:23 notherJob1 a b c", )) results = ParsedResults() parser = ps() parser.processResults(self.cmd, results) cpuTime = next( (v for dp, v in results.values if dp["id"] == "cpu"), None ) self.assertEqual(cpuTime, 623)
def test_sumsCpuTimes(self): """Verifies that the CPU times of multiple instances of the same process are summed together. """ self.cmd.result.output = "\n".join(( " PID RSS TIME COMMAND", "345 1 10:23 notherJob1 a b c", "123 1 00:05:01 someJob a b c", "657 1 2-03:00:00 someJob a b c", "8766 1 00:10:00 unrelatedTask a b c", )) results = ParsedResults() ps().processResults(self.cmd, results) cpu, cpuValue = _getDatapoint(results.values, "cpu") self.assertEqual(cpuValue, 184524)
def test_sumsMemory(self): """Verifies that the memory usage of multiple instances of the same process are summed together. """ self.cmd.result.output = "\n".join(( " PID RSS TIME COMMAND", "345 1 10:23 notherJob1 a b c", "123 1 00:05:01 someJob a b c", "657 1 2-03:00:00 someJob a b c", "8766 1 00:10:00 unrelatedTask a b c", )) results = ParsedResults() ps().processResults(self.cmd, results) mem, memValue = _getDatapoint(results.values, "mem") self.assertEqual(memValue, 3072)
def test_countsProcesses(self): """Verifies that the count of multiple instances of the same process is correct. """ self.cmd.result.output = "\n".join(( " PID RSS TIME COMMAND", "345 1 10:23 notherJob1 a b c", "123 1 00:05:01 someJob a b c", "657 1 2-03:00:00 someJob a b c", "8766 1 00:10:00 unrelatedTask a b c", )) results = ParsedResults() ps().processResults(self.cmd, results) count, countValue = _getDatapoint(results.values, "count") self.assertEqual(countValue, 3)
def _unexpected_error_result(self, response, datasource): event = makeCmdEvent( self._devId, datasource, "Unexpected result from command", ) event["result"] = str(response) if self._showfullcommand: event["command"] = datasource.command parsed = ParsedResults() parsed.events.append(event) # Clear the timeout event since this command didn't time out. parsed.events.append( makeCmdTimeoutEvent(self._devId, datasource, severity=Clear), ) return parsed
def setUp(self): self.cmd = Object() deviceConfig = Object() deviceConfig.device = 'localhost' self.cmd.deviceConfig = deviceConfig self.cmd.name = "testDataSource" self.cmd.parser = "JSON" self.cmd.result = Object() self.cmd.result.exitCode = 0 self.cmd.severity = 2 self.cmd.eventKey = 'testEventKey' self.cmd.eventClass = '/Cmd' self.cmd.command = "testJSONCommand" self.cmd.points = [] self.parser = JSON() self.results = ParsedResults()