import crashstatsutils import json import jydoop setupjob = crashstatsutils.dosetupjob([('meta_data', 'json'), ('processed_data', 'json')]) def map(k, meta_data, processed_data, context): """ Group and count by (signature, graphicsvendor) """ if processed_data is None: context.write('unprocessed', 1) return try: meta = json.loads(meta_data) processed = json.loads(processed_data) except: context.write('jsonerror', 1) return if processed.get('os_name', None) != 'Windows NT': context.write('notwindows', 1) return signature = processed.get('signature') vendor = meta.get('AdapterVendorID', None) context.write((signature, vendor), 1) combine = jydoop.sumreducer
import crashstatsutils import jydoop import json from org.python.core.util import StringUtil setupjob = crashstatsutils.dosetupjob([]) def map(k, context): result = context.cx.getCurrentValue() meta_data = StringUtil.fromBytes(result.getValue("meta_data", "json")) meta = json.loads(meta_data) product = meta['ProductName'] version = meta['Version'] ispluginhang = meta.get('PluginHang', None) == "1" err = 0 kv = result.getColumnLatest("raw_data", "dump") if kv is None: err += 1 dumplen = 0 else: dumplen = kv.getValueLength() if "additional_minidumps" in meta: extradumps = meta["additional_minidumps"].split(",") for extradump in extradumps: extrakv = result.getColumnLatest("raw_data", "upload_file_minidump_" + extradump) if extrakv is None: err += 1 else: extralen = extrakv.getValueLength() dumplen += extralen
import crashstatsutils import jydoop import json import csv import dateutil.parser from datetime import datetime cutoff = 50 * 2**20 cutoffbuild = 20131207 cutoffdate = datetime(2013, 12, 12, 0, 30) setupjob = crashstatsutils.dosetupjob([('meta_data', 'json'), ('processed_data', 'json')]) def map(k, meta_data, processed_data, context): if processed_data is None: return meta = json.loads(meta_data) if meta.get('ReleaseChannel', None) != 'nightly': return if int(meta.get('buildid', '0')[:8]) < cutoffbuild: return processed = json.loads(processed_data) pdate = dateutil.parser.parse(processed['date_processed']) if pdate < cutoffdate: return
import crashstatsutils import jydoop import json from org.python.core.util import StringUtil setupjob = crashstatsutils.dosetupjob([]) def map(k, context): result = context.cx.getCurrentValue() meta_data = StringUtil.fromBytes(result.getValue("meta_data", "json")) meta = json.loads(meta_data) product = meta['ProductName'] version = meta['Version'] ispluginhang = meta.get('PluginHang', None) == "1" err = 0 kv = result.getColumnLatest("raw_data", "dump") if kv is None: err += 1 dumplen = 0 else: dumplen = kv.getValueLength() if "additional_minidumps" in meta: extradumps = meta["additional_minidumps"].split(",") for extradump in extradumps: extrakv = result.getColumnLatest( "raw_data", "upload_file_minidump_" + extradump) if extrakv is None: err += 1