def testSingleValueEmpty(self): try: cur = TestEmptyCursor() db.singleValueSql(cur, "") assert False, "must raise SQLDidNotReturnSingleValue" except db.SQLDidNotReturnSingleValue, e: pass
def testSingleValueEmpty(self): try: cur = TestEmptyCursor() db.singleValueSql(cur,"") assert False, "must raise SQLDidNotReturnSingleValue" except db.SQLDidNotReturnSingleValue,e: pass
def latestEntryBeforeOrEqualTo(connection, aDate, product, version): """ Retrieve the closest report date containing the provided product and version that does not exceed the provided date. We append a day to the max(report_date) to ensure that we capture reports to the end of the day, not the beginning. """ sql = """ SELECT max(report_date) + 1 FROM tcbs JOIN product_versions USING (product_version_id) WHERE tcbs.report_date <= %s AND product_name = %s AND version_string = %s """ cursor = connection.cursor() try: result = db.singleValueSql(cursor, sql, (aDate, product, version)) connection.commit() except: result = None connection.rollback() return result or aDate
def latestEntryBeforeOrEqualTo(connection, aDate, product, version): """ Retrieve the closest report date containing the provided product and version that does not exceed the provided date. We append a day to the max(report_date) to ensure that we capture reports to the end of the day, not the beginning. """ sql = """ SELECT max(report_date) + 1 FROM tcbs JOIN product_versions USING (product_version_id) WHERE tcbs.report_date <= %s AND product_name = %s AND version_string = %s """ cursor = connection.cursor() try: result = db.singleValueSql(cursor, sql, (aDate, product, version)) connection.commit() except: result = None connection.rollback() return result or aDate
def latestEntryBeforeOrEqualTo(aCursor, aDate, productdims_id): sql = """ select max(window_end) from top_crashes_by_signature tcbs where tcbs.window_end <= %s and tcbs.productdims_id = %s """ try: result = db.singleValueSql(aCursor, sql, (aDate, productdims_id)) if result: return result return aDate except: return aDate
def whichTCBS(aCursor, dbParams, product, version): ''' Answers a boolean indicating if the old top crashes by signature should be used. ''' sql = """ /* socorro.services.topCrashBySignatures useTCBSClassic */ SELECT which_table FROM product_selector WHERE product_name = '%s' AND version_string = '%s'""" % (product, version) try: return db.singleValueSql(aCursor, sql, dbParams) except db.SQLDidNotReturnSingleValue: logger.info("No record in product_selector for %s %s." % (product, version)) raise ValueError, "No record of %s %s" % (product, version)
def latestEntryBeforeOrEqualTo(aCursor, aDate, product, version): """ Retrieve the closest report date containing the provided product and version that does not exceed the provided date. """ sql = """ SELECT max(report_date) FROM tcbs JOIN product_versions USING (product_version_id) WHERE tcbs.report_date <= %s AND product_name = %s AND version_string = %s """ try: result = db.singleValueSql(aCursor, sql, (aDate, product, version)) except: result = None return result or aDate
def getId(self, product, version): try: return self.cache[(product, version)] except KeyError: connection = self.database.connection() cursor = connection.cursor() sql = """ select product_version_id as id from product_info where product_name = %s and version_string = %s """ try: self.cache[(product, version)] = id = db.singleValueSql(cursor, sql, (product, version)) return id except Exception: util.reportExceptionAndContinue(self.config['logger']) raise KeyError((product, version))
def getId(self, product, version): try: return self.cache[(product, version)] except KeyError: connection = self.database.connection() cursor = connection.cursor() sql = """ select product_version_id as id from product_info where product_name = %s and version_string = %s """ try: self.cache[(product, version)] = id = db.singleValueSql( cursor, sql, (product, version)) return id except Exception: util.reportExceptionAndContinue(self.config['logger']) raise KeyError((product, version))
def totalNumberOfCrashesForPeriod (aCursor, databaseParameters): """ """ where = "" if databaseParameters["crash_type"] == 'browser': where = "AND tcbs.plugin_count = 0 AND tcbs.hang_count = 0" if databaseParameters["crash_type"] == 'plugin': where = "AND tcbs.plugin_count > 0 OR tcbs.hang_count > 0" sql = """ select sum(tcbs.count) from top_crashes_by_signature tcbs where '%s' < tcbs.window_end and tcbs.window_end <= '%s' and tcbs.productdims_id = %d %s """ % (databaseParameters["startDate"], databaseParameters["to_date"], \ databaseParameters["productdims_id"], where) #logger.debug(aCursor.mogrify(sql, databaseParameters)) return db.singleValueSql(aCursor, sql, databaseParameters)
def testSingleValueMulti(self): try: cur = TestMultiCursor(numRows=5) assert "Row 0, Column 0" == db.singleValueSql(cur, "") except Exception, e: assert False, "must not raise an exception for this " + e
def testSingleValueSingle(self): try: cur = TestSingleCursor() assert "Row 0, Column 0" == db.singleValueSql(cur, "") except Exception, e: assert False, "must not raise an exception for this %s" % e
def get_list(self, **kwargs): """ List all crashes with a given signature and return them. Optional arguments: see SearchCommon.get_parameters() """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = search_common.get_parameters(kwargs) if params["signature"] is None: return None params["terms"] = params["signature"] params["search_mode"] = "is_exactly" # Default mode falls back to starts_with for postgres if params["plugin_search_mode"] == "default": params["plugin_search_mode"] = "starts_with" # Limiting to a signature if params["terms"]: params["terms"] = self.prepare_terms(params["terms"], params["search_mode"]) # Searching for terms in plugins if params["report_process"] == "plugin" and params["plugin_terms"]: params["plugin_terms"] = " ".join(params["plugin_terms"]) params["plugin_terms"] = self.prepare_terms( params["plugin_terms"], params["plugin_search_mode"]) # Get information about the versions util_service = Util(config=self.context) params["versions_info"] = util_service.versions_info(**params) # Parsing the versions params["versions_string"] = params["versions"] (params["versions"], params["products"]) = self.parse_versions( params["versions"], params["products"]) # Changing the OS ids to OS names for i, elem in enumerate(params["os"]): for platform in self.context.platforms: if platform["id"] == elem: params["os"][i] = platform["name"] # Creating the parameters for the sql query sql_params = { } # Preparing the different parts of the sql query sql_select = """ SELECT r.date_processed, r.uptime, r.user_comments, r.uuid, r.product, r.version, r.build, r.signature, r.url, r.os_name, r.os_version, r.cpu_name, r.cpu_info, r.address, r.reason, r.last_crash, r.install_age, r.hangid, r.process_type, (r.client_crash_date - (r.install_age * INTERVAL '1 second')) AS install_time, rd.duplicate_of """ sql_from = self.build_reports_sql_from(params) sql_from = """%s LEFT OUTER JOIN reports_duplicates rd ON r.uuid = rd.uuid """ % sql_from (sql_where, sql_params) = self.build_reports_sql_where(params, sql_params, self.context) sql_order = """ ORDER BY r.date_processed DESC """ (sql_limit, sql_params) = self.build_reports_sql_limit(params, sql_params) # Assembling the query sql_query = " ".join(( "/* socorro.external.postgresql.report.Report.list */", sql_select, sql_from, sql_where, sql_order, sql_limit)) # Query for counting the results sql_count_query = " ".join(( "/* socorro.external.postgresql.report.Report.list */", "SELECT count(*)", sql_from, sql_where)) # Debug logger.debug(sql_count_query) logger.debug(cur.mogrify(sql_count_query, sql_params)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except db.SQLDidNotReturnSingleValue: total = 0 util.reportExceptionAndContinue(logger) results = [] # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except psycopg2.Error: util.reportExceptionAndContinue(logger) json_result = { "total": total, "hits": [] } # Transforming the results into what we want for crash in results: row = dict(zip(( "date_processed", "uptime", "user_comments", "uuid", "product", "version", "build", "signature", "url", "os_name", "os_version", "cpu_name", "cpu_info", "address", "reason", "last_crash", "install_age", "hangid", "process_type", "install_time", "duplicate_of"), crash)) for i in row: if isinstance(row[i], datetime.datetime): row[i] = str(row[i]) json_result["hits"].append(row) self.connection.close() return json_result
def search(self, **kwargs): """ Search for crashes and return them. See http://socorro.readthedocs.org/en/latest/middleware.html#search Optional arguments: see SearchCommon.get_parameters() """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = search_common.get_parameters(kwargs) # Default mode falls back to starts_with for postgres if params["search_mode"] == "default": params["search_mode"] = "starts_with" if params["plugin_search_mode"] == "default": params["plugin_search_mode"] = "starts_with" # For Postgres, we never search for a list of terms if params["terms"]: params["terms"] = " ".join(params["terms"]) params["terms"] = Search.prepare_terms(params["terms"], params["search_mode"]) # Searching for terms in plugins if params["report_process"] == "plugin" and params["plugin_terms"]: params["plugin_terms"] = " ".join(params["plugin_terms"]) params["plugin_terms"] = Search.prepare_terms( params["plugin_terms"], params["plugin_search_mode"]) # Parsing the versions params["versions_string"] = params["versions"] (params["versions"], params["products"]) = Search.parse_versions( params["versions"], params["products"]) # Changing the OS ids to OS names for i, elem in enumerate(params["os"]): for platform in self.context.platforms: if platform["id"] == elem: params["os"][i] = platform["name"] # Creating the parameters for the sql query sql_params = { "from_date": params["from_date"], "to_date": params["to_date"], "limit": params["result_number"], "offset": params["result_offset"] } sql_params = Search.dispatch_params(sql_params, "term", params["terms"]) sql_params = Search.dispatch_params(sql_params, "product", params["products"]) sql_params = Search.dispatch_params(sql_params, "os", params["os"]) sql_params = Search.dispatch_params(sql_params, "version", params["versions"]) sql_params = Search.dispatch_params(sql_params, "build", params["build_ids"]) sql_params = Search.dispatch_params(sql_params, "reason", params["reasons"]) sql_params = Search.dispatch_params(sql_params, "plugin_term", params["plugin_terms"]) sql_params = Search.dispatch_params(sql_params, "branch", params["branches"]) # Preparing the different parts of the sql query #--------------------------------------------------------------- # SELECT #--------------------------------------------------------------- sql_select = self.generate_sql_select(params) # Adding count for each OS for i in self.context.platforms: sql_params["os_%s" % i["id"]] = i["name"] #--------------------------------------------------------------- # FROM #--------------------------------------------------------------- sql_from = self.generate_sql_from(params) #--------------------------------------------------------------- # WHERE #--------------------------------------------------------------- sql_where = [""" WHERE r.date_processed BETWEEN %(from_date)s AND %(to_date)s """] ## Adding terms to where clause if params["terms"]: if params["search_mode"] == "is_exactly": sql_where.append("r.signature=%(term)s") else: sql_where.append("r.signature LIKE %(term)s") ## Adding products to where clause if params["products"]: products_list = ["r.product=%(product" + str(x) + ")s" for x in range(len(params["products"]))] sql_where.append("(%s)" % (" OR ".join(products_list))) ## Adding OS to where clause if params["os"]: os_list = ["r.os_name=%(os" + str(x) + ")s" for x in range(len(params["os"]))] sql_where.append("(%s)" % (" OR ".join(os_list))) ## Adding branches to where clause if params["branches"]: branches_list = ["branches.branch=%(branch" + str(x) + ")s" for x in range(len(params["branches"]))] sql_where.append("(%s)" % (" OR ".join(branches_list))) ## Adding versions to where clause if params["versions"]: # Get information about the versions versions_service = Util(config=self.context) fakeparams = { "versions": params["versions_string"] } versions_info = versions_service.versions_info(**fakeparams) if isinstance(params["versions"], list): versions_where = [] for x in range(0, len(params["versions"]), 2): version_where = [] version_where.append(str(x).join(("r.product=%(version", ")s"))) key = "%s:%s" % (params["versions"][x], params["versions"][x + 1]) version_where = self.generate_version_where( key, params["versions"], versions_info, x, sql_params, version_where) version_where.append(str(x + 1).join(( "r.version=%(version", ")s"))) versions_where.append("(%s)" % " AND ".join(version_where)) sql_where.append("(%s)" % " OR ".join(versions_where)) else: # Original product:value key = "%s:%s" % (params["products"], params["versions"]) version_where = [] version_where = self.generate_version_where( key, params["versions"], versions_info, None, sql_params, version_where) version_where.append("r.version=%(version)s") sql_where.append("(%s)" % " AND ".join(version_where)) ## Adding build id to where clause if params["build_ids"]: build_ids_list = ["r.build=%(build" + str(x) + ")s" for x in range(len(params["build_ids"]))] sql_where.append("(%s)" % (" OR ".join(build_ids_list))) ## Adding reason to where clause if params["reasons"]: reasons_list = ["r.reason=%(reason" + str(x) + ")s" for x in range(len(params["reasons"]))] sql_where.append("(%s)" % (" OR ".join(reasons_list))) if params["report_type"] == "crash": sql_where.append("r.hangid IS NULL") elif params["report_type"] == "hang": sql_where.append("r.hangid IS NOT NULL") ## Searching through plugins if params["report_process"] == "plugin": sql_where.append("r.process_type = 'plugin'") sql_where.append(("plugins_reports.date_processed BETWEEN " "%(from_date)s AND %(to_date)s")) if params["plugin_terms"]: comp = "=" if params["plugin_search_mode"] in ("contains", "starts_with"): comp = " LIKE " sql_where_plugin_in = [] for f in params["plugin_in"]: if f == "name": field = "plugins.name" elif f == "filename": field = "plugins.filename" sql_where_plugin_in.append(comp.join((field, "%(plugin_term)s"))) sql_where.append("(%s)" % " OR ".join(sql_where_plugin_in)) elif params["report_process"] == "browser": sql_where.append("r.process_type IS NULL") elif params["report_process"] == "content": sql_where.append("r.process_type = 'content'") sql_where = " AND ".join(sql_where) #--------------------------------------------------------------- # GROUP BY #--------------------------------------------------------------- sql_group = self.generate_sql_group(params) #--------------------------------------------------------------- # ORDER BY #--------------------------------------------------------------- sql_order = """ ORDER BY total DESC """ #--------------------------------------------------------------- # LIMIT OFFSET #--------------------------------------------------------------- sql_limit = """ LIMIT %(limit)s OFFSET %(offset)s """ # Assembling the query sql_from = " JOIN ".join(sql_from) sql_query = " ".join(("/* socorro.search.Search search */", sql_select, sql_from, sql_where, sql_group, sql_order, sql_limit)) # Query for counting the results sql_count_query = " ".join(( "/* socorro.external.postgresql.search.Search search.count */", "SELECT count(DISTINCT r.signature)", sql_from, sql_where)) # Debug logger.debug(cur.mogrify(sql_query, sql_params)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except Exception: total = 0 util.reportExceptionAndContinue(logger) # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except Exception: results = [] util.reportExceptionAndContinue(logger) else: results = [] json_result = { "total": total, "hits": [] } # Transforming the results into what we want for crash in results: if params["report_process"] == "plugin": row = dict(zip(("signature", "count", "is_windows", "is_mac", "is_linux", "numhang", "numplugin", "numcontent", "pluginname", "pluginversion", "pluginfilename"), crash)) else: row = dict(zip(("signature", "count", "is_windows", "is_mac", "is_linux", "numhang", "numplugin", "numcontent"), crash)) json_result["hits"].append(row) self.connection.close() return json_result
def testSingleValueMulti(self): try: cur = TestMultiCursor(numRows=5) assert "Row 0, Column 0" == db.singleValueSql(cur,"") except Exception, e: assert False, "must not raise an exception for this "+e
def get(self, **kwargs): """ Search for crashes and return them. See http://socorro.readthedocs.org/en/latest/middleware.html#search Optional arguments: see SearchCommon.get_parameters() """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = search_common.get_parameters(kwargs) # change aliases from the web to the implementation's need if "for" in params and "terms" not in params: params["terms"] = params.get("for") if "from" in params and "from_date" not in params: params["from_date"] = params.get("from") if "to" in params and "to_date" not in params: params["to_date"] = params.get("to") if "in" in params and "fields" not in params: params["fields"] = params.get("in") # Default mode falls back to starts_with for postgres if params["search_mode"] == "default": params["search_mode"] = "starts_with" if params["plugin_search_mode"] == "default": params["plugin_search_mode"] = "starts_with" # For Postgres, we never search for a list of terms if params["terms"]: params["terms"] = " ".join(params["terms"]) params["terms"] = Search.prepare_terms(params["terms"], params["search_mode"]) # Searching for terms in plugins if params["report_process"] == "plugin" and params["plugin_terms"]: params["plugin_terms"] = " ".join(params["plugin_terms"]) params["plugin_terms"] = Search.prepare_terms( params["plugin_terms"], params["plugin_search_mode"]) # Get information about the versions util_service = Util(config=self.context) params["versions_info"] = util_service.versions_info(**params) # Parsing the versions params["versions_string"] = params["versions"] (params["versions"], params["products"]) = Search.parse_versions( params["versions"], params["products"]) if hasattr(self.context, 'webapi'): context = self.context.webapi else: # old middleware context = self.context # Changing the OS ids to OS names for i, elem in enumerate(params["os"]): for platform in context.platforms: if platform["id"] == elem: params["os"][i] = platform["name"] # Creating the parameters for the sql query sql_params = { } # Preparing the different parts of the sql query sql_select = self.generate_sql_select(params) # Adding count for each OS for i in context.platforms: sql_params["os_%s" % i["id"]] = i["name"] sql_from = self.build_reports_sql_from(params) (sql_where, sql_params) = self.build_reports_sql_where(params, sql_params, self.context) sql_group = self.generate_sql_group(params) sql_order = """ ORDER BY total DESC, signature """ (sql_limit, sql_params) = self.build_reports_sql_limit(params, sql_params) # Assembling the query sql_query = " ".join(("/* socorro.search.Search search */", sql_select, sql_from, sql_where, sql_group, sql_order, sql_limit)) # Query for counting the results sql_count_query = " ".join(( "/* socorro.external.postgresql.search.Search search.count */", "SELECT count(DISTINCT r.signature)", sql_from, sql_where)) # Debug logger.debug(cur.mogrify(sql_query, sql_params)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except db.SQLDidNotReturnSingleValue: total = 0 util.reportExceptionAndContinue(logger) results = [] # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except psycopg2.Error: util.reportExceptionAndContinue(logger) json_result = { "total": total, "hits": [] } # Transforming the results into what we want for crash in results: if params["report_process"] == "plugin": row = dict(zip(("signature", "count", "is_windows", "is_mac", "is_linux", "numhang", "numplugin", "numcontent", "pluginname", "pluginversion", "pluginfilename"), crash)) else: row = dict(zip(("signature", "count", "is_windows", "is_mac", "is_linux", "numhang", "numplugin", "numcontent"), crash)) json_result["hits"].append(row) self.connection.close() return json_result
def get_list(self, **kwargs): """ List all crashes with a given signature and return them. Optional arguments: see SearchCommon.get_parameters() """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = search_common.get_parameters(kwargs) if params["signature"] is None: return None params["terms"] = params["signature"] params["search_mode"] = "is_exactly" # Default mode falls back to starts_with for postgres if params["plugin_search_mode"] == "default": params["plugin_search_mode"] = "starts_with" # Limiting to a signature if params["terms"]: params["terms"] = self.prepare_terms(params["terms"], params["search_mode"]) # Searching for terms in plugins if params["report_process"] == "plugin" and params["plugin_terms"]: params["plugin_terms"] = " ".join(params["plugin_terms"]) params["plugin_terms"] = self.prepare_terms( params["plugin_terms"], params["plugin_search_mode"]) # Get information about the versions util_service = Util(config=self.context) params["versions_info"] = util_service.versions_info(**params) # Parsing the versions params["versions_string"] = params["versions"] (params["versions"], params["products"]) = self.parse_versions(params["versions"], params["products"]) # Changing the OS ids to OS names for i, elem in enumerate(params["os"]): for platform in self.context.platforms: if platform["id"] == elem: params["os"][i] = platform["name"] # Creating the parameters for the sql query sql_params = {} # Preparing the different parts of the sql query sql_select = """ SELECT r.date_processed, r.uptime, r.user_comments, r.uuid, r.product, r.version, r.build, r.signature, r.url, r.os_name, r.os_version, r.cpu_name, r.cpu_info, r.address, r.reason, r.last_crash, r.install_age, r.hangid, r.process_type, (r.client_crash_date - (r.install_age * INTERVAL '1 second')) AS install_time, rd.duplicate_of """ sql_from = self.build_reports_sql_from(params) sql_from = """%s LEFT OUTER JOIN reports_duplicates rd ON r.uuid = rd.uuid """ % sql_from (sql_where, sql_params) = self.build_reports_sql_where(params, sql_params, self.context) sql_order = """ ORDER BY r.date_processed DESC """ (sql_limit, sql_params) = self.build_reports_sql_limit(params, sql_params) # Assembling the query sql_query = " ".join( ("/* socorro.external.postgresql.report.Report.list */", sql_select, sql_from, sql_where, sql_order, sql_limit)) # Query for counting the results sql_count_query = " ".join( ("/* socorro.external.postgresql.report.Report.list */", "SELECT count(*)", sql_from, sql_where)) # Debug logger.debug(sql_count_query) logger.debug(cur.mogrify(sql_count_query, sql_params)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except db.SQLDidNotReturnSingleValue: total = 0 util.reportExceptionAndContinue(logger) results = [] # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except psycopg2.Error: util.reportExceptionAndContinue(logger) json_result = {"total": total, "hits": []} # Transforming the results into what we want for crash in results: row = dict( zip(("date_processed", "uptime", "user_comments", "uuid", "product", "version", "build", "signature", "url", "os_name", "os_version", "cpu_name", "cpu_info", "address", "reason", "last_crash", "install_age", "hangid", "process_type", "install_time", "duplicate_of"), crash)) for i in row: if isinstance(row[i], datetime.datetime): row[i] = str(row[i]) json_result["hits"].append(row) self.connection.close() return json_result
def search(self, **kwargs): """ Search for crashes and return them. See http://socorro.readthedocs.org/en/latest/middleware.html#search Optional arguments: see SearchCommon.get_parameters() """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = search_common.get_parameters(kwargs) # Default mode falls back to starts_with for postgres if params["search_mode"] == "default": params["search_mode"] = "starts_with" if params["plugin_search_mode"] == "default": params["plugin_search_mode"] = "starts_with" # For Postgres, we never search for a list of terms if params["terms"]: params["terms"] = " ".join(params["terms"]) params["terms"] = Search.prepare_terms(params["terms"], params["search_mode"]) # Searching for terms in plugins if params["report_process"] == "plugin" and params["plugin_terms"]: params["plugin_terms"] = " ".join(params["plugin_terms"]) params["plugin_terms"] = Search.prepare_terms( params["plugin_terms"], params["plugin_search_mode"]) # Get information about the versions util_service = Util(config=self.context) params["versions_info"] = util_service.versions_info(**params) # Parsing the versions params["versions_string"] = params["versions"] (params["versions"], params["products"]) = Search.parse_versions(params["versions"], params["products"]) # Changing the OS ids to OS names for i, elem in enumerate(params["os"]): for platform in self.context.platforms: if platform["id"] == elem: params["os"][i] = platform["name"] # Creating the parameters for the sql query sql_params = {} # Preparing the different parts of the sql query sql_select = self.generate_sql_select(params) # Adding count for each OS for i in self.context.platforms: sql_params["os_%s" % i["id"]] = i["name"] sql_from = self.build_reports_sql_from(params) (sql_where, sql_params) = self.build_reports_sql_where(params, sql_params, self.context) sql_group = self.generate_sql_group(params) sql_order = """ ORDER BY total DESC, signature """ (sql_limit, sql_params) = self.build_reports_sql_limit(params, sql_params) # Assembling the query sql_query = " ".join( ("/* socorro.search.Search search */", sql_select, sql_from, sql_where, sql_group, sql_order, sql_limit)) # Query for counting the results sql_count_query = " ".join( ("/* socorro.external.postgresql.search.Search search.count */", "SELECT count(DISTINCT r.signature)", sql_from, sql_where)) # Debug logger.debug(cur.mogrify(sql_query, sql_params)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except db.SQLDidNotReturnSingleValue: total = 0 util.reportExceptionAndContinue(logger) results = [] # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except psycopg2.Error: util.reportExceptionAndContinue(logger) json_result = {"total": total, "hits": []} # Transforming the results into what we want for crash in results: if params["report_process"] == "plugin": row = dict( zip(("signature", "count", "is_windows", "is_mac", "is_linux", "numhang", "numplugin", "numcontent", "pluginname", "pluginversion", "pluginfilename"), crash)) else: row = dict( zip(("signature", "count", "is_windows", "is_mac", "is_linux", "numhang", "numplugin", "numcontent"), crash)) json_result["hits"].append(row) self.connection.close() return json_result
def testSingleValueSingle(self): try: cur = TestSingleCursor() assert "Row 0, Column 0" == db.singleValueSql(cur,"") except Exception, e: assert False, "must not raise an exception for this %s" %e
def get_comments(self, **kwargs): """Return a list of comments on crash reports, filtered by signatures and other fields. See socorro.lib.search_common.get_parameters() for all filters. """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = self.prepare_search_params(**kwargs) # Creating the parameters for the sql query sql_params = {} # Preparing the different parts of the sql query # WARNING: sensitive data is returned here (email). When there is # an authentication mecanism, a verification should be done here. sql_select = """ SELECT r.date_processed, r.user_comments, r.uuid, CASE WHEN r.email = '' THEN null WHEN r.email IS NULL THEN null ELSE r.email END """ sql_from = self.build_reports_sql_from(params) (sql_where, sql_params) = self.build_reports_sql_where(params, sql_params, self.context) sql_where = "%s AND r.user_comments IS NOT NULL" % sql_where sql_order = "ORDER BY email ASC, r.date_processed ASC" # Assembling the query sql_query = " ".join(( "/* external.postgresql.crashes.Crashes.get_comments */", sql_select, sql_from, sql_where, sql_order)) # Query for counting the results sql_count_query = " ".join(( "/* external.postgresql.crashes.Crashes.get_comments */", "SELECT count(*)", sql_from, sql_where)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except db.SQLDidNotReturnSingleValue: total = 0 util.reportExceptionAndContinue(logger) results = [] # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except psycopg2.Error: util.reportExceptionAndContinue(logger) result = { "total": total, "hits": [] } # Transforming the results into what we want for crash in results: row = dict(zip(( "date_processed", "user_comments", "uuid", "email"), crash)) for i in row: if isinstance(row[i], datetime.datetime): row[i] = str(row[i]) result["hits"].append(row) self.connection.close() return result
def get_comments(self, **kwargs): """Return a list of comments on crash reports, filtered by signatures and other fields. See socorro.lib.search_common.get_parameters() for all filters. """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() params = self.prepare_search_params(**kwargs) # Creating the parameters for the sql query sql_params = {} # Preparing the different parts of the sql query # WARNING: sensitive data is returned here (email). When there is # an authentication mecanism, a verification should be done here. sql_select = """ SELECT r.date_processed, r.user_comments, r.uuid, CASE WHEN r.email = '' THEN null WHEN r.email IS NULL THEN null ELSE r.email END """ sql_from = self.build_reports_sql_from(params) (sql_where, sql_params) = self.build_reports_sql_where(params, sql_params, self.context) sql_where = "%s AND r.user_comments IS NOT NULL" % sql_where sql_order = "ORDER BY email ASC, r.date_processed ASC" # Assembling the query sql_query = " ".join( ("/* external.postgresql.crashes.Crashes.get_comments */", sql_select, sql_from, sql_where, sql_order)) # Query for counting the results sql_count_query = " ".join( ("/* external.postgresql.crashes.Crashes.get_comments */", "SELECT count(*)", sql_from, sql_where)) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, sql_params) except db.SQLDidNotReturnSingleValue: total = 0 util.reportExceptionAndContinue(logger) results = [] # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, sql_params) except psycopg2.Error: util.reportExceptionAndContinue(logger) result = {"total": total, "hits": []} # Transforming the results into what we want for crash in results: row = dict( zip(("date_processed", "user_comments", "uuid", "email"), crash)) for i in row: if isinstance(row[i], datetime.datetime): row[i] = str(row[i]) result["hits"].append(row) self.connection.close() return result
def search(self, types, **kwargs): """ Search for crashes and return them. See https://wiki.mozilla.org/Socorro/ElasticSearch_API#Search Keyword arguments: types -- Type of data to return. Only "signatures" is supported for postgres. Optional arguments: see SearchAPI.get_parameters """ # Creating the connection to the DB self.connection = self.database.connection() cur = self.connection.cursor() # Default dates now = datetime.today() lastweek = now - timedelta(7) # Getting parameters that have default values terms = kwargs.get("for", "") products = kwargs.get("product", "Firefox") from_date = kwargs.get("from", lastweek) to_date = kwargs.get("to", now) os = kwargs.get("os", "_all") branches = kwargs.get("branches", None) build_id = kwargs.get("build", None) reason = kwargs.get("crash_reason", None) report_type = kwargs.get("report_type", None) versions_list = kwargs.get("version", "_all") report_process = kwargs.get("report_process", None) plugin_in = kwargs.get("plugin_in", None) plugin_search_mode = kwargs.get("plugin_search_mode", None) plugin_term = kwargs.get("plugin_term", "") search_mode = kwargs.get("search_mode", "starts_with") result_number = kwargs.get("result_number", 100) result_offset = kwargs.get("result_offset", 0) # Default mode falls back to starts_with for postgres if search_mode == "default": search_mode = "starts_with" if plugin_search_mode == "default": plugin_search_mode = "starts_with" # Handling dates from_date = PostgresAPI.format_date(from_date) to_date = PostgresAPI.format_date(to_date) # For Postgres, we never search for a list of terms if type(terms) is list: terms = " ".join(terms) # Searching for terms in signature is_terms_a_list = type(terms) is list if terms: terms = PostgresAPI.prepare_terms(terms, is_terms_a_list, search_mode) # Searching for terms in plugins if report_process == "plugin" and plugin_term: plugin_term = PostgresAPI.prepare_terms(plugin_term, (type(plugin_term) is list), plugin_search_mode) # Parsing the versions (versions, products) = PostgresAPI.parse_versions(versions_list, products) # Changing the OS ids to OS names if type(os) is list: for i in xrange(len(os)): for platform in self.context.platforms: if platform["id"] == os[i]: os[i] = platform["name"] else: for platform in self.context.platforms: if platform["id"] == os: os = platform["name"] # Creating the parameters for the sql query params = { "from_date" : from_date, "to_date" : to_date, "limit" : int(result_number), "offset" : int(result_offset) } params = PostgresAPI.dispatch_params(params, "term", terms) params = PostgresAPI.dispatch_params(params, "product", products) params = PostgresAPI.dispatch_params(params, "os", os) params = PostgresAPI.dispatch_params(params, "version", versions) params = PostgresAPI.dispatch_params(params, "build", build_id) params = PostgresAPI.dispatch_params(params, "reason", reason) params = PostgresAPI.dispatch_params(params, "plugin_term", plugin_term) params = PostgresAPI.dispatch_params(params, "branch", branches) # Preparing the different parts of the sql query #--------------------------------------------------------------- # SELECT #--------------------------------------------------------------- sql_select = self.generate_sql_select(report_process) # Adding count for each OS for i in self.context.platforms: params[ "os_" + i["id"] ] = i["name"] #--------------------------------------------------------------- # FROM #--------------------------------------------------------------- sql_from = self.generate_sql_from(report_process, branches) #--------------------------------------------------------------- # WHERE #--------------------------------------------------------------- sql_where = [""" WHERE r.date_processed BETWEEN %(from_date)s AND %(to_date)s """] ## Adding terms to where clause if terms: if not is_terms_a_list and search_mode == "is_exactly": sql_where.append("r.signature=%(term)s") elif not is_terms_a_list: sql_where.append("r.signature LIKE %(term)s") else: if search_mode == "is_exactly": comp = "=" else: comp = "LIKE" sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(terms)), " OR ", "r.signature"+comp+"%(term", ")s"), ")" ) ) ) ## Adding products to where clause if type(products) is list: sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(products)), " OR ", "r.product=%(product", ")s"), ")" ) ) ) else: sql_where.append("r.product=%(product)s" ) ## Adding OS to where clause if os != "_all": if type(os) is list: sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(os)), " OR ", "r.os_name=%(os", ")s"), ")" ) ) ) else: sql_where.append("r.os_name=%(os)s") ## Adding branches to where clause if branches: if type(branches) is list: sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(branches)), " OR ", "branches.branch=%(branch", ")s"), ")" ) ) ) else: sql_where.append("branches.branch=%(branch)s") ## Adding versions to where clause if versions != "_all" and len(versions): contains_beta = False if type(versions) is list: versions_where = [] versions_info = self.get_versions_info(cur, versions) for x in xrange(0, len(versions), 2): version_where = [] version_where.append(str(x).join(("r.product=%(version", ")s"))) # Original product:value key = ":".join((versions[x], versions[x + 1])) if key in versions_info: version_info = versions_info[key] else: version_info = None if version_info and version_info["which_table"] == "new": if "b" in version_info["version_string"]: # it's a beta version_where.append(str(x + 1).join(( "product_versions.version_string=%(version", ")s"))) version_where.append("""r.version = product_versions.release_version AND r.release_channel ILIKE 'beta' AND product_versions.build_type = 'beta' AND EXISTS ( SELECT 1 FROM product_version_builds WHERE product_versions.product_version_id = product_version_builds.product_version_id AND build_numeric(r.build) = product_version_builds.build_id ) """) contains_beta = True else: # it's a release version_where.append(str(x + 1).join(( "r.version=%(version", ")s"))) version_where.append("r.release_channel NOT IN ('nightly', 'aurora', 'beta')") else: version_where.append(str(x + 1).join(( "r.version=%(version", ")s"))) versions_where.append("(%s)" % " AND ".join(version_where)) sql_where.append("".join(("(", " OR ".join(versions_where), ")"))) else: versions_info = self.get_versions_info(cur, (products, versions)) # Original product:value key = ":".join((products, versions)) if key in versions_info: version_info = versions_info[key] else: version_info = None if version_info and version_info["which_table"] == "new": if "b" in version_info["version_string"]: # it's a beta sql_where.append("product_versions.version_string=%(version)s") sql_where.append("""r.version = product_versions.release_version AND r.release_channel ILIKE 'beta' AND product_versions.build_type = 'beta' AND EXISTS ( SELECT 1 FROM product_version_builds WHERE product_versions.product_version_id = product_version_builds.product_version_id AND build_numeric(r.build) = product_version_builds.build_id ) """) contains_beta = True else: # it's a release sql_where.append("r.version=%(version)s") sql_where.append("r.release_channel NOT IN ('nightly', 'aurora', 'beta')") else: sql_where.append("r.version=%(version)s") if contains_beta: sql_from.append("product_versions ON r.version = product_versions.release_version AND r.product = product_versions.product_name") ## Adding build id to where clause if build_id: if type(build_id) is list: sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(build_id)), " OR ", "r.build=%(build", ")s"), ")" ) ) ) else: sql_where.append("r.build=%(build)s") ## Adding reason to where clause if reason: if type(reason) is list: sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(reason)), " OR ", "r.reason=%(reason", ")s"), ")" ) ) ) else: sql_where.append("r.reason=%(reason)s") if report_type == "crash": sql_where.append("r.hangid IS NULL") elif report_type == "hang": sql_where.append("r.hangid IS NOT NULL") ## Searching through plugins if report_process == "plugin": sql_where.append("r.process_type = 'plugin'") sql_where.append("plugins_reports.date_processed BETWEEN %(from_date)s AND %(to_date)s") if plugin_term: comp = "=" if plugin_search_mode == "contains" or plugin_search_mode == "starts_with": comp = " LIKE " field = "plugins.name" if plugin_in == "filename": field = "plugins.filename" if type(plugin_term) is list: sql_where.append( "".join( ( "(", PostgresAPI.array_to_string(xrange(len(plugin_term)), " OR ", field + comp +"%(plugin_term", ")s"), ")" ) ) ) else: sql_where.append( "".join( ( field, comp, "%(plugin_term)s" ) ) ) elif report_process == "browser": sql_where.append("r.process_type IS NULL") sql_where = " AND ".join(sql_where) #--------------------------------------------------------------- # GROUP BY #--------------------------------------------------------------- sql_group = self.generate_sql_group(report_process) #--------------------------------------------------------------- # ORDER BY #--------------------------------------------------------------- sql_order = """ ORDER BY total DESC """ #--------------------------------------------------------------- # LIMIT OFFSET #--------------------------------------------------------------- sql_limit = """ LIMIT %(limit)s OFFSET %(offset)s """ # Assembling the query sql_from = " JOIN ".join(sql_from) sql_query = " ".join( ( "/* socorro.search.postgresAPI search */", sql_select, sql_from, sql_where, sql_group, sql_order, sql_limit ) ) # Query for counting the results sql_count_query = " ".join( ( "/* socorro.search.postgresAPI search.count */ SELECT count(DISTINCT r.signature) ", sql_from, sql_where ) ) # Querying the DB try: total = db.singleValueSql(cur, sql_count_query, params) except Exception: total = 0 util.reportExceptionAndContinue(logger) # No need to call Postgres if we know there will be no results if total != 0: try: results = db.execute(cur, sql_query, params) except Exception: results = [] util.reportExceptionAndContinue(logger) else: results = [] json_result = { "total" : total, "hits" : [] } # Transforming the results into what we want for crash in results: if report_process == "plugin": row = dict( zip( ("signature", "count", "is_windows", "is_mac", "is_linux", "is_solaris", "numhang", "numplugin", "pluginname", "pluginversion", "pluginfilename"), crash ) ) else: row = dict( zip( ("signature", "count", "is_windows", "is_mac", "is_linux", "is_solaris", "numhang", "numplugin"), crash ) ) json_result["hits"].append(row) self.connection.close() return json_result