Ejemplo n.º 1
0
def setup_query_parameters(config):
    now = config.day + dt.timedelta(1)
    now_str = now.strftime("%Y-%m-%d")
    yesterday = config.day
    yesterday_str = yesterday.strftime("%Y-%m-%d")
    logger.debug("config.day = %s; now = %s; yesterday = %s", config.day, now, yesterday)
    prod_phrase = ""
    try:
        if config.product != "":
            if "," in config.product:
                prod_list = [x.strip() for x in config.product.split(",")]
                prod_phrase = "and r.product in ('%s')" % "','".join(prod_list)
            else:
                prod_phrase = "and r.product = '%s'" % config.product
    except Exception:
        util.reportExceptionAndContinue(logger)
    ver_phrase = ""
    try:
        if config.version != "":
            if "," in config.product:
                ver_list = [x.strip() for x in config.version.split(",")]
                ver_phrase = "and r.version in ('%s')" % "','".join(ver_list)
            else:
                ver_phrase = "and r.version = '%s'" % config.version
    except Exception:
        util.reportExceptionAndContinue(logger)

    return util.DotDict(
        {"now_str": now_str, "yesterday_str": yesterday_str, "prod_phrase": prod_phrase, "ver_phrase": ver_phrase}
    )
Ejemplo n.º 2
0
def setup_query_parameters(config):
    now = config.day + dt.timedelta(1)
    now_str = now.strftime('%Y-%m-%d')
    yesterday = config.day
    yesterday_str = yesterday.strftime('%Y-%m-%d')
    logger.debug("config.day = %s; now = %s; yesterday = %s", config.day, now,
                 yesterday)
    prod_phrase = ''
    try:
        if config.product != '':
            if ',' in config.product:
                prod_list = [x.strip() for x in config.product.split(',')]
                prod_phrase = ("and r.product in ('%s')" %
                               "','".join(prod_list))
            else:
                prod_phrase = "and r.product = '%s'" % config.product
    except Exception:
        util.reportExceptionAndContinue(logger)
    ver_phrase = ''
    try:
        if config.version != '':
            if ',' in config.product:
                ver_list = [x.strip() for x in config.version.split(',')]
                ver_phrase = ("and r.version in ('%s')" % "','".join(ver_list))
            else:
                ver_phrase = "and r.version = '%s'" % config.version
    except Exception:
        util.reportExceptionAndContinue(logger)

    return util.DotDict({
        'now_str': now_str,
        'yesterday_str': yesterday_str,
        'prod_phrase': prod_phrase,
        'ver_phrase': ver_phrase
    })
Ejemplo n.º 3
0
 def cleanup (self):
   self.logger.debug("%s - killing database connections", threading.currentThread().getName())
   for name, aConnection in self.iteritems():
     try:
       aConnection.close()
       self.logger.debug("%s - connection %s closed", threading.currentThread().getName(), name)
     except psycopg2.InterfaceError:
       self.logger.debug("%s - connection %s already closed", threading.currentThread().getName(), name)
     except:
       util.reportExceptionAndContinue(self.logger)
Ejemplo n.º 4
0
 def cleanup(self):
     self.logger.debug("%s - killing database connections", threading.currentThread().getName())
     for name, aConnection in self.iteritems():
         try:
             aConnection.close()
             self.logger.debug("%s - connection %s closed", threading.currentThread().getName(), name)
         except psycopg2.InterfaceError:
             self.logger.debug("%s - connection %s already closed", threading.currentThread().getName(), name)
         except:
             util.reportExceptionAndContinue(self.logger)
Ejemplo n.º 5
0
def dailyUrlDump(
    config,
    sdb=sdb,
    gzipped_csv_files=gzipped_csv_files,
    IdCache=IdCache,
    write_row=write_row,
    process_crash=process_crash,
    logger=logger,
):
    dbConnectionPool = sdb.DatabaseConnectionPool(config, logger)
    # Set the temp_buffers for this session
    databaseTempbuffers = "8MB"  # default
    if "databaseTempbuffers" in config:
        databaseTempbuffers = config.databaseTempbuffers
    try:
        try:
            db_conn, db_cursor = dbConnectionPool.connectionCursorPair()

            with gzipped_csv_files(config) as csv_file_handles_tuple:
                headers_not_yet_written = True
                id_cache = IdCache(db_cursor)
                sql_parameters = setup_query_parameters(config)
                logger.debug(
                    "config.day = %s; now = %s; yesterday = %s",
                    config.day,
                    sql_parameters.now_str,
                    sql_parameters.yesterday_str,
                )
                sql_query = sql % sql_parameters
                logger.debug("SQL is: %s", sql_query)
                db_cursor.execute(""" SET TEMP_BUFFERS = %s """, (databaseTempbuffers,))
                for crash_row in sdb.execute(db_cursor, sql_query):
                    if headers_not_yet_written:
                        write_row(csv_file_handles_tuple, [x[0] for x in db_cursor.description])
                        headers_not_yet_written = False
                    column_value_list = process_crash(crash_row, id_cache)
                    write_row(csv_file_handles_tuple, column_value_list)
                    # end for loop over each crash_row
        finally:
            dbConnectionPool.cleanup()
    except:
        util.reportExceptionAndContinue(logger)
Ejemplo n.º 6
0
    def PUT(self, *args):
        """
        Call the put method defined in a subclass and return its result.

        Return a JSON dump of the returned value,
        or the raw result if a content type was returned.

        """
        try:
            result = self.put(*args)
            if isinstance(result, tuple):
                web.header('Content-Type', result[1])
                return result[0]
            web.header('Content-Type', 'application/json')
            return json.dumps(result)
        except web.HTTPError:
            raise
        except Exception:
            util.reportExceptionAndContinue(self.config.logger)
            raise
Ejemplo n.º 7
0
def dailyUrlDump(config,
                 sdb=sdb,
                 gzipped_csv_files=gzipped_csv_files,
                 IdCache=IdCache,
                 write_row=write_row,
                 process_crash=process_crash,
                 logger=logger):
    dbConnectionPool = sdb.DatabaseConnectionPool(config, logger)
    # Set the temp_buffers for this session
    databaseTempbuffers = '8MB'  # default
    if 'databaseTempbuffers' in config:
        databaseTempbuffers = config.databaseTempbuffers
    try:
        try:
            db_conn, db_cursor = dbConnectionPool.connectionCursorPair()

            with gzipped_csv_files(config) as csv_file_handles_tuple:
                headers_not_yet_written = True
                id_cache = IdCache(db_cursor)
                sql_parameters = setup_query_parameters(config)
                logger.debug("config.day = %s; now = %s; yesterday = %s",
                             config.day, sql_parameters.now_str,
                             sql_parameters.yesterday_str)
                sql_query = sql % sql_parameters
                logger.debug("SQL is: %s", sql_query)
                db_cursor.execute(""" SET TEMP_BUFFERS = %s """,
                                  (databaseTempbuffers, ))
                for crash_row in sdb.execute(db_cursor, sql_query):
                    if headers_not_yet_written:
                        write_row(csv_file_handles_tuple,
                                  [x[0] for x in db_cursor.description])
                        headers_not_yet_written = False
                    column_value_list = process_crash(crash_row, id_cache)
                    write_row(csv_file_handles_tuple, column_value_list)
                    # end for loop over each crash_row
        finally:
            dbConnectionPool.cleanup()
    except:
        util.reportExceptionAndContinue(logger)
Ejemplo n.º 8
0
        try:
            result = self.get(*args)
            if isinstance(result, tuple):
                web.header('Content-Type', result[1])
                return result[0]
            web.header('Content-Type', 'application/json')
            return json.dumps(result)
        except web.webapi.HTTPError:
            raise
        except (DatabaseError, InsertionError), e:
            raise web.webapi.InternalError(message=str(e))
        except (MissingArgumentError, BadArgumentError), e:
            raise BadRequest(str(e))
        except Exception:
            stringLogger = util.StringLogger()
            util.reportExceptionAndContinue(stringLogger)
            try:
                util.reportExceptionAndContinue(self.config.logger)
            except (AttributeError, KeyError):
                pass
            raise Exception(stringLogger.getMessages())

    def get(self, *args):
        raise NotImplementedError(
            "The GET function has not been implemented for %s" % repr(args)
        )

    def POST(self, *args):
        """
        Call the post method defined in a subclass and return its result.
Ejemplo n.º 9
0
config.logger = logger

if config.numberOfSubmissions == 'forever':
    config.iteratorFunc = sub.createInfiniteFileSystemIterator
elif config.numberOfSubmissions == 'all':
    config.iteratorFunc = sub.createFileSystemIterator
else:
    config.iteratorFunc = sub.createLimitedFileSystemIterator
    config.numberOfSubmissions = int(config.numberOfSubmissions)

if config.dryrun:
    config.submissionFunc = sub.submissionDryRun
else:
    config.submissionFunc = sub.doSubmission

config.sleep = float(config.delay) / 1000.0

config.uniqueHang = 'uniqueHangId' in config

if config.searchRoot:
    sub.submitter(config)
else:
    try:
        import json
        with open(config.jsonfile) as jsonFile:
            formData = json.load(jsonFile)
        config.submissionFunc(formData, config.dumpfile, config.url,
                              config.logger)
    except Exception, x:
        sutil.reportExceptionAndContinue(config.logger)
Ejemplo n.º 10
0
if config.numberOfSubmissions == 'forever':
    config.iteratorFunc = sub.createInfiniteFileSystemIterator
elif config.numberOfSubmissions == 'all':
    config.iteratorFunc = sub.createFileSystemIterator
else:
    config.iteratorFunc = sub.createLimitedFileSystemIterator
    config.numberOfSubmissions = int(config.numberOfSubmissions)

if config.dryrun:
    config.submissionFunc = sub.submissionDryRun
else:
    config.submissionFunc = sub.doSubmission

config.sleep = float(config.delay)/1000.0

config.uniqueHang = 'uniqueHangId' in config

if config.searchRoot:
    sub.submitter(config)
else:
    try:
        import json
        with open(config.jsonfile) as jsonFile:
            formData = json.load(jsonFile)
        config.submissionFunc(formData,
                              config.dumpfile,
                              config.url,
                              config.logger)
    except Exception, x:
        sutil.reportExceptionAndContinue(config.logger)