Esempio n. 1
0
def setup_query_parameters(config):
    now = config.day + dt.timedelta(1)
    now_str = now.strftime("%Y-%m-%d")
    yesterday = config.day
    yesterday_str = yesterday.strftime("%Y-%m-%d")
    logger.debug("config.day = %s; now = %s; yesterday = %s", config.day, now, yesterday)
    prod_phrase = ""
    try:
        if config.product != "":
            if "," in config.product:
                prod_list = [x.strip() for x in config.product.split(",")]
                prod_phrase = "and r.product in ('%s')" % "','".join(prod_list)
            else:
                prod_phrase = "and r.product = '%s'" % config.product
    except Exception:
        util.reportExceptionAndContinue(logger)
    ver_phrase = ""
    try:
        if config.version != "":
            if "," in config.product:
                ver_list = [x.strip() for x in config.version.split(",")]
                ver_phrase = "and r.version in ('%s')" % "','".join(ver_list)
            else:
                ver_phrase = "and r.version = '%s'" % config.version
    except Exception:
        util.reportExceptionAndContinue(logger)

    return util.DotDict(
        {"now_str": now_str, "yesterday_str": yesterday_str, "prod_phrase": prod_phrase, "ver_phrase": ver_phrase}
    )
Esempio n. 2
0
    def get(self, **kwargs):
        """Return a single crash report from it's UUID. """
        filters = [
            ("uuid", None, "str"),
        ]
        params = external_common.parse_arguments(filters, kwargs)

        day = int(params.uuid[-2:])
        month = int(params.uuid[-4:-2])
        # assuming we won't use this after year 2099
        year = int("20%s" % params.uuid[-6:-4])

        crash_date = datetime.date(year=year, month=month, day=day)
        logger.debug("Looking for crash %s during day %s" % (params.uuid,
                                                             crash_date))

        sql = """/* socorro.external.postgresql.crash.Crash.get */
            SELECT reports.email, reports.url, reports.addons_checked,
            (   SELECT reports_duplicates.duplicate_of
                FROM reports_duplicates
                WHERE reports_duplicates.uuid = reports.uuid
            ) as duplicate_of
            FROM reports
            WHERE reports.uuid=%(uuid)s
            AND reports.success IS NOT NULL
            AND utc_day_is( reports.date_processed,  %(crash_date)s)
        """
        sql_params = {
            "uuid": params.uuid,
            "crash_date": crash_date
        }

        results = []

        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        try:
            results = db.execute(cur, sql, sql_params)
        except psycopg2.Error:
            util.reportExceptionAndContinue(logger)

        json_result = {
            "total": 0,
            "hits": []
        }

        for crash in results:
            row = dict(zip((
                       "email",
                       "url",
                       "addons_checked",
                       "duplicate_of"), crash))
            json_result["hits"].append(row)
        json_result["total"] = len(json_result["hits"])

        self.connection.close()

        return json_result
Esempio n. 3
0
def scrapeB2G(config, cursor, product_name, urllib=urllib2, date=None):
    month = date.strftime('%m')
    b2g_url = '%s/%s/%s/' % (config.base_url, product_name,
                       'manifests')

    try:
        day = date.strftime('%d')
        dir_prefix = '%s-%s-%s' % (date.year, month, day)
        # I have no idea what this first level of directories means :/
        # TODO get info about that and update this search
        version_dirs = getLinks(b2g_url, startswith='1.', urllib=urllib)
        for version_dir in version_dirs:
            # /1.0.0/2013/01/2013-01-27-07/*.json
            prod_url = '%s/%s/%s/%s/' % (b2g_url, version_dir, date.year, month)
            nightlies = getLinks(prod_url, startswith=dir_prefix, urllib=urllib)
            for nightly in nightlies:
                for info in getB2G(nightly, prod_url):
                    (platform, repository, version, kvpairs) = info
                    build_id = kvpairs['buildid']
                    build_type = kvpairs['build_type']
                    buildutil.insert_build(cursor, product_name, version, platform,
                                           build_id, build_type, kvpairs.get('beta_number', None), repository,
                                           ignore_duplicates=True)

    except urllib.URLError:
        util.reportExceptionAndContinue(logger)
Esempio n. 4
0
 def cleanup (self):
   for name, crashStore in self.iteritems():
     try:
       crashStore.close()
       self.logger.debug("crashStore for %s closed", name)
     except:
       sutil.reportExceptionAndContinue(self.logger)
Esempio n. 5
0
 def doSubmission(ooidTuple):
     logger.debug("received: %s", str(ooidTuple))
     try:
         sourceStorage = crashStoragePoolForSource.crashStorage()
         destStorage = crashStoragePoolForDest.crashStorage()
         ooid = ooidTuple[0]
         try:
             logger.debug("trying to fetch %s", ooid)
             jsonContents = sourceStorage.get_meta(ooid)
         except ValueError:
             logger.warning("the json for %s is degenerate and cannot be loaded" " - saving empty json", ooid)
             jsonContents = {}
         dumpContents = sourceStorage.get_raw_dump(ooid)
         if conf.dryrun:
             logger.info("dry run - pushing %s to dest", ooid)
         else:
             logger.debug("pushing %s to dest", ooid)
             result = destStorage.save_raw(ooid, jsonContents, dumpContents)
             if result == cstore.CrashStorageSystem.ERROR:
                 return iwf.FAILURE
             elif result == cstore.CrashStorageSystem.RETRY:
                 return iwf.RETRY
             try:
                 sourceStorage.quickDelete(ooid)
             except Exception:
                 sutil.reportExceptionAndContinue(self.logger)
         return iwf.OK
     except Exception, x:
         sutil.reportExceptionAndContinue(logger)
         return iwf.FAILURE
Esempio n. 6
0
def dailyUrlDump(config, sdb=sdb,
                 gzipped_csv_files=gzipped_csv_files,
                 IdCache=IdCache,
                 write_row=write_row,
                 process_crash=process_crash,
                 logger=logger):
    dbConnectionPool = sdb.DatabaseConnectionPool(config, logger)
    try:
        try:
            db_conn, db_cursor = dbConnectionPool.connectionCursorPair()

            with gzipped_csv_files(config) as csv_file_handles_tuple:
                headers_not_yet_written = True
                id_cache = IdCache(db_cursor)
                sql_parameters = setup_query_parameters(config)
                logger.debug("config.day = %s; now = %s; yesterday = %s",
                             config.day,
                             sql_parameters.now_str,
                             sql_parameters.yesterday_str)
                sql_query = sql % sql_parameters
                logger.debug("SQL is: %s", sql_query)
                for crash_row in sdb.execute(db_cursor, sql_query):
                    if headers_not_yet_written:
                        write_row(csv_file_handles_tuple,
                                  [x[0] for x in db_cursor.description])
                        headers_not_yet_written = False
                    column_value_list = process_crash(crash_row, id_cache)
                    write_row(csv_file_handles_tuple,
                              column_value_list)
                    # end for loop over each crash_row
        finally:
            dbConnectionPool.cleanup()
    except:
        util.reportExceptionAndContinue(logger)
Esempio n. 7
0
  def save_raw (self, uuid, jsonData, dump, currentTimestamp):
    try:
      #throttleAction = self.throttler.throttle(jsonData)
      throttleAction = jsonData.legacy_processing
      if throttleAction == LegacyThrottler.DISCARD:
        self.logger.debug("discarding %s %s", jsonData.ProductName, jsonData.Version)
        return CrashStorageSystem.DISCARDED
      elif throttleAction == LegacyThrottler.DEFER:
        self.logger.debug("deferring %s %s", jsonData.ProductName, jsonData.Version)
        fileSystemStorage = self.deferredFileSystemStorage
      else:
        self.logger.debug("not throttled %s %s", jsonData.ProductName, jsonData.Version)
        fileSystemStorage = self.standardFileSystemStorage

      jsonFileHandle, dumpFileHandle = fileSystemStorage.newEntry(uuid, self.hostname, currentTimestamp)
      try:
        dumpFileHandle.write(dump)
        json.dump(jsonData, jsonFileHandle)
      finally:
        dumpFileHandle.close()
        jsonFileHandle.close()

      return CrashStorageSystem.OK
    except:
      sutil.reportExceptionAndContinue(self.logger)
      return CrashStorageSystem.ERROR
Esempio n. 8
0
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
    prod_url = "%s/%s/" % (config.base_url, product_name)

    # releases are sometimes in nightly, sometimes in candidates dir.
    # look in both.
    for directory in ("nightly", "candidates"):
        if not getLinks(prod_url, startswith=directory, urllib=urllib):
            logger.debug("Dir %s not found for %s" % (directory, product_name))
            continue

        url = "%s/%s/%s/" % (config.base_url, product_name, directory)

        try:
            releases = getLinks(url, endswith="-candidates/", urllib=urllib)
            for release in releases:
                for info in getRelease(release, url):
                    (platform, version, build_number, kvpairs) = info
                    build_type = "Release"
                    beta_number = None
                    repository = "mozilla-release"
                    if "b" in version:
                        build_type = "Beta"
                        version, beta_number = version.split("b")
                        repository = "mozilla-beta"
                    build_id = kvpairs["buildID"]
                    buildutil.insert_build(
                        cursor, product_name, version, platform, build_id, build_type, beta_number, repository
                    )
        except urllib.URLError:
            util.reportExceptionAndContinue(logger)
Esempio n. 9
0
def resubmit (conf, jds=jds, hbc=hbc, open=open):
  logger = conf.logger
  logger.info('creating hbase connection: host: %s, port: %s', conf.hbaseHost, conf.hbasePort)
  hbaseConnection = hbc.HBaseConnectionForCrashReports(conf.hbaseHost,
                                                       conf.hbasePort,
                                                       conf.hbaseTimeout)
  logger.info('creating json/dump store object: root: %s', conf.hbaseFallbackFS)
  fallbackStorage = jds.JsonDumpStorage(root=conf.hbaseFallbackFS,
                                        maxDirectoryEntries = conf.hbaseFallbackDumpDirCount,
                                        jsonSuffix = conf.jsonFileSuffix,
                                        dumpSuffix = conf.dumpFileSuffix,
                                        dumpGID = conf.hbaseFallbackDumpGID,
                                        dumpPermissions = conf.hbaseFallbackDumpPermissions,
                                        dirPermissions = conf.hbaseFallbackDirPermissions,
                                       )
  processedCrashList = []
  for uuid in fallbackStorage.destructiveDateWalk():
    logger.info('found uuid: %s', uuid)
    try:
      jsonFile = open(fallbackStorage.getJson(uuid))
      try:
        jsonContents = json.load(jsonFile)
      finally:
        jsonFile.close()
      dumpFile = open(fallbackStorage.getDump(uuid))
      try:
        dumpContents = dumpFile.read()
      finally:
        dumpFile.close()
      logger.debug('pushing %s to hbase', uuid)
      hbaseConnection.put_json_dump(uuid, jsonContents, dumpContents)
      processedCrashList.append(uuid)
    except Exception, x:
      sutil.reportExceptionAndContinue(logger)
Esempio n. 10
0
  def __init__(self,host,port,timeout,
               thrift=Thrift,
               tsocket=TSocket,
               ttrans=TTransport,
               protocol=TBinaryProtocol,
               ttp=ttypes,
               client=Client,
               column=ColumnDescriptor,
               mutation=Mutation,
               logger=utl.SilentFakeLogger()):
    self.host = host
    self.port = port
    self.timeout = timeout
    self.thriftModule = thrift
    self.tsocketModule = tsocket
    self.transportModule = ttrans
    self.protocolModule = protocol
    self.ttypesModule = ttp
    self.clientClass = client
    self.columnClass = column
    self.mutationClass = mutation
    self.logger = logger
    self.hbaseThriftExceptions = (self.ttypesModule.IOError,
                                  #self.ttypesModule.IllegalArgument,
                                  #self.ttypesModule.AlreadyExists,
                                  self.thriftModule.TException,
                                  #HBaseClientException,
                                  socket.timeout,
                                  socket.error
                                 )

    try:
      self.make_connection(timeout=self.timeout)
    except NoConnectionException:
      utl.reportExceptionAndContinue(logger=self.logger)
Esempio n. 11
0
 def queuingThreadFunc (self):
   self.logger.debug('queuingThreadFunc start')
   try:
     try:
       for aJob in self.jobSourceIterator(): # may never raise StopIteration
         if aJob is None:
           self.logger.info("there is nothing to do.  Sleeping for 7 seconds")
           self.responsiveSleep(7)
           continue
         self.quitCheck()
         try:
           self.logger.debug("queuing standard job %s", aJob)
           self.workerPool.newTask(self.retryTaskFuncWrapper, (aJob,))
         except Exception:
           self.logger.warning('%s has failed', aJob)
           sutil.reportExceptionAndContinue(self.logger)
     except Exception:
       self.logger.warning('The jobSourceIterator has failed')
       sutil.reportExceptionAndContinue(self.logger)
     except KeyboardInterrupt:
       self.logger.debug('queuingThread gets quit request')
   finally:
     self.quit = True
     self.logger.debug("we're quitting queuingThread")
     self.logger.debug("waiting for standard worker threads to stop")
     self.workerPool.waitForCompletion()
     self.logger.debug("all worker threads stopped")
Esempio n. 12
0
 def save_raw (self, ooid, jsonData, dump, currentTimestamp):
   try:
     if jsonData.legacy_processing == LegacyThrottler.DISCARD:
       return CrashStorageSystem.DISCARDED
   except KeyError:
     pass
   try:
     #jsonDataAsString = json.dumps(jsonData)
     jsonFileHandle, dumpFileHandle = self.localFS.newEntry(ooid, self.hostname, currentTimestamp)
     try:
       dumpFileHandle.write(dump)
       json.dump(jsonData, jsonFileHandle)
     finally:
       dumpFileHandle.close()
       jsonFileHandle.close()
     self.logger.info('saved - %s', ooid)
     return CrashStorageSystem.OK
   except Exception, x:
     sutil.reportExceptionAndContinue(self.logger)
     self.logger.warning('local storage has failed: trying fallback storage for: %s', ooid)
     try:
       #jsonDataAsString = json.dumps(jsonData)
       jsonFileHandle, dumpFileHandle = self.fallbackFS.newEntry(ooid, self.hostname, currentTimestamp)
       try:
         dumpFileHandle.write(dump)
         json.dump(jsonData, jsonFileHandle)
       finally:
         dumpFileHandle.close()
         jsonFileHandle.close()
       return CrashStorageSystem.OK
     except Exception, x:
       sutil.reportExceptionAndContinue(self.logger)
       self.logger.critical('fallback storage has failed: dropping %s on the floor', ooid)
Esempio n. 13
0
  def load_json_transform_rules(self):
    sql = ("select predicate, predicate_args, predicate_kwargs, "
           "       action, action_args, action_kwargs "
           "from transform_rules "
           "where "
           "  category = 'processor.json_rewrite'")
    try:
      rules = sdb.transaction_execute_with_retry(self.databaseConnectionPool,
                                                 sql)
    except Exception:
      sutil.reportExceptionAndContinue(logger)
      rules = [('socorro.processor.processor.json_equal_predicate',
                    '',
                    'key="ReleaseChannel", value="esr"',
                    'socorro.processor.processor.json_reformat_action',
                    '',
                    'key="Version", format_str="%(Version)sesr"'),
               ('socorro.processor.processor.json_ProductID_predicate',
                    '',
                    '',
                    'socorro.processor.processor.json_Product_rewrite_action',
                    '',
                    '') ]

    self.json_transform_rule_system.load_rules(rules)
    self.config.logger.info('done loading rules: %s',
                            str(self.json_transform_rule_system.rules))
Esempio n. 14
0
    def GET(self, *args):
        """
        Call the get method defined in a subclass and return its result.

        Return a JSON dump of the returned value,
        or the raw result if a content type was returned.

        """
        try:
            result = self.get(*args)
            if isinstance(result, tuple):
                web.header("Content-Type", result[1])
                return result[0]
            web.header("Content-Type", "application/json")
            return json.dumps(result)
        except web.webapi.HTTPError:
            raise
        except Exception:
            stringLogger = util.StringLogger()
            util.reportExceptionAndContinue(stringLogger)
            try:
                util.reportExceptionAndContinue(self.context.logger)
            except (AttributeError, KeyError):
                pass
            raise Exception(stringLogger.getMessages())
Esempio n. 15
0
 def func (paramsTuple):
     jsonFilePathName, binaryFilePathName = paramsTuple[0]
     with open(jsonFilePathName) as jsonFile:
         formData = json.load(jsonFile)
     if config.uniqueHang:
         try:
             if formData['HangId'] in existingHangIdCache:
                 formData['HangId'] = existingHangIdCache
             else:
                 formData['HangId'] =  \
                 existingHangIdCache[formData['HangId']] = uuid.uuid4()
         except Exception:
             pass
     processTimeStatistic = statsPools.processTime.getStat()
     submittedCountStatistic = statsPools.submittedCount.getStat()
     try:
         processTimeStatistic.start()
         config.submissionFunc(formData, binaryFilePathName, config.url,
                               config.logger)
         submittedCountStatistic.increment()
     except Exception:
         sutil.reportExceptionAndContinue(sutil.FakeLogger())
         failureCountStatistic = statsPools.failureCount.getStat()
         failureCountStatistic.increment()
         return iwf.OK
     finally:
         processTimeStatistic.end()
     return iwf.OK
def handler(req):
  global persistentStorage
  try:
    x = persistentStorage
  except NameError:
    persistentStorage = socorro.collector.initializer.createPersistentInitialization(configModule)

  logger = persistentStorage["logger"]
  config = persistentStorage["config"]
  collectObject = persistentStorage["collectObject"]

  logger.debug("handler invoked using subinterpreter: %s", req.interpreter)

  if req.method == "POST":
    try:
      req.content_type = "text/plain"

      theform = util.FieldStorage(req)
      dump = theform[config.dumpField]
      if not dump.file:
        return apache.HTTP_BAD_REQUEST

      currentTimestamp = dt.datetime.now()

      jsonDataDictionary = collectObject.makeJsonDictFromForm(theform)
      jsonDataDictionary["submitted_timestamp"] = currentTimestamp.isoformat()
      try:
        throttleable = int(jsonDataDictionary["Throttleable"])
      except KeyError:
        throttleable = 2
      if not throttleable or (throttleable and not collectObject.throttle(jsonDataDictionary)):
        fileSystemStorage = persistentStorage["standardFileSystemStorage"]
      elif throttleable == 2:
        fileSystemStorage = persistentStorage["deferredFileSystemStorage"]
      else:
        req.write("Discarded=1\n")
        return apache.OK

      uuid = ooid.createNewOoid(currentTimestamp, persistentStorage["config"].storageDepth)

      jsonFileHandle, dumpFileHandle = fileSystemStorage.newEntry(uuid, persistentStorage["hostname"], dt.datetime.now())
      try:
        collectObject.storeDump(dump.file, dumpFileHandle)
        collectObject.storeJson(jsonDataDictionary, jsonFileHandle)
      finally:
        dumpFileHandle.close()
        jsonFileHandle.close()

      req.write("CrashID=%s%s\n" % (config.dumpIDPrefix, uuid))
      return apache.OK
    except:
      logger.info("mod-python subinterpreter name: %s", req.interpreter)
      sutil.reportExceptionAndContinue(logger)
      #print >>sys.stderr, "Exception: %s" % sys.exc_info()[0]
      #print >>sys.stderr, sys.exc_info()[1]
      #print >>sys.stderr
      #sys.stderr.flush()
      return apache.HTTP_INTERNAL_SERVER_ERROR
  else:
    return apache.HTTP_METHOD_NOT_ALLOWED
Esempio n. 17
0
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
    prod_url = '%s/%s/' % (config.base_url, product_name)

    # releases are sometimes in nightly, sometimes in candidates dir.
    # look in both.
    for directory in ('nightly', 'candidates'):
        if not getLinks(prod_url, startswith=directory, urllib=urllib):
            logger.debug('Dir %s not found for %s' % (directory, product_name))
            continue

        url = '%s/%s/%s/' % (config.base_url, product_name, directory)

        try:
            releases = getLinks(url, endswith='-candidates/',
                                urllib=urllib)
            for release in releases:
                for info in getRelease(release, url):
                    (platform, version, build_number, kvpairs) = info
                    build_type = 'Release'
                    beta_number = None
                    repository = 'mozilla-release'
                    if 'b' in version:
                        build_type = 'Beta'
                        version, beta_number = version.split('b')
                        repository = 'mozilla-beta'
                    build_id = kvpairs['buildID']
                    buildutil.insert_build(cursor, product_name, version,
                                           platform, build_id, build_type,
                                           beta_number, repository)
        except urllib.URLError:
            util.reportExceptionAndContinue(logger)
Esempio n. 18
0
    def get(self, **kwargs):
        """Return a single crash report from it's UUID. """
        filters = [
            ("uuid", None, "str"),
        ]
        params = external_common.parse_arguments(filters, kwargs)

        if params.uuid is None:
            raise MissingOrBadArgumentException(
                        "Mandatory parameter 'uuid' is missing or empty")

        crash_date = datetimeutil.uuid_to_date(params.uuid)
        logger.debug("Looking for crash %s during day %s" % (params.uuid,
                                                             crash_date))

        sql = """/* socorro.external.postgresql.crash.Crash.get */
            SELECT reports.email, reports.url, reports.addons_checked,
            (   SELECT reports_duplicates.duplicate_of
                FROM reports_duplicates
                WHERE reports_duplicates.uuid = reports.uuid
            ) as duplicate_of
            FROM reports
            WHERE reports.uuid=%(uuid)s
            AND reports.success IS NOT NULL
            AND utc_day_is( reports.date_processed,  %(crash_date)s)
        """
        sql_params = {
            "uuid": params.uuid,
            "crash_date": crash_date
        }

        results = []

        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        try:
            results = db.execute(cur, sql, sql_params)
        except psycopg2.Error:
            util.reportExceptionAndContinue(logger)

        json_result = {
            "total": 0,
            "hits": []
        }

        for crash in results:
            row = dict(zip((
                       "email",
                       "url",
                       "addons_checked",
                       "duplicate_of"), crash))
            json_result["hits"].append(row)
        json_result["total"] = len(json_result["hits"])

        self.connection.close()

        return json_result
Esempio n. 19
0
def disconnectPartition (databaseCursor, partitionList, masterTableName, logger):
  for anOldPartitionName in partitionList:
    try:
      databaseCursor.execute("SAVEPOINT X; ALTER TABLE %s NO INHERIT %s; RELEASE SAVEPOINT X;" % (anOldPartitionName, masterTableName))
    except:
      databaseCursor.execute("ROLLBACK TO SAVEPOINT X")
      socorro_util.reportExceptionAndContinue(logger)
  databaseCursor.connection.commit()
Esempio n. 20
0
def save_last_run_date(config, now_function=utc_now):
  try:
    f = open(config.persistentDataPathname, "w")
    try:
      return cPickle.dump(now_function(), f)
    finally:
      f.close()
  except IOError:
    util.reportExceptionAndContinue(logger)
Esempio n. 21
0
 def __init__(self, config):
     """
     Constructor
     """
     super(VersionsInfo, self).__init__(config)
     try:
         self.database = db.Database(config)
     except (AttributeError, KeyError):
         util.reportExceptionAndContinue(logger)
     logger.debug('VersionsInfo __init__')
Esempio n. 22
0
 def cleanup (self):
   self.logger.debug("%s - killing database connections", threading.currentThread().getName())
   for name, aConnection in self.iteritems():
     try:
       aConnection.close()
       self.logger.debug("%s - connection %s closed", threading.currentThread().getName(), name)
     except psycopg2.InterfaceError:
       self.logger.debug("%s - connection %s already closed", threading.currentThread().getName(), name)
     except:
       util.reportExceptionAndContinue(self.logger)
Esempio n. 23
0
 def __init__(self, config):
     """
     Set the DB and the pool up and store the config.
     """
     super(JsonServiceBase, self).__init__(config)
     try:
         self.database = db.Database(config)
         self.crashStoragePool = cs.CrashStoragePool(config, storageClass=config.hbaseStorageClass)
     except (AttributeError, KeyError):
         util.reportExceptionAndContinue(logger)
Esempio n. 24
0
def handler(req):
  global persistentStorage
  try:
    x = persistentStorage
  except NameError:
    persistentStorage = init.createPersistentInitialization(configModule)

  logger = persistentStorage.logger
  config = persistentStorage.config
  crashStorage = persistentStorage.crashStorage

  #logger.debug("handler invoked using subinterpreter: %s", req.interpreter)
  if req.method == "POST":
    try:
      req.content_type = "text/plain"

      theform = util.FieldStorage(req)
      dump = theform[config.dumpField]
      if not dump.file:
        return apache.HTTP_BAD_REQUEST
      dump = dump.file.read()
      #dump = cstore.RepeatableStreamReader(dump.file)

      currentTimestamp = utc_now()

      jsonDataDictionary = crashStorage.makeJsonDictFromForm(theform)
      jsonDataDictionary.submitted_timestamp = currentTimestamp.isoformat()

      #for future use when we start sunsetting products
      #if crashStorage.terminated(jsonDataDictionary):
        #req.write("Terminated=%s" % jsonDataDictionary.Version)
        #return apache.OK

      uuid = ooid.createNewOoid(currentTimestamp, config.storageDepth)
      logger.debug("    %s", uuid)

      jsonDataDictionary.legacy_processing = persistentStorage.legacyThrottler.throttle(jsonDataDictionary)

      result = crashStorage.save_raw(uuid, jsonDataDictionary, dump, currentTimestamp)

      if result == cstore.CrashStorageSystem.DISCARDED:
        req.write("Discarded=1\n")
        return apache.OK
      elif result == cstore.CrashStorageSystem.ERROR:
        return apache.HTTP_INTERNAL_SERVER_ERROR
      req.write("CrashID=%s%s\n" % (config.dumpIDPrefix, uuid))
      return apache.OK
    except:
      logger.info("mod-python subinterpreter name: %s", req.interpreter)
      sutil.reportExceptionAndContinue(logger)

      return apache.HTTP_INTERNAL_SERVER_ERROR
  else:
    return apache.HTTP_METHOD_NOT_ALLOWED
Esempio n. 25
0
 def cleanup (self):
   self.logger.debug("%s - killing thread database connections", threading.currentThread().getName())
   for i, aDatabaseConnectionPair in self.iteritems():
     try:
       aDatabaseConnectionPair[0].rollback()
       aDatabaseConnectionPair[0].close()
       self.logger.debug("%s - connection %s closed", threading.currentThread().getName(), i)
     except psycopg2.InterfaceError:
       self.logger.debug("%s - connection %s already closed", threading.currentThread().getName(), i)
     except:
       util.reportExceptionAndContinue(self.logger)
Esempio n. 26
0
 def removeDumpFile(self, ooid):
   """
   Find and remove the dump file for the given ooid.
   Quietly continue if unfound. Log problem and continue if irremovable.
   """
   try:
     filePath = self.getDumpPath(ooid)
     os.unlink(filePath)
   except OSError,x:
     if 2 != x.errno:
       socorro_util.reportExceptionAndContinue(self.logger)
Esempio n. 27
0
 def save_raw (self, uuid, jsonData, dump, currentTimestamp=None):
   try:
     jsonDataAsString = json.dumps(jsonData)
     self.hbaseConnection.put_json_dump(uuid, jsonData, dump, number_of_retries=2)
     self.logger.info('saved - %s', uuid)
     return CrashStorageSystem.OK
   except self.exceptionsEligibleForRetry:
     sutil.reportExceptionAndContinue(self.logger)
     return CrashStorageSystem.RETRY
   except Exception, x:
     sutil.reportExceptionAndContinue(self.logger)
     return CrashStorageSystem.ERROR
Esempio n. 28
0
    def __init__(self, config):
        """
        Default constructor

        """
        super(PostgresAPI, self).__init__(config)
        try:
            self.database = db.Database(config)
        except (AttributeError, KeyError):
            util.reportExceptionAndContinue(logger)

        self.connection = None
Esempio n. 29
0
 def post(self, *args):
   convertedArgs = webapi.typeConversion([str], args)
   parameters = util.DotDict(zip(['uuid'], convertedArgs))
   connection = self.database.connection()
   sql = """INSERT INTO priorityjobs (uuid) VALUES (%s)"""
   try:
     connection.cursor().execute(sql, (parameters['uuid'],))
   except Exception:
     connection.rollback()
     util.reportExceptionAndContinue(logger)
     return False
   connection.commit()
   return True
Esempio n. 30
0
    def __init__(self, *args, **kwargs):
        """
        Store the config and create a connection to the database.

        Keyword arguments:
        config -- Configuration of the application.

        """
        self.context = kwargs.get("config")
        try:
            self.database = db.Database(self.context)
        except (AttributeError, KeyError):
            util.reportExceptionAndContinue(logger)

        self.connection = None
Esempio n. 31
0
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
    prod_url = '%s/%s/' % (config.base_url, product_name)

    # releases are sometimes in nightly, sometimes in candidates dir.
    # look in both.
    for directory in ('nightly', 'candidates'):
        if not getLinks(prod_url, startswith=directory, urllib=urllib):
            logger.debug('Dir %s not found for %s' % (directory, product_name))
            continue

        url = '%s/%s/%s/' % (config.base_url, product_name, directory)

        try:
            releases = getLinks(url, endswith='-candidates/', urllib=urllib)
            for release in releases:
                for info in getRelease(release, url):
                    (platform, version, build_number, kvpairs) = info
                    build_type = 'Release'
                    beta_number = None
                    repository = 'mozilla-release'
                    if 'b' in version:
                        build_type = 'Beta'
                        version, beta_number = version.split('b')
                        repository = 'mozilla-beta'
                    build_id = kvpairs['buildID']
                    buildutil.insert_build(cursor,
                                           product_name,
                                           version,
                                           platform,
                                           build_id,
                                           build_type,
                                           beta_number,
                                           repository,
                                           ignore_duplicates=True)
        except urllib.URLError:
            util.reportExceptionAndContinue(logger)
Esempio n. 32
0
def processor2008(config):
    import sys
    import logging
    import logging.handlers

    import socorro.lib.util as sutil
    import socorro.processor.externalProcessor as processor

    logger = logging.getLogger("processor")
    logger.setLevel(logging.DEBUG)

    sutil.setupLoggingHandlers(logger, config)
    sutil.echoConfig(logger, config)

    config['logger'] = logger

    try:
        try:
            p = processor.ProcessorWithExternalBreakpad(config)
            p.start()
        except:
            sutil.reportExceptionAndContinue(logger)
    finally:
        logger.info("done.")
Esempio n. 33
0
 def doSubmission(ooidTuple):
     logger.debug('received: %s', str(ooidTuple))
     try:
         sourceStorage = crashStoragePoolForSource.crashStorage()
         destStorage = crashStoragePoolForDest.crashStorage()
         ooid = ooidTuple[0]
         try:
             jsonContents = sourceStorage.get_meta(ooid)
         except ValueError:
             logger.warning('the json for %s is degenerate and cannot be loaded'  \
                            ' - saving empty json', ooid)
             jsonContents = {}
         dumpContents = sourceStorage.get_raw_dump(ooid)
         logger.debug('pushing %s to dest', ooid)
         result = destStorage.save_raw(ooid, jsonContents, dumpContents)
         if result == cstore.CrashStorageSystem.ERROR:
             return iwf.FAILURE
         elif result == cstore.CrashStorageSystem.RETRY:
             return iwf.RETRY
         sourceStorage.quickDelete(ooid)
         return iwf.OK
     except Exception, x:
         sutil.reportExceptionAndContinue(logger)
         return iwf.FAILURE
Esempio n. 34
0
    def search(self, **kwargs):
        """
        Search for crashes and return them.

        See http://socorro.readthedocs.org/en/latest/middleware.html#search

        Optional arguments: see SearchCommon.get_parameters()

        """
        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        params = search_common.get_parameters(kwargs)

        # Default mode falls back to starts_with for postgres
        if params["search_mode"] == "default":
            params["search_mode"] = "starts_with"
        if params["plugin_search_mode"] == "default":
            params["plugin_search_mode"] = "starts_with"

        # For Postgres, we never search for a list of terms
        if params["terms"]:
            params["terms"] = " ".join(params["terms"])
            params["terms"] = Search.prepare_terms(params["terms"],
                                                   params["search_mode"])

        # Searching for terms in plugins
        if params["report_process"] == "plugin" and params["plugin_terms"]:
            params["plugin_terms"] = " ".join(params["plugin_terms"])
            params["plugin_terms"] = Search.prepare_terms(
                params["plugin_terms"], params["plugin_search_mode"])

        # Get information about the versions
        util_service = Util(config=self.context)
        params["versions_info"] = util_service.versions_info(**params)

        # Parsing the versions
        params["versions_string"] = params["versions"]
        (params["versions"],
         params["products"]) = Search.parse_versions(params["versions"],
                                                     params["products"])

        # Changing the OS ids to OS names
        for i, elem in enumerate(params["os"]):
            for platform in self.context.platforms:
                if platform["id"] == elem:
                    params["os"][i] = platform["name"]

        # Creating the parameters for the sql query
        sql_params = {}

        # Preparing the different parts of the sql query
        sql_select = self.generate_sql_select(params)

        # Adding count for each OS
        for i in self.context.platforms:
            sql_params["os_%s" % i["id"]] = i["name"]

        sql_from = self.build_reports_sql_from(params)

        (sql_where,
         sql_params) = self.build_reports_sql_where(params, sql_params,
                                                    self.context)

        sql_group = self.generate_sql_group(params)

        sql_order = """
            ORDER BY total DESC, signature
        """

        (sql_limit,
         sql_params) = self.build_reports_sql_limit(params, sql_params)

        # Assembling the query
        sql_query = " ".join(
            ("/* socorro.search.Search search */", sql_select, sql_from,
             sql_where, sql_group, sql_order, sql_limit))

        # Query for counting the results
        sql_count_query = " ".join(
            ("/* socorro.external.postgresql.search.Search search.count */",
             "SELECT count(DISTINCT r.signature)", sql_from, sql_where))

        # Debug
        logger.debug(cur.mogrify(sql_query, sql_params))

        # Querying the DB
        try:
            total = db.singleValueSql(cur, sql_count_query, sql_params)
        except db.SQLDidNotReturnSingleValue:
            total = 0
            util.reportExceptionAndContinue(logger)

        results = []

        # No need to call Postgres if we know there will be no results
        if total != 0:
            try:
                results = db.execute(cur, sql_query, sql_params)
            except psycopg2.Error:
                util.reportExceptionAndContinue(logger)

        json_result = {"total": total, "hits": []}

        # Transforming the results into what we want
        for crash in results:
            if params["report_process"] == "plugin":
                row = dict(
                    zip(("signature", "count", "is_windows", "is_mac",
                         "is_linux", "numhang", "numplugin", "numcontent",
                         "pluginname", "pluginversion", "pluginfilename"),
                        crash))
            else:
                row = dict(
                    zip(("signature", "count", "is_windows", "is_mac",
                         "is_linux", "numhang", "numplugin", "numcontent"),
                        crash))
            json_result["hits"].append(row)

        self.connection.close()

        return json_result
Esempio n. 35
0
    def versions_info(self, **kwargs):
        """
        Return information about versions of a product.

        See http://socorro.readthedocs.org/en/latest/middleware.html

        Keyword arguments:
        versions - List of products and versions.

        Return:
        None if versions is null or empty ;
        Otherwise a dictionary of data about a version, i.e.:
        {
            "product_name:version_string": {
                "product_version_id": integer,
                "version_string": "string",
                "product_name": "string",
                "major_version": "string" or None,
                "release_channel": "string" or None,
                "build_id": [list, of, decimals] or None
            }
        }

        """
        # Parse arguments
        filters = [("versions", None, ["list", "str"])]
        params = external_common.parse_arguments(filters, kwargs)

        if "versions" not in params or not params["versions"]:
            return None

        products_list = []
        (versions_list,
         products_list) = Util.parse_versions(params["versions"],
                                              products_list)

        if not versions_list:
            return None

        versions = []
        products = []
        for x in xrange(0, len(versions_list), 2):
            products.append(versions_list[x])
            versions.append(versions_list[x + 1])

        params = {}
        params = Util.dispatch_params(params, "product", products)
        params = Util.dispatch_params(params, "version", versions)

        where = []
        for i in range(len(products)):
            where.append(
                str(i).join(("(pi.product_name = %(product",
                             ")s AND pi.version_string = %(version", ")s)")))

        sql = """/* socorro.external.postgresql.util.Util.versions_info */
        SELECT pv.product_version_id, pi.version_string, pi.product_name,
               which_table, pv.release_version, pv.build_type, pvb.build_id
        FROM product_info pi
            LEFT JOIN product_versions pv ON
                (pv.product_version_id = pi.product_version_id)
            JOIN product_version_builds pvb ON
                (pv.product_version_id = pvb.product_version_id)
        WHERE %s
        ORDER BY pv.version_sort
        """ % " OR ".join(where)

        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        try:
            results = db.execute(cur, sql, params)
        except Exception:
            results = []
            util.reportExceptionAndContinue(logger)

        res = {}
        for line in results:
            row = dict(
                zip(("product_version_id", "version_string", "product_name",
                     "which_table", "major_version", "release_channel",
                     "build_id"), line))

            key = ":".join((row["product_name"], row["version_string"]))

            if key in res:
                # That key already exists, just add it the new buildid
                res[key]["build_id"].append(int(row["build_id"]))
            else:
                if row["which_table"] == "old":
                    row["release_channel"] = row["build_id"] = None
                del row["which_table"]

                if row["build_id"]:
                    row["build_id"] = [int(row["build_id"])]

                res[key] = row

        return res
Esempio n. 36
0
    def get_list(self, **kwargs):
        """
        List all crashes with a given signature and return them.

        Optional arguments: see SearchCommon.get_parameters()

        """
        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        params = search_common.get_parameters(kwargs)

        if params["signature"] is None:
            return None

        params["terms"] = params["signature"]
        params["search_mode"] = "is_exactly"

        # Default mode falls back to starts_with for postgres
        if params["plugin_search_mode"] == "default":
            params["plugin_search_mode"] = "starts_with"

        # Limiting to a signature
        if params["terms"]:
            params["terms"] = self.prepare_terms(params["terms"],
                                                 params["search_mode"])

        # Searching for terms in plugins
        if params["report_process"] == "plugin" and params["plugin_terms"]:
            params["plugin_terms"] = " ".join(params["plugin_terms"])
            params["plugin_terms"] = self.prepare_terms(
                params["plugin_terms"], params["plugin_search_mode"])

        # Get information about the versions
        util_service = Util(config=self.context)
        params["versions_info"] = util_service.versions_info(**params)

        # Parsing the versions
        params["versions_string"] = params["versions"]
        (params["versions"],
         params["products"]) = self.parse_versions(params["versions"],
                                                   params["products"])

        # Changing the OS ids to OS names
        for i, elem in enumerate(params["os"]):
            for platform in self.context.platforms:
                if platform["id"] == elem:
                    params["os"][i] = platform["name"]

        # Creating the parameters for the sql query
        sql_params = {}

        # Preparing the different parts of the sql query

        sql_select = """
            SELECT
                r.date_processed,
                r.uptime,
                r.user_comments,
                r.uuid,
                r.product,
                r.version,
                r.build,
                r.signature,
                r.url,
                r.os_name,
                r.os_version,
                r.cpu_name,
                r.cpu_info,
                r.address,
                r.reason,
                r.last_crash,
                r.install_age,
                r.hangid,
                r.process_type,
                (r.client_crash_date - (r.install_age * INTERVAL '1 second'))
                    AS install_time,
                rd.duplicate_of
        """

        sql_from = self.build_reports_sql_from(params)
        sql_from = """%s
            LEFT OUTER JOIN reports_duplicates rd ON r.uuid = rd.uuid
        """ % sql_from

        (sql_where,
         sql_params) = self.build_reports_sql_where(params, sql_params,
                                                    self.context)

        sql_order = """
            ORDER BY r.date_processed DESC
        """

        (sql_limit,
         sql_params) = self.build_reports_sql_limit(params, sql_params)

        # Assembling the query
        sql_query = " ".join(
            ("/* socorro.external.postgresql.report.Report.list */",
             sql_select, sql_from, sql_where, sql_order, sql_limit))

        # Query for counting the results
        sql_count_query = " ".join(
            ("/* socorro.external.postgresql.report.Report.list */",
             "SELECT count(*)", sql_from, sql_where))

        # Debug
        logger.debug(sql_count_query)
        logger.debug(cur.mogrify(sql_count_query, sql_params))

        # Querying the DB
        try:
            total = db.singleValueSql(cur, sql_count_query, sql_params)
        except db.SQLDidNotReturnSingleValue:
            total = 0
            util.reportExceptionAndContinue(logger)

        results = []

        # No need to call Postgres if we know there will be no results
        if total != 0:
            try:
                results = db.execute(cur, sql_query, sql_params)
            except psycopg2.Error:
                util.reportExceptionAndContinue(logger)

        json_result = {"total": total, "hits": []}

        # Transforming the results into what we want
        for crash in results:
            row = dict(
                zip(("date_processed", "uptime", "user_comments", "uuid",
                     "product", "version", "build", "signature", "url",
                     "os_name", "os_version", "cpu_name", "cpu_info",
                     "address", "reason", "last_crash", "install_age",
                     "hangid", "process_type", "install_time", "duplicate_of"),
                    crash))
            for i in row:
                if isinstance(row[i], datetime.datetime):
                    row[i] = str(row[i])
            json_result["hits"].append(row)

        self.connection.close()

        return json_result
Esempio n. 37
0
    def builds(self, **kwargs):
        """
        Return information about nightly builds of one or several products.

        See http://socorro.readthedocs.org/en/latest/middleware.html#builds

        Keyword arguments:
        product - Concerned product
        version - Concerned version
        from_date - Retrieve builds from this date to now

        Return:
        [
            {
                "product": "string",
                "version": "string",
                "platform": "string",
                "buildid": "integer",
                "build_type": "string",
                "beta_number": "string",
                "repository": "string",
                "date": "string"
            },
            ...
        ]

        """
        # Default value for from_date
        lastweek = utc_now() - timedelta(7)

        # Parse arguments
        filters = [
            ("product", None, "str"),
            ("version", None, "str"),
            ("from_date", lastweek, "datetime"),
        ]
        params = external_common.parse_arguments(filters, kwargs)

        self._require_parameters(params, "product")

        # FIXME this will be moved to the DB in 7, see bug 740829
        if params["product"].startswith("Fennec"):
            params["release_name"] = "mobile"
        else:
            params["release_name"] = params["product"]

        params["from_date"] = params["from_date"].date()

        sql = [
            """/* socorro.external.postgresql.builds.Builds.builds */
            SELECT  version,
                    platform,
                    build_id as buildid,
                    build_type,
                    beta_number,
                    repository,
                    build_date(build_id) as date
            FROM releases_raw
            WHERE product_name = %(release_name)s
            """
        ]

        if params["version"]:
            sql.append("AND version = %(version)s")

        sql.append("""
            AND build_date(build_id) >=
                timestamp with time zone %(from_date)s
            AND repository IN ('mozilla-central', 'mozilla-1.9.2',
                               'comm-central', 'comm-1.9.2',
                               'comm-central-trunk', 'mozilla-central-android')
            ORDER BY build_date(build_id) DESC, product_name ASC, version ASC,
                     platform ASC
        """)

        sql_query = " ".join(sql)

        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        try:
            logger.debug(cur.mogrify(sql_query, params))
            sql_results = db.execute(cur, sql_query, params)
        except Exception:
            sql_results = []
            util.reportExceptionAndContinue(logger)

        results = [
            dict(
                zip(("version", "platform", "buildid", "build_type",
                     "beta_number", "repository", "date"), line))
            for line in sql_results
        ]

        for i, line in enumerate(results):
            results[i]["product"] = params["product"]
            results[i]["buildid"] = int(line["buildid"])
            results[i]["date"] = line["date"].strftime("%Y-%m-%d")

        return results
Esempio n. 38
0
        assert ("raise TestingException" in logger.buffer[4])
        assert ("test message" in logger.buffer[5])
        logger.clear()
        try:
            raise TestingException("test message")
        except TestingException, e:
            util.reportExceptionAndContinue(logger,
                                            loggingLevel=-23,
                                            ignoreFunction=ignoreAlways)
        assert ([] == logger.buffer)
        logger.clear()
        try:
            raise TestingException("test message")
        except TestingException, e:
            util.reportExceptionAndContinue(logger,
                                            loggingLevel=-23,
                                            ignoreFunction=ignoreNever)
        assert ([
            -23,
            -23,
            -23,
            -23,
            -23,
            -23,
        ] == logger.levels)
        assert ("TestingException" in logger.buffer[0])
        assert ("test message" == str(logger.buffer[1]))
        assert ("raise TestingException" in logger.buffer[4])
        assert ("test message" in logger.buffer[5])
        logger.clear()
Esempio n. 39
0
        try:
            result = self.get(*args)
            if isinstance(result, tuple):
                web.header('Content-Type', result[1])
                return result[0]
            web.header('Content-Type', 'application/json')
            return json.dumps(result)
        except web.webapi.HTTPError:
            raise
        except (DatabaseError, InsertionError), e:
            raise web.webapi.InternalError(message=str(e))
        except (MissingArgumentError, BadArgumentError), e:
            raise BadRequest(str(e))
        except Exception:
            stringLogger = util.StringLogger()
            util.reportExceptionAndContinue(stringLogger)
            try:
                util.reportExceptionAndContinue(self.config.logger)
            except (AttributeError, KeyError):
                pass
            raise Exception(stringLogger.getMessages())

    def get(self, *args):
        raise NotImplementedError(
            "The GET function has not been implemented for %s" % repr(args)
        )

    def POST(self, *args):
        """
        Call the post method defined in a subclass and return its result.
Esempio n. 40
0
    def get_comments(self, **kwargs):
        """Return a list of comments on crash reports, filtered by
        signatures and other fields.

        See socorro.lib.search_common.get_parameters() for all filters.
        """
        # Creating the connection to the DB
        self.connection = self.database.connection()
        cur = self.connection.cursor()

        params = self.prepare_search_params(**kwargs)

        # Creating the parameters for the sql query
        sql_params = {}

        # Preparing the different parts of the sql query

        # WARNING: sensitive data is returned here (email). When there is
        # an authentication mecanism, a verification should be done here.
        sql_select = """
            SELECT
                r.date_processed,
                r.user_comments,
                r.uuid,
                CASE
                    WHEN r.email = '' THEN null
                    WHEN r.email IS NULL THEN null
                    ELSE r.email
                END
        """

        sql_from = self.build_reports_sql_from(params)

        (sql_where,
         sql_params) = self.build_reports_sql_where(params, sql_params,
                                                    self.context)
        sql_where = "%s AND r.user_comments IS NOT NULL" % sql_where

        sql_order = "ORDER BY email ASC, r.date_processed ASC"

        # Assembling the query
        sql_query = " ".join(
            ("/* external.postgresql.crashes.Crashes.get_comments */",
             sql_select, sql_from, sql_where, sql_order))

        # Query for counting the results
        sql_count_query = " ".join(
            ("/* external.postgresql.crashes.Crashes.get_comments */",
             "SELECT count(*)", sql_from, sql_where))

        # Querying the DB
        try:
            total = db.singleValueSql(cur, sql_count_query, sql_params)
        except db.SQLDidNotReturnSingleValue:
            total = 0
            util.reportExceptionAndContinue(logger)

        results = []

        # No need to call Postgres if we know there will be no results
        if total != 0:
            try:
                results = db.execute(cur, sql_query, sql_params)
            except psycopg2.Error:
                util.reportExceptionAndContinue(logger)

        result = {"total": total, "hits": []}

        # Transforming the results into what we want
        for crash in results:
            row = dict(
                zip(("date_processed", "user_comments", "uuid", "email"),
                    crash))
            for i in row:
                if isinstance(row[i], datetime.datetime):
                    row[i] = str(row[i])
            result["hits"].append(row)

        self.connection.close()

        return result
Esempio n. 41
0
import socorro.processor.externalProcessor as processor
import socorro.lib.ConfigurationManager as configurationManager
import socorro.lib.util as sutil

try:
    config = configurationManager.newConfiguration(
        configurationModule=configModule,
        applicationName="Socorro Processor 2.0")
except configurationManager.NotAnOptionError, x:
    print >> sys.stderr, x
    print >> sys.stderr, "for usage, try --help"
    sys.exit()

logger = logging.getLogger("processor")
logger.setLevel(logging.DEBUG)

sutil.setupLoggingHandlers(logger, config)
sutil.echoConfig(logger, config)

config['logger'] = logger

try:
    try:
        p = processor.ProcessorWithExternalBreakpad(config)
        p.start()
    except:
        sutil.reportExceptionAndContinue(logger)
finally:
    logger.info("done.")
Esempio n. 42
0
config.logger = logger

if config.numberOfSubmissions == 'forever':
    config.iteratorFunc = sub.createInfiniteFileSystemIterator
elif config.numberOfSubmissions == 'all':
    config.iteratorFunc = sub.createFileSystemIterator
else:
    config.iteratorFunc = sub.createLimitedFileSystemIterator
    config.numberOfSubmissions = int(config.numberOfSubmissions)

if config.dryrun:
    config.submissionFunc = sub.submissionDryRun
else:
    config.submissionFunc = sub.doSubmission

config.sleep = float(config.delay) / 1000.0

config.uniqueHang = 'uniqueHangId' in config

if config.searchRoot:
    sub.submitter(config)
else:
    try:
        import json
        with open(config.jsonfile) as jsonFile:
            formData = json.load(jsonFile)
        config.submissionFunc(formData, config.dumpfile, config.url,
                              config.logger)
    except Exception, x:
        sutil.reportExceptionAndContinue(config.logger)