Esempio n. 1
0
def updateCronjobsTable(connection, cronjobName, success, lastTargetTime, failureMessage=None):
    cursor = connection.cursor()

    params = [lastTargetTime]
    if success:
        params.append(utc_now())
        sql = """
          /* socorro.cron.dailyMatviews updateCronjobsTable */
          UPDATE cronjobs
          SET
            last_target_time = %s,
            last_success = %s
          WHERE cronjob = %s
        """
    else:
        params.append(utc_now())
        params.append(failureMessage)
        sql = """
          /* socorro.cron.dailyMatviews updateCronjobsTable */
          UPDATE cronjobs
          SET
            last_target_time = %s,
            last_failure = %s,
            failure_message = %s
          WHERE cronjob = %s
        """
    params.append(cronjobName)

    cursor.execute(sql, params)
    connection.commit()
Esempio n. 2
0
 def _set_ongoing_job(self, class_):
     app_name = class_.app_name
     info = self.job_state_database.get(app_name)
     if info:
         # Was it already ongoing?
         if info.get('ongoing'):
             # Unless it's been ongoing for ages, raise OngoingJobError
             age_hours = (utc_now() - info['ongoing']).seconds / 3600.0
             if age_hours < self.config.crontabber.max_ongoing_age_hours:
                 raise OngoingJobError(info['ongoing'])
             else:
                 self.logger.debug(
                     '{} has been ongoing for {:2} hours. Ignore it and running the app anyway.'
                     .format(app_name, age_hours)
                 )
         info['ongoing'] = utc_now()
     else:
         info = {
             'next_run': None,
             'first_run': None,
             'last_run': None,
             'last_success': None,
             'last_error': {},
             'error_count': 0,
             'depends_on': [],
             'ongoing': utc_now(),
         }
     self.job_state_database[app_name] = info
Esempio n. 3
0
 def _normal_jobs_iter(self):
     """
     Yields a list of job tuples pulled from the 'jobs' table for which the
     owner is this process and the started datetime is null.  This iterator
     is perpetual - it never raises the StopIteration exception
     """
     get_normal_job_sql = (
         "select"
         "    j.id,"
         "    j.uuid,"
         "    priority "
         "from"
         "    jobs j "
         "where"
         "    j.owner = %d"
         "    and j.starteddatetime is null "
         "order by queueddatetime"
         "  limit %d" % (self.processor_id,
                         self.config.batchJobLimit))
     normal_jobs_list = []
     last_query_timestamp = utc_now()
     while True:
         polling_threshold = utc_now() - self.config.pollingInterval
         if not normal_jobs_list and \
            last_query_timestamp < polling_threshold:  # get more
             normal_jobs_list = self.transaction(
                 execute_query_fetchall,
                 get_normal_job_sql
             )
             last_query_timestamp = utc_now()
         if normal_jobs_list:
             while normal_jobs_list:
                 yield normal_jobs_list.pop(0)
         else:
             yield None
Esempio n. 4
0
    def test_sending_many_emails(self, exacttarget_mock):
        """Test that we can send emails to a lot of users in the same run. """

        # First add a lot of emails.
        now = utc_now() - datetime.timedelta(minutes=30)

        config_manager = self._setup_storage_config()
        with config_manager.context() as config:
            storage = ElasticSearchCrashStorage(config)

            for i in range(21):
                storage.save_processed({
                    'uuid': 'fake-%s' % i,
                    'email': '*****@*****.**' % i,
                    'product': 'WaterWolf',
                    'version': '20.0',
                    'release_channel': 'Release',
                    'date_processed': now,
                })

            storage.es.refresh()

        config_manager = self._setup_simple_config()
        with config_manager.context() as config:
            job = automatic_emails.AutomaticEmailsCronApp(config, '')
            job.run(utc_now())

            et_mock = exacttarget_mock.return_value
            # Verify that we have the default 4 results + the 21 we added.
            self.assertEqual(et_mock.trigger_send.call_count, 25)
    def test_no_new_crashes(self):
        new_crash_source = ESNewCrashSource(self.config)
        self.health_check()

        generator = new_crash_source.new_crashes(
            utc_now() - datetime.timedelta(days=1),
            'Firefox',
            ['43.0.1']
        )
        assert list(generator) == []

        self.index_crash(
            a_processed_crash,
            raw_crash=a_raw_crash,
            crash_id=a_processed_crash['uuid']
        )
        self.refresh_index()

        # Same test now that there is a processed crash in there
        # but notably under a different name and version.
        generator = new_crash_source.new_crashes(
            utc_now() - datetime.timedelta(days=1),
            'Firefox',
            ['43.0.1']
        )
        assert list(generator) == []
Esempio n. 6
0
 def time_to_run(self, class_, time_):
     """return true if it's time to run the job.
     This is true if there is no previous information about its last run
     or if the last time it ran and set its next_run to a date that is now
     past.
     """
     app_name = class_.app_name
     try:
         info = self.database[app_name]
     except KeyError:
         if time_:
             h, m = [int(x) for x in time_.split(':')]
             # only run if this hour and minute is < now
             now = utc_now()
             if now.hour > h:
                 return True
             elif now.hour == h and now.minute >= m:
                 return True
             return False
         else:
             # no past information, run now
             return True
     next_run = info['next_run']
     if next_run < utc_now():
         return True
     return False
Esempio n. 7
0
    def test_slow_run_job(self):
        config_manager, json_file = self._setup_config_manager(
          'socorro.unittest.cron.test_crontabber.SlowJob|1h'
        )

        with config_manager.context() as config:
            tab = crontabber.CronTabber(config)
            time_before = utc_now()
            tab.run_all()
            time_after = utc_now()
            time_taken = (time_after - time_before).seconds
            #time_taken = (time_after - time_before).microseconds / 1000.0 / 1000.0
            #print time_taken
            self.assertEqual(round(time_taken), 1.0)

            # check that this was written to the JSON file
            # and that the next_run is going to be 1 day from now
            assert os.path.isfile(json_file)
            structure = json.load(open(json_file))
            information = structure['slow-job']
            self.assertEqual(information['error_count'], 0)
            self.assertEqual(information['last_error'], {})
            self.assertTrue(information['next_run'].startswith(
                             (time_before + datetime.timedelta(hours=1))
                              .strftime('%Y-%m-%d %H:%M:%S')))
Esempio n. 8
0
    def run(self):
        # if this is non-zero, we use it.
        if self.config.days_into_past:
            last_run = (
                utc_now() -
                datetime.timedelta(days=self.config.days_into_past)
            )
        else:
            try:
                # KeyError if it's never run successfully
                # TypeError if self.job_information is None
                last_run = self.job_information['last_success']
            except (KeyError, TypeError):
                # basically, the "virgin run" of this job
                last_run = utc_now()

        # bugzilla runs on PST, so we need to communicate in its time zone
        PST = tz.gettz('PST8PDT')
        last_run_formatted = last_run.astimezone(PST).strftime('%Y-%m-%d')
        query = self.config.query % last_run_formatted
        for (
            bug_id,
            signature_set
        ) in self._iterator(query):
            try:
                # each run of this loop is a transaction
                self.database_transaction_executor(
                    self.inner_transaction,
                    bug_id,
                    signature_set
                )
            except NothingUsefulHappened:
                pass
Esempio n. 9
0
    def test_delete_old_indices(self):
        # Create old indices to be deleted.
        self.index_client.create('socorro200142', {})
        self.indices.append('socorro200142')

        self.index_client.create('socorro200000', {})
        self.indices.append('socorro200000')

        # Create an old aliased index.
        self.index_client.create('socorro200201_20030101', {})
        self.indices.append('socorro200201_20030101')
        self.index_client.put_alias(
            index='socorro200201_20030101',
            name='socorro200201',
        )

        # Create a recent aliased index.
        last_week_index = self.get_index_for_date(
            utc_now() - datetime.timedelta(weeks=1)
        )
        self.index_client.create('socorro_some_aliased_index', {})
        self.indices.append('socorro_some_aliased_index')
        self.index_client.put_alias(
            index='socorro_some_aliased_index',
            name=last_week_index,
        )

        # Create a recent index that should not be deleted.
        now_index = self.get_index_for_date(utc_now())
        self.index_client.create(now_index, {})
        self.indices.append(now_index)

        # These will raise an error if an index was not correctly created.
        assert self.index_client.exists('socorro200142')
        assert self.index_client.exists('socorro200000')
        assert self.index_client.exists('socorro200201')
        assert self.index_client.exists(now_index)
        assert self.index_client.exists(last_week_index)

        api = IndexCleaner(self.config)
        api.delete_old_indices()

        # Verify the recent index is still there.
        ok_(self.index_client.exists(now_index))
        ok_(self.index_client.exists(last_week_index))

        # Verify the old indices are gone.
        ok_(not self.index_client.exists('socorro200142'))
        ok_(not self.index_client.exists('socorro200000'))
        ok_(not self.index_client.exists('socorro200201'))
Esempio n. 10
0
    def test_run(self):
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['suspicious-crashes']
            assert not information['suspicious-crashes']['last_error']
            assert information['suspicious-crashes']['last_success']

            cursor = self.conn.cursor()

            cursor.execute("""
                SELECT signatures.signature, scs.report_date
                FROM suspicious_crash_signatures scs
                JOIN signatures ON scs.signature_id=signatures.signature_id
            """)

            count = 0
            today = (utc_now() - datetime.timedelta(1)).date()
            for row in cursor.fetchall():
                eq_('sig', row[0])
                eq_(today, row[1].date())
                count += 1

            eq_(1, count)
Esempio n. 11
0
 def transferOne(
     self, ooid, anotherJsonDumpStorage, createLinks=True, removeOld=False, webheadHostName=None, aDate=None
 ):
     """
 Transfer data from another JsonDumpStorage instance into this instance of JsonDumpStorage
 ooid - the id of the data to transfer
 anotherJsonDumpStorage - An instance of JsonDumpStorage holding the data to be transferred
 createLinks - If true, create symlinks from and to date subdir
 removeOld - If true, attempt to delete the files and symlinks in source file tree
 webheadHostName: Used if known
 aDate: Used if unable to parse date from source directories and uuid
 NOTE: Assumes that the path names and suffixes for anotherJsonDumpStorage are the same as for self
 """
     self.logger.debug("%s - transferOne %s %s", threading.currentThread().getName(), ooid, aDate)
     jsonFromFile = anotherJsonDumpStorage.getJson(ooid)
     self.logger.debug("%s - fetched json", threading.currentThread().getName())
     dumpFromFile = os.path.splitext(jsonFromFile)[0] + anotherJsonDumpStorage.dumpSuffix
     if createLinks:
         self.logger.debug("%s - fetching stamp", threading.currentThread().getName())
         stamp = anotherJsonDumpStorage.pathToDate(anotherJsonDumpStorage.lookupOoidInDatePath(None, ooid, None)[0])
     else:
         self.logger.debug("%s - not bothering to fetch stamp", threading.currentThread().getName())
         stamp = None
     self.logger.debug("%s - fetched pathToDate ", threading.currentThread().getName())
     if not stamp:
         if not aDate:
             aDate = utc_now()
         stamp = aDate
     self.logger.debug("%s - about to copyFrom ", threading.currentThread().getName())
     self.copyFrom(ooid, jsonFromFile, dumpFromFile, webheadHostName, stamp, createLinks, removeOld)
    def test_test_mapping(self):
        """Much test. So meta. Wow test_test_. """
        # First test a valid mapping.
        mapping = self.api.get_mapping()
        assert self.api.test_mapping(mapping) is None

        # Insert an invalid storage mapping.
        mapping = self.api.get_mapping({
            'name': 'fake_field',
            'namespace': 'raw_crash',
            'in_database_name': 'fake_field',
            'storage_mapping': {
                'type': 'unkwown'
            }
        })
        with pytest.raises(BadArgumentError):
            self.api.test_mapping(mapping)

        # Test with a correct mapping but with data that cannot be indexed.
        self.index_crash({
            'date_processed': datetimeutil.utc_now(),
            'product': 'WaterWolf',
        })
        self.refresh_index()
        mapping = self.api.get_mapping({
            'name': 'product',
            'storage_mapping': {
                'type': 'long'
            }
        })
        with pytest.raises(BadArgumentError):
            self.api.test_mapping(mapping)
Esempio n. 13
0
    def setUp(self):
        super(IntegrationTestSettings, self).setUp()

        config = self.get_config_context()
        self.storage = crashstorage.ElasticSearchCrashStorage(config)

        # clear the indices cache so the index is created on every test
        self.storage.indices_cache = set()

        self.now = utc_now()

        # Create the supersearch fields.
        self.storage.es.bulk_index(
            index=config.webapi.elasticsearch_default_index,
            doc_type='supersearch_fields',
            docs=SUPERSEARCH_FIELDS.values(),
            id_field='name',
            refresh=True,
        )

        # Create the index that will be used.
        es_index = self.storage.get_index_for_crash(self.now)
        self.storage.create_socorro_index(es_index)

        self.api = SuperSearch(config=config)
Esempio n. 14
0
def fillProcessorTable(cursor, processorCount, stamp=None, processorMap = {},logger = None):
  """
  Puts some entries into the processor table.
  Also creates priority_jobs_NNN for each processor id, unless that table exists
  Given a map of id->timestamp, sets the lastseendatetime for each successive processor to that stamp
  (Ignores ids generated by the count or in the processorMap, and uses database's serial id generator)
  """
  if not logger:
    logger = logging.getLogger()

  if not stamp: stamp = utc_now()
  if not processorCount and not processorMap: return
  sql = "INSERT INTO processors (name,startdatetime,lastseendatetime) VALUES (%s,%s,%s);"
  data = []
  if processorMap:
    data.extend([('test_%d'%(id),stamp,processorMap.get(id,stamp)) for id in processorMap.keys() ])
  else:
    data.extend([('test_%d'%(x),stamp, stamp) for x in range(1,processorCount+1) ])
  try:
    cursor.executemany(sql,data)
    cursor.connection.commit()

    sql = "SELECT id from processors;"
    cursor.execute(sql)
    allIds = cursor.fetchall()
    cursor.connection.rollback()
    sql = "CREATE TABLE priority_jobs_%s (uuid varchar(50) not null primary key);"
    for tup in allIds:
      try:
        cursor.execute(sql%(tup[0]))
        cursor.connection.commit()
      except psycopg2.ProgrammingError:
        cursor.connection.rollback()
  finally:
    cursor.connection.rollback()
Esempio n. 15
0
    def test_update_user(self):
        config_manager = self._setup_simple_config()
        with config_manager.context() as config:
            job = automatic_emails.AutomaticEmailsCronApp(config, '')
            now = utc_now()

            report = {
                'email': '*****@*****.**'
            }
            job.update_user(report, now, self.conn)

            cursor = self.conn.cursor()
            cursor.execute("""
                SELECT last_sending FROM emails WHERE email=%(email)s
            """, report)

            self.assertEqual(cursor.rowcount, 1)
            row = cursor.fetchone()
            self.assertEqual(row[0], now)

            # Test with a non-existing user
            report = {
                'email': '*****@*****.**'
            }
            job.update_user(report, now, self.conn)

            cursor = self.conn.cursor()
            cursor.execute("""
                SELECT last_sending FROM emails WHERE email=%(email)s
            """, report)

            self.assertEqual(cursor.rowcount, 1)
            row = cursor.fetchone()
            self.assertEqual(row[0], now)
Esempio n. 16
0
    def test_create_release(self):
        self._insert_release_channels()
        self._insert_products()
        config_manager = self._setup_config_manager()

        with config_manager.context() as config:
            app = middleware_app.MiddlewareApp(config)
            app.main()
            server = middleware_app.application

            now = datetimeutil.utc_now()
            response = self.post(
                server,
                "/releases/release/",
                {
                    "product": "Firefox",
                    "version": "1.0",
                    "update_channel": "beta",
                    "build_id": now.strftime("%Y%m%d%H%M"),
                    "platform": "Windows",
                    "beta_number": "1",
                    "release_channel": "Beta",
                    "throttle": "1",
                },
            )
            eq_(response.data, True)
Esempio n. 17
0
    def get_signatures(self, **kwargs):
        """Return top crashers by signatures.

        See http://socorro.readthedocs.org/en/latest/middleware.html#tcbs
        """
        filters = [
            ("product", None, "str"),
            ("version", None, "str"),
            ("crash_type", "all", "str"),
            ("to_date", datetimeutil.utc_now(), "datetime"),
            ("duration", datetime.timedelta(7), "timedelta"),
            ("os", None, "str"),
            ("limit", 100, "int"),
            ("date_range_type", None, "str")
        ]

        params = external_common.parse_arguments(filters, kwargs)
        params.logger = logger

        try:
            connection = self.database.connection()
            cursor = connection.cursor()
            return tcbs.twoPeriodTopCrasherComparison(cursor, params)
        finally:
            connection.close()
Esempio n. 18
0
    def get_signatures(self, **kwargs):
        """Return top crashers by signatures.

        See http://socorro.readthedocs.org/en/latest/middleware.html#tcbs
        """
        filters = [
            ("product", None, "str"),
            ("version", None, "str"),
            ("crash_type", "all", "str"),
            ("to_date", datetimeutil.utc_now(), "datetime"),
            ("duration", datetime.timedelta(7), "timedelta"),
            ("os", None, "str"),
            ("limit", 100, "int"),
            ("date_range_type", None, "str")
        ]

        params = external_common.parse_arguments(filters, kwargs)
        params.logger = logger

        # what the twoPeriodTopCrasherComparison() function does is that it
        # makes a start date from taking the to_date - duration
        if params.duration > datetime.timedelta(30):
            raise BadArgumentError('Duration too long. Max 30 days.')

        with self.get_connection() as connection:
            return tcbs.twoPeriodTopCrasherComparison(connection, params)
Esempio n. 19
0
    def test_cleanup_radix(self):
        self.fsrts._current_slot = lambda: ["00", "00_00"]
        self.fsrts.save_raw_crash({"test": "TEST"}, {"foo": "bar", self.fsrts.config.dump_field: "baz"}, self.CRASH_ID)
        self.fsrts._current_slot = lambda: ["10", "00_01"]

        self.assertEqual(list(self.fsrts.new_crashes()), [self.CRASH_ID])
        self.assertEqual(list(self.fsrts.new_crashes()), [])

        config_manager, json_file = self._setup_config_manager()
        with config_manager.context() as config:
            tab = crontabber.CronTabber(config)

        tab.run_all()

        self.assertEqual(os.listdir(self.fsrts.config.fs_root), [])

        future = (utc_now() + datetime.timedelta(days=10)).strftime("%Y%m%d")
        future_id = "0bba929f-8721-460c-dead-a43c%s" % future

        self.fsrts._current_slot = lambda: ["00", "00_00"]
        self.fsrts.save_raw_crash({"test": "TEST"}, {"foo": "bar", self.fsrts.config.dump_field: "baz"}, future_id)
        self.fsrts._current_slot = lambda: ["10", "00_01"]

        self.assertEqual(list(self.fsrts.new_crashes()), [future_id])
        self.assertEqual(list(self.fsrts.new_crashes()), [])

        tab.run_all()

        self.assertEqual(os.listdir(self.fsrts.config.fs_root), [future])
Esempio n. 20
0
        def mocked_urlopener(url, today=None):
            if today is None:
                today = utc_now()
            html_wrap = "<html><body>\n%s\n</body></html>"
            if url.endswith('/firefox/'):
                return html_wrap % """
                <a href="candidates/">candidates</a>
                <a href="nightly/">nightly</a>
                """
            if url.endswith('/firefox/nightly/'):
                return html_wrap % """
                <a href="10.0-candidates/">10.0-candidiates</a>
                """
            if url.endswith('/firefox/candidates/'):
                return html_wrap % """
                <a href="10.0b4-candidates/">10.0b4-candidiates</a>
                """
            if (url.endswith('/firefox/nightly/10.0-candidates/') or
                url.endswith('/firefox/candidates/10.0b4-candidates/')):
                return html_wrap % """
                <a href="build1/">build1</a>
                """
            if (url.endswith('/firefox/nightly/10.0-candidates/build1/') or
                url.endswith('/firefox/candidates/10.0b4-candidates/build1/')):
                return html_wrap % """
                <a href="linux_info.txt">linux_info.txt</a>
                """
            if url.endswith(today.strftime('/firefox/nightly/%Y/%m/')):
                return html_wrap % today.strftime("""
                <a href="%Y-%m-%d-trunk/">%Y-%m-%d-trunk</a>
                """)
            if url.endswith(today.strftime(
              '/firefox/nightly/%Y/%m/%Y-%m-%d-trunk/')):
                return html_wrap % """
                <a href="mozilla-nightly-15.0a1.en-US.linux-x86_64.txt">txt</a>
                <a href="mozilla-nightly-15.0a2.en-US.linux-x86_64.txt">txt</a>
                """
            if url.endswith(today.strftime(
              '/firefox/nightly/%Y/%m/%Y-%m-%d-trunk/mozilla-nightly-15.0a1.en'
              '-US.linux-x86_64.txt')):
                return (
                   "20120505030510\n"
                   "http://hg.mozilla.org/mozilla-central/rev/0a48e6561534"
                )
            if url.endswith(today.strftime(
              '/firefox/nightly/%Y/%m/%Y-%m-%d-trunk/mozilla-nightly-15.0a2.en'
              '-US.linux-x86_64.txt')):
                return (
                   "20120505443322\n"
                   "http://hg.mozilla.org/mozilla-central/rev/xxx123"
                )
            if url.endswith(
              '/firefox/nightly/10.0-candidates/build1/linux_info.txt'):
                return "buildID=20120516113045"
            if url.endswith(
              '/firefox/candidates/10.0b4-candidates/build1/linux_info.txt'):
                return "buildID=20120516114455"

            # bad testing boy!
            raise NotImplementedError(url)
  def __init__(self,*args,**kwargs):
    """
    Passes appropriate kwargs to Config, pays local attention to these keys:
    updateInterval: default: '0' format: 'dd:hh:mm:ss', leading parts optional. Must be >= 0 seconds.
    updateFunction: default: noop(). Takes self as argument. Behavior: Updates default values in argument
    reEvaluateFunction: default: noop(). Takes self as argument. Behavior: Mutates values in argument
    signalNumber: default: SIGALRM (14). If 0, then signals will not be handled.
      Instances that share the same signalNumber will all be update()-ed at every signal.

    self.internal.updateFunction may be set after construction if desired: Avoids double-work at construction
    self.internal.reEvalutateFunction may be set after construction if desired, but this is not recommended.
    """
    skwargs = dict([(x,kwargs[x]) for x in socorro_config.getDefaultedConfigOptions().keys() if x in kwargs])
    for i in range(len(args)):
      skwargs[socorro_config.namedConfigOptions[i]] = args[i]
    super(DynamicConfig,self).__init__(**skwargs)
    self.internal.updateFunction = kwargs.get('updateFunction',noop)
    self.internal.reEvaluateFunction = kwargs.get('reEvaluateFunction',noop)
    self.internal.signalNumber = kwargs.get('signalNumber',14)
    self.internal.nextUpdate = None
    updateInterval = kwargs.get('updateInterval','0:0:0:0')
    self.internal.updateDelta = socorro_config.timeDeltaConverter(updateInterval)
    if self.internal.updateDelta:
      if self.internal.updateDelta < datetime.timedelta(0):
        raise ValueError("updateInterval must be non-negative, but %s"%self.internal.updateDelta)
      self.internal.nextUpdate = utc_now() + self.internal.updateDelta

    # finally: make sure we are current
    if self.internal.signalNumber:
      priorSignal = signal.signal(self.internal.signalNumber,DynamicConfig.handleAlarm)
    self.doUpdate()
    DynamicConfig.instances[id(self)] = self
Esempio n. 22
0
    def test_create_release_with_beta_number_null(self):
        self._insert_release_channels()
        service = Releases(config=self.config)

        now = datetimeutil.utc_now()
        build_id = now.strftime('%Y%m%d%H%M')
        params = dict(
            product='Firefox',
            version='1.0',
            update_channel='beta',
            build_id=build_id,
            platform='Windows',
            beta_number=None,
            release_channel='Beta',
            throttle=1
        )

        res = service.create_release(**params)
        ok_(res)

        # but...
        params['beta_number'] = 0
        assert_raises(
            MissingArgumentError,
            service.create_release,
            **params
        )
Esempio n. 23
0
    def test_basic_run(self):
        cur = self.conn.cursor()
        # Ensure test table is present.
        statement = """
            INSERT INTO raw_adi
            (date, product_name, adi_count) VALUES
            (%(first)s, 'WinterFox', 11),
            (%(second)s, 'WinterFox', 23)
        """
        second = utc_now().date()
        first = second - datetime.timedelta(days=1)
        cur.execute(statement, {'first': first, 'second': second})
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager(days_to_keep=1)
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['clean-raw-adi']
        assert not information['clean-raw-adi']['last_error']
        assert information['clean-raw-adi']['last_success']

        # Ensure test row was removed
        cur.execute("""
            SELECT date FROM raw_adi
        """)
        result, = cur.fetchall()
        report_date = result[0]
        eq_(report_date, second)
Esempio n. 24
0
    def test_update_user(self):
        config_manager = self._setup_simple_config()
        with config_manager.context() as config:
            job = automatic_emails.AutomaticEmailsCronApp(config, '')
            now = utc_now().isoformat()

            es = SuperS().es(
                urls=config.elasticsearch.elasticsearch_urls,
                timeout=config.elasticsearch.elasticsearch_timeout,
            )
            search = es.indexes(
                config.elasticsearch.elasticsearch_emails_index
            )
            search = search.doctypes('emails')

            connection = es.get_es()

            job.update_user('*****@*****.**', now, connection)
            connection.refresh()

            s = search.filter(_id='*****@*****.**')
            res = list(s.values_list('last_sending'))

            self.assertEqual(len(res), 1)
            self.assertEqual(res[0][0], now)

            # Test with a non-existing user
            job.update_user('*****@*****.**', now, connection)
            connection.refresh()

            s = search.filter(_id='*****@*****.**')
            res = list(s.values_list('last_sending'))

            self.assertEqual(len(res), 1)
            self.assertEqual(res[0][0], now)
Esempio n. 25
0
    def POST(self, *args):
        raw_crash, dumps = self._get_raw_crash_from_form()

        current_timestamp = utc_now()
        raw_crash.submitted_timestamp = current_timestamp.isoformat()
        # legacy - ought to be removed someday
        raw_crash.timestamp = time.time()

        if (not self.config.accept_submitted_crash_id
            or 'crash_id' not in raw_crash
        ):
            crash_id = createNewOoid(current_timestamp)
            raw_crash.crash_id = crash_id
            self.logger.info('%s received', crash_id)
        else:
            crash_id = raw_crash.crash_id
            self.logger.info('%s received with existing crash_id:', crash_id)

        raw_crash.type_tag = self.type_tag

        self.crash_storage.save_raw_crash(
            raw_crash,
            dumps,
            crash_id
        )
        self.logger.info('%s accepted', crash_id)
        return "CrashID=%s%s\n" % (self.type_tag, crash_id)
Esempio n. 26
0
 def testLookupOoidInDatePath(self):
   d = dumpStorage.DumpStorage(self.testDir)
   expected = {}
   count = 0
   for ooid,v in createJDS.jsonFileData.items():
     dateS = v[0]
     if 0 == count%2:
       nd,dd = d.newEntry(ooid,datetime.datetime(*[int(x) for x in dateS.split('-')], tzinfo=UTC))
       expected[ooid] = dd
     elif 0 == count%5:
       expected[ooid] = None
       pass
     else:
       nd,dd = d.newEntry(ooid)
       expected[ooid] = dd
     count += 1
     dateS = v[0]
   count = 0
   for ooid in createJDS.jsonFileData.keys():
     dateS = v[0]
     if expected[ooid]:
       exEnd = datetime.datetime(*[int(x) for x in dateS.split('-')], tzinfo=UTC)
       passDate = utc_now()
       if 0 == count%3:
         passDate = None
       else:
         passDate = exEnd
       got,ignore = d.lookupOoidInDatePath(passDate,ooid)
       assert expected[ooid] == got, 'For %s: Expected %s, got %s'%(ooid,expected[ooid],got)
Esempio n. 27
0
    def POST(self, *args):
        raw_crash, dumps = \
            self._make_raw_crash_and_dumps(web.webapi.rawinput())

        current_timestamp = utc_now()
        raw_crash.submitted_timestamp = current_timestamp.isoformat()
        # legacy - ought to be removed someday
        raw_crash.timestamp = time.time()

        crash_id = createNewOoid(current_timestamp)

        raw_crash.legacy_processing = self.legacy_throttler.throttle(raw_crash)
        if raw_crash.legacy_processing == LegacyThrottler.DISCARD:
            self.logger.info('%s discarded', crash_id)
            return "Discarded=1\n"
        if raw_crash.legacy_processing == LegacyThrottler.IGNORE:
            self.logger.info('%s ignored', crash_id)
            return "Unsupported=1\n"

        crash_storage = self.context.crashStoragePool.crashStorage()
        try:
            crash_storage.save_raw(
                crash_id,
                raw_crash,
                dumps
            )
        except PolyStorageError, x:
            self.logger.error('%s storage exception: %s',
                              crash_id,
                              str(x.exceptions),  # log internal error set
                              exc_info=True)
            raise
Esempio n. 28
0
    def test_create_release(self):
        self._insert_release_channels()
        self._insert_products()
        config_manager = self._setup_config_manager()

        with config_manager.context() as config:
            app = middleware_app.MiddlewareApp(config)
            app.main()
            server = middleware_app.application

            now = datetimeutil.utc_now()
            response = self.post(
                server,
                '/releases/release/',
                {
                    'product': 'Firefox',
                    'version': '1.0',
                    'update_channel': 'beta',
                    'build_id': now.strftime('%Y%m%d%H%M'),
                    'platform': 'Windows',
                    'beta_number': '1',
                    'release_channel': 'Beta',
                    'throttle': '1'
                }
            )
            eq_(response.data, True)
Esempio n. 29
0
    def test_mapping(self, mapping):
        """Verify that a mapping is correct.

        This function does so by first creating a new, temporary index in
        elasticsearch using the mapping. It then takes some recent crash
        reports that are in elasticsearch and tries to insert them in the
        temporary index. Any failure in any of those steps will raise an
        exception. If any is raised, that means the mapping is incorrect in
        some way (either it doesn't validate against elasticsearch's rules,
        or is not compatible with the data we currently store).

        If no exception is raised, the mapping is likely correct.

        This function is to be used in any place that can change the
        `storage_mapping` field in any Super Search Field.
        Methods `create_field` and `update_field` use it, see above.
        """
        temp_index = 'socorro_mapping_test'

        es_connection = self.get_connection()

        # Import at runtime to avoid dependency circle.
        from socorro.external.es.index_creator import IndexCreator
        index_creator = IndexCreator(self.config)
        try:
            index_creator.create_index(
                temp_index,
                mapping,
            )

            now = datetimeutil.utc_now()
            last_week = now - datetime.timedelta(days=7)
            current_indices = self.generate_list_of_indexes(last_week, now)

            crashes_sample = es_connection.search(
                index=current_indices,
                doc_type=self.config.elasticsearch.elasticsearch_doctype,
                size=self.config.elasticsearch.mapping_test_crash_number,
            )
            crashes = [x['_source'] for x in crashes_sample['hits']['hits']]

            for crash in crashes:
                es_connection.index(
                    index=temp_index,
                    doc_type=self.config.elasticsearch.elasticsearch_doctype,
                    body=crash,
                )
        except elasticsearch.exceptions.ElasticsearchException as e:
            raise BadArgumentError(
                'storage_mapping',
                msg='Indexing existing data in Elasticsearch failed with the '
                    'new mapping. Error is: %s' % str(e),
            )
        finally:
            try:
                index_creator.get_index_client().delete(temp_index)
            except elasticsearch.exceptions.NotFoundError:
                # If the index does not exist (if the index creation failed
                # for example), we don't need to do anything.
                pass
def testSaveCampaign():
  context = getDummyContext()

  product = 'Foobar'
  versions = '5'
  signature = 'JohnHancock'
  subject = 'email subject'
  body = 'email body'
  start_date = utc_now()
  end_date = start_date + timedelta(hours=1)
  author = '*****@*****.**'
  email_count = 0

  parameters = (product, versions, signature, subject, body, start_date, end_date, email_count, author)

  sql =  """INSERT INTO email_campaigns (product, versions, signature, subject, body, start_date, end_date, email_count, author)
                        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id"""

  dummyCursor = expect.DummyObjectWithExpectations()
  dummyCursor.expect('mogrify', (sql, list(parameters)), {}, None)
  dummyCursor.expect('execute', (sql, list(parameters)), {}, None)
  dummyCursor.expect('fetchone', (), {}, ['123'])

  campaign = ecc.EmailCampaignCreate(context)
  campaignId = campaign.save_campaign(dummyCursor, product, versions, signature, subject, body, start_date, end_date, author)

  assert campaignId == '123'
Esempio n. 31
0
    def process_crash(self, raw_crash, raw_dumps, processed_crash):
        """Take a raw_crash and its associated raw_dumps and return a
        processed_crash.
        """
        # processor_meta_data will be used to ferry "inside information" to
        # transformation rules.  Sometimes rules need a bit more extra
        # information about the transformation process itself.
        processor_meta_data = DotDict()
        processor_meta_data.processor_notes = [
            self.config.processor_name, self.__class__.__name__
        ]
        processor_meta_data.quit_check = self.quit_check
        processor_meta_data.processor = self
        processor_meta_data.config = self.config

        if "processor_notes" in processed_crash:
            original_processor_notes = [
                x.strip() for x in processed_crash.processor_notes.split(";")
            ]
            processor_meta_data.processor_notes.append(
                "earlier processing: %s" %
                processed_crash.get("started_datetime", 'Unknown Date'))
        else:
            original_processor_notes = []

        processed_crash.success = False
        processed_crash.started_datetime = utc_now()
        # for backwards compatibility:
        processed_crash.startedDateTime = processed_crash.started_datetime
        processed_crash.signature = 'EMPTY: crash failed to process'

        crash_id = raw_crash['uuid']
        try:
            # quit_check calls ought to be scattered around the code to allow
            # the processor to be responsive to requests to shut down.
            self.quit_check()

            start_time = self.config.logger.info(
                "starting transform for crash: %s", crash_id)
            processor_meta_data.started_timestamp = start_time

            # apply_all_rules
            for rule in self.rules:
                rule.act(raw_crash, raw_dumps, processed_crash,
                         processor_meta_data)
                self.quit_check()

            # the crash made it through the processor rules with no exceptions
            # raised, call it a success.
            processed_crash.success = True

        except Exception as exception:
            self.config.logger.warning('Error while processing %s: %s',
                                       crash_id,
                                       str(exception),
                                       exc_info=True)
            processor_meta_data.processor_notes.append(
                'unrecoverable processor error: %s' % exception)

        # the processor notes are in the form of a list.  Join them all
        # together to make a single string
        processor_meta_data.processor_notes.extend(original_processor_notes)
        processed_crash.processor_notes = '; '.join(
            processor_meta_data.processor_notes)
        completed_datetime = utc_now()
        processed_crash.completed_datetime = completed_datetime
        # for backwards compatibility:
        processed_crash.completeddatetime = completed_datetime

        self.config.logger.info(
            "finishing %s transform for crash: %s",
            'successful' if processed_crash.success else 'failed', crash_id)
        return processed_crash
Esempio n. 32
0
def get_parameters(kwargs):
    """
    Return a dictionary of parameters with default values.

    Optional arguments:
    data_type -- Type of data to return.
        Default is None, to be determined by each service if needed.
    terms -- Terms to search for.
        Can be a string or a list of strings.
        Default is none.
    fields -- Fields to search into.
        Can be a string or a list of strings.
        Default to signature, not implemented for PostgreSQL.
    search_mode -- How to search for terms.
        Must be one of the following:
            "default", "contains", "is_exactly" or "starts_with".
        Default to "default" for ElasticSearch,
            "starts_with" for PostgreSQL.
    from_date -- Only elements after this date.
        Format must be "YYYY-mm-dd HH:ii:ss.S".
        Default is a week ago.
    to_date -- Only elements before this date.
        Format must be "YYYY-mm-dd HH:ii:ss.S".
        Default is now.
    products -- Products concerned by this search.
        Can be a string or a list of strings.
        Default is Firefox.
    os -- Restrict search to those operating systems.
        Can be a string or a list of strings.
        Default is all.
    versions -- Version of the software.
        Can be a string or a list of strings.
        Default is all.
    build_ids -- Restrict search to a particular build of the software.
        Can be a string or a list of strings.
        Default is all.
    reasons -- Restrict search to crashes caused by this reason.
        Default is all.
    release_channels -- Restrict search to crashes in this release channels.
        Default is all.
    report_type -- Retrict to a type of report.
        Can be any, crash or hang.
        Default is any.
    report_process -- How was the report processed.
        Can be any, crash or hang.
        Default is any.
    plugin_terms -- Search for terms concerning plugins.
        Can be a string or a list of strings.
        Default is none.
    plugin_in -- What field to look into.
        Can be "name" or "filename".
        Default is 'name'.
    plugin_search_mode -- How to search into plugins.
        Must be one of the following:
            "contains", "is_exactly" or "starts_with".
        Default to "contains".
    result_number -- Number of results to get.
        Default is 100.
    result_offset -- Get results from this offset.
        Default is 0.
    """
    # Default dates
    now = datetimeutil.utc_now()
    lastweek = now - datetime.timedelta(7)

    filters = [("data_type", "signatures", "str"),
               ("terms", None, ["list", "str"]), ("signature", None, "str"),
               ("fields", "signature", ["list", "str"]),
               ("search_mode", "default", "str"),
               ("from_date", lastweek, "datetime"),
               ("to_date", now, "datetime"),
               ("products", None, ["list", "str"]),
               ("versions", None, ["list", "str"]),
               ("os", None, ["list",
                             "str"]), ("reasons", None, ["list", "str"]),
               ("release_channels", None, ["list", "str"]),
               ("build_ids", None, ["list", "str"]),
               ("build_from", lastweek, "datetime"),
               ("build_to", now, "datetime"), ("report_process", "any", "str"),
               ("report_type", "any", "str"),
               ("plugin_terms", None, ["list", "str"]),
               ("plugin_in", "name", ["list", "str"]),
               ("plugin_search_mode", "default", "str"),
               ("result_number", 100, "int"), ("result_offset", 0, "int")]

    params = extern.parse_arguments(filters, kwargs)

    # To be moved into a config file?
    authorized_modes = ["default", "starts_with", "contains", "is_exactly"]
    if params["search_mode"] not in authorized_modes:
        params["search_mode"] = "default"
    if params["plugin_search_mode"] not in authorized_modes:
        params["plugin_search_mode"] = "default"

    # Do not search in the future and make sure we have dates where expected
    if params["to_date"] is None or params["to_date"] > now:
        params["to_date"] = now
    if params["from_date"] is None:
        params["from_date"] = lastweek

    if params["build_to"] is None or params["build_to"] > now:
        params["build_to"] = now
    if params["build_from"] is None:
        params["build_from"] = lastweek

    # Securing fields
    params['fields'] = restrict_fields(params['fields'], ['signature', 'dump'])
    params['plugin_in'] = restrict_fields(params['plugin_in'],
                                          ['filename', 'name'])

    return params
Esempio n. 33
0
    def test_email_after_delay(self, exacttarget_mock):
        """Test that a user will receive an email if he or she sends us a new
        crash report after the delay is passed (but not before). """
        config_manager = self._setup_config_manager(
            delay_between_emails=1,
            restrict_products=['EarthRaccoon']
        )
        email = '*****@*****.**'
        list_service_mock = exacttarget_mock.return_value.list.return_value
        list_service_mock.get_subscriber.return_value = {
            'token': email
        }
        trigger_send_mock = exacttarget_mock.return_value.trigger_send
        tomorrow = utc_now() + datetime.timedelta(days=1, hours=2)
        twohourslater = utc_now() + datetime.timedelta(hours=2)

        storage_config_manager = self._setup_storage_config()
        with storage_config_manager.context() as storage_config:
            storage = ElasticSearchCrashStorage(storage_config)

        with config_manager.context() as config:
            # 1. Send an email to the user and update emailing data
            tab = crontabber.CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            exacttarget_mock.return_value.trigger_send.assert_called_with(
                'socorro_dev_test',
                {
                    'EMAIL_ADDRESS_': email,
                    'EMAIL_FORMAT_': 'H',
                    'TOKEN': email
                }
            )
            self.assertEqual(trigger_send_mock.call_count, 1)

            # 2. Test that before 'delay' is passed user doesn't receive
            # another email

            # Insert a new crash report with the same email address
            storage.save_processed({
                'uuid': '50',
                'email': email,
                'product': 'EarthRaccoon',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': utc_now() + datetime.timedelta(hours=1)
            })
            storage.es.refresh()

            # Run crontabber with time pushed by two hours
            with mock.patch('socorro.cron.crontabber.utc_now') as cronutc_mock:
                with mock.patch('socorro.cron.base.utc_now') as baseutc_mock:
                    cronutc_mock.return_value = twohourslater
                    baseutc_mock.return_value = twohourslater
                    tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # No new email was sent
            self.assertEqual(trigger_send_mock.call_count, 1)

            # 3. Verify that, after 'delay' is passed, a new email is sent
            # to our user

            # Insert a new crash report with the same email address
            storage.save_processed({
                'uuid': '51',
                'email': email,
                'product': 'EarthRaccoon',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': utc_now() + datetime.timedelta(days=1)
            })
            storage.es.refresh()

            # Run crontabber with time pushed by a day
            with mock.patch('socorro.cron.crontabber.utc_now') as cronutc_mock:
                with mock.patch('socorro.cron.base.utc_now') as baseutc_mock:
                    cronutc_mock.return_value = tomorrow
                    baseutc_mock.return_value = tomorrow
                    tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # A new email was sent
            self.assertEqual(trigger_send_mock.call_count, 2)
Esempio n. 34
0
    def setUp(self):
        super(IntegrationTestSuperSearch, self).setUp()

        self.api = SuperSearch(config=self.config)
        self.now = datetimeutil.utc_now()
Esempio n. 35
0
    def test_get_missing_fields(self):
        config = self.get_base_config(es_index='socorro_integration_test_%W')

        fake_mappings = [
            {
                'mappings': {
                    config.elasticsearch.elasticsearch_doctype: {
                        'properties': {
                            # Add a bunch of unknown fields.
                            'field_z': {
                                'type': 'string'
                            },
                            'namespace1': {
                                'type': 'object',
                                'properties': {
                                    'field_a': {
                                        'type': 'string'
                                    },
                                    'field_b': {
                                        'type': 'long'
                                    }
                                }
                            },
                            'namespace2': {
                                'type': 'object',
                                'properties': {
                                    'subspace1': {
                                        'type': 'object',
                                        'properties': {
                                            'field_b': {
                                                'type': 'long'
                                            }
                                        }
                                    }
                                }
                            },
                            # Add a few known fields that should not appear.
                            'processed_crash': {
                                'type': 'object',
                                'properties': {
                                    'signature': {
                                        'type': 'string'
                                    },
                                    'product': {
                                        'type': 'string'
                                    },
                                }
                            }
                        }
                    }
                }
            },
            {
                'mappings': {
                    config.elasticsearch.elasticsearch_doctype: {
                        'properties': {
                            'namespace1': {
                                'type': 'object',
                                'properties': {
                                    'subspace1': {
                                        'type': 'object',
                                        'properties': {
                                            'field_d': {
                                                'type': 'long'
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            },
        ]

        now = datetimeutil.utc_now()
        indices = []

        try:
            # Using "2" here means that an index will be missing, hence testing
            # that it swallows the subsequent error.
            for i in range(2):
                date = now - datetime.timedelta(weeks=i)
                index = date.strftime(config.elasticsearch.elasticsearch_index)
                mapping = fake_mappings[i % len(fake_mappings)]

                self.index_creator.create_index(index, mapping)
                indices.append(index)

            api = SuperSearchFields(config=config)
            missing_fields = api.get_missing_fields()
            expected = [
                'field_z',
                'namespace1.field_a',
                'namespace1.field_b',
                'namespace1.subspace1.field_d',
                'namespace2.subspace1.field_b',
            ]

            eq_(missing_fields['hits'], expected)
            eq_(missing_fields['total'], 5)

        finally:
            for index in indices:
                self.index_client.delete(index=index)
Esempio n. 36
0
    def setUpClass(cls):
        """ Populate product_info table with fake data """
        super(IntegrationTestSignatureSummary, cls).setUpClass()

        cursor = cls.connection.cursor()

        # Insert data
        cls.now = datetimeutil.utc_now()
        now = cls.now.date()
        yesterday = now - datetime.timedelta(days=1)
        lastweek = now - datetime.timedelta(days=7)

        cursor.execute("""
            INSERT INTO products
            (product_name, sort, rapid_release_version, release_name)
            VALUES
            (
                'Firefox',
                1,
                '8.0',
                'firefox'
            ),
            (
                'Fennec',
                3,
                '11.0',
                'mobile'
            ),
            (
                'Thunderbird',
                2,
                '10.0',
                'thunderbird'
            );
        """)

        cursor.execute("""
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            (
                'Release', 1
            ),
            (
                'Beta', 2
            );
        """)

        cursor.execute("""
            INSERT INTO product_release_channels
            (product_name, release_channel, throttle)
            VALUES
            (
                'Firefox', 'Release', '0.1'
            ),
            (
                'Fennec', 'Release', '0.1'
            ),
            (
                'Fennec', 'Beta', '1.0'
            ),
            (
                'Thunderbird', 'Release', '0.1'
            );
        """)

        # Insert versions, contains an expired version
        cursor.execute("""
            INSERT INTO product_versions
            (product_version_id,
             product_name, major_version, release_version, version_string,
             build_date, sunset_date, featured_version, build_type,
             version_sort, has_builds, is_rapid_beta)
            VALUES
            (
                1,
                'Firefox',
                '8.0',
                '8.0',
                '8.0',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0008000',
                True,
                False
            ),
            (
                2,
                'Firefox',
                '9.0',
                '9.0',
                '9.0',
                '%(lastweek)s',
                '%(lastweek)s',
                False,
                'Nightly',
                '0009000',
                True,
                False
            ),
            (
                3,
                'Fennec',
                '11.0',
                '11.0',
                '11.0.1',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0011001',
                True,
                False
            ),
            (
                4,
                'Fennec',
                '12.0',
                '12.0',
                '12.0b1',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '00120b1',
                True,
                False
            ),
            (
                5,
                'Thunderbird',
                '10.0',
                '10.0',
                '10.0.2b',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '001002b',
                True,
                False
            );
        """ % {
            'now': now,
            'lastweek': lastweek
        })

        cursor.execute("""
            INSERT INTO signatures
            (first_build, first_report, signature)
            VALUES
            ('20130701120000', '%(now)s', 'Fake Signature #1')
        """ % {'now': now})

        cursor.execute("""
            SELECT signature_id FROM signatures
            WHERE signature = 'Fake Signature #1'
        """)

        signature_id = cursor.fetchone()[0]

        cursor.execute("""
            SELECT product_version_id
            FROM product_versions
            WHERE product_name = 'Firefox' and version_string = '8.0'
        """)
        product_version_id = cursor.fetchone()[0]

        cursor.execute("""
        SELECT product_version_id
        FROM product_versions
        WHERE product_name = 'Firefox' and version_string = '9.0'
        """)
        other_product_version_id = cursor.fetchone()[0]

        cursor.execute(
            """
            INSERT INTO signature_summary_products
            (signature_id, product_version_id, product_name,
             version_string, report_date, report_count)
            VALUES
            (%(signature_id)s, %(product_version_id)s, 'Firefox',
             '8.0', '%(yesterday)s', 1)
        """ % {
                'yesterday': yesterday,
                'product_version_id': product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
           INSERT INTO signature_summary_products
           (signature_id, product_version_id, product_name,
            version_string, report_date, report_count)
           VALUES
           (%(signature_id)s, %(product_version_id)s, 'Firefox',
            '9.0', '%(yesterday)s', 1)
        """ % {
                'yesterday': yesterday,
                'product_version_id': other_product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
            INSERT INTO signature_summary_architecture
            (signature_id, architecture, product_version_id,
             product_name, report_date, report_count, version_string)
            VALUES
            (%(signature_id)s, 'amd64', %(product_version_id)s,
             'Firefox', '%(yesterday)s', 1, '8.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
           INSERT INTO signature_summary_architecture
           (signature_id, architecture, product_version_id,
            product_name, report_date, report_count, version_string)
           VALUES
           (%(signature_id)s, 'amd64', %(product_version_id)s,
            'Firefox', '%(yesterday)s', 1, '9.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': other_product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
            INSERT INTO signature_summary_flash_version
            (signature_id, flash_version, product_version_id,
             product_name, report_date, report_count, version_string)
            VALUES
            (%(signature_id)s, '1.0', %(product_version_id)s,
             'Firefox', '%(yesterday)s', 1, '8.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
           INSERT INTO signature_summary_flash_version
           (signature_id, flash_version, product_version_id,
            product_name, report_date, report_count, version_string)
           VALUES
           (%(signature_id)s, '1.0', %(product_version_id)s,
            'Firefox', '%(yesterday)s', 1, '9.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': other_product_version_id,
                'signature_id': signature_id
            })

        cursor.execute("""
            INSERT INTO signature_summary_installations
            (signature_id, product_name, version_string,
             report_date, crash_count, install_count)
            VALUES
            (%(signature_id)s, 'Firefox', '8.0', '%(yesterday)s', 10, 8)
        """ % {
            'yesterday': yesterday,
            'signature_id': signature_id
        })

        cursor.execute(
            """
            INSERT INTO signature_summary_os
            (signature_id, os_version_string, product_version_id,
             product_name, report_date, report_count, version_string)
            VALUES
            (%(signature_id)s, 'Windows NT 6.4',
             %(product_version_id)s, 'Firefox', '%(yesterday)s', 1, '8.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
            INSERT INTO signature_summary_process_type
            (signature_id, process_type, product_version_id,
             product_name, report_date, report_count, version_string)
            VALUES
            (%(signature_id)s, 'plugin', %(product_version_id)s,
             'Firefox', '%(yesterday)s', 1, '8.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
            INSERT INTO signature_summary_uptime
            (signature_id, uptime_string, product_version_id,
             product_name, report_date, report_count, version_string)
            VALUES
            (%(signature_id)s, '15-30 minutes',
             %(product_version_id)s, 'Firefox', '%(yesterday)s', 1, '8.0')
        """ % {
                'yesterday': yesterday,
                'product_version_id': product_version_id,
                'signature_id': signature_id
            })

        cursor.execute(
            """
            INSERT INTO exploitability_reports
            (signature_id, product_version_id, product_name, version_string,
             signature, report_date, null_count, none_count, low_count,
             medium_count, high_count)
            VALUES
            (%(signature_id)s, %(product_version_id)s, 'Firefox', '8.0',
             'Fake Signature #1', '%(yesterday)s', 1, 2, 3, 4, 5)
        """ % {
                'yesterday': yesterday,
                'signature_id': signature_id,
                'product_version_id': product_version_id
            })

        cursor.execute("""
            INSERT INTO android_devices
            (android_cpu_abi, android_manufacturer,
             android_model, android_version)
            VALUES
            ('armeabi-v7a', 'samsung', 'GT-P5100', '16 (REL)')
        """)

        cursor.execute("""
            SELECT android_device_id FROM android_devices
            WHERE android_cpu_abi = 'armeabi-v7a' AND
            android_manufacturer = 'samsung' AND
            android_model = 'GT-P5100' AND
            android_version = '16 (REL)'
        """)

        device_id = cursor.fetchone()[0]

        cursor.execute(
            """
            INSERT INTO signature_summary_device
            (report_date, signature_id, product_version_id, product_name,
             version_string, android_device_id, report_count)
            VALUES
            ('%(yesterday)s', %(signature_id)s, %(product_version_id)s,
             'Firefox', '8.0', %(device_id)s, 123)
        """ % {
                'yesterday': yesterday,
                'signature_id': signature_id,
                'device_id': device_id,
                'product_version_id': product_version_id
            })

        cursor.execute(
            """
           INSERT INTO signature_summary_device
           (report_date, signature_id, product_version_id, product_name,
            version_string, android_device_id, report_count)
           VALUES
           ('%(yesterday)s', %(signature_id)s, %(product_version_id)s,
            'Firefox', '9.0', %(device_id)s, 123)
        """ % {
                'yesterday': yesterday,
                'signature_id': signature_id,
                'device_id': device_id,
                'product_version_id': other_product_version_id
            })

        cursor.execute("""
            INSERT INTO graphics_device
            (vendor_hex, adapter_hex, vendor_name, adapter_name)
            VALUES
            ('0x1234', '0x5678', 'Test Vendor', 'Test Adapter')
        """)

        cursor.execute("""
            SELECT graphics_device_id FROM graphics_device
            WHERE vendor_hex = '0x1234' AND adapter_hex = '0x5678'
        """)

        graphics_device_id = cursor.fetchone()[0]

        cursor.execute(
            """
            INSERT INTO signature_summary_graphics
            (report_date, signature_id, graphics_device_id, product_version_id,
             product_name, version_string, report_count)
            VALUES
            ('%(yesterday)s', %(signature_id)s, %(device_id)s,
             %(product_version_id)s, 'Firefox', '8.0', 123)
        """ % {
                'yesterday': yesterday,
                'signature_id': signature_id,
                'device_id': graphics_device_id,
                'product_version_id': product_version_id
            })

        cls.connection.commit()

        def add_product_version_builds(self):
            cursor = self.connection.cursor()

            cursor.execute("""
                SELECT product_version_id
                FROM product_versions
                WHERE product_name = 'Firefox' and version_string = '8.0'
            """)
            product_version_id = cursor.fetchone()[0]

            cursor.execute("""
                INSERT INTO product_version_builds
                (build_id, platform, product_version_id)
                VALUES
                (1, 'Windows NT', %(product_version_id)s)
            """ % {'product_version_id': product_version_id})

            self.connection.commit()
Esempio n. 37
0
    def setUp(self):
        """Set up this test class by populating the reports table with fake
        data. """
        super(IntegrationTestCrashes, self).setUp()

        cursor = self.connection.cursor()

        self.now = datetimeutil.utc_now()
        yesterday = self.now - datetime.timedelta(days=1)
        day_before_yesterday = self.now - datetime.timedelta(days=2)

        build_date = self.now - datetime.timedelta(days=30)
        sunset_date = self.now + datetime.timedelta(days=30)

        cursor.execute("""
            INSERT INTO products
            (product_name, sort, release_name)
            VALUES
            (
                'WaterWolf',
                1,
                'WaterWolf'
            );
        """)

        cursor.execute("""
            INSERT INTO product_versions
            (product_version_id, product_name, major_version, release_version,
             version_string, version_sort, build_date, sunset_date,
             featured_version, build_type, is_rapid_beta, rapid_beta_id)
            VALUES
            (
                1,
                'WaterWolf',
                '1.0',
                '1.0',
                '1.0',
                '10000011000',
                '%(build_date)s',
                '%(sunset_date)s',
                't',
                'Nightly',
                False,
                NULL
            ),
            (
                2,
                'WaterWolf',
                '2.0',
                '2.0',
                '2.0',
                '10000012000',
                '%(build_date)s',
                '%(sunset_date)s',
                't',
                'Nightly',
                False,
                NULL
            );
        """ % {
            "build_date": build_date,
            "sunset_date": sunset_date
        })

        cursor.execute("""
            INSERT INTO gccrashes (report_date, product_version_id, build,
                                   gc_count_madu)
            VALUES
            ('%s', '%s', '%s', '%s'),
            ('%s', '%s', '%s', '%s'),
            ('%s', '%s', '%s', '%s');
        """ % (yesterday, "1", "10000011000", "42", day_before_yesterday, "1",
               "10000011000", "42", yesterday, "2", "10000012000", "24"))

        self.connection.commit()
        cursor.close()
Esempio n. 38
0
    def _run_one(self, job_class, config, force=False):
        seconds = convert_frequency(config.frequency)
        time_ = config.time
        if not force:
            if not self.time_to_run(job_class, time_):
                self.logger.debug("skipping %r because it's not time to run", job_class)
                return

        self.logger.debug('about to run %r', job_class)
        app_name = job_class.app_name
        info = self.job_state_database.get(app_name)

        last_success = None
        now = utc_now()
        log_run = True

        exc_type = exc_value = exc_tb = None
        try:
            t0 = time.time()
            for last_success in self._run_job(job_class, config, info):
                t1 = time.time()
                self.logger.debug('successfully ran %r on %s', job_class, last_success)
                self._remember_success(job_class, last_success, t1 - t0)
                # _run_job() returns a generator, so we don't know how
                # many times this will loop. Anyway, we need to reset the
                # 't0' for the next loop if there is one.
                t0 = time.time()
        except (OngoingJobError, RowLevelLockError):
            # It's not an actual runtime error. It just basically means
            # you can't start crontabber right now.
            log_run = False
            raise
        except Exception:
            t1 = time.time()
            exc_type, exc_value, exc_tb = sys.exc_info()

            if self.config.sentry and self.config.sentry.dsn:
                client = sentry_client.get_client(self.config.sentry.dsn)
                identifier = client.get_ident(client.captureException())
                self.logger.info('Error captured in Sentry. Reference: %s' % identifier)

            self.logger.debug(
                'error when running %r on %s', job_class, last_success, exc_info=True
            )
            self._remember_failure(
                job_class,
                t1 - t0,
                exc_type,
                exc_value,
                exc_tb
            )

        finally:
            if log_run:
                self._log_run(
                    job_class,
                    seconds,
                    time_,
                    last_success,
                    now,
                    exc_type, exc_value, exc_tb
                )
    def setUp(self):
        super(IntegrationTestCrashAduByBuildSignature, self).setUp()

        cursor = self.connection.cursor()

        # Insert data
        self.now = datetimeutil.utc_now()
        self.tomorrow = self.now + datetime.timedelta(days=1)

        tomorrow = self.tomorrow.date()
        now = self.now.date()

        cursor.execute("""
            INSERT INTO products
            (product_name, sort, rapid_release_version, release_name)
            VALUES
            (
                'Firefox',
                1,
                '8.0',
                'firefox'
            );
        """)

        cursor.execute("""
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            (
                'Release', 1
            ),
            (
                'Beta', 2
            );
        """)

        cursor.execute("""
            INSERT INTO product_versions
            (product_version_id,
             product_name, major_version, release_version, version_string,
             build_date, sunset_date, featured_version, build_type,
             version_sort, has_builds, is_rapid_beta, build_type_enum)
            VALUES
            (
                1,
                'Firefox',
                '8.0',
                '8.0',
                '8.0',
                '%(now)s',
                '%(now)s',
                False,
                'release',
                '0008000',
                True,
                False,
                'release'
            );
        """ % {'now': now})

        cursor.execute("""
            INSERT INTO signatures
            (first_build, first_report, signature)
            VALUES
            ('20130701120000', '%(now)s', 'Fake Signature #1')
        """ % {'now': now})

        cursor.execute("""
            SELECT signature_id FROM signatures
            WHERE signature = 'Fake Signature #1'
        """)

        signature_id = cursor.fetchone()[0]

        cursor.execute("""
            SELECT product_version_id
            FROM product_versions
            WHERE product_name = 'Firefox' and version_string = '8.0'
        """)
        product_version_id = cursor.fetchone()[0]

        cursor.execute(
            """
            INSERT INTO reports_clean
            (address_id,
             build,
             date_processed,
             domain_id,
             flash_version_id,
             os_name,
             os_version_id,
             process_type,
             reason_id,
             release_channel,
             signature_id,
             uuid,
             build_type,
             product_version_id)
            VALUES
            (1,
             '%(build)s',
             '%(now)s',
             1,
             1,
             'windows',
             '9',
             'browser',
             1,
             'release',
             '%(signature_id)s',
             'a1',
             'release',
             '%(product_version_id)s')""" % {
                'now': now,
                'build': now.strftime('%Y%m%d'),
                'signature_id': signature_id,
                'product_version_id': product_version_id
            })

        cursor.execute(
            """
             INSERT INTO build_adu
                (product_version_id,
                build_date,
                adu_date,
                os_name,
                adu_count)
             VALUES
                (%(product_version_id)s,
                '%(now)s',
                '%(now)s',
                'windows',
                123),
                (%(product_version_id)s,
                '%(tomorrow)s',
                '%(tomorrow)s',
                'windows',
                321) """ % {
                'product_version_id': product_version_id,
                'now': now,
                'tomorrow': tomorrow
            })
Esempio n. 40
0
 def mock_utc_now():
     n = utc_now()
     n = n.replace(hour=3)
     return n
Esempio n. 41
0
    def process_crash(self, raw_crash, raw_dumps, processed_crash):
        """Take a raw_crash and its associated raw_dumps and return a processed_crash

        If this throws an exception, the crash was not processed correctly.

        """
        # processor_meta_data will be used to ferry "inside information" to
        # transformation rules. Sometimes rules need a bit more extra
        # information about the transformation process itself.
        processor_meta_data = DotDict()
        processor_meta_data.processor_notes = [
            self.config.processor_name, self.__class__.__name__
        ]
        processor_meta_data.quit_check = self.quit_check
        processor_meta_data.processor = self
        processor_meta_data.config = self.config

        if "processor_notes" in processed_crash:
            original_processor_notes = [
                x.strip() for x in processed_crash.processor_notes.split(";")
            ]
            processor_meta_data.processor_notes.append(
                "earlier processing: %s" %
                processed_crash.get("started_datetime", 'Unknown Date'))
        else:
            original_processor_notes = []

        processed_crash.success = False
        processed_crash.started_datetime = utc_now()
        # for backwards compatibility:
        processed_crash.startedDateTime = processed_crash.started_datetime
        processed_crash.signature = 'EMPTY: crash failed to process'

        crash_id = raw_crash['uuid']

        # quit_check calls ought to be scattered around the code to allow
        # the processor to be responsive to requests to shut down.
        self.quit_check()

        start_time = self.logger.info('starting transform for crash: %s',
                                      crash_id)
        processor_meta_data.started_timestamp = start_time

        # Apply rules; if a rule fails, capture the error and continue onward
        for rule in self.rules:
            try:
                rule.act(raw_crash, raw_dumps, processed_crash,
                         processor_meta_data)

            except Exception as exc:
                # If a rule throws an error, capture it and toss it in the
                # processor notes
                sentry_client.capture_error(sentry_dsn=self.sentry_dsn,
                                            logger=self.logger,
                                            extra={'crash_id': crash_id})
                # NOTE(willkg): notes are public, so we can't put exception
                # messages in them
                processor_meta_data.processor_notes.append(
                    'rule %s failed: %s' %
                    (rule.__class__.__name__, exc.__class__.__name__))

            self.quit_check()

        # The crash made it through the processor rules with no exceptions
        # raised, call it a success
        processed_crash.success = True

        # The processor notes are in the form of a list.  Join them all
        # together to make a single string
        processor_meta_data.processor_notes.extend(original_processor_notes)
        processed_crash.processor_notes = '; '.join(
            processor_meta_data.processor_notes)
        completed_datetime = utc_now()
        processed_crash.completed_datetime = completed_datetime

        # For backwards compatibility
        processed_crash.completeddatetime = completed_datetime

        self.logger.info("finishing %s transform for crash: %s",
                         'successful' if processed_crash.success else 'failed',
                         crash_id)
        return processed_crash
Esempio n. 42
0
    def fix_date_parameter(self, parameters):
        """Correct the date parameter.

        If there is no date parameter, set default values. Otherwise, make
        sure there is exactly one lower bound value and one greater bound
        value.
        """
        default_date_range = datetime.timedelta(
            days=self.config.search_default_date_range
        )
        maximum_date_range = datetime.timedelta(
            days=self.config.search_maximum_date_range
        )

        if not parameters.get('date'):
            now = datetimeutil.utc_now()
            lastweek = now - default_date_range

            parameters['date'] = []
            parameters['date'].append(SearchParam(
                'date', lastweek, '>=', 'datetime'
            ))
            parameters['date'].append(SearchParam(
                'date', now, '<=', 'datetime'
            ))
        else:
            lower_than = None
            greater_than = None
            for param in parameters['date']:
                if (
                    '<' in param.operator and (
                        not lower_than or
                        (lower_than and lower_than.value > param.value)
                    )
                ):
                    lower_than = param
                if (
                    '>' in param.operator and (
                        not greater_than or
                        (greater_than and greater_than.value < param.value)
                    )
                ):
                    greater_than = param

            # Remove all the existing parameters so we have exactly
            # one lower value and one greater value
            parameters['date'] = []

            if not lower_than:
                # add a lower than that is now
                lower_than = SearchParam(
                    'date', datetimeutil.utc_now(), '<=', 'datetime'
                )

            if not greater_than:
                # add a greater than that is lower_than minus the date range
                greater_than = SearchParam(
                    'date',
                    lower_than.value - default_date_range,
                    '>=',
                    'datetime'
                )

            # Verify the date range is not too big.
            delta = lower_than.value - greater_than.value
            if delta > maximum_date_range:
                raise BadArgumentError(
                    'date',
                    msg='Date range is bigger than %s days' %
                    self.config.search_maximum_date_range
                )

            parameters['date'].append(lower_than)
            parameters['date'].append(greater_than)
Esempio n. 43
0
    def setUp(self):
        """Set up this test class by populating the reports table with fake
        data. """
        super(IntegrationTestSearch, self).setUp()

        cursor = self.connection.cursor()

        # Insert data
        now = datetimeutil.utc_now()
        yesterday = now - datetime.timedelta(days=1)

        cursor.execute("""
            INSERT INTO reports
            (
                id,
                uuid,
                date_processed,
                product,
                version,
                build,
                signature,
                reason,
                os_name,
                hangid,
                process_type,
                release_channel
            )
            VALUES
            (
                1,
                '1',
                '%(yesterday)s',
                'WaterWolf',
                '1.0',
                '20001212010203',
                'sig1',
                'STACK_OVERFLOW',
                'Linux',
                1,
                'browser',
                'Release'
            ),
            (
                2,
                '2',
                '%(yesterday)s',
                'WaterWolf',
                '2.0',
                '20001212010204',
                'sig1',
                'SIGFAULT',
                'Windows NT',
                2,
                'browser',
                'Release'
            ),
            (
                3,
                '3',
                '%(yesterday)s',
                'WaterWolf',
                '1.0',
                '20001212010205',
                'sig1',
                'BIG_FAILURE',
                'Windows NT',
                null,
                'plugin',
                'Release'
            ),
            (
                4,
                '4',
                '%(yesterday)s',
                'WaterWolf',
                '1.0',
                '20001212010203',
                'sig1',
                'STACK_OVERFLOW',
                'Windows NT',
                null,
                'browser',
                'Release'
            ),
            (
                5,
                '5',
                '%(yesterday)s',
                'WaterWolf',
                '1.0',
                '20001212010203',
                'sig2',
                'STACK_OVERFLOW',
                'Linux',
                null,
                'browser',
                'Release'
            ),
            (
                6,
                '6',
                '%(yesterday)s',
                'WaterWolf',
                '3.0',
                '20001212010203',
                'sig2',
                'STACK_OVERFLOW',
                'Windows NT',
                null,
                'browser',
                'Release'
            ),
            (
                7,
                '7',
                '%(yesterday)s',
                'NightlyTrain',
                '1.0',
                '20001212010203',
                'sig2',
                'STACK_OVERFLOW',
                'Linux',
                null,
                'plugin',
                'Release'
            ),
            (
                8,
                '8',
                '%(yesterday)s',
                'WaterWolf',
                '1.0',
                '20001212010204',
                'sig3',
                'STACK_OVERFLOW',
                'Linux',
                null,
                'browser',
                'Release'
            ),
            (
                9,
                '9',
                '%(yesterday)s',
                'NightlyTrain',
                '1.0',
                '20001212010203',
                'js::functions::call::hello_world',
                'SIGFAULT',
                'Linux',
                null,
                'browser',
                'Release'
            ),
            (
                10,
                '10',
                '%(yesterday)s',
                'WindBear',
                '1.0',
                '20001212010203',
                'this/is+a=C|signature',
                'STACK_OVERFLOW',
                'Linux',
                null,
                'browser',
                'Release'
            );
        """ % {
            'yesterday': yesterday
        })

        cursor.execute("""
            INSERT INTO plugins_reports
            (
                report_id,
                plugin_id,
                date_processed,
                version
            )
            VALUES
            (
                3,
                1,
                '%(yesterday)s',
                '1.23.001'
            ),
            (
                7,
                2,
                '%(yesterday)s',
                '2.0.1'
            );
        """ % {
            'yesterday': yesterday
        })

        cursor.execute("""
            INSERT INTO plugins
            (
                id,
                filename,
                name
            )
            VALUES
            (
                1,
                'flash.dll',
                'Flash'
            ),
            (
                2,
                'NPSWF32_11_5_502_146.dll',
                'someplugin'
            );
        """)

        self.connection.commit()
Esempio n. 44
0
    def query(self, from_date, to_date, json_query):
        """
        Send a query directly to ElasticSearch and return the result.
        """
        # Default dates
        now = dtutil.utc_now().date()
        lastweek = now - timedelta(7)

        from_date = dtutil.string_to_datetime(from_date) or lastweek
        to_date = dtutil.string_to_datetime(to_date) or now

        # Create the indexes to use for querying.
        daterange = []
        delta_day = to_date - from_date
        for delta in range(0, delta_day.days + 1):
            day = from_date + timedelta(delta)
            index = "socorro_%s" % day.strftime("%y%m%d")
            # Cache protection for limitating the number of HTTP calls
            if index not in self.cache or not self.cache[index]:
                daterange.append(index)

        can_return = False

        # -
        # This code is here to avoid failing queries caused by missing
        # indexes. It should not happen on prod, but doing this makes
        # sure users will never see a 500 Error because of this eventuality.
        # -

        # Iterate until we can return an actual result and not an error
        while not can_return:
            if not daterange:
                http_response = "{}"
                break

            datestring = ",".join(daterange)
            uri = "/%s/_search" % datestring

            with self.http:
                http_response = self.http.post(uri, json_query)

            # If there has been an error,
            # then we get a dict instead of some json.
            if isinstance(http_response, dict):
                data = http_response["error"]["data"]

                # If an index is missing,
                # try to remove it from the list of indexes and retry.
                if (http_response["error"]["code"] == 404 and
                    data.find("IndexMissingException") >= 0):
                    index = data[data.find("[[") + 2:data.find("]")]

                    # Cache protection for limitating the number of HTTP calls
                    self.cache[index] = True

                    try:
                        daterange.remove(index)
                    except Exception:
                        raise
            else:
                can_return = True

        return (http_response, "text/json")
    def test_email_cannot_be_sent_twice(self, exacttarget_mock):
        (config_manager, json_file) = self._setup_config_manager(
            restrict_products=['NightlyTrain']
        )
        et_mock = exacttarget_mock.return_value

        # Prepare failures
        _failures = []
        _email_sent = []

        class SomeRandomError(Exception):
            pass

        def trigger_send(template, fields):
            email = fields['EMAIL_ADDRESS_']
            if email == '*****@*****.**' and email not in _failures:
                _failures.append(email)
                raise SomeRandomError('This is an error. ')
            else:
                _email_sent.append(email)

        et_mock.trigger_send = trigger_send

        with config_manager.context() as config:
            tab = crontabber.CronTabber(config)
            tab.run_all()

            information = json.load(open(json_file))
            assert information['automatic-emails']
            assert information['automatic-emails']['last_error']
            self.assertEqual(
                information['automatic-emails']['last_error']['type'],
                str(SomeRandomError)
            )

            # Verify that user's data was updated, but not all of it
            self.assertEqual(_email_sent, ['*****@*****.**', '*****@*****.**'])
            cursor = self.conn.cursor()
            emails_list = (
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**'
            )
            sql = """
                SELECT email, last_sending
                FROM emails
                WHERE email IN %s
            """ % (emails_list,)
            cursor.execute(sql)
            now = utc_now()
            self.assertEqual(cursor.rowcount, 2)
            for row in cursor.fetchall():
                assert row[0] in ('*****@*****.**', '*****@*****.**')
                self.assertEqual(row[1].year, now.year)
                self.assertEqual(row[1].month, now.month)
                self.assertEqual(row[1].day, now.day)

            # Run crontabber again and verify that all users are updated,
            # and emails are not sent twice
            self._wind_clock(json_file, hours=1)

            # This forces a crontabber instance to reload the JSON file
            tab._database = None

            tab.run_all()

            information = json.load(open(json_file))
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # Verify that users were not sent an email twice
            self.assertEqual(_email_sent, [
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**'
            ])
Esempio n. 46
0
    def setUpClass(cls):
        """ Populate product_info table with fake data """
        super(IntegrationTestProducts, cls).setUpClass()

        cursor = cls.connection.cursor()

        # Insert data
        cls.now = datetimeutil.utc_now()
        now = cls.now.date()
        lastweek = now - datetime.timedelta(days=7)

        cursor.execute("""
            INSERT INTO products
            (product_name, sort, rapid_release_version, release_name)
            VALUES
            (
                'Firefox',
                1,
                '8.0',
                'firefox'
            ),
            (
                'Fennec',
                3,
                '11.0',
                'mobile'
            ),
            (
                'Thunderbird',
                2,
                '10.0',
                'thunderbird'
            );
        """)

        cursor.execute("""
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            (
                'Release', 1
            ),
            (
                'Beta', 2
            );
        """)

        cursor.execute("""
            INSERT INTO product_release_channels
            (product_name, release_channel, throttle)
            VALUES
            (
                'Firefox', 'Release', '0.1'
            ),
            (
                'Fennec', 'Release', '0.1'
            ),
            (
                'Fennec', 'Beta', '1.0'
            ),
            (
                'Thunderbird', 'Release', '0.1'
            );
        """)

        # Insert versions, contains an expired version
        cursor.execute("""
            INSERT INTO product_versions
            (product_name, major_version, release_version, version_string,
             build_date, sunset_date, featured_version, build_type,
             version_sort)
            VALUES
            (
                'Firefox',
                '8.0',
                '8.0',
                '8.0',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0008000'
            ),
            (
                'Firefox',
                '9.0',
                '9.0',
                '9.0',
                '%(lastweek)s',
                '%(lastweek)s',
                False,
                'Nightly',
                '0009000'
            ),
            (
                'Fennec',
                '11.0',
                '11.0',
                '11.0.1',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0011001'
            ),
            (
                'Fennec',
                '12.0',
                '12.0',
                '12.0b1',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '00120b1'
            ),
            (
                'Thunderbird',
                '10.0',
                '10.0',
                '10.0.2b',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '001002b'
            );
        """ % {
            'now': now,
            'lastweek': lastweek
        })

        cls.connection.commit()
Esempio n. 47
0
    def setUp(self):
        super(IntegrationTestAutomaticEmails, self).setUp()
        # prep a fake table
        now = utc_now() - datetime.timedelta(minutes=30)
        last_month = now - datetime.timedelta(days=31)

        config_manager = self._setup_storage_config()
        with config_manager.context() as config:
            storage = ElasticSearchCrashStorage(config)
            # clear the indices cache so the index is created on every test
            storage.indices_cache = set()

            storage.save_processed({
                'uuid': '1',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': 'unknown'
                    }
                }
            })
            storage.save_processed({
                'uuid': '2',
                'email': '"Quidam" <*****@*****.**>',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': None
                    }
                }
            })
            storage.save_processed({
                'uuid': '3',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now,
                'classifications': {
                    'support': {
                        'classification': 'bitguard'
                    }
                }
            })
            storage.save_processed({
                'uuid': '4',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '5',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '6',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '7',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '8',
                'email': '*****@*****.**',
                'product': 'NightlyTrain',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '9',
                'email': '*****@*****.**',
                'product': 'EarthRaccoon',
                'version': '1.0',
                'release_channel': 'Nightly',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '18',
                'email': 'z\xc3\[email protected]',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Let's insert a duplicate
            storage.save_processed({
                'uuid': '10',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # And let's insert some invalid crashes
            storage.save_processed({
                'uuid': '11',
                'email': None,
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '12',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': last_month
            })
            storage.save_processed({
                'uuid': '13',
                'email': '*****@*****.**',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '14',
                'email': '*****@*****.**',
                'product': 'WindBear',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Finally some invalid email addresses
            storage.save_processed({
                'uuid': '15',
                'email': '     ',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '16',
                'email': 'invalid@email',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })
            storage.save_processed({
                'uuid': '17',
                'email': 'i.do.not.work',
                'product': 'WaterWolf',
                'version': '20.0',
                'release_channel': 'Release',
                'date_processed': now
            })

            # Create some email addresses.
            storage.create_emails_index()
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '*****@*****.**',
                    'last_sending': last_month
                },
                id='*****@*****.**',
            )
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '"Quidam" <*****@*****.**>',
                    'last_sending': last_month
                },
                id='"Quidam" <*****@*****.**>',
            )
            storage.es.index(
                index=config.elasticsearch_emails_index,
                doc_type='emails',
                doc={
                    'email': '*****@*****.**',
                    'last_sending': now
                },
                id='*****@*****.**',
            )

            # As indexing is asynchronous, we need to force elasticsearch to
            # make the newly created content searchable before we run the
            # tests.
            storage.es.refresh()
Esempio n. 48
0
    def setUp(self):
        """ Populate product_info table with fake data """
        super(IntegrationTestProducts, self).setUp()

        cursor = self.connection.cursor()

        # Insert data
        self.now = datetimeutil.utc_now()
        now = self.now.date()
        lastweek = now - datetime.timedelta(days=7)

        cursor.execute("""
            INSERT INTO products
            (product_name, sort, rapid_release_version, release_name)
            VALUES
            (
                'Firefox',
                1,
                '8.0',
                'firefox'
            ),
            (
                'Fennec',
                3,
                '11.0',
                'mobile'
            ),
            (
                'Thunderbird',
                2,
                '10.0',
                'thunderbird'
            );
        """)

        cursor.execute("""
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            (
                'Release', 1
            ),
            (
                'Beta', 2
            );
        """)

        cursor.execute("""
            INSERT INTO product_release_channels
            (product_name, release_channel, throttle)
            VALUES
            (
                'Firefox', 'Release', '0.1'
            ),
            (
                'Fennec', 'Release', '0.1'
            ),
            (
                'Fennec', 'Beta', '1.0'
            ),
            (
                'Thunderbird', 'Release', '0.1'
            );
        """)

        # Insert versions, contains an expired version
        cursor.execute("""
            INSERT INTO product_versions
            (product_name, major_version, release_version, version_string,
             build_date, sunset_date, featured_version, build_type,
             version_sort)
            VALUES
            (
                'Firefox',
                '8.0',
                '8.0',
                '8.0',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0008000'
            ),
            (
                'Firefox',
                '9.0',
                '9.0',
                '9.0',
                '%(lastweek)s',
                '%(lastweek)s',
                False,
                'Nightly',
                '0009000'
            ),
            (
                'Fennec',
                '11.0',
                '11.0',
                '11.0.1',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0011001'
            ),
            (
                'Fennec',
                '12.0',
                '12.0',
                '12.0b1',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '00120b1'
            ),
            (
                'Thunderbird',
                '10.0',
                '10.0',
                '10.0.2b',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '001002b'
            );
        """ % {
            'now': now,
            'lastweek': lastweek
        })

        # insert bixie errors
        cursor.execute("""
            INSERT INTO bixie.raw_product_releases
            (id, product_name, version, build, build_type, platform,
             repository, stability)
            VALUES
            (
                1,
                'EmailApp',
                '0.1',
                1234567890,
                'Release',
                'mobile',
                'repo',
                'stable'
            ),
            (
                2,
                'EmailApp',
                '0.2',
                1234567890,
                'Beta',
                'mobile',
                'repo',
                'stable'
            ),
            (
                3,
                'ClockOClock',
                '1.0.18',
                1234567890,
                'Release',
                'mobile',
                'repo',
                'stable'
            )
        """)

        self.connection.commit()
Esempio n. 49
0
    def setUp(self):
        super(IntegrationElasticsearchSearch, self).setUp()

        config = self.get_config_context()
        self.api = Search(config=config)
        self.storage = crashstorage.ElasticSearchCrashStorage(config)

        # clear the indices cache so the index is created on every test
        self.storage.indices_cache = set()

        # Create the supersearch fields.
        self.storage.es.bulk_index(
            index=config.webapi.elasticsearch_default_index,
            doc_type='supersearch_fields',
            docs=SUPERSEARCH_FIELDS.values(),
            id_field='name',
            refresh=True,
        )

        now = datetimeutil.utc_now()

        yesterday = now - datetime.timedelta(days=1)
        yesterday = datetimeutil.date_to_string(yesterday)

        last_month = now - datetime.timedelta(weeks=4)
        last_month = datetimeutil.date_to_string(last_month)

        # insert data into elasticsearch
        default_crash_report = {
            'uuid': 100,
            'signature': 'js::break_your_browser',
            'date_processed': yesterday,
            'product': 'WaterWolf',
            'version': '1.0',
            'release_channel': 'release',
            'os_name': 'Linux',
            'build': '1234567890',
            'reason': 'MOZALLOC_WENT_WRONG',
            'hangid': None,
            'process_type': None,
        }

        self.storage.save_processed(default_crash_report)

        self.storage.save_processed(
            dict(default_crash_report, uuid=1, product='EarthRaccoon')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=2, version='2.0')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=3, release_channel='aurora')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=4, os_name='Windows NT')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=5, build='0987654321')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=6, reason='VERY_BAD_EXCEPTION')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=7, hangid='12')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=8, process_type='plugin')
        )

        self.storage.save_processed(
            dict(default_crash_report, uuid=9, signature='my_bad')
        )

        self.storage.save_processed(
            dict(
                default_crash_report,
                uuid=10,
                date_processed=last_month,
                signature='my_little_signature',
            )
        )

        # for plugin terms test
        self.storage.save_processed(
            dict(
                default_crash_report,
                uuid=11,
                product='PluginSoft',
                process_type='plugin',
                PluginFilename='carly.dll',
                PluginName='Hey I just met you',
                PluginVersion='1.2',
            )
        )

        self.storage.save_processed(
            dict(
                default_crash_report,
                uuid=12,
                product='PluginSoft',
                process_type='plugin',
                PluginFilename='hey.dll',
                PluginName='Hey Plugin',
                PluginVersion='10.7.0.2a',
            )
        )

        self.storage.save_processed(
            dict(
                default_crash_report,
                uuid=13,
                product='EarlyOwl',
                version='11.0b1',
                release_channel='beta',
            )
        )

        self.storage.save_processed(
            dict(
                default_crash_report,
                uuid=14,
                product='EarlyOwl',
                version='11.0b2',
                release_channel='beta',
            )
        )

        # As indexing is asynchronous, we need to force elasticsearch to
        # make the newly created content searchable before we run the tests
        self.storage.es.refresh()
    def setUp(self):
        super(TestFunctionalAutomaticEmails, self).setUp()
        # prep a fake table
        now = utc_now() - datetime.timedelta(minutes=30)
        last_month = now - datetime.timedelta(days=31)
        cursor = self.conn.cursor()

        cursor.execute("""
            INSERT INTO reports
            (uuid, email, product, version, release_channel, date_processed)
            VALUES (
                '1',
                '*****@*****.**',
                'WaterWolf',
                '20.0',
                'Release',
                '%(now)s'
            ), (
                '2',
                '*****@*****.**',
                'WaterWolf',
                '20.0',
                'Release',
                '%(now)s'
            ), (
                '3',
                '*****@*****.**',
                'WaterWolf',
                '20.0',
                'Release',
                '%(now)s'
            ), (
                '4',
                '*****@*****.**',
                'NightlyTrain',
                '1.0',
                'Nightly',
                '%(now)s'
            ), (
                '5',
                '*****@*****.**',
                'NightlyTrain',
                '1.0',
                'Nightly',
                '%(now)s'
            ), (
                '6',
                '*****@*****.**',
                'NightlyTrain',
                '1.0',
                'Nightly',
                '%(now)s'
            ), (
                '7',
                '*****@*****.**',
                'NightlyTrain',
                '1.0',
                'Nightly',
                '%(now)s'
            ), (
                '8',
                '*****@*****.**',
                'NightlyTrain',
                '1.0',
                'Nightly',
                '%(now)s'
            )
        """ % {'now': now})

        # Let's insert a duplicate
        cursor.execute("""
            INSERT INTO reports
            (uuid, email, product, version, release_channel, date_processed)
            VALUES (
                '10',
                '*****@*****.**',
                'WaterWolf',
                '20.0',
                'Release',
                '%(now)s'
            )
        """ % {'now': now})

        # And let's insert some invalid crashes
        cursor.execute("""
            INSERT INTO reports
            (uuid, email, product, version, release_channel, date_processed)
            VALUES (
                '11',
                null,
                'WaterWolf',
                '20.0',
                'Release',
                '%(now)s'
            ), (
                '12',
                '*****@*****.**',
                'WaterWolf',
                '20.0',
                'Release',
                '%(last_month)s'
            ), (
                '13',
                '*****@*****.**',
                'WaterWolf',
                '20.0',
                'Release',
                '%(now)s'
            ), (
                '14',
                '*****@*****.**',
                'WindBear',
                '20.0',
                'Release',
                '%(now)s'
            )
        """ % {'now': now, 'last_month': last_month})

        cursor.execute("""
            INSERT INTO emails (email, last_sending)
            VALUES (
                '*****@*****.**',
                '%(last_month)s'
            ), (
                '*****@*****.**',
                '%(last_month)s'
            ), (
                '*****@*****.**',
                '%(now)s'
            )
        """ % {'now': now, 'last_month': last_month})

        self.conn.commit()
Esempio n. 51
0
    def test_email_cannot_be_sent_twice(self, exacttarget_mock):
        config_manager = self._setup_config_manager(
            restrict_products=['NightlyTrain']
        )
        et_mock = exacttarget_mock.return_value

        # Prepare failures
        _failures = []
        _email_sent = []

        class SomeRandomError(Exception):
            pass

        def trigger_send(template, fields):
            email = fields['EMAIL_ADDRESS_']
            if email == '*****@*****.**' and email not in _failures:
                _failures.append(email)
                raise SomeRandomError('This is an error. ')
            else:
                _email_sent.append(email)

        et_mock.trigger_send = trigger_send

        with config_manager.context() as config:
            tab = crontabber.CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert information['automatic-emails']['last_error']
            self.assertEqual(
                information['automatic-emails']['last_error']['type'],
                str(SomeRandomError)
            )

            # Verify that user's data was updated, but not all of it
            self.assertEqual(_email_sent, ['*****@*****.**', '*****@*****.**'])
            emails_list = (
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**'
            )

            conf = config.crontabber['class-AutomaticEmailsCronApp']
            es = SuperS().es(
                urls=conf.elasticsearch.elasticsearch_urls,
                timeout=conf.elasticsearch.elasticsearch_timeout,
            )
            search = es.indexes(
                conf.elasticsearch.elasticsearch_emails_index
            )
            search = search.doctypes('emails')
            es.get_es().refresh()

            search = search.filter(_id__in=emails_list)
            res = search.execute()
            self.assertEqual(res.count, 2)

            now = utc_now()
            for row in res.results:
                assert row['_id'] in ('*****@*****.**', '*****@*****.**')
                date = string_to_datetime(row['_source']['last_sending'])
                self.assertEqual(date.year, now.year)
                self.assertEqual(date.month, now.month)
                self.assertEqual(date.day, now.day)

            # Run crontabber again and verify that all users are updated,
            # and emails are not sent twice
            state = tab.database['automatic-emails']
            self._wind_clock(state, hours=1)
            tab.database['automatic-emails'] = state

            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']

            # Verify that users were not sent an email twice
            self.assertEqual(_email_sent, [
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**',
                '*****@*****.**'
            ])
Esempio n. 52
0
    def setUp(self):
        super(TestIntegrationFTPScraper, self).setUp()
        cursor = self.conn.cursor()

        # Insert data
        now = utc_now()
        build_date = now - datetime.timedelta(days=30)
        sunset_date = now + datetime.timedelta(days=30)

        cursor.execute("""
            TRUNCATE products CASCADE;
            INSERT INTO products
            (product_name, sort, release_name)
            VALUES
            (
            'Firefox',
            1,
            'firefox'
            ),
            (
            'Fennec',
            1,
            'mobile'
            );
        """)

        cursor.execute("""
            TRUNCATE product_versions CASCADE;
            INSERT INTO product_versions
            (product_version_id, product_name, major_version, release_version,
            version_string, version_sort, build_date, sunset_date,
            featured_version, build_type)
            VALUES
            (
                1,
                'Firefox',
                '15.0',
                '15.0',
                '15.0a1',
                '000000150a1',
                '%(build_date)s',
                '%(sunset_date)s',
                't',
                'nightly'
            )
            ,(
                2,
                'Firefox',
                '24.5',
                '24.5.0esr',
                '24.5.0esr',
                '024005000x000',
                '%(build_date)s',
                '%(sunset_date)s',
                't',
                'esr'
            )
            ;
        """ % {
            "build_date": build_date,
            "sunset_date": sunset_date
        })

        cursor.execute("""
            TRUNCATE release_channels CASCADE;
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            ('nightly', 1),
            ('aurora', 2),
            ('beta', 3),
            ('release', 4);
        """)

        cursor.execute("""
            TRUNCATE product_release_channels CASCADE;
            INSERT INTO product_release_channels
            (product_name, release_channel, throttle)
            VALUES
            ('Firefox', 'nightly', 1),
            ('Firefox', 'aurora', 1),
            ('Firefox', 'beta', 1),
            ('Firefox', 'release', 1),
            ('Fennec', 'release', 1),
            ('Fennec', 'beta', 1);
        """)

        self.conn.commit()
        self.mocked_session = requests.Session()

        def download(url):
            return self.mocked_session.get(url).content

        def skip_json_file(url):
            return False

        self.scrapers = ftpscraper.ScrapersMixin()
        self.scrapers.download = download
        self.scrapers.skip_json_file = skip_json_file
        self.scrapers.config = DotDict({'logger': mock.Mock()})
Esempio n. 53
0
    def setUp(self):
        super(IntegrationTestFeaturedVersionsAutomatic, self).setUp()
        self.__truncate()

        now = utc_now()
        build_date = now - datetime.timedelta(days=30)
        sunset_date = now + datetime.timedelta(days=30)

        execute_no_results(
            self.conn, """
            INSERT INTO products
            (product_name, sort, release_name)
            VALUES
            ('Firefox', 1, 'firefox'),
            ('Fennec', 1, 'mobile')
            """)
        execute_no_results(
            self.conn, """
            INSERT INTO product_versions
            (product_version_id, product_name, major_version, release_version,
            version_string, version_sort, build_date, sunset_date,
            featured_version, build_type)
            VALUES
            (
                1,
                'Firefox',
                '15.0',
                '15.0',
                '15.0a1',
                '000000150a1',
                %(build_date)s,
                %(sunset_date)s,
                true,
                'release'
            ),
            (
                2,
                'Firefox',
                '24.5',
                '24.5.0',
                '24.5.0',
                '024005000x000',
                %(build_date)s,
                %(sunset_date)s,
                true,
                'nightly'
            ),
            (
                3,
                'Firefox',
                '49.0.1',
                '49.0.1',
                '49.0.1',
                '000000150a1',
                %(build_date)s,
                %(sunset_date)s,
                false,
                'release'
            ),
            (
                4,
                'Firefox',
                '50.0b',
                '50.0b',
                '50.0b',
                '024005000x000',
                %(build_date)s,
                %(sunset_date)s,
                false,
                'beta'
            ),
            (
                5,
                'Firefox',
                '51.0a2',
                '51.0a2',
                '51.0a2',
                '000000150a1',
                %(build_date)s,
                %(sunset_date)s,
                false,
                'aurora'
            ),
            (
                6,
                'Firefox',
                '52.0a1',
                '52.0a1',
                '52.0a1',
                '024005000x000',
                %(build_date)s,
                %(sunset_date)s,
                false,
                'nightly'
            )
            """, {
                'build_date': build_date,
                'sunset_date': sunset_date
            })
        execute_no_results(
            self.conn, """
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            ('nightly', 1),
            ('aurora', 2),
            ('beta', 3),
            ('release', 4)
            """)
        execute_no_results(
            self.conn, """
            INSERT INTO product_release_channels
            (product_name, release_channel, throttle)
            VALUES
            ('Firefox', 'nightly', 1),
            ('Firefox', 'aurora', 1),
            ('Firefox', 'beta', 1),
            ('Firefox', 'release', 1),
            ('Fennec', 'release', 1),
            ('Fennec', 'beta', 1)
            """)
Esempio n. 54
0
    def test_scrape_json_releases(self, requests_mocker):
        today = utc_now()

        requests_mocker.get(BASE_URL + 'firefox/',
                            text="""<html><body>
            <a href="candidates/">candidates</a>
            <a href="nightly/">nightly</a>
            </body></html>""")
        requests_mocker.get(BASE_URL + 'firefox/candidates/',
                            text="""<html><body>
            <a href="28.0-candidates/">28.0-candidiates</a>
            <a href="10.0b4-candidates/">10.0b4-candidiates</a>
            <a href="None-candidates/">None-candidiates</a>
            </body></html>""")
        requests_mocker.get(BASE_URL + 'firefox/candidates/28.0-candidates/',
                            text="""<html><body>
            <a href="build1/">build1</a>
            </body></html>""")
        requests_mocker.get(BASE_URL +
                            'firefox/candidates/28.0-candidates/build1/',
                            text="""<html><body>
            <a href="linux-i686/">linux-i686</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL + 'firefox/candidates/28.0-candidates/build1/linux-i686/',
            text="""<html><body>
            <a href="en-US/">en-US</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/28.0-candidates/build1/linux-i686/en-US/',
            text="""<html><body>
            <a href="firefox-28.0.json">firefox-28.0.json</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/28.0-candidates/build1/linux-i686/en-US/firefox-28.0.json',
            json={
                'buildid': '20140113161827',
                'moz_app_maxversion': '28.0.*',
                'moz_app_name': 'firefox',
                'moz_app_vendor': 'Mozilla',
                'moz_app_version': '28.0',
                'moz_pkg_platform': 'linux-i686',
                'moz_source_repo':
                'http://hg.mozilla.org/releases/mozilla-release',
                'moz_update_channel': 'release'
            })
        requests_mocker.get(BASE_URL + 'firefox/candidates/10.0b4-candidates/',
                            text="""<html><body>
            <a href="build1/">build1</a>
            </body></html>""")
        requests_mocker.get(BASE_URL +
                            'firefox/candidates/10.0b4-candidates/build1/',
                            text="""<html><body>
            <a href="linux-i686/">linux-i686</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/10.0b4-candidates/build1/linux-i686/',
            text="""<html><body>
            <a href="en-US/">en-US</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/10.0b4-candidates/build1/linux-i686/en-US/',
            text="""<html><body>
            <a href="firefox-10.0b4.json">firefox-10.0b4.json</a>
            <a href="firefox-10.0b4.en-US.linux-i686.mozinfo.json">
            firefox-10.0b4.en-US.linux-i686.mozinfo.json</a>
            <a href="JUNK.json">JUNK.json</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/10.0b4-candidates/build1/linux-i686/en-US/firefox-10.0b4.json',
            json={
                'buildid': '20140113161826',
                'moz_app_maxversion': '10.0.*',
                'moz_app_name': 'firefox',
                'moz_app_vendor': 'Mozilla',
                'moz_app_version': '27.0',
                'moz_pkg_platform': 'linux-i686',
                'moz_source_repo':
                'http://hg.mozilla.org/releases/mozilla-beta',
                'moz_update_channel': 'beta'
            })
        # Ignore unrecognized JSON files, see bug 1065071
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/10.0b4-candidates/build1/linux-i686/en-US/JUNK.json',
            json={
                'something': 'unexpected',
                'nothing': 'else'
            })

        requests_mocker.get(BASE_URL + 'firefox/candidates/None-candidates/',
                            text="""<html><body>
            <a href="build1/">build1</a>
            </body></html>""")
        requests_mocker.get(BASE_URL +
                            'firefox/candidates/None-candidates/build1/',
                            text="""<html><body>
            <a href="linux-i686/">linux-i686</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL + 'firefox/candidates/None-candidates/build1/linux-i686/',
            text="""<html><body>
            <a href="en-US/">en-US</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/None-candidates/build1/linux-i686/en-US/',
            text="""<html><body>
            <a href="None.json">None.json</a>
            </body></html>""")
        requests_mocker.get(
            BASE_URL +
            'firefox/candidates/None-candidates/build1/linux-i686/en-US/None.json',
            text=""" """)

        requests_mocker.get(BASE_URL + 'firefox/nightly/',
                            text="""<html><body>
            <a href="%(year)s/">%(year)s</a>
            </body></html>""" % {'year': today.strftime('%Y')})
        requests_mocker.get(today.strftime(BASE_URL + 'firefox/nightly/%Y/'),
                            text="""<html><body>
            <a href="%(month)s/">%(month)s</a>
            </body></html>""" % {'month': today.strftime('%m')})
        requests_mocker.get(today.strftime(BASE_URL +
                                           'firefox/nightly/%Y/%m/'),
                            text="""<html><body>
            <a href="%(date)s-03-02-03-mozilla-central/">txt</a>
            <a href="%(date)s-03-02-04-mozilla-central/">txt</a>
            """ % {'date': today.strftime('%Y-%m-%d')})
        requests_mocker.get(today.strftime(
            BASE_URL +
            'firefox/nightly/%Y/%m/%Y-%m-%d-03-02-03-mozilla-central/'),
                            text="""<html><body>
            <a href="firefox-30.0a1.en-US.linux-i686.json">txt</a>
            </body></html>""")
        requests_mocker.get(today.strftime(
            BASE_URL +
            'firefox/nightly/%Y/%m/%Y-%m-%d-03-02-03-mozilla-central/' +
            'firefox-30.0a1.en-US.linux-i686.json'),
                            json={
                                'as': '$(CC)',
                                'buildid': '20140205030203',
                                'cc': '/usr/bin/ccache ',
                                'cxx': '/usr/bin/ccache stuff',
                                'host_alias': 'x86_64-unknown-linux-gnu',
                                'host_cpu': 'x86_64',
                                'host_os': 'linux-gnu',
                                'host_vendor': 'unknown',
                                'ld': 'ld',
                                'moz_app_id':
                                '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
                                'moz_app_maxversion': '30.0a1',
                                'moz_app_name': 'firefox',
                                'moz_app_vendor': 'Mozilla',
                                'moz_app_version': '30.0a1',
                                'moz_pkg_platform': 'linux-i686',
                                'moz_source_repo':
                                'https://hg.mozilla.org/mozilla-central',
                                'moz_source_stamp': '1f170f9fead0',
                                'moz_update_channel': 'nightly',
                                'target_alias': 'i686-pc-linux',
                                'target_cpu': 'i686',
                                'target_os': 'linux-gnu',
                                'target_vendor': 'pc'
                            })
        requests_mocker.get(today.strftime(
            BASE_URL +
            'firefox/nightly/%Y/%m/%Y-%m-%d-03-02-04-mozilla-central/'),
                            text="""<html><body>
            <a href="firefox-30.0a2.en-US.linux-i686.json">txt</a>
            </body></html>""")
        requests_mocker.get(today.strftime(
            BASE_URL +
            'firefox/nightly/%Y/%m/%Y-%m-%d-03-02-04-mozilla-central/' +
            'firefox-30.0a2.en-US.linux-i686.json'),
                            json={
                                'as': '$(CC)',
                                'buildid': '20140205030204',
                                'cc': '/usr/bin/ccache stuff',
                                'cxx': '/usr/bin/ccache stuff',
                                'host_alias': 'x86_64-unknown-linux-gnu',
                                'host_cpu': 'x86_64',
                                'host_os': 'linux-gnu',
                                'host_vendor': 'unknown',
                                'ld': 'ld',
                                'moz_app_id':
                                '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
                                'moz_app_maxversion': '30.0a2',
                                'moz_app_name': 'firefox',
                                'moz_app_vendor': 'Mozilla',
                                'moz_app_version': '30.0a2',
                                'moz_pkg_platform': 'linux-i686',
                                'moz_source_repo':
                                'https://hg.mozilla.org/mozilla-central',
                                'moz_source_stamp': '1f170f9fead0',
                                'moz_update_channel': 'nightly',
                                'target_alias': 'i686-pc-linux',
                                'target_cpu': 'i686',
                                'target_os': 'linux-gnu',
                                'target_vendor': 'pc'
                            })

        config_manager = self._setup_config_manager_firefox()
        with config_manager.context() as config:
            tab = CronTabberApp(config)
            tab.run_all()

            information = self._load_structure()
            assert information['ftpscraper']
            assert not information['ftpscraper']['last_error']
            assert information['ftpscraper']['last_success']

            config.logger.warning.assert_any_call(
                'Unable to JSON parse content %r', ' ', exc_info=True)

            config.logger.warning.assert_any_call(
                'warning, unsupported JSON file: %s',
                BASE_URL + 'firefox/candidates/'
                '10.0b4-candidates/build1/linux-i686/en-US/JUNK.json')

        cursor = self.conn.cursor()
        columns = 'product_name', 'build_id', 'build_type'
        cursor.execute("""
            select %s
            from releases_raw
        """ % ','.join(columns))
        builds = [dict(zip(columns, row)) for row in cursor.fetchall()]
        build_ids = dict((str(x['build_id']), x) for x in builds)

        assert '20140113161827' in build_ids
        assert '20140113161826' in build_ids
        assert '20140205030203' in build_ids
        assert len(build_ids) == 4
        expected = [{
            'build_id': 20140113161827,
            'product_name': 'firefox',
            'build_type': 'release'
        }, {
            'build_id': 20140113161827,
            'product_name': 'firefox',
            'build_type': 'beta'
        }, {
            'build_id': 20140113161826,
            'product_name': 'firefox',
            'build_type': 'beta'
        }, {
            'build_id': 20140205030203,
            'product_name': 'firefox',
            'build_type': 'nightly'
        }, {
            'build_id': 20140205030204,
            'product_name': 'firefox',
            'build_type': 'aurora'
        }]
        assert builds == expected
Esempio n. 55
0
    def _run_one(self, job_class, config, force=False):
        _debug = self.config.logger.debug
        seconds = convert_frequency(config.frequency)
        time_ = config.time
        if not force:
            if not self.time_to_run(job_class, time_):
                _debug("skipping %r because it's not time to run", job_class)
                return
            ok, dependency_error = self.check_dependencies(job_class)
            if not ok:
                _debug("skipping %r dependencies aren't met [%s]", job_class,
                       dependency_error)
                return

        _debug('about to run %r', job_class)
        app_name = job_class.app_name
        info = self.database.get(app_name)

        last_success = None
        now = utc_now()
        try:
            t0 = time.time()
            for last_success in self._run_job(job_class, config, info):
                t1 = time.time()
                _debug('successfully ran %r on %s', job_class, last_success)
                self._remember_success(job_class, last_success, t1 - t0)
                # _run_job() returns a generator, so we don't know how
                # many times this will loop. Anyway, we need to reset the
                # 't0' for the next loop if there is one.
                t0 = time.time()
            exc_type = exc_value = exc_tb = None
        except:
            t1 = time.time()
            exc_type, exc_value, exc_tb = sys.exc_info()

            # when debugging tests that mock logging, uncomment this otherwise
            # the exc_info=True doesn't compute and record what the exception
            # was
            #raise

            if self.config.sentry and self.config.sentry.dsn:
                try:
                    client = raven.Client(dsn=self.config.sentry.dsn)
                    identifier = client.get_ident(client.captureException())
                    self.config.logger.info(
                        'Error captured in Sentry. Reference: %s' % identifier)
                except Exception:
                    # Blank exceptions like this is evil but a failure to send
                    # the exception to Sentry is much less important than for
                    # crontabber to carry on. This is especially true
                    # considering that raven depends on network I/O.
                    _debug('Failed to capture and send error to Sentry',
                           exc_info=True)

            _debug('error when running %r on %s',
                   job_class,
                   last_success,
                   exc_info=True)
            self._remember_failure(job_class, t1 - t0, exc_type, exc_value,
                                   exc_tb)

        finally:
            self._log_run(job_class, seconds, time_, last_success, now,
                          exc_type, exc_value, exc_tb)
Esempio n. 56
0
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import os

from django.conf import settings

from crashstats.crashstats.models import MissingProcessedCrash
from crashstats.crashstats.management.commands.verifyprocessed import Command
from socorro.lib.datetimeutil import utc_now
from socorro.lib.ooid import create_new_ooid, date_from_ooid

TODAY = utc_now().strftime("%Y%m%d")
BUCKET_NAME = os.environ.get("resource.boto.bucket_name")


def get_small_entropy(self):
    """Returns small entropy so we're not spending ages cycling through things."""
    yield from ["000", "111", "222"]


class TestVerifyProcessed:
    def fetch_crashids(self):
        return MissingProcessedCrash.objects.order_by("crash_id").values_list(
            "crash_id", flat=True)

    def create_raw_crash_in_s3(self, boto_helper, crash_id):
        boto_helper.upload_fileobj(
            bucket_name=BUCKET_NAME,
            key="v2/raw_crash/%s/%s/%s" % (crash_id[0:3], TODAY, crash_id),
Esempio n. 57
0
    def test_basic_run_no_errors(self):
        # a mutable where commands sent are stored
        commands_sent = []
        self.Popen.side_effect = functools.partial(
            mocked_Popen,
            _commands_sent=commands_sent,
            _exit_code=0,
            _stdout='Bla bla',
            _stderr='',
        )

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = crontabber.CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['modulelist']
            #print information['modulelist']['last_error']
            #print information['modulelist']['last_error']['traceback']
            if information['modulelist']['last_error']:
                raise AssertionError(information['modulelist']['last_error'])

            assert len(commands_sent) == 3
            first = commands_sent[0]
            second = commands_sent[1]
            third = commands_sent[2]
            yesterday = utc_now()
            yesterday -= datetime.timedelta(days=1)
            yesterday_fmt = yesterday.strftime('%Y%m%d')
            self.assertTrue(
                'PIG_CLASSPATH=/some/place pig' in first
            )
            self.assertTrue(
                '-param start_date=%s' % yesterday_fmt in first
            )
            self.assertTrue(
                '-param end_date=%s' % yesterday_fmt in first
            )
            self.assertTrue(
                '/some/place/modulelist.pig' in first
            )

            self.assertTrue(
                'PIG_CLASSPATH=/some/place hadoop fs -getmerge' in second
            )
            self.assertTrue(
                'modulelist-%s-%s' % (yesterday_fmt, yesterday_fmt) in second
            )
            self.assertTrue(
                '/some/other/place/%s-modulelist.txt' % (yesterday_fmt,)
                in second
            )

            self.assertTrue(
                'PIG_CLASSPATH=/some/place hadoop fs ' in third
            )
            self.assertTrue(
                'modulelist-%s-%s' % (yesterday_fmt, yesterday_fmt) in second
            )

            # note that all jobs spew out 'Bla bla' on stdout
            config.logger.info.assert_called_with('Bla bla')
Esempio n. 58
0
    def setUp(self):
        """ Populate tables with fake data """
        super(TestUtil, self).setUp()

        cursor = self.connection.cursor()

        self.now = datetimeutil.utc_now()
        now = self.now.date()

        cursor.execute("""
            INSERT INTO products
            (product_name, sort, rapid_release_version, release_name)
            VALUES
            (
                'Firefox',
                1,
                '8.0',
                'firefox'
            ),
            (
                'Fennec',
                2,
                '11.0',
                'mobile'
            ),
            (
                'Thunderbird',
                3,
                '10.0',
                'thunderbird'
            ),
            (
                'WaterWolf',
                4,
                '100.0',
                'waterwolf'
            );
        """)

        cursor.execute("""
            INSERT INTO release_channels
            (release_channel, sort)
            VALUES
            (
                'Release', 1
            ),
            (
                'Beta', 2
            );
        """)

        cursor.execute("""
            INSERT INTO product_release_channels
            (product_name, release_channel, throttle)
            VALUES
            (
                'Firefox', 'Release', '0.1'
            ),
            (
                'Fennec', 'Release', '0.1'
            ),
            (
                'Fennec', 'Beta', '1.0'
            ),
            (
                'Thunderbird', 'Release', '0.1'
            ),
            (
                'WaterWolf', 'Beta', '0.1'
            );
        """)

        cursor.execute("""
            INSERT INTO product_versions
            (product_version_id,
             product_name, major_version, release_version, version_string,
             build_date, sunset_date, featured_version, build_type,
             version_sort, is_rapid_beta, rapid_beta_id)
            VALUES
            (
                1,
                'Firefox',
                '8.0',
                '8.0',
                '8.0',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0008000',
                False,
                NULL
            ),
            (
                2,
                'Fennec',
                '11.0',
                '11.0',
                '11.0.1',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '0011001',
                False,
                NULL
            ),
            (
                3,
                'Fennec',
                '12.0',
                '12.0',
                '12.0b1',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '00120b1',
                False,
                NULL
            ),
            (
                4,
                'Thunderbird',
                '10.0',
                '10.0',
                '10.0.2b',
                '%(now)s',
                '%(now)s',
                False,
                'Release',
                '001002b',
                False,
                NULL
            ),
            (
                5,
                'WaterWolf',
                '3.0b',
                '3.0b',
                '3.0b',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '003002b000',
                True,
                4
            ),
            (
                6,
                'WaterWolf',
                '3.0b',
                '3.0b',
                '3.0b1',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '003002b001',
                False,
                5
            ),
            (
                7,
                'WaterWolf',
                '3.0b',
                '3.0b',
                '3.0b2',
                '%(now)s',
                '%(now)s',
                False,
                'Beta',
                '003002b002',
                False,
                5
            );
        """ % {'now': now})

        cursor.execute("""
            INSERT INTO product_version_builds
            (build_id, platform, product_version_id)
            VALUES
            (1, 'Linux', 1),
            (2, 'Linux', 2),
            (3, 'Linux', 3),
            (4, 'Linux', 4),
            (5, 'Linux', 5),
            (6, 'Linux', 6),
            (7, 'Linux', 7);
        """)

        self.connection.commit()
Esempio n. 59
0
def main(argv=None):
    parser = argparse.ArgumentParser(formatter_class=WrappedTextHelpFormatter,
                                     description=DESCRIPTION.strip())
    parser.add_argument("--host",
                        default=DEFAULT_HOST,
                        help="host for system to fetch crashids from")
    parser.add_argument(
        "--date",
        default="",
        help=
        ('date to pull crash ids from as YYYY-MM-DD, "yesterday", "today", or "now"; '
         'defaults to "yesterday"'),
    )
    parser.add_argument(
        "--signature-contains",
        default="",
        dest="signature",
        help="signature contains this string",
    )
    parser.add_argument(
        "--product",
        default="Firefox",
        help='Product to fetch for or "all" for all; defaults to "Firefox"',
    )
    parser.add_argument("--url",
                        default="",
                        help="Super Search url to base query on")
    parser.add_argument(
        "--num",
        default=100,
        help='number of crash ids you want or "all" for all of them',
    )
    parser.add_argument("-v",
                        "--verbose",
                        action="store_true",
                        help="increase verbosity of output")

    if argv is None:
        args = parser.parse_args()
    else:
        args = parser.parse_args(argv)

    host = args.host.rstrip("/")

    # Start with params from --url value or empty dict
    if args.url:
        params = extract_params(args.url)
    else:
        params = {}

    params["_columns"] = "uuid"
    params["product"] = params.get("product", args.product)
    if params["product"] == "all":
        # If the user specified "all", then we don't want to pass a product filter
        # in
        del params["product"]

    # Override with date if specified
    if "date" not in params or args.date:
        datestamp = args.date or "yesterday"

        if datestamp == "now":
            # Create a start -> end window that has wiggle room on either side
            # to deal with time differences between the client and server, but
            # also big enough to pick up results even in stage where it doesn't
            # process much
            enddate = utc_now() + datetime.timedelta(hours=1)
            startdate = enddate - datetime.timedelta(hours=12)

            # For "now", we want precision so we don't hit cache and we want to
            # sort by reverse date so that we get the most recent crashes
            startdate = startdate.strftime("%Y-%m-%dT%H:%M:%S.000Z")
            enddate = enddate.strftime("%Y-%m-%dT%H:%M:%S.000Z")
            params["_sort"] = "-date"

        else:
            if datestamp == "today":
                startdate = utc_now()
            elif datestamp == "yesterday":
                startdate = utc_now() - datetime.timedelta(days=1)
            else:
                startdate = datetime.datetime.strptime(datestamp, "%Y-%m-%d")

            enddate = startdate + datetime.timedelta(days=1)

            # For "today", "yesterday", and other dates, we want a day
            # precision so that Socorro can cache it
            startdate = startdate.strftime("%Y-%m-%d")
            enddate = enddate.strftime("%Y-%m-%d")

        params["date"] = [">=%s" % startdate, "<%s" % enddate]

    # Override with signature-contains if specified
    sig = args.signature
    if sig:
        params["signature"] = "~" + sig

    num_results = args.num
    if num_results == "all":
        num_results = INFINITY

    else:
        try:
            num_results = int(num_results)
        except ValueError:
            print('num needs to be an integer or "all"')
            return 1

    if args.verbose:
        print("Params: %s" % params)

    for crashid in fetch_crashids(host, params, num_results):
        print(crashid)

    return 0
Esempio n. 60
0
def main(argv=None):
    parser = argparse.ArgumentParser(
        formatter_class=WrappedTextHelpFormatter,
        description=DESCRIPTION.strip(),
    )
    parser.add_argument('--host',
                        default=DEFAULT_HOST,
                        help='host for system to fetch crashids from')
    parser.add_argument(
        '--date',
        default='',
        help=
        ('date to pull crash ids from as YYYY-MM-DD, "yesterday", "today", or "now"; '
         'defaults to "yesterday"'))
    parser.add_argument('--signature-contains',
                        default='',
                        dest='signature',
                        help='signature contains this string')
    parser.add_argument('--url',
                        default='',
                        help='Super Search url to base query on')
    parser.add_argument(
        '--num',
        default=100,
        help='number of crash ids you want or "all" for all of them')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='increase verbosity of output')

    if argv is None:
        args = parser.parse_args()
    else:
        args = parser.parse_args(argv)

    host = args.host.rstrip('/')

    # Start with params from --url value or product=Firefox
    if args.url:
        params = extract_params(args.url)
    else:
        params = {'product': 'Firefox'}

    params['_columns'] = 'uuid'

    # Override with date if specified
    if 'date' not in params or args.date:
        datestamp = args.date or 'yesterday'

        if datestamp == 'now':
            # Create a start -> end window that has wiggle room on either side
            # to deal with time differences between the client and server, but
            # also big enough to pick up results even in stage where it doesn't
            # process much
            enddate = utc_now() + datetime.timedelta(hours=1)
            startdate = enddate - datetime.timedelta(hours=12)

            # For "now", we want precision so we don't hit cache and we want to
            # sort by reverse date so that we get the most recent crashes
            startdate = startdate.strftime('%Y-%m-%dT%H:%M:%S.000Z')
            enddate = enddate.strftime('%Y-%m-%dT%H:%M:%S.000Z')
            params['_sort'] = '-date'

        else:
            if datestamp == 'today':
                startdate = utc_now()
            elif datestamp == 'yesterday':
                startdate = utc_now() - datetime.timedelta(days=1)
            else:
                startdate = datetime.datetime.strptime(datestamp, '%Y-%m-%d')

            enddate = startdate + datetime.timedelta(days=1)

            # For "today", "yesterday", and other dates, we want a day
            # precision so that Socorro can cache it
            startdate = startdate.strftime('%Y-%m-%d')
            enddate = enddate.strftime('%Y-%m-%d')

        params['date'] = ['>=%s' % startdate, '<%s' % enddate]

    # Override with signature-contains if specified
    sig = args.signature
    if sig:
        params['signature'] = '~' + sig

    num_results = args.num
    if num_results == 'all':
        num_results = INFINITY

    else:
        try:
            num_results = int(num_results)
        except ValueError:
            print('num needs to be an integer or "all"')
            return 1

    if args.verbose:
        print('Params: %s' % params)

    for crashid in fetch_crashids(host, params, num_results):
        print(crashid)

    return 0