Пример #1
0
    def test_run_job_with_mocked_data_with_wrong_products(self):
        config_manager = self._setup_config_manager(
            product='Thunderbird,SeaMonkey',
            version='1.0,2.0',
            public_output_path=False
        )
        self._insert_waterwolf_mock_data()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            # this should have created two .csv.gz files
            now = datetime.datetime.utcnow() - datetime.timedelta(days=1)

            private = now.strftime('%Y%m%d-crashdata.csv.gz')
            public = now.strftime('%Y%m%d-pub-crashdata.csv.gz')
            ok_(private in os.listdir(self.tempdir))
            ok_(public not in os.listdir(self.tempdir))

            private_path = os.path.join(self.tempdir, private)
            f = gzip.open(private_path)
            try:
                eq_(f.read(), '')
            finally:
                f.close()
Пример #2
0
    def test_run(self):
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['suspicious-crashes']
            assert not information['suspicious-crashes']['last_error']
            assert information['suspicious-crashes']['last_success']

            cursor = self.conn.cursor()

            cursor.execute("""
                SELECT signatures.signature, scs.report_date
                FROM suspicious_crash_signatures scs
                JOIN signatures ON scs.signature_id=signatures.signature_id
            """)

            count = 0
            today = (utc_now() - datetime.timedelta(1)).date()
            for row in cursor.fetchall():
                eq_('sig', row[0])
                eq_(today, row[1].date())
                count += 1

            eq_(1, count)
Пример #3
0
    def test_basic_run_job_without_reports(self):
        config_manager = self._setup_config_manager(3)

        cursor = self.conn.cursor()
        cursor.execute('select count(*) from reports')
        count, = cursor.fetchone()
        assert count == 0, "reports table not cleaned"
        cursor.execute('select count(*) from bugs')
        count, = cursor.fetchone()
        assert count == 0, "'bugs' table not cleaned"
        cursor.execute('select count(*) from bug_associations')
        count, = cursor.fetchone()
        assert count == 0, "'bug_associations' table not cleaned"

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        # now, because there we no matching signatures in the reports table
        # it means that all bugs are rejected
        cursor.execute('select count(*) from bugs')
        count, = cursor.fetchone()
        ok_(not count)
        cursor.execute('select count(*) from bug_associations')
        count, = cursor.fetchone()
        ok_(not count)
Пример #4
0
    def test_run_job_with_mocked_data(self):
        config_manager = self._setup_config_manager()
        self._insert_waterwolf_mock_data()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            # this should have created two .csv.gz files
            now = datetime.datetime.utcnow() - datetime.timedelta(days=1)

            private = now.strftime('%Y%m%d-crashdata.csv.gz')
            public = now.strftime('%Y%m%d-pub-crashdata.csv.gz')
            ok_(private in os.listdir(self.tempdir))
            ok_(public in os.listdir(self.tempdir))

            private_path = os.path.join(self.tempdir, private)
            f = gzip.open(private_path)
            try:
                content = f.read()
                ok_(content)
                lines = content.splitlines()
                header = lines[0]
                payload = lines[1:]
                eq_(header.split('\t')[0], 'signature')
                eq_(header.split('\t')[1], 'url')
                urls = [x.split('\t')[1] for x in payload]
                ok_('http://p**n.xxx' in urls)
                signatures = [x.split('\t')[0] for x in payload]
                eq_(sorted(signatures), [
                    'FakeSignature1', 'FakeSignature2', 'FakeSignature3',
                    'FakeSignature4'
                ])
            finally:
                f.close()

            public_path = os.path.join(self.tempdir, public)
            f = gzip.open(public_path)
            try:
                content = f.read()
                ok_(content)
                lines = content.splitlines()
                header = lines[0]
                payload = lines[1:]
                eq_(header.split('\t')[0], 'signature')
                eq_(header.split('\t')[1], 'URL (removed)')
                urls = [x.split('\t')[1] for x in payload]
                ok_('http://p**n.xxx' not in urls)
                signatures = [x.split('\t')[0] for x in payload]
                eq_(sorted(signatures), [
                    'FakeSignature1', 'FakeSignature2', 'FakeSignature3',
                    'FakeSignature4'
                ])
            finally:
                f.close()
Пример #5
0
    def test_basic_run(self):
        cur = self.conn.cursor()
        # Ensure test table is present.
        statement = """
            INSERT INTO raw_adi
            (date, product_name, adi_count) VALUES
            (%(first)s, 'WinterFox', 11),
            (%(second)s, 'WinterFox', 23)
        """
        second = utc_now().date()
        first = second - datetime.timedelta(days=1)
        cur.execute(statement, {'first': first, 'second': second})
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager(days_to_keep=1)
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['clean-raw-adi']
        assert not information['clean-raw-adi']['last_error']
        assert information['clean-raw-adi']['last_success']

        # Ensure test row was removed
        cur.execute("""
            SELECT date FROM raw_adi
        """)
        result, = cur.fetchall()
        report_date = result[0]
        eq_(report_date, second)
    def test_reprocessing_exception(self):
        config_manager = self._setup_config_manager()

        cursor = self.conn.cursor()

        # Test exception handling
        cursor.execute("""
            alter table reprocessing_jobs RENAME TO test_reprocessing_jobs
        """)
        # Need to commit this in order to test the exception handling
        # because the crontabber invocation happens in a different Pg
        # transaction.
        self.conn.commit()

        try:
            with config_manager.context() as config:
                tab = CronTabber(config)
                tab.run_all()

            state = tab.job_state_database['reprocessing-jobs']
            res_expected = "<class 'psycopg2.ProgrammingError'>"
            res = state['last_error']['type']
            eq_(res, res_expected)

        finally:
            # Change table name back
            cursor.execute("""
                alter table test_reprocessing_jobs RENAME TO reprocessing_jobs
            """)
            self.conn.commit()
Пример #7
0
    def test_run_job_with_no_data_with_ssh_errors(self):
        config_manager = self._setup_config_manager(
          public_output_path='',
          private_user='******',
          private_server='secure.mozilla.org',
          private_location='/var/data/',
          private_ssh_command='chmod 0640 /var/data/*',
        )
        self._insert_waterwolf_mock_data()

        # any mutable so we can keep track of the number of times
        # the side_effect function is called
        calls = []

        def comm():
            if calls:
                # some errors
                return '', "CRAP!"
            else:
                calls.append(1)
                return '', ''

        self.Popen().communicate.side_effect = comm

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            ok_(config.logger.warn.called)
Пример #8
0
    def test_run_job_with_reports_with_existing_bugs_different(self, requests_mocker):
        """Verify that an association to a signature that no longer is part
        of the crash signatures list gets removed.
        """
        requests_mocker.get(BUGZILLA_BASE_URL, json=SAMPLE_BUGZILLA_RESULTS)
        config_manager = self._setup_config_manager(3)
        cursor = self.conn.cursor()

        cursor.execute("""
            insert into bug_associations (bug_id, signature)
            values (8, '@different');
        """)
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        cursor.execute(
            'select signature from bug_associations where bug_id = 8'
        )
        associations = cursor.fetchall()
        # The previous association, to signature '@different' that is not in
        # crash signatures, is now missing.
        assert ('@different',) not in associations
Пример #9
0
    def test_reprocessing(self):
        """ Simple test of reprocessing"""
        config_manager = self._setup_config_manager()

        cursor = self.conn.cursor()

        # Create partitions to support the status query
        # Load report_partition_info data
        cursor.execute("""
            INSERT into reprocessing_jobs
              (crash_id)
            VALUES
             ('13c4a348-5d04-11e3-8118-d231feb1dc81')
        """)

        # We have to do this here to accommodate separate crontabber processes
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = tab.job_state_database['reprocessing-jobs']
            assert not information['last_error']
            assert information['last_success']

        cursor = self.conn.cursor()
        cursor.execute('select count(*) from reprocessing_jobs')

        res_expected = 0
        res, = cursor.fetchone()
        eq_(res, res_expected)
Пример #10
0
    def test_failing_pig_job(self):
        # a mutable where commands sent are stored
        commands_sent = []
        self.Popen.side_effect = functools.partial(
            mocked_Popen,
            _commands_sent=commands_sent,
            _exit_code=1,
            _stdout='',
            _stderr='First command failed :(',
        )

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['modulelist']
            assert information['modulelist']['last_error']
            _traceback = information['modulelist']['last_error']['traceback']
            ok_('pig run failed' in _traceback)
            # the other two where cancelled
            eq_(len(commands_sent), 1)
            config.logger.error.has_calls(
                [mock.call('First command failed :(')])
Пример #11
0
    def test_reprocessing_exception(self):
        config_manager = self._setup_config_manager()

        cursor = self.conn.cursor()

        # Test exception handling
        cursor.execute("""
            alter table reprocessing_jobs RENAME TO test_reprocessing_jobs
        """)
        # Need to commit this in order to test the exception handling
        # because the crontabber invocation happens in a different Pg
        # transaction.
        self.conn.commit()

        try:
            with config_manager.context() as config:
                tab = CronTabber(config)
                tab.run_all()

            state = tab.job_state_database['reprocessing-jobs']
            res_expected = "<class 'psycopg2.ProgrammingError'>"
            res = state['last_error']['type']
            eq_(res, res_expected)

        finally:
            # Change table name back
            cursor.execute("""
                alter table test_reprocessing_jobs RENAME TO reprocessing_jobs
            """)
            self.conn.commit()
Пример #12
0
    def test_symbols_unpack_zip_file(self):

        source_file = os.path.join(
            self.temp_source_directory,
            os.path.basename(ZIP_FILE)
        )
        shutil.copy(ZIP_FILE, source_file)

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        information = self._load_structure()
        assert information['symbols-unpack']
        assert not information['symbols-unpack']['last_error']
        assert information['symbols-unpack']['last_success']

        ok_(os.listdir(self.temp_destination_directory), 'empty')
        # there should now be a directory named `sample-<todays date>
        destination_dir = self.temp_destination_directory
        ok_(os.path.isdir(destination_dir))
        # and it should contain files
        ok_(os.listdir(destination_dir))
        # and the original should now have been deleted
        ok_(not os.path.isfile(source_file))
Пример #13
0
    def test_other_indices_are_not_deleted(self):
        """Verify that non-week-based indices are not removed. For example,
        the socorro_email index should not be deleted by the cron job.
        """
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            # clear the indices cache so the index is created on every test
            self.storage.indices_cache = set()

            es = self.storage.es

            # Create the socorro emails index.
            self.storage.create_emails_index()

            # This will raise an error if the index was not correctly created.
            es.status('socorro_emails')

            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['elasticsearch-cleanup']
            assert not information['elasticsearch-cleanup']['last_error']
            assert information['elasticsearch-cleanup']['last_success']

            # Verify the email index is still there. This will raise an error
            # if the index does not exist.
            es.status('socorro_emails')
Пример #14
0
    def test_symbols_unpack_other_files(self):

        source_file = os.path.join(
            self.temp_source_directory,
            os.path.basename(TAR_FILE)
        )
        shutil.copy(TAR_FILE, source_file)

        source_file = os.path.join(
            self.temp_source_directory,
            os.path.basename(TGZ_FILE)
        )
        shutil.copy(TGZ_FILE, source_file)

        source_file = os.path.join(
            self.temp_source_directory,
            os.path.basename(TARGZ_FILE)
        )
        shutil.copy(TARGZ_FILE, source_file)

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        information = self._load_structure()
        assert information['symbols-unpack']
        assert not information['symbols-unpack']['last_error']
        assert information['symbols-unpack']['last_success']

        ok_(os.listdir(self.temp_destination_directory), 'empty')
    def test_other_indices_are_not_deleted(self):
        """Verify that non-week-based indices are not removed. For example,
        the socorro_email index should not be deleted by the cron job.
        """
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            # clear the indices cache so the index is created on every test
            self.storage.indices_cache = set()

            es = self.storage.es

            # Create the socorro emails index.
            self.storage.create_emails_index()

            # This will raise an error if the index was not correctly created.
            es.status('socorro_emails')

            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['elasticsearch-cleanup']
            assert not information['elasticsearch-cleanup']['last_error']
            assert information['elasticsearch-cleanup']['last_success']

            # Verify the email index is still there. This will raise an error
            # if the index does not exist.
            es.status('socorro_emails')
Пример #16
0
    def test_run_drop_old_partitions(self):
        cur = self.conn.cursor()

        # Ensure test table is present.
        statement = """
        SELECT COUNT(*) FROM information_schema.tables
        WHERE table_name = 'phrotest_20120102';
        """
        cur.execute(statement)
        result = cur.fetchone()
        eq_(result[0], 1)

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['drop-old-partitions']
        assert not information['drop-old-partitions']['last_error']
        assert information['drop-old-partitions']['last_success']

        # Ensure test table was removed.
        statement = """
        SELECT COUNT(*) FROM information_schema.tables
        WHERE table_name = 'phrotest_20120102';
        """
        cur.execute(statement)
        result = cur.fetchone()
        eq_(result[0], 0)
Пример #17
0
    def test_symbols_unpack_subdirectories(self):
        root_dir = self.temp_source_directory
        foo_dir = os.path.join(root_dir, 'foo')
        os.mkdir(foo_dir)
        bar_dir = os.path.join(foo_dir, 'bar')
        os.mkdir(bar_dir)
        source_file = os.path.join(bar_dir, os.path.basename(ZIP_FILE))
        shutil.copy(ZIP_FILE, source_file)

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        information = self._load_structure()
        assert information['symbols-unpack']
        assert not information['symbols-unpack']['last_error']
        assert information['symbols-unpack']['last_success']

        ok_(os.listdir(self.temp_destination_directory), 'empty')
        # there should now be a directory named `sample-<todays date>
        destination_dir = self.temp_destination_directory
        ok_(os.path.isdir(destination_dir))
        # and it should contain files
        ok_(os.listdir(destination_dir))
        # and the original should now have been deleted
        ok_(not os.path.isfile(source_file))

        # because there was nothing else in that directory, or its parent
        # those directories should be removed now
        ok_(not os.path.isdir(bar_dir))
        ok_(not os.path.isdir(foo_dir))
        assert os.path.isdir(root_dir)
    def test_run_drop_old_partitions(self):
        cur = self.conn.cursor()

        # Ensure test table is present.
        statement = """
        SELECT COUNT(*) FROM information_schema.tables
        WHERE table_name = 'phrotest_20120102';
        """
        cur.execute(statement)
        result = cur.fetchone()
        assert result[0] == 1

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['drop-old-partitions']
        assert not information['drop-old-partitions']['last_error']
        assert information['drop-old-partitions']['last_success']

        # Ensure test table was removed.
        statement = """
        SELECT COUNT(*) FROM information_schema.tables
        WHERE table_name = 'phrotest_20120102';
        """
        cur.execute(statement)
        result = cur.fetchone()
        assert result[0] == 0
Пример #19
0
    def test_run_job_with_reports_with_existing_bugs_same(self, requests_mocker):
        requests_mocker.get(BUGZILLA_BASE_URL, json=SAMPLE_BUGZILLA_RESULTS)
        config_manager = self._setup_config_manager(3)
        cursor = self.conn.cursor()

        cursor.execute("""
            insert into bug_associations (bug_id, signature)
            values (8, 'legitimate(sig)');
        """)
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        cursor.execute(
            'select signature from bug_associations where bug_id = 8'
        )
        associations = cursor.fetchall()
        # New signatures have correctly been inserted.
        assert len(associations) == 2
        assert ('another::legitimate(sig)',) in associations
        assert ('legitimate(sig)',) in associations
Пример #20
0
    def test_basic_run_job_without_reports(self):
        config_manager = self._setup_config_manager(3)

        cursor = self.conn.cursor()
        cursor.execute('select count(*) from reports')
        count, = cursor.fetchone()
        assert count == 0, "reports table not cleaned"
        cursor.execute('select count(*) from bugs')
        count, = cursor.fetchone()
        assert count == 0, "'bugs' table not cleaned"
        cursor.execute('select count(*) from bug_associations')
        count, = cursor.fetchone()
        assert count == 0, "'bug_associations' table not cleaned"

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        # now, because there we no matching signatures in the reports table
        # it means that all bugs are rejected
        cursor.execute('select count(*) from bugs')
        count, = cursor.fetchone()
        ok_(not count)
        cursor.execute('select count(*) from bug_associations')
        count, = cursor.fetchone()
        ok_(not count)
Пример #21
0
    def test_basic_run_job_no_data(self):
        config_manager = self._setup_config_manager()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            # this should have created two .csv.gz files
            now = datetime.datetime.utcnow() - datetime.timedelta(days=1)

            private = now.strftime('%Y%m%d-crashdata.csv.gz')
            public = now.strftime('%Y%m%d-pub-crashdata.csv.gz')
            ok_(private in os.listdir(self.tempdir))
            ok_(public in os.listdir(self.tempdir))

            private_path = os.path.join(self.tempdir, private)
            f = gzip.open(private_path)
            try:
                eq_(f.read(), '')
            finally:
                f.close()

            public_path = os.path.join(self.tempdir, public)
            f = gzip.open(public_path)
            try:
                eq_(f.read(), '')
            finally:
                f.close()
Пример #22
0
    def test_run_drop_old_partitions(self):
        cur = self.conn.cursor()

        # Ensure test table is present.
        statement = """
        SELECT COUNT(*) FROM raw_crashes_20120102;
        """
        cur.execute(statement)
        result = cur.fetchone()
        eq_(result[0], 2L)
        # need to get out of this transaction to
        # allow crontabber to acquire a lock
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        print information['truncate-partitions']['last_error']
        assert information['truncate-partitions']
        assert not information['truncate-partitions']['last_error']
        assert information['truncate-partitions']['last_success']

        # Ensure test table was removed.
        statement = """
        SELECT COUNT(*) FROM raw_crashes_20120102;
        """
        cur.execute(statement)
        result = cur.fetchone()
        eq_(result[0], 0)
Пример #23
0
    def test_run_job_with_mocked_data_with_wrong_products(self):
        config_manager = self._setup_config_manager(
            product='Thunderbird,SeaMonkey',
            version='1.0,2.0',
            public_output_path=False)
        self._insert_waterwolf_mock_data()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            # this should have created two .csv.gz files
            now = datetime.datetime.utcnow() - datetime.timedelta(days=1)

            private = now.strftime('%Y%m%d-crashdata.csv.gz')
            public = now.strftime('%Y%m%d-pub-crashdata.csv.gz')
            ok_(private in os.listdir(self.tempdir))
            ok_(public not in os.listdir(self.tempdir))

            private_path = os.path.join(self.tempdir, private)
            f = gzip.open(private_path)
            try:
                eq_(f.read(), '')
            finally:
                f.close()
Пример #24
0
    def test_run_drop_old_partitions(self):
        cur = self.conn.cursor()

        # Ensure test table is present.
        statement = """
        SELECT COUNT(*) FROM raw_crashes_20120102;
        """
        cur.execute(statement)
        result = cur.fetchone()
        eq_(result[0], 2L)
        # need to get out of this transaction to
        # allow crontabber to acquire a lock
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        print information['truncate-partitions']['last_error']
        assert information['truncate-partitions']
        assert not information['truncate-partitions']['last_error']
        assert information['truncate-partitions']['last_success']

        # Ensure test table was removed.
        statement = """
        SELECT COUNT(*) FROM raw_crashes_20120102;
        """
        cur.execute(statement)
        result = cur.fetchone()
        eq_(result[0], 0)
Пример #25
0
    def test_basic_run(self):
        cur = self.conn.cursor()
        # Ensure test table is present.
        statement = """
            INSERT INTO raw_adi
            (date, product_name, adi_count) VALUES
            (%(first)s, 'WinterFox', 11),
            (%(second)s, 'WinterFox', 23)
        """
        second = utc_now().date()
        first = second - datetime.timedelta(days=1)
        cur.execute(statement, {'first': first, 'second': second})
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager(days_to_keep=1)
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['clean-raw-adi']
        assert not information['clean-raw-adi']['last_error']
        assert information['clean-raw-adi']['last_success']

        # Ensure test row was removed
        cur.execute("""
            SELECT date FROM raw_adi
        """)
        result, = cur.fetchall()
        report_date = result[0]
        eq_(report_date, second)
Пример #26
0
    def test_failing_pig_job(self):
        # a mutable where commands sent are stored
        commands_sent = []
        self.Popen.side_effect = functools.partial(
            mocked_Popen,
            _commands_sent=commands_sent,
            _exit_code=1,
            _stdout='',
            _stderr='First command failed :(',
        )

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['modulelist']
            assert information['modulelist']['last_error']
            _traceback = information['modulelist']['last_error']['traceback']
            ok_('pig run failed' in _traceback)
            # the other two where cancelled
            eq_(len(commands_sent), 1)
            config.logger.error.has_calls([
                mock.call('First command failed :(')
            ])
Пример #27
0
    def test_run_job_with_no_data_with_ssh_errors(self):
        config_manager = self._setup_config_manager(
            public_output_path='',
            private_user='******',
            private_server='secure.mozilla.org',
            private_location='/var/data/',
            private_ssh_command='chmod 0640 /var/data/*',
        )
        self._insert_waterwolf_mock_data()

        # any mutable so we can keep track of the number of times
        # the side_effect function is called
        calls = []

        def comm():
            if calls:
                # some errors
                return '', "CRAP!"
            else:
                calls.append(1)
                return '', ''

        self.Popen().communicate.side_effect = comm

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            ok_(config.logger.warn.called)
Пример #28
0
    def test_basic_run_job(self, requests_mocker):
        requests_mocker.get(BUGZILLA_BASE_URL, json=SAMPLE_BUGZILLA_RESULTS)
        config_manager = self._setup_config_manager(3)

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        cursor = self.conn.cursor()
        cursor.execute('select bug_id from bug_associations order by bug_id')
        associations = cursor.fetchall()

        # Verify we have the expected number of associations.
        assert len(associations) == 8
        bug_ids = [x[0] for x in associations]

        # Verify bugs with no crash signatures are missing.
        assert 6 not in bug_ids

        cursor.execute(
            'select signature from bug_associations where bug_id = 8'
        )
        associations = cursor.fetchall()
        # New signatures have correctly been inserted.
        assert len(associations) == 2
        assert ('another::legitimate(sig)',) in associations
        assert ('legitimate(sig)',) in associations
Пример #29
0
    def test_basic_run(self):
        cur = self.conn.cursor()
        # Ensure test table is present.
        statement = """
            INSERT INTO missing_symbols
            (date_processed, debug_file, debug_id, code_file, code_id)
            VALUES
            (%(first)s, 'foo.pdb', '0420', 'foo.py', '123'),
            (%(second)s, 'bar.pdb', '65EA9', 'bar.py', null)
        """
        second = utc_now().date()
        first = second - datetime.timedelta(days=1)
        cur.execute(statement, {'first': first, 'second': second})
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager(days_to_keep=1)
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['clean-missing-symbols']
        assert not information['clean-missing-symbols']['last_error']
        assert information['clean-missing-symbols']['last_success']

        # Ensure expected test row was removed
        cur.execute("""
            SELECT date_processed FROM missing_symbols
        """)
        first, = cur.fetchall()
        date_processed = first[0]
        eq_(date_processed, second)
Пример #30
0
    def test_run(self):
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['suspicious-crashes']
            assert not information['suspicious-crashes']['last_error']
            assert information['suspicious-crashes']['last_success']

            cursor = self.conn.cursor()

            cursor.execute("""
                SELECT signatures.signature, scs.report_date
                FROM suspicious_crash_signatures scs
                JOIN signatures ON scs.signature_id=signatures.signature_id
            """)

            count = 0
            today = (utc_now() - datetime.timedelta(1)).date()
            for row in cursor.fetchall():
                eq_('sig', row[0])
                eq_(today, row[1].date())
                count += 1

            eq_(1, count)
Пример #31
0
    def test_basic_run(self):
        cur = self.conn.cursor()
        # Ensure test table is present.
        statement = """
            INSERT INTO missing_symbols
            (date_processed, debug_file, debug_id, code_file, code_id)
            VALUES
            (%(first)s, 'foo.pdb', '0420', 'foo.py', '123'),
            (%(second)s, 'bar.pdb', '65EA9', 'bar.py', null)
        """
        second = utc_now().date()
        first = second - datetime.timedelta(days=1)
        cur.execute(statement, {'first': first, 'second': second})
        self.conn.commit()

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager(days_to_keep=1)
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['clean-missing-symbols']
        assert not information['clean-missing-symbols']['last_error']
        assert information['clean-missing-symbols']['last_success']

        # Ensure expected test row was removed
        cur.execute("""
            SELECT date_processed FROM missing_symbols
        """)
        first, = cur.fetchall()
        date_processed = first[0]
        eq_(date_processed, second)
Пример #32
0
    def test_reprocessing(self):
        """ Simple test of reprocessing"""
        config_manager = self._setup_config_manager()
        c = config_manager.get_config()

        cursor = self.conn.cursor()

        # Create partitions to support the status query
        # Load report_partition_info data
        cursor.execute("""
            INSERT into reprocessing_jobs
              (crash_id)
            VALUES
             ('13c4a348-5d04-11e3-8118-d231feb1dc81')
        """)

        # We have to do this here to accommodate separate crontabber processes
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = tab.job_state_database['reprocessing-jobs']
            assert not information['last_error']
            assert information['last_success']

        cursor = self.conn.cursor()
        cursor.execute('select count(*) from reprocessing_jobs')

        res_expected = 0
        res, = cursor.fetchone()
        eq_(res, res_expected)
Пример #33
0
    def test_symbols_unpack_other_files(self):

        source_file = os.path.join(self.temp_source_directory,
                                   os.path.basename(TAR_FILE))
        shutil.copy(TAR_FILE, source_file)

        source_file = os.path.join(self.temp_source_directory,
                                   os.path.basename(TGZ_FILE))
        shutil.copy(TGZ_FILE, source_file)

        source_file = os.path.join(self.temp_source_directory,
                                   os.path.basename(TARGZ_FILE))
        shutil.copy(TARGZ_FILE, source_file)

        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        information = self._load_structure()
        assert information['symbols-unpack']
        assert not information['symbols-unpack']['last_error']
        assert information['symbols-unpack']['last_success']

        ok_(os.listdir(self.temp_destination_directory), 'empty')
Пример #34
0
    def test_basic_run_job_no_data(self):
        config_manager = self._setup_config_manager()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

            # this should have created two .csv.gz files
            now = datetime.datetime.utcnow() - datetime.timedelta(days=1)

            private = now.strftime('%Y%m%d-crashdata.csv.gz')
            public = now.strftime('%Y%m%d-pub-crashdata.csv.gz')
            ok_(private in os.listdir(self.tempdir))
            ok_(public in os.listdir(self.tempdir))

            private_path = os.path.join(self.tempdir, private)
            f = gzip.open(private_path)
            try:
                eq_(f.read(), '')
            finally:
                f.close()

            public_path = os.path.join(self.tempdir, public)
            f = gzip.open(public_path)
            try:
                eq_(f.read(), '')
            finally:
                f.close()
Пример #35
0
    def test_all_matviews(self, mocked_utc_now):

        # Pretend it's 03AM UTC
        def mock_utc_now():
            n = utc_now()
            n = n.replace(hour=3)
            return n

        mocked_utc_now.side_effect = mock_utc_now

        config_manager = self._setup_config_manager(
          'socorro.unittest.cron.jobs.test_matviews.ReportsCleanJob|1d\n'
          'socorro.unittest.cron.jobs.test_matviews.FTPScraperJob|1d\n'
          ''
          'socorro.cron.jobs.matviews.ProductVersionsCronApp|1d\n'
          'socorro.cron.jobs.matviews.SignaturesCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.TCBSCronApp|1d\n'
          'socorro.cron.jobs.matviews.ADUCronApp|1d\n'
          'socorro.cron.jobs.matviews.NightlyBuildsCronApp|1d\n'
          'socorro.cron.jobs.matviews.BuildADUCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.CrashesByUserCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.CrashesByUserBuildCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.HomePageGraphCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.HomePageGraphBuildCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.TCBSBuildCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.ExplosivenessCronApp|1d|02:00\n'
          'socorro.cron.jobs.matviews.GraphicsDeviceCronApp|1d|02:00\n'
        )

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()

            for app_name in ('product-versions-matview',
                             'signatures-matview',
                             'tcbs-matview',
                             'adu-matview',
                             'nightly-builds-matview',
                             'build-adu-matview',
                             'crashes-by-user-matview',
                             'crashes-by-user-build-matview',
                             'home-page-graph-matview',
                             'home-page-graph-matview-build',
                             'tcbs-build-matview',
                             'explosiveness-matview',
                             'graphics-device-matview',):

                ok_(app_name in information, app_name)
                ok_(
                    not information[app_name]['last_error'],
                    app_name
                )
                ok_(
                    information[app_name]['last_success'],
                    app_name
                )
Пример #36
0
    def test_run(self, connection_context):
        with self._setup_config_manager().context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            ok_(information['elasticsearch-cleanup'])
            ok_(not information['elasticsearch-cleanup']['last_error'])
            ok_(information['elasticsearch-cleanup']['last_success'])
    def test_run(self, pyes_mock):
        with self._setup_config_manager().context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            ok_(information['elasticsearch-cleanup'])
            ok_(not information['elasticsearch-cleanup']['last_error'])
            ok_(information['elasticsearch-cleanup']['last_success'])
    def test_basic_run_job(self, rget):
        config_manager = self._setup_config_manager()

        def mocked_get(url):
            return Response({
                'hits': [
                    {
                        'product': 'Firefox',
                        'is_featured': True,
                        'version': '24.5.0'
                    },
                ],
                'total':
                1
            })

        rget.side_effect = mocked_get

        rows = execute_query_fetchall(
            self.conn, 'select product_name, version_string, featured_version '
            'from product_versions')
        eq_(sorted(rows), [('Firefox', '15.0a1', True),
                           ('Firefox', '24.5.0', False)])
        # and the view `product_info`...
        rows = execute_query_fetchall(
            self.conn, 'select product_name, version_string, is_featured '
            'from product_info')
        eq_(sorted(rows), [('Firefox', '15.0a1', True),
                           ('Firefox', '24.5.0', False)])
        # This is necessary so we get a new cursor when we do other
        # selects after the crontabber app has run.
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['featured-versions-sync']
            assert not information['featured-versions-sync']['last_error']
            assert information['featured-versions-sync']['last_success']

            config.logger.info.assert_called_with(
                'Set featured versions for Firefox %r' % ([u'24.5.0'], ))

        rows = execute_query_fetchall(
            self.conn, 'select product_name, version_string, featured_version '
            'from product_versions')
        eq_(sorted(rows), [('Firefox', '15.0a1', False),
                           ('Firefox', '24.5.0', True)])
        # and the view `product_info`...
        rows = execute_query_fetchall(
            self.conn, 'select product_name, version_string, is_featured '
            'from product_info')
        eq_(sorted(rows), [('Firefox', '15.0a1', False),
                           ('Firefox', '24.5.0', True)])
Пример #39
0
    def test_symbols_unpack_empty(self):
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        information = self._load_structure()
        assert information['symbols-unpack']
        assert not information['symbols-unpack']['last_error']
        assert information['symbols-unpack']['last_success']
Пример #40
0
    def test_run_weekly_reports_partitions(self):
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['weekly-reports-partitions']
            assert not information['weekly-reports-partitions']['last_error']
            assert information['weekly-reports-partitions']['last_success']
Пример #41
0
    def test_symbols_unpack_empty(self):
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        information = self._load_structure()
        assert information['symbols-unpack']
        assert not information['symbols-unpack']['last_error']
        assert information['symbols-unpack']['last_success']
Пример #42
0
    def test_cron_job(self, exacttarget_mock):
        config_manager = self._setup_config_manager()
        et_mock = exacttarget_mock.return_value

        # Make get_subscriber raise an exception
        list_service = et_mock.list.return_value = mock.Mock()
        list_service.get_subscriber = mock.Mock(
            side_effect=exacttarget.NewsletterException()
        )

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']
            eq_(et_mock.trigger_send.call_count, 4)

            last_email = u'z\[email protected]'

            # Verify the last call to trigger_send
            fields = {
                'EMAIL_ADDRESS_': last_email,
                'EMAIL_FORMAT_': 'H',
                'TOKEN': last_email
            }

            et_mock.trigger_send.assert_called_with('socorro_dev_test', fields)

            # Verify that user's data was updated
            conf = config.crontabber['class-AutomaticEmailsCronApp']
            es = SuperS().es(
                urls=conf.elasticsearch.elasticsearch_urls,
                timeout=conf.elasticsearch.elasticsearch_timeout,
            )
            search = es.indexes(conf.elasticsearch.elasticsearch_emails_index)
            search = search.doctypes('emails')
            es.get_es().refresh()

            emails_list = (
                '*****@*****.**',
                '"Quidam" <*****@*****.**>',
                '*****@*****.**'
            )
            search = search.filter(_id__in=emails_list)
            res = search.values_list('last_sending')
            eq_(len(res), 3)
            now = utc_now()
            for row in res:
                date = string_to_datetime(row[0])
                eq_(date.year, now.year)
                eq_(date.month, now.month)
                eq_(date.day, now.day)
Пример #43
0
    def test_basic_run_job_with_reports_with_existing_bugs_different(self):
        config_manager = self._setup_config_manager(3)

        cursor = self.conn.cursor()
        cursor.execute('select count(*) from bugs')
        count, = cursor.fetchone()
        assert not count, count
        cursor.execute('select count(*) from bug_associations')
        count, = cursor.fetchone()
        assert not count, count
        cursor.execute('select count(*) from reports')
        count, = cursor.fetchone()
        assert not count, count

        # these are matching the SAMPLE_CSV above
        cursor.execute("""insert into reports
        (uuid,signature)
        values
        ('123', 'legitimate(sig)');
        """)
        cursor.execute("""insert into reports
        (uuid,signature)
        values
        ('456', 'MWSBAR.DLL@0x2589f');
        """)
        cursor.execute("""insert into bugs
        (id,status,resolution,short_desc)
        values
        (8, 'CLOSED', 'RESOLVED', 'Different');
        """)
        cursor.execute("""insert into bug_associations
        (bug_id,signature)
        values
        (8, '@different');
        """)
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        cursor.execute('select id, short_desc from bugs where id = 8')
        bug = cursor.fetchone()
        eq_(bug[1], 'newlines in sigs')

        cursor.execute(
          'select signature from bug_associations where bug_id = 8')
        association = cursor.fetchone()
        eq_(association[0], 'legitimate(sig)')
Пример #44
0
    def test_basic_run_job_with_reports_with_existing_bugs_different(self):
        config_manager = self._setup_config_manager(3)

        cursor = self.conn.cursor()
        cursor.execute('select count(*) from bugs')
        count, = cursor.fetchone()
        assert not count, count
        cursor.execute('select count(*) from bug_associations')
        count, = cursor.fetchone()
        assert not count, count
        cursor.execute('select count(*) from reports')
        count, = cursor.fetchone()
        assert not count, count

        # these are matching the SAMPLE_CSV above
        cursor.execute("""insert into reports
        (uuid,signature)
        values
        ('123', 'legitimate(sig)');
        """)
        cursor.execute("""insert into reports
        (uuid,signature)
        values
        ('456', 'MWSBAR.DLL@0x2589f');
        """)
        cursor.execute("""insert into bugs
        (id,status,resolution,short_desc)
        values
        (8, 'CLOSED', 'RESOLVED', 'Different');
        """)
        cursor.execute("""insert into bug_associations
        (bug_id,signature)
        values
        (8, '@different');
        """)
        self.conn.commit()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['bugzilla-associations']
            assert not information['bugzilla-associations']['last_error']
            assert information['bugzilla-associations']['last_success']

        cursor.execute('select id, short_desc from bugs where id = 8')
        bug = cursor.fetchone()
        eq_(bug[1], 'newlines in sigs')

        cursor.execute(
          'select signature from bug_associations where bug_id = 8')
        association = cursor.fetchone()
        eq_(association[0], 'legitimate(sig)')
Пример #45
0
    def test_mocked_fetch_with_secondary_destination(self, fake_hive):
        class MockedPGConnectionContext:
            connection = mock.MagicMock()

            def __init__(self, config):
                self.config = config

            @contextlib.contextmanager
            def __call__(self):
                yield self.connection

        config_manager = self._setup_config_manager(
            overrides={
                'crontabber.class-FetchADIFromHiveCronApp.'
                'secondary_destination.'
                'database_class':
                MockedPGConnectionContext,
            })

        yesterday = (datetime.datetime.utcnow() -
                     datetime.timedelta(days=1)).date()

        def return_test_data(fake):
            yield [
                yesterday, 'WinterWolf', 'Ginko', '2.3.1', '10.0.4',
                'nightly-ww3v20', 'nightly', 'a-guid', 1
            ]

        fake_hive.connect.return_value \
            .cursor.return_value.__iter__ = return_test_data

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['fetch-adi-from-hive']
            assert not information['fetch-adi-from-hive']['last_error']

        fake_hive.connect.assert_called_with(
            database='default',
            authMechanism='PLAIN',
            host='localhost',
            user='******',
            password='******',
            port=10000,
            timeout=1800000,
        )

        # Critical test here.
        # We make sure the secondary database class gets used
        # for a `cursor.copy_from()` call.
        ok_(MockedPGConnectionContext.connection.cursor().copy_from.called)
Пример #46
0
    def test_duplicates(self):
        config_manager = self._setup_config_manager(
            'socorro.cron.jobs.matviews.DuplicatesCronApp|1d')

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['duplicates']
            assert not information['duplicates']['last_error']
            assert information['duplicates']['last_success']
Пример #47
0
    def test_cron_job(self, exacttarget_mock):
        config_manager = self._setup_config_manager()
        et_mock = exacttarget_mock.return_value

        # Make get_subscriber raise an exception
        list_service = et_mock.list.return_value = mock.Mock()
        list_service.get_subscriber = mock.Mock(
            side_effect=exacttarget.NewsletterException())

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['automatic-emails']
            assert not information['automatic-emails']['last_error']
            assert information['automatic-emails']['last_success']
            eq_(et_mock.trigger_send.call_count, 4)

            last_email = u'z\[email protected]'

            # Verify the last call to trigger_send
            fields = {
                'EMAIL_ADDRESS_': last_email,
                'EMAIL_FORMAT_': 'H',
                'TOKEN': last_email
            }

            et_mock.trigger_send.assert_called_with('socorro_dev_test', fields)

            # Verify that user's data was updated
            conf = config.crontabber['class-AutomaticEmailsCronApp']
            es = SuperS().es(
                urls=conf.elasticsearch.elasticsearch_urls,
                timeout=conf.elasticsearch.elasticsearch_timeout,
            )
            search = es.indexes(conf.elasticsearch.elasticsearch_emails_index)
            search = search.doctypes('emails')
            es.get_es().refresh()

            emails_list = ('*****@*****.**',
                           '"Quidam" <*****@*****.**>',
                           '*****@*****.**')
            search = search.filter(_id__in=emails_list)
            res = search.values_list('last_sending')
            eq_(len(res), 3)
            now = utc_now()
            for row in res:
                date = string_to_datetime(row[0])
                eq_(date.year, now.year)
                eq_(date.month, now.month)
                eq_(date.day, now.day)
Пример #48
0
    def test_duplicates(self):
        config_manager = self._setup_config_manager(
          'socorro.cron.jobs.matviews.DuplicatesCronApp|1d'
        )

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['duplicates']
            assert not information['duplicates']['last_error']
            assert information['duplicates']['last_success']
Пример #49
0
    def test_mocked_fetch(self):
        config_manager = self._setup_config_manager()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['fetch-adi-from-hive']
            assert not information['fetch-adi-from-hive']['last_error']

            config.logger.info.assert_called_with(
                'Faking the fetching of ADI from Hive :)')
Пример #50
0
    def test_reports_clean_with_dependency(self):
        config_manager = self._setup_config_manager(
            'socorro.cron.jobs.matviews.DuplicatesCronApp|1h\n'
            'socorro.cron.jobs.matviews.ReportsCleanCronApp|1h')

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['reports-clean']
            assert not information['reports-clean']['last_error']
            assert information['reports-clean']['last_success']
Пример #51
0
    def test_mocked_fetch(self):
        config_manager = self._setup_config_manager()

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['fetch-adi-from-hive']
            assert not information['fetch-adi-from-hive']['last_error']

            config.logger.info.assert_called_with(
                'Faking the fetching of ADI from Hive :)'
            )
Пример #52
0
    def test_reports_clean_with_dependency(self):
        config_manager = self._setup_config_manager(
          'socorro.cron.jobs.matviews.DuplicatesCronApp|1h\n'
          'socorro.cron.jobs.matviews.ReportsCleanCronApp|1h'
        )

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['reports-clean']
            assert not information['reports-clean']['last_error']
            assert information['reports-clean']['last_success']
Пример #53
0
    def test_basic_run(self):
        # We need to prepare to return a size for the new key
        self.mock_key.size = 123456789
        self.mock_key.generate_url.return_value = (
            'https://s3.example.com/latest.csv'
        )

        # Run the crontabber job to remove the test table.
        config_manager = self._setup_config_manager()
        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

        # Basic assertion test of stored procedure.
        information = self._load_structure()
        assert information['missing-symbols']
        assert not information['missing-symbols']['last_error']
        assert information['missing-symbols']['last_success']

        self.mock_boto_class()._connect.assert_called_with()
        self.mock_boto_class.close.assert_called_with()
        self.mock_bucket.new_key.assert_called_with('latest.csv')
        content = StringIO()
        writer = csv.writer(content)
        writer.writerow((
            'debug_file',
            'debug_id',
            'code_file',
            'code_id',
        ))
        writer.writerow((
            'nvwgf2um.pdb',
            '9D492B844FF34800B34320464AA1E7E41',
            'nvwgf2um.dll',
            '561D1D4Ff58000',
        ))
        self.mock_key.set_contents_from_string.assert_called_with(
            content.getvalue()
        )

        # this is becausse 123456789 bytes is 117.74 Mb
        tab.config.logger.info.assert_called_with(
            'Generated https://s3.example.com/latest.csv '
            '(123,456,789 bytes, 117.74 Mb)'
        )
    def test_download_error(self, rget):
        config_manager = self._setup_config_manager()

        def mocked_get(url):
            return Response('not here', status_code=404)

        rget.side_effect = mocked_get

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['featured-versions-automatic']
            assert information['featured-versions-automatic']['last_error']
            error = information['featured-versions-automatic']['last_error']
            ok_('DownloadError' in error['type'])
            ok_('404' in error['value'])
Пример #55
0
    def test_run_job_no_data_but_scped(self):
        config_manager = self._setup_config_manager(
          public_output_path='',
          private_user='******',
          private_server='secure.mozilla.org',
          private_location='/var/data/',
          private_ssh_command='chmod 0640 /var/data/*',
        )

        def comm():
            # no errors
            return '', ''

        self.Popen().communicate.side_effect = comm

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['daily-url']
            assert not information['daily-url']['last_error']
            assert information['daily-url']['last_success']

        # even though the files created are empty they should nevertheless
        # be scp'ed
        # can expect the command exactly
        now = datetime.datetime.utcnow() - datetime.timedelta(days=1)
        private = now.strftime('%Y%m%d-crashdata.csv.gz')
        private_path = os.path.join(self.tempdir, private)
        assert os.path.isfile(private_path)
        scp_command = 'scp "%s" "[email protected]:/var/data/"' % private_path
        ssh_command = 'ssh "*****@*****.**" "chmod 0640 /var/data/*"'
        self.Popen.assert_any_call(
          scp_command,
          stdin=PIPE, stderr=PIPE, stdout=PIPE,
          shell=True
        )

        self.Popen.assert_any_call(
          ssh_command,
          stdin=PIPE, stderr=PIPE, stdout=PIPE,
          shell=True
        )