Ejemplo n.º 1
0
def set_up_to_succeed(app, caplog):
    (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
    caplog.set_level(logging.INFO)
    with capture_app_logs(app):
        with mock_s3(app, bucket=bucket) as m3:
            m3.Object(bucket,
                      f'{source_prefix}/2017/01/18/12345678_00012_1.pdf').put(
                          Body=b'a note attachment')
            m3.Object(bucket,
                      f'{source_prefix}/2018/12/22/23456789_00003_1.png').put(
                          Body=b'another note attachment')
            m3.Object(bucket,
                      f'{source_prefix}/2019/08/29/34567890_00014_2.xls').put(
                          Body=b'yet another note attachment')
            m3.Object(bucket,
                      f'{dest_prefix}/12345678/12345678_00012_1.pdf').put(
                          Body=b'a note attachment')
            m3.Object(bucket,
                      f'{dest_prefix}/23456789/23456789_00003_1.png').put(
                          Body=b'another note attachment')
            m3.Object(bucket,
                      f'{dest_prefix}/34567890/34567890_00014_2.xls').put(
                          Body=b'yet another note attachment')
            yield
    assert 'No attachments missing on S3 when compared against the view.' in caplog.text
Ejemplo n.º 2
0
    def test_run_with_all_param(self, app, caplog):
        """When 'all' is provided, copies all files."""
        (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
        datestamp = 'all'

        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            with mock_s3(app, bucket=bucket) as m3:
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/12345678_00012_1.pdf').put(
                        Body=b'a note attachment')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/23456789_00003_1.png').put(
                        Body=b'another note attachment')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/29/34567890_00014_2.xls').put(
                        Body=b'ok to copy me')

                response = MigrateSisAdvisingNoteAttachments().run(
                    datestamp=datestamp)

                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files.' in caplog.text
                assert 'Copied 3 attachments to the destination folder.' in caplog.text
                assert response == (
                    'SIS advising note attachment migration complete for sis-data/sis-sftp/incremental/advising-notes/attachment-files.'
                )
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/12345678/12345678_00012_1.pdf')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/23456789/23456789_00003_1.png')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/34567890/34567890_00014_2.xls')
Ejemplo n.º 3
0
 def test_s3_nonexistent_object(self, app, caplog, bad_bucket):
     """Returns false on S3 checks for nonexistent objects."""
     with capture_app_logs(app):
         key = app.config[
             'LOCH_S3_PREFIX_TESTEXT'] + '/00001/sonnet-xlv.html'
         response = s3.object_exists(key)
         assert response is False
Ejemplo n.º 4
0
    def test_sync_canvas_snapshots(self, app, metadata_db, caplog):
        """Dispatches a complete sync job against fixtures."""
        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            with mock_s3(app):
                result = SyncCanvasSnapshots().run_wrapped()
            assert 'Canvas snapshot sync job dispatched to workers' in result
            assert_background_job_status('sync')
            assert 'Dispatched S3 sync of snapshot quiz_dim-00000-0ab80c7c.gz' in caplog.text
            assert 'Dispatched S3 sync of snapshot requests-00098-b14782f5.gz' in caplog.text
            assert '311 successful dispatches, 0 failures' in caplog.text

            schema = app.config['RDS_SCHEMA_METADATA']

            count_results = rds.fetch(f'SELECT count(*) FROM {schema}.canvas_sync_job_status')
            assert count_results[0]['count'] == 311

            canvas_status_results = rds.fetch(f'SELECT DISTINCT status FROM {schema}.canvas_sync_job_status')
            assert len(canvas_status_results) == 1
            assert canvas_status_results[0]['status'] == 'created'

            sync_results = rds.fetch(f'SELECT * FROM {schema}.canvas_sync_job_status LIMIT 1')
            assert sync_results[0]['job_id'].startswith('sync_')
            assert sync_results[0]['filename'] == 'account_dim-00000-5eb7ee9e.gz'
            assert sync_results[0]['canvas_table'] == 'account_dim'
            assert 'account_dim/part-00505-5c40f1f3-b611-4f64-a007-67b775e984fe.c000.txt.gz' in sync_results[0]['source_url']
            assert sync_results[0]['destination_url'] is None
            assert sync_results[0]['details'] is None
            assert sync_results[0]['created_at']
            assert sync_results[0]['updated_at']
Ejemplo n.º 5
0
    def test_malformed_filenames(self, app, caplog):
        (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
        datestamp = 'all'

        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            with mock_s3(app, bucket=bucket) as m3:
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/12345678_00012_1_May_7_2019_email.pdf'
                ).put(Body=b'extra chars in my name lol')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/23456789_00052_1.png.png'
                ).put(Body=b'somehow i got a redundant .ext')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/29/23456789_00053_1._DEGREE_COMPLETION_LETTER'
                ).put(Body=b'original file name mistaken for the .ext', )
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/29/34567890_00014_2..7.19_(2)-edited_(1)-2_(1)_(1).xls'
                ).put(Body=b'is this a versioning scheme?', )

                MigrateSisAdvisingNoteAttachments().run(datestamp=datestamp)

                assert 'Copied 4 attachments to the destination folder.' in caplog.text
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/12345678/12345678_00012_1.pdf')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/23456789/23456789_00052_1.png')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/23456789/23456789_00053_1')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/34567890/34567890_00014_2.xls')
Ejemplo n.º 6
0
    def test_remove_obsolete_files(self, app, caplog, cleanup_s3):
        """Removes files from S3 following prefix and whitelist rules."""
        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            prefix1 = app.config['LOCH_S3_PREFIX_TESTEXT'] + '/001'
            prefix2 = app.config['LOCH_S3_PREFIX_TESTEXT'] + '/002'

            assert s3.upload_from_url(
                'http://shakespeare.mit.edu/Poetry/sonnet.XX.html',
                prefix1 + '/xx/sonnet-xx.html')
            assert s3.upload_from_url(
                'http://shakespeare.mit.edu/Poetry/sonnet.XXI.html',
                prefix1 + '/xxi/sonnet-xxi.html')
            assert s3.upload_from_url(
                'http://shakespeare.mit.edu/Poetry/sonnet.XXII.html',
                prefix1 + '/xxii/sonnet-xxii.html')
            assert s3.upload_from_url(
                'http://shakespeare.mit.edu/Poetry/sonnet.XLV.html',
                prefix2 + '/xlv/sonnet-xlv.html')

            whitelist = ['sonnet-xxi.html', 'sonnet-xxii.html']
            assert s3.delete_objects_with_prefix(prefix1, whitelist) is True

            assert f'3 key(s) matching prefix "{prefix1}"' in caplog.text
            assert '2 key(s) in whitelist' in caplog.text
            assert 'will delete 1 object(s)' in caplog.text

            assert s3.object_exists(prefix1 + '/xx/sonnet-xx.html') is False
            assert s3.object_exists(prefix1 + '/xxi/sonnet-xxi.html') is True
            assert s3.object_exists(prefix1 + '/xxii/sonnet-xxii.html') is True
            assert s3.object_exists(prefix2 + '/xlv/sonnet-xlv.html') is True
Ejemplo n.º 7
0
    def test_import_student_photos(self, app, metadata_db, student_tables,
                                   caplog):
        from nessie.jobs.import_student_photos import ImportStudentPhotos
        caplog.set_level(logging.DEBUG)
        with capture_app_logs(app):
            with mock_s3(app):
                result = ImportStudentPhotos().run_wrapped()
                assert result == 'Student photo import completed: 1 succeeded, 9 had no photo available, 0 failed.'
                response = s3.get_keys_with_prefix('cal1card-data/photos')
                assert len(response) == 1
                assert response[0] == 'cal1card-data/photos/61889.jpg'

            success_rows = rds.fetch(
                f"SELECT * FROM {app.config['RDS_SCHEMA_METADATA']}.photo_import_status WHERE status = 'success'"
            )
            assert len(success_rows) == 1
            assert success_rows[0]['sid'] == '11667051'

            failure_rows = rds.fetch(
                f"SELECT * FROM {app.config['RDS_SCHEMA_METADATA']}.photo_import_status WHERE status = 'failure'"
            )
            assert len(failure_rows) == 0

            not_found_rows = rds.fetch(
                f"SELECT * FROM {app.config['RDS_SCHEMA_METADATA']}.photo_import_status WHERE status = 'photo_not_found'"
            )
            assert len(not_found_rows) == 9
Ejemplo n.º 8
0
def set_up_to_fail(app, caplog):
    (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
    caplog.set_level(logging.INFO)
    with capture_app_logs(app):
        with mock_s3(app, bucket=bucket) as m3:
            m3.Object(bucket,
                      f'{source_prefix}/2017/01/18/12345678_00012_1.pdf').put(
                          Body=b'a note attachment')
            m3.Object(bucket,
                      f'{source_prefix}/2018/12/22/23456789_00003_1.png').put(
                          Body=b'another note attachment')
            m3.Object(bucket,
                      f'{dest_prefix}/12345678/12345678_00012_1.pdf').put(
                          Body=b'a note attachment')
            m3.Object(bucket,
                      f'{dest_prefix}/34567890/34567890_00014_2.xls').put(
                          Body=b'yet another note attachment')
            m3.Object(
                bucket, f'{dest_prefix}/45678901/45678901_00192_4.xls'
            ).put(
                Body=b'bamboozled by a completely unexpected note attachment')
            with pytest.raises(BackgroundJobError) as e:
                yield
    assert 'Attachments verification found missing attachments or sync failures:' in str(
        e.value)
    assert '\'attachment_sync_failure_count\': 1' in str(e.value)
    assert '\'missing_s3_attachments_count\': 1' in str(e.value)
    assert '\'attachment_sync_failures\': [\'sis-data/sis-sftp/incremental/advising-notes/attachment-files/2018/12/22/23456789_00003_1.png\']' in str(
        e.value, )
    assert '\'missing_s3_attachments\': [\'23456789_00003_1.png\']' in str(
        e.value)
    assert 'Attachments missing on S3 when compared against SIS notes views: 1' in caplog.text
Ejemplo n.º 9
0
    def test_import_registrations(self, app, metadata_db, student_tables, caplog):
        from nessie.jobs.import_registrations import ImportRegistrations
        rows = redshift.fetch('SELECT * FROM student_test.student_term_gpas')
        assert len(rows) == 0
        rows = redshift.fetch('SELECT * FROM student_test.student_last_registrations')
        assert len(rows) == 0
        caplog.set_level(logging.DEBUG)
        with capture_app_logs(app):
            with mock_s3(app):
                result = ImportRegistrations().run_wrapped()
            assert result == 'Registrations import completed: 2 succeeded, 8 failed.'
            rows = redshift.fetch('SELECT * FROM student_test.student_term_gpas ORDER BY sid')
            assert len(rows) == 11
            for row in rows[0:6]:
                assert row['sid'] == '11667051'
            for row in rows[7:10]:
                assert row['sid'] == '1234567890'
            row_2168 = next(r for r in rows if r['term_id'] == '2168')
            assert row_2168['gpa'] == Decimal('3.000')
            assert row_2168['units_taken_for_gpa'] == Decimal('8.0')

            rows = redshift.fetch('SELECT * FROM student_test.student_last_registrations ORDER BY sid')
            assert len(rows) == 2
            assert rows[0]['sid'] == '11667051'
            assert rows[1]['sid'] == '1234567890'
            feed = json.loads(rows[1]['feed'], strict=False)
            assert feed['term']['id'] == '2172'
            assert feed['academicLevels'][0]['level']['description'] == 'Sophomore'

            rows = redshift.fetch('SELECT * FROM student_test.student_api_demographics ORDER BY sid')
            assert len(rows) == 2
            assert rows[0]['sid'] == '11667051'
            assert rows[1]['sid'] == '1234567890'
            feed = json.loads(rows[1]['feed'], strict=False)
            assert feed['gender']['genderOfRecord']['description'] == 'Female'
Ejemplo n.º 10
0
 def test_server_error(self, app, caplog):
     """Logs unexpected server errors."""
     with capture_app_logs(app):
         canvas_error = MockResponse(429, {}, '{"message": "Easy, tiger."}')
         with register_mock(canvas_data.get_snapshots, canvas_error):
             response = canvas_data.get_snapshots()
             assert '429 Client Error: Too Many Requests' in caplog.text
             assert not response
Ejemplo n.º 11
0
 def test_server_error(self, app, caplog):
     """Logs unexpected server errors."""
     with capture_app_logs(app):
         canvas_error = MockResponse(401, {},
                                     '{"message": "Unauthorized."}')
         with register_mock(dispatcher.dispatch, canvas_error):
             response = dispatcher.dispatch('create_canvas_schema')
             assert '401 Client Error: Unauthorized' in caplog.text
             assert not response
Ejemplo n.º 12
0
 def test_s3_upload_error_handling(self, app, caplog, bad_bucket):
     """Handles and logs connection errors on S3 upload."""
     with capture_app_logs(app):
         url = 'http://shakespeare.mit.edu/Poetry/sonnet.XLV.html'
         key = app.config[
             'LOCH_S3_PREFIX_TESTEXT'] + '/00001/sonnet-xlv.html'
         with pytest.raises(ValueError):
             s3.upload_from_url(url, key)
             assert 'Error on S3 upload' in caplog.text
             assert 'the bucket \'not-a-bucket-nohow\' does not exist, or is forbidden for access' in caplog.text
Ejemplo n.º 13
0
    def test_run_with_no_param(self, mock_datetime, app, caplog, metadata_db,
                               prior_job_status):
        """When no parameter is provided, copies new files since the last succesful run."""
        (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
        mock_datetime.utcnow.return_value = datetime(year=2019,
                                                     month=8,
                                                     day=29,
                                                     hour=5,
                                                     minute=21)

        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            with mock_s3(app, bucket=bucket) as m3:
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/25/45678912_00027_1.pdf').put(
                        Body=b'i\'ve already been copied')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/26/12345678_00012_1.pdf').put(
                        Body=b'a note attachment')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/23456789_00003_1.png').put(
                        Body=b'another note attachment')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/29/34567890_00014_2.xls').put(
                        Body=b'don\'t copy me')

                response = MigrateSisAdvisingNoteAttachments().run()

                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/25.' not in caplog.text
                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/26.' in caplog.text
                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/27.' in caplog.text
                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/28.' in caplog.text
                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/29.' not in caplog.text
                assert 'Copied 1 attachments to the destination folder.' in caplog.text
                assert 'Copied 0 attachments to the destination folder.' in caplog.text
                assert response == (
                    'SIS advising note attachment migration complete for sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/26, \
sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/27, \
sis-data/sis-sftp/incremental/advising-notes/attachment-files/2019/08/28.')
                assert not object_exists(
                    m3, bucket, f'{dest_prefix}/45678912/45678912_00027_1.xls')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/12345678/12345678_00012_1.pdf')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/23456789/23456789_00003_1.png')
                assert not object_exists(
                    m3, bucket, f'{dest_prefix}/34567890/34567890_00014_2.xls')
Ejemplo n.º 14
0
    def test_schema_creation_drop(self, app, caplog, ensure_drop_schema):
        """Can create and drop schemata on a real Redshift instance."""
        schema_name = app.config['REDSHIFT_SCHEMA_BOAC']
        schema = psycopg2.sql.Identifier(schema_name)
        with capture_app_logs(app):
            result = redshift.execute('CREATE SCHEMA {schema}', schema=schema)
            assert result == 'CREATE SCHEMA'

            result = redshift.execute('CREATE SCHEMA {schema}', schema=schema)
            assert result is None
            assert f'Schema "{schema_name}" already exists' in caplog.text

            result = redshift.execute('DROP SCHEMA {schema}', schema=schema)
            assert result == 'DROP SCHEMA'
Ejemplo n.º 15
0
 def test_aborts_on_missing_term(self, app, caplog):
     from nessie.jobs.create_sis_schema import CreateSisSchema
     with mock_s3(app):
         daily_path = get_s3_sis_daily_path()
         historical_path = app.config[
             'LOCH_S3_SIS_DATA_PATH'] + '/historical'
         self._upload_data_to_s3(daily_path, historical_path)
         s3.delete_objects(
             [f'{daily_path}/enrollments/enrollments-2178.gz'])
         with capture_app_logs(app):
             with pytest.raises(BackgroundJobError) as e:
                 CreateSisSchema().update_manifests()
             assert 'Expected filename enrollments-2178.gz not found in S3, aborting' in str(
                 e.value)
Ejemplo n.º 16
0
 def test_import_term_gpas(self, app, metadata_db, student_tables, caplog):
     from nessie.jobs.import_term_gpas import ImportTermGpas
     caplog.set_level(logging.DEBUG)
     with capture_app_logs(app):
         with mock_s3(app):
             result = ImportTermGpas().run_wrapped()
         assert result == 'Term GPA import completed: 1 succeeded, 0 returned no registrations, 7 failed.'
         rows = redshift.fetch(
             'SELECT * FROM student_test.student_term_gpas')
         assert len(rows) == 7
         for row in rows:
             assert row['sid'] == '11667051'
         row_2178 = next(r for r in rows if r['term_id'] == '2178')
         assert row_2178['gpa'] == Decimal('3.000')
         assert row_2178['units_taken_for_gpa'] == Decimal('8.0')
Ejemplo n.º 17
0
    def test_file_upload_and_skip(self, app, caplog, cleanup_s3):
        """Uploads files to real S3, skipping duplicates."""
        url = 'http://shakespeare.mit.edu/Poetry/sonnet.XLV.html'
        key = app.config['LOCH_S3_PREFIX_TESTEXT'] + '/00001/sonnet-xlv.html'

        caplog.set_level(logging.DEBUG)
        with capture_app_logs(app):
            result = SyncFileToS3().run(url=url, key=key)
            assert result is True
            assert f'Key {key} does not exist, starting upload' in caplog.text
            assert 'S3 upload complete' in caplog.text

            result = SyncFileToS3().run(url=url, key=key)
            assert result is False
            assert f'Key {key} exists, skipping upload' in caplog.text
Ejemplo n.º 18
0
 def test_affiliations_conflict(self, app, caplog):
     feed = {
         'academicStatuses': [
             _active_grad_academic_status(),
         ],
         'affiliations': [
             _active_ucbx_affiliation(),
         ],
     }
     profile = {}
     with capture_app_logs(app):
         merge_sis_profile_academic_status(feed, profile)
         assert profile['academicCareer'] == 'GRAD'
         assert profile.get('academicCareerStatus') is None
         assert profile['plans'][0]['description'] == 'On-Campus/Online Prfsnl MPH'
         assert profile['plans'][0]['status'] == 'Active'
         assert 'Conflict between affiliations and academicStatuses' in caplog.text
Ejemplo n.º 19
0
 def test_source_url_error_handling(self, app, caplog):
     """Handles and logs connection errors to source URL."""
     with capture_app_logs(app):
         url = 'http://shakespeare.mit.edu/Poetry/sonnet.XLV.html'
         key = app.config[
             'LOCH_S3_PREFIX_TESTEXT'] + '/00001/sonnet-xlv.html'
         responses.add(responses.GET,
                       url,
                       status=500,
                       body='{"message": "Internal server error."}')
         with pytest.raises(ConnectionError):
             s3.upload_from_url(url, key)
             assert 'Received unexpected status code, aborting S3 upload' in caplog.text
             assert 'status=500' in caplog.text
             assert 'body={"message": "Internal server error."}' in caplog.text
             assert f'url={url}' in caplog.text
             assert f'key={key}' in caplog.text
Ejemplo n.º 20
0
    def test_first_time_run_with_no_param(self, mock_datetime, app, caplog,
                                          metadata_db):
        """When no parameter is provided and there is no prior successful run, copies all files."""
        (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
        mock_datetime.utcnow.return_value = datetime(year=2019,
                                                     month=8,
                                                     day=29,
                                                     hour=5,
                                                     minute=21)

        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            with mock_s3(app, bucket=bucket) as m3:
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/12345678_00012_1.pdf').put(
                        Body=b'a note attachment')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/23456789_00003_1.png').put(
                        Body=b'another note attachment')
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/29/34567890_00014_2.xls').put(
                        Body=b'ok to copy me')

                response = MigrateSisAdvisingNoteAttachments().run()

                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/.' in caplog.text
                assert 'Copied 3 attachments to the destination folder.' in caplog.text
                assert response == (
                    'SIS advising note attachment migration complete for sis-data/sis-sftp/incremental/advising-notes/attachment-files/.'
                )
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/12345678/12345678_00012_1.pdf')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/23456789/23456789_00003_1.png')
                assert object_exists(
                    m3, bucket, f'{dest_prefix}/34567890/34567890_00014_2.xls')
Ejemplo n.º 21
0
    def test_run_with_invalid_param(self, app, caplog):
        """When invalid value is provided, job completes but copies zero files."""
        (bucket, source_prefix, dest_prefix) = get_s3_refs(app)
        datestamp = 'wrong!#$&'

        caplog.set_level(logging.INFO)
        with capture_app_logs(app):
            with mock_s3(app, bucket=bucket) as m3:
                m3.Object(
                    bucket,
                    f'{source_prefix}/2019/08/28/12345678_00012_1.pdf').put(
                        Body=b'a note attachment')

                response = MigrateSisAdvisingNoteAttachments().run(
                    datestamp=datestamp)

                assert 'Will copy files from /sis-data/sis-sftp/incremental/advising-notes/attachment-files/wrong!#$&.' in caplog.text
                assert 'Copied 0 attachments to the destination folder.' in caplog.text
                assert response == (
                    'SIS advising note attachment migration complete for sis-data/sis-sftp/incremental/advising-notes/attachment-files/wrong!#$&.'
                )
                assert not object_exists(
                    m3, bucket, f'{dest_prefix}/12345678/12345678_00012_1.pdf')
Ejemplo n.º 22
0
 def test_metadata_tracked(self, app, metadata_db, student_tables, caplog):
     from nessie.jobs.import_registrations import ImportRegistrations
     rows = rds.fetch('SELECT * FROM nessie_metadata_test.registration_import_status')
     assert len(rows) == 0
     caplog.set_level(logging.DEBUG)
     with capture_app_logs(app):
         with mock_s3(app):
             ImportRegistrations().run_wrapped()
             rows = rds.fetch('SELECT * FROM nessie_metadata_test.registration_import_status')
             assert len(rows) == 10
             assert len([r for r in rows if r['status'] == 'failure']) == 8
             assert next(r['status'] for r in rows if r['sid'] == '11667051') == 'success'
             result = ImportRegistrations().run_wrapped()
             assert result == 'Registrations import completed: 0 succeeded, 8 failed.'
             result = ImportRegistrations().run_wrapped(load_mode='all')
             assert result == 'Registrations import completed: 2 succeeded, 8 failed.'
             rds.execute("DELETE FROM nessie_metadata_test.registration_import_status WHERE sid = '11667051'")
             result = ImportRegistrations().run_wrapped()
             assert result == 'Registrations import completed: 1 succeeded, 8 failed.'
             assert next(r['status'] for r in rows if r['sid'] == '11667051') == 'success'
             rds.execute("UPDATE nessie_metadata_test.registration_import_status SET status='failure' WHERE sid = '11667051'")
             result = ImportRegistrations().run_wrapped()
             assert result == 'Registrations import completed: 1 succeeded, 8 failed.'
             assert next(r['status'] for r in rows if r['sid'] == '11667051') == 'success'
Ejemplo n.º 23
0
 def test_course_not_found(self, app, caplog):
     """Logs 404 for unknown course."""
     with capture_app_logs(app):
         response = canvas_api.get_course_enrollments(9999999)
         assert '404 Client Error' in caplog.text
         assert not response
Ejemplo n.º 24
0
    def test_resync_canvas_snapshots(self, app, metadata_db, caplog):
        """Dispatches a complete resync job against fixtures."""
        caplog.set_level(logging.INFO)
        snapshots = canvas_data.get_snapshots()['files']

        def mock_metadata(job_id, snapshot, status, destination_size):
            metadata.create_canvas_sync_status(job_id, snapshot['filename'],
                                               snapshot['table'],
                                               snapshot['url'])
            key = '/'.join([
                get_s3_canvas_daily_path(), snapshot['table'],
                snapshot['filename']
            ])
            metadata.update_canvas_sync_status(
                job_id,
                key,
                status,
                source_size=1048576,
                destination_size=destination_size)

        old_sync_job = 'sync_152550000'
        latest_sync_job = 'sync_152560000'

        # The older job should be ignored by the resync.
        for snapshot in snapshots[0:5]:
            mock_metadata(old_sync_job, snapshot, 'complete', 1048576)
        for snapshot in snapshots[5:10]:
            mock_metadata(old_sync_job, snapshot, 'error', None)

        # The latest job synced five files successfully and ran into three problems.
        for snapshot in snapshots[10:15]:
            mock_metadata(latest_sync_job, snapshot, 'complete', 1048576)
        stalled = snapshots[15]
        errored = snapshots[16]
        size_discrepancy = snapshots[17]
        mock_metadata(latest_sync_job, stalled, 'streaming', None)
        mock_metadata(latest_sync_job, errored, 'error', None)
        mock_metadata(latest_sync_job, size_discrepancy, 'complete', 65536)

        schema = app.config['RDS_SCHEMA_METADATA']

        with capture_app_logs(app):
            assert rds.fetch(
                f'SELECT count(*) FROM {schema}.canvas_sync_job_status'
            )[0]['count'] == 18
            with mock_s3(app):
                result = ResyncCanvasSnapshots().run_wrapped()
            assert 'Canvas snapshot resync job dispatched to workers' in result
            assert_background_job_status('resync')
            assert f"Dispatched S3 resync of snapshot {stalled['filename']}" in caplog.text
            assert f"Dispatched S3 resync of snapshot {errored['filename']}" in caplog.text
            assert f"Dispatched S3 resync of snapshot {size_discrepancy['filename']}" in caplog.text
            assert '3 successful dispatches, 0 failures' in caplog.text

        assert rds.fetch(
            f'SELECT count(*) FROM {schema}.canvas_sync_job_status'
        )[0]['count'] == 21
        resync_results = rds.fetch(
            f"SELECT * FROM {schema}.canvas_sync_job_status WHERE job_id LIKE 'resync%'"
        )
        assert len(resync_results) == 3

        urls = []
        for r in resync_results:
            assert r['job_id'].startswith('resync_')
            assert r['filename']
            assert r['canvas_table']
            assert r['created_at']
            assert r['updated_at']
            urls.append(r['source_url'])
        assert stalled['url'] in urls
        assert errored['url'] in urls
        assert size_discrepancy['url'] in urls
Ejemplo n.º 25
0
 def test_connection_error_handling(self, app, caplog):
     """Handles and logs connection errors."""
     with capture_app_logs(app):
         with override_config(app, 'REDSHIFT_HOST', 'H.C. Earwicker'):
             redshift.execute('SELECT 1')
             assert 'could not translate host name "H.C. Earwicker" to address' in caplog.text