Пример #1
0
    def test_s3_file_upload_and_download_with_path_object(self):
        s3_file = S3File.from_local_file(local_path=self.S3_FILE_UPLOAD_PATH,
                                         s3_path=self.PATH_OBJECT)
        s3_file.download(destination_path=self.S3_FILE_DOWNLOAD_PATH)

        with open(self.S3_FILE_DOWNLOAD_PATH, 'r') as downloaded_file:
            self.assertEqual(downloaded_file.read(), self.S3_FILE_CONTENTS)
Пример #2
0
    def test_upsert_audit(self):
        s3_file = S3File.from_local_file(local_path=self.LOCAL_FILE_PATH,
                                         s3_path=self.S3_PATH)

        s3_to_redshift(
            s3_file,
            RedshiftTable(self.DB_CONNECTION, self.TABLE,
                          self.UPSERT_UNIQUENESS_KEY))

        recorded_audit_data = self.DB_CONNECTION.fetch(
            self.AUDIT_TABLE_CONTENTS_QUERY)
        self.assertEqual(recorded_audit_data, self.EXPECTED_AUDIT_DATA)
Пример #3
0
    def test_data_in_redshift(self):
        '''Because our destination database, `BasicRedshiftButActuallyPostgres`, is only pretending
        to be a Redshift database, the s3_to_redshift method should successfully
        move a local .csv file to it.'''

        s3_file = S3File.from_local_file(local_path=self.LOCAL_FILE_PATH,
                                         s3_path=self.S3_PATH)

        s3_to_redshift(
            s3_file,
            RedshiftTable(self.DB_CONNECTION, self.TABLE,
                          self.UPSERT_UNIQUENESS_KEY))

        current_data_in_table = self.DB_CONNECTION.fetch(
            self.DB_SELECT_ALL_QUERY)
        self.assertEqual(current_data_in_table, self.FILE_CONTENTS)
Пример #4
0
    def test_vacuum_errors_are_swallowed(self, database_execute):
        database_execute.side_effect = [
            'pre_upsert_audit_table_insert_statement',
            'post_upsert_audit_table_update_statement',
            NotSupportedError(
                "VACUUM is running. HINT: re-execute after other vacuum finished"
            )
        ]

        s3_file = S3File.from_local_file(local_path=self.LOCAL_FILE_PATH,
                                         s3_path=self.S3_PATH)

        try:
            s3_to_redshift(
                s3_file,
                RedshiftTable(self.DB_CONNECTION, self.TABLE,
                              self.UPSERT_UNIQUENESS_KEY))
        except BaseException:
            self.fail('nothing should have errored here unexpectedly!')
Пример #5
0
 def test_s3_file_size(self):
     s3_file = S3File.from_local_file(local_path=self.S3_FILE_UPLOAD_PATH,
                                      s3_path=self.S3_PATH)
     expected_file_size = os.path.getsize(self.S3_FILE_UPLOAD_PATH)
     self.assertEqual(s3_file.file_size, expected_file_size)
Пример #6
0
 def test_s3_file_factory(self):
     s3_file = S3File.from_local_file(local_path=self.S3_FILE_UPLOAD_PATH,
                                      s3_path=self.S3_PATH)
     self.assertIsInstance(s3_file, S3File)
Пример #7
0
def from_local_file(file_path, destination):
    '''Assumes a CSV'''
    s3_path = _transient_s3_path(destination) + '.csv'
    s3_file = S3File.from_local_file(file_path, s3_path)

    from_s3_file(s3_file, destination)