コード例 #1
0
    def test_transfer_bucket(self):

        # Create a destination bucket
        # TODO maybe pull this from an env var as well
        destination_bucket = f"{self.test_bucket}-test"
        self.s3.create_bucket(destination_bucket)

        # Copy
        self.s3.transfer_bucket(self.test_bucket, self.test_key,
                                destination_bucket)

        # Test that object made it
        path = self.s3.get_file(destination_bucket, self.test_key)
        result_tbl = Table.from_csv(path)
        assert_matching_tables(self.tbl, result_tbl)
        # Test that original still exists in original bucket
        self.assertTrue(self.s3.key_exists(self.test_bucket, self.test_key))

        # Transfer and delete original
        self.s3.transfer_bucket(self.test_bucket, self.test_key_2,
                                destination_bucket, None, None, None, None,
                                None, False, True)
        path_2 = self.s3.get_file(destination_bucket, self.test_key_2)
        result_tbl_2 = Table.from_csv(path_2)
        assert_matching_tables(self.tbl_2, result_tbl_2)
        self.assertFalse(self.s3.key_exists(self.test_bucket, self.test_key_2))
コード例 #2
0
ファイル: test_etl.py プロジェクト: bxjw/parsons
    def test_to_csv_zip(self):

        try:
            # Test using the to_csv() method
            self.tbl.to_csv('myzip.zip')
            tmp = zip_archive.unzip_archive('myzip.zip')
            assert_matching_tables(self.tbl, Table.from_csv(tmp))

            # Test using the to_csv_zip() method
            self.tbl.to_zip_csv('myzip.zip')
            tmp = zip_archive.unzip_archive('myzip.zip')
            assert_matching_tables(self.tbl, Table.from_csv(tmp))
        finally:
            os.unlink('myzip.zip')
コード例 #3
0
    def test_get_url(self):

        # Test that you can download from URL
        url = self.s3.get_url(self.test_bucket, self.test_key)
        csv_table = Table.from_csv(url)
        assert_matching_tables(self.tbl, csv_table)

        # Test that the url expires
        url_short = self.s3.get_url(self.test_bucket,
                                    self.test_key,
                                    expires_in=1)
        time.sleep(2)
        with self.assertRaises(urllib.error.HTTPError) as cm:
            Table.from_csv(url_short)
        self.assertEqual(cm.exception.code, 403)
コード例 #4
0
    def test_put_and_get_file(self):

        # put_file is part of setup, so just testing getting it here

        path = self.s3.get_file(self.test_bucket, self.test_key)
        result_tbl = Table.from_csv(path)
        assert_matching_tables(self.tbl, result_tbl)
コード例 #5
0
    def test_get_url(self):

        file_name = 'delete_me.csv'
        input_tbl = Table([['a'], ['1']])
        self.cloud.upload_table(input_tbl, TEMP_BUCKET_NAME, file_name)
        url = self.cloud.get_url(TEMP_BUCKET_NAME, file_name)
        download_tbl = Table.from_csv(url)
        assert_matching_tables(input_tbl, download_tbl)
コード例 #6
0
ファイル: test_etl.py プロジェクト: bxjw/parsons
    def test_append_csv_compressed(self):
        path = self.tbl.to_csv(temp_file_compression='gzip')
        append_tbl = Table([{'first': 'Mary', 'last': 'Nichols'}])
        append_tbl.append_csv(path)

        result_tbl = Table.from_csv(path)
        # Combine tables, so we can check the resulting file
        self.tbl.concat(append_tbl)
        assert_matching_tables(self.tbl, result_tbl)
コード例 #7
0
    def setUp(self):

        self.temp_schema = TEMP_SCHEMA
        self.db = Postgres()

        # Create a schema.
        setup_sql = f"""
                     DROP SCHEMA IF EXISTS {self.temp_schema} CASCADE;
                     CREATE SCHEMA {self.temp_schema};
                     """
        self.db.query(setup_sql)

        # Load dummy data to parsons tables
        self.table1 = Table.from_csv(f'{_dir}/test_data/sample_table_1.csv')
        self.table2 = Table.from_csv(f'{_dir}/test_data/sample_table_2.csv')

        # Create source table
        self.db.copy(self.table1, f'{self.temp_schema}.source')

        # Create DB Sync object
        self.db_sync = DBSync(self.db, self.db)
コード例 #8
0
ファイル: test_sftp_ssh.py プロジェクト: jekoule/parsons
def assert_file_matches_table(local_path, table):
    downloaded_tbl = Table.from_csv(local_path)
    assert_matching_tables(table, downloaded_tbl)
コード例 #9
0
ファイル: test_etl.py プロジェクト: bxjw/parsons
 def _assert_expected_csv(self, path, orig_tbl):
     result_tbl = Table.from_csv(path)
     assert_matching_tables(orig_tbl, result_tbl)