class BigQueryTest(unittest.TestCase):

    def setUp(self):
        self.project_id = os.getenv('PROJECT_ID')
        self.dataset_id = os.getenv('DATASET_ID', 'test_dataset')
        self.table_id = os.getenv('TABLE_ID', 'test_table') + '_' + str(int(time.time()))
        self.view_id = os.getenv('VIEW_ID', 'test_view') + '_' + str(int(time.time()))
        if self.project_id is None:
            print('PROJECT_ID is not defined.')
            sys.exit(1)
        self.bq = BigQuery(self.project_id)
        if self.bq.exists_dataset(self.dataset_id):
            self.bq.drop_dataset(self.dataset_id, delete_contents=True)
        self.bq.create_dataset(self.dataset_id)
        self.bq.dataset_id = self.dataset_id    # Set default datasetId
        schema = [
            { 'name': 'id', 'type': 'INTEGER', 'mode': 'REQUIRED' },
            { 'name': 'name', 'type': 'STRING', 'mode': 'REQUIRED' },
            { 'name': 'birth', 'type': 'RECORD', 'mode': 'NULLABLE', 'fields': [
                { 'name': 'year', 'type': 'INTEGER', 'mode': 'REQUIRED' },
                { 'name': 'month', 'type': 'INTEGER', 'mode': 'REQUIRED' },
                { 'name': 'day', 'type': 'INTEGER', 'mode': 'REQUIRED' },
            ]},
            { 'name': 'url', 'type': 'STRING', 'mode': 'REPEATED' },
        ]
        self.bq.create_table(self.table_id, schema=schema)

    def TearDown(self):
        self.bq.drop_table(self.table_id)
        self.bq.drop_dataset(self.dataset_id, delete_contents=True)

    def test_error_invalid_rows(self):
        rows = [
            { 'id': 1, 'name': 'foo' },
            { 'id': 2 },
            { 'id': 'three', 'name': 'baz' },
        ]
        with self.assertRaises(BigQueryError):
            self.bq.load(self.table_id, rows)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(0, len(res))
        pprint(res)

    def test_error_unknown_values(self):
        rows = [
            { 'id': 1, 'name': 'foo', 'unknown_field': 'unknown_value' },
        ]
        with self.assertRaises(BigQueryError):
            self.bq.load(self.table_id, rows)

    def test_normal_unknown_values(self):
        rows = [
            { 'id': 1, 'name': 'foo', 'unknown_field': 'unknown_value' },
        ]
        self.bq.load(self.table_id, rows, ignore_unknown_values=True)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(1, len(res))
        pprint(res)

    def test_normal_async(self):
        filepath = os.path.dirname(os.path.abspath(__file__)) + '/data.json'
        job_id = self.bq.load(self.table_id, filepath, async=True)
        self.assertTrue(re.match(r'job_', job_id))
        print(job_id)

        while True:
            res = self.bq.info_job(job_id)
            state = res['status']['state']
            print(state)
            if state == 'DONE': break
            time.sleep(2)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(3, len(res))
        pprint(res)

    def test_normal_from_csv(self):
        filepath = os.path.dirname(os.path.abspath(__file__)) + '/data.csv'
        schema = [
            { 'name': 'id', 'type': 'INTEGER', 'mode': 'REQUIRED' },
            { 'name': 'name', 'type': 'STRING', 'mode': 'REQUIRED' },
        ]
        res = self.bq.load(self.table_id, filepath, schema=schema, skip_leading_rows=1)
        self.assertTrue(bool(res))

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(3, len(res))
        pprint(res)

    def test_normal_from_json(self):
        filepath = os.path.dirname(os.path.abspath(__file__)) + '/data.json'
        res = self.bq.load(self.table_id, filepath)
        self.assertTrue(bool(res))

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(3, len(res))
        pprint(res)

    def test_normal_from_obj(self):
        rows = [
            { 'id': 1, 'name': 'foo' },
            { 'id': 2, 'name': 'bar', 'birth': { 'year': 2015, 'month': 10, 'day': 28 } },
            { 'id': 3, 'name': 'baz', 'url': [
                'http://www.yahoo.co.jp/',
                'http://www.google.co.jp/',
            ]}
        ]
        res = self.bq.load(self.table_id, rows)
        self.assertTrue(bool(res))

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(3, len(res))
        pprint(res)
class BigQueryTest(unittest.TestCase):

    def setUp(self):
        self.project_id = os.getenv('PROJECT_ID')
        self.dataset_id = os.getenv('DATASET_ID', 'test_dataset')
        self.table_id = os.getenv('TABLE_ID', 'test_table') + '_' + str(int(time.time()))
        self.view_id = os.getenv('VIEW_ID', 'test_view') + '_' + str(int(time.time()))
        if self.project_id is None:
            print('PROJECT_ID is not defined.')
            sys.exit(1)
        self.bq = BigQuery(self.project_id)
        if self.bq.exists_dataset(self.dataset_id):
            self.bq.drop_dataset(self.dataset_id, delete_contents=True)
        self.bq.create_dataset(self.dataset_id)
        self.bq.dataset_id = self.dataset_id    # Set default datasetId
        #schema = [
        #    { 'name': 'title', 'type': 'STRING', 'mode': 'REQUIRED' },
        #    { 'name': 'count', 'type': 'INTEGER', 'mode': 'REQUIRED' }
        #]
        #self.bq.create_table(self.table_id, schema=schema)

    def TearDown(self):
        self.bq.drop_table(self.table_id)
        self.bq.drop_dataset(self.dataset_id, delete_contents=True)

#    def test_error_allow_large_results(self):
#        schema = [
#            { 'name': 'word', 'type': 'STRING', 'mode': 'REQUIRED' },
#            { 'name': 'word_count', 'type': 'INTEGER', 'mode': 'REQUIRED' },
#        ]
#        self.bq.create_table(self.table_id, schema=schema)
#
#        query = 'SELECT word,word_count FROM [publicdata:samples.shakespeare] LIMIT 10'
#
#        # Cannnot append to required fields when allowLargeResults is True
#        with self.assertRaises(BigQueryError):
#            res = self.bq.insert_from_select(self.table_id, query, allow_large_results=True)

    def test_error_no_required_field(self):
        schema = [
            { 'name': 'title', 'type': 'STRING', 'mode': 'REQUIRED' },
            { 'name': 'unique_words', 'type': 'INTEGER', 'mode': 'REQUIRED' },
            { 'name': 'required_field', 'type': 'STRING', 'mode': 'REQUIRED' },
        ]
        self.bq.create_table(self.table_id, schema=schema)

        query = 'SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words ' \
            + 'FROM [publicdata:samples.shakespeare]'

        # No Required Field
        with self.assertRaises(BigQueryError):
            self.bq.insert_from_select(self.table_id, query)

    def test_error_schema_mismatch(self):
        schema = [
            { 'name': 'title', 'type': 'STRING', 'mode': 'REQUIRED' },
            { 'name': 'id', 'type': 'INTEGER', 'mode': 'REQUIRED' },
        ]
        self.bq.create_table(self.table_id, schema=schema)

        query = 'SELECT title,id FROM [publicdata:samples.wikipedia] LIMIT 10'

        # Can't change type form NULLABLE to REQUIRED
        with self.assertRaises(BigQueryError):
            self.bq.insert_from_select(self.table_id, query)

    def test_normal_allow_large_results(self):
        schema = [
            { 'name': 'word', 'type': 'STRING', 'mode': 'NULLABLE' },
            { 'name': 'word_count', 'type': 'INTEGER', 'mode': 'NULLABLE' },
        ]
        self.bq.create_table(self.table_id, schema=schema)

        query = 'SELECT word,word_count FROM [publicdata:samples.shakespeare] LIMIT 10'
        res = self.bq.insert_from_select(self.table_id, query, allow_large_results=True)
        self.assertTrue(bool(res))
        pprint(res)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(10, len(res))
        pprint(res)

    def test_normal_insert_into_exists_table(self):
        schema = [
            { 'name': 'word', 'type': 'STRING', 'mode': 'REQUIRED' },
            { 'name': 'word_count', 'type': 'INTEGER', 'mode': 'REQUIRED' },
        ]
        self.bq.create_table(self.table_id, schema=schema)

        query = 'SELECT word,word_count FROM [publicdata:samples.shakespeare] LIMIT 10'
        res = self.bq.insert_from_select(self.table_id, query)
        self.assertTrue(bool(res))
        pprint(res)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(10, len(res))
        pprint(res)

    def test_normal_with_args(self):
        query = 'SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words FROM shakespeare'
        res = self.bq.insert_from_select(
            dest_project_id=self.project_id, dest_dataset_id=self.dataset_id, dest_table_id=self.table_id, query=query,
            src_project_id='publicdata', src_dataset_id='samples'
        )
        self.assertTrue(bool(res))
        pprint(res)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(10, len(res))
        pprint(res)

    def test_normal_async(self):
        query = 'SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words ' \
            + 'FROM [publicdata:samples.shakespeare]'
        res = self.bq.insert_from_select(self.table_id, query, async=True)
        self.assertTrue(re.match(r'job_', res))
        print(res)

        self.bq.wait_job(res)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(10, len(res))
        pprint(res)

    def test_normal(self):
        query = 'SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words ' \
            + 'FROM [publicdata:samples.shakespeare]'
        res = self.bq.insert_from_select(self.table_id, query)
        self.assertTrue(bool(res))
        pprint(res)

        res = self.bq.dump_table(self.table_id)
        self.assertEqual(10, len(res))
        pprint(res)
class BigQueryTest(unittest.TestCase):

    def setUp(self):
        self.project_id = os.getenv('PROJECT_ID')
        self.dataset_id = os.getenv('DATASET_ID', 'test_dataset')
        self.table_id = os.getenv('TABLE_ID', 'test_table') + '_' + str(int(time.time()))
        self.view_id = os.getenv('VIEW_ID', 'test_view') + '_' + str(int(time.time()))
        if self.project_id is None:
            print('PROJECT_ID is not defined.')
            sys.exit(1)
        self.bq = BigQuery(self.project_id)
        if self.bq.exists_dataset(self.dataset_id):
            self.bq.drop_dataset(self.dataset_id, delete_contents=True)
        self.bq.create_dataset(self.dataset_id)
        self.bq.dataset_id = self.dataset_id    # Set default datasetId
        schema = [
            { 'name': 'id', 'type': 'INTEGER', 'mode': 'REQUIRED' },
            { 'name': 'name', 'type': 'STRING', 'mode': 'REQUIRED' },
            { 'name': 'birth', 'type': 'RECORD', 'mode': 'NULLABLE', 'fields': [
                { 'name': 'year', 'type': 'INTEGER', 'mode': 'REQUIRED' },
                { 'name': 'month', 'type': 'INTEGER', 'mode': 'REQUIRED' },
                { 'name': 'day', 'type': 'INTEGER', 'mode': 'REQUIRED' },
            ]},
            { 'name': 'url', 'type': 'STRING', 'mode': 'REPEATED' },
        ]
        self.bq.create_table(self.table_id, schema=schema)

    def TearDown(self):
        self.bq.drop_table(self.table_id)
        self.bq.drop_dataset(self.dataset_id, delete_contents=True)

    @staticmethod
    def sigalrm_handler(x, y):
        raise Exception("Timeout")

    def wait_insert(self):
        signal.signal(signal.SIGALRM, BigQueryTest.sigalrm_handler)
        signal.alarm(120)
        while True:
            res = self.bq.dump_table(self.table_id)
            if bool(res): 
                signal.alarm(0)
                return res
            print('sleep...')
            time.sleep(2)

    def test_error(self):
        rows = [
            { 'id': 1, 'name': 'foo' },         # normal
            { 'id': 2 },                        # missing required field
            { 'id': 'three', 'name': 'baz' },   # invalid data type
        ]
        with self.assertRaises(BigQueryError):
            self.bq.insert(self.table_id, rows, ignore_unknown_values=True)

        rows = [
            { 'id': 1, 'name': 'foo' },
            { 'id': 2, 'name': 'bar', 'unknown_fields': 'unknown_value' },
        ]
        with self.assertRaises(BigQueryError):
            self.bq.insert(self.table_id, rows, skip_invalid_rows=True)

    def test_normal_skip_invalid_rows(self):
        rows = [
            { 'id': 1, 'name': 'foo' },         # normal
            { 'id': 2 },                        # missing required field
            { 'id': 'three', 'name': 'baz' },   # invalid data type
        ]
        res = self.bq.insert(self.table_id, rows, skip_invalid_rows=True)
        self.assertTrue(bool(res))

        res = self.wait_insert()
        self.assertEqual(1, len(res))
        pprint(res)

    def test_normal_ignore_unknown_values(self):
        rows = [
            { 'id': 1, 'name': 'foo' },         # normal
            { 'id': 2, 'name': 'bar', 'unknown_field': 'unknown_value' },
        ]
        res = self.bq.insert(self.table_id, rows, ignore_unknown_values=True)
        self.assertTrue(bool(res))
        
        res = self.wait_insert()
        self.assertEqual(2, len(res))
        pprint(res)

    def test_normal(self):
        rows = [
            { 'id': 1, 'name': 'foo' },
            { 'id': 2, 'name': 'bar', 'birth': { 'year': 2015, 'month': 10, 'day': 28 } },
            { 'id': 3, 'name': 'baz', 'url': [
                'http://www.yahoo.co.jp/',
                'http://www.google.co.jp/',
            ]}
        ]
        res = self.bq.insert(self.table_id, rows)
        self.assertTrue(bool(res))

        res = self.wait_insert()
        self.assertEqual(3, len(res))
        pprint(res)