コード例 #1
0
    def test_invalid_sql_provided(self):
        """
        Test if the converter fails when provided with an .sql file missing the CREATE TABLE statement.
        """

        invalid_sql_case = TEST_DATA_PATH + "/invalid_sql_case.sql"
        with self.assertRaises(ValueError) as ctx:
            _, big_query_list = converter.convert(invalid_sql_case, None, None)

        expected = f"File {TEST_DATA_PATH}/invalid_sql_case.sql does not contain a CREATE TABLE STATEMENT"
        self.assertEqual(str(ctx.exception), expected)
コード例 #2
0
    def test_invalid_type_mappings_provided(self):
        """
        Test if the converter fails when provided with an invalid data type in the custom type map.
        It refers to the map passed with '--extra-type-mappings'.
        """

        type_mappings_case = TEST_DATA_PATH + "/type_mappings_case.sql"
        with self.assertRaises(ValueError) as ctx:
            _, big_query_list = converter.convert(type_mappings_case,
                                                  INVALID_TYPES_MAP_PATH, None)
        expected = "The provided data types are not valid in BigQuery: \n[{'type': 'sTRING', 'name': 'created_at', 'mode': 'REQUIRED'}]\n"
        self.assertEqual(str(ctx.exception), expected)
コード例 #3
0
    def test_output_correctly_generated_virtual(self):
        """
        Test if converted mysql schema matches with the BigQuery json one.
        """

        virtual_case_sql = TEST_DATA_PATH + "/virtual_case.sql"
        big_query_json = TEST_DATA_PATH + "/virtual_case.json"
        with open(big_query_json) as json_file:
            bigquery_data = json.load(json_file)

        _, big_query_list = converter.convert(virtual_case_sql, None, None,
                                              True)
        self.assertEqual(
            bigquery_data,
            big_query_list,
            msg=f"\nFAILED AT FOLLOWING SCHEMA: {virtual_case_sql}")
コード例 #4
0
    def test_output_correctly_generated_default(self):
        """
        Test if converted mysql schema matches with the BigQuery json one.
        """

        # Point to the default `.sql` case and its relative `.json`
        default_case_sql = TEST_DATA_PATH + "/default_case.sql"
        big_query_json = TEST_DATA_PATH + "/default_case.json"
        with open(big_query_json) as json_file:
            bigquery_data = json.load(json_file)

        _, big_query_list = converter.convert(default_case_sql, None, None)
        self.assertEqual(
            bigquery_data,
            big_query_list,
            msg=f"\nFAILED AT FOLLOWING SCHEMA: {default_case_sql}")
コード例 #5
0
    def test_output_field_mappings_case(self):
        """
        Test if the converted schema is correct when providing a custom field type mapping.
        The custom map is provided via CLI with the '-f', '--extra-field-mappings' option.
        """

        # Point to the field_mappings_case `.sql` case and its relative `.json`
        field_mappings_case_sql = TEST_DATA_PATH + "/field_mappings_case.sql"
        big_query_json = TEST_DATA_PATH + "/field_mappings_case.json"
        with open(big_query_json) as json_file:
            bigquery_data = json.load(json_file)

        _, big_query_list = converter.convert(field_mappings_case_sql, None,
                                              FIELD_MAP_PATH)
        self.assertEqual(
            bigquery_data,
            big_query_list,
            msg=f"\nFAILED AT FOLLOWING SCHEMA: {field_mappings_case_sql}")
コード例 #6
0
    )
    parser.add_argument(
        '-f',
        '--extra-field-mappings',
        action='store',
        help='Path to a .json file used to assign a type to a spefic field.')
    parser.add_argument(
        '-d',
        '--drop-virtual-fields',
        type=bool,
        action='store',
        help='The generated .json file will not contain VIRTUAL fields.')

    args = parser.parse_args()
    filepath = args.Path
    output_path = args.output_path
    extra_type_mappings = args.extra_type_mappings
    extra_field_mappings = args.extra_field_mappings

    table_name, big_query_list = converter.convert(filepath,
                                                   extra_type_mappings,
                                                   extra_field_mappings,
                                                   args.drop_virtual_fields)

    if output_path is None:
        print(f'\nSchema processed for table: {table_name}\n')
        print(json.dumps(big_query_list, indent=4))
    else:
        with open(f'{output_path}/{table_name}.json', 'w') as outfile:
            json.dump(big_query_list, outfile, indent=4)