def test_invalid_path_cannotwrite(self):
     cm.del_catalog()
     del_files_in_dir(sndbx_path)
     A = read_csv_metadata(path_a)
     p = os.sep.join([sndbx_path, 'temp', 'A_saved.pkl'])
     creat_dir_ifnot_exists(sndbx_path)
     save_object(A, p)
    def test_valid_object_1(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        p = os.sep.join([sndbx_path, 'A_saved.pkl'])
        creat_dir_ifnot_exists(sndbx_path)
        save_object(A, p)

        A1 = load_object(p)
        self.assertEqual(A.equals(A1), True)
    def test_valid_path_df_chk_catalog_1(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)

        p = os.sep.join([sndbx_path, 'A_saved.csv'])

        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(A, p)

        A1 = read_csv_metadata(p)

        self.assertEqual(cm.get_key(A1), cm.get_key(A), 'The keys in the catalog are not same')
    def test_valid_path_df_overwrite(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)

        p = os.sep.join([sndbx_path, 'A_saved.csv'])

        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(A, p)
        to_csv_metadata(A, p)

        A1 = read_csv_metadata(p)

        self.assertEqual(cm.get_key(A1), cm.get_key(A),
                         'The keys in the catalog are not same')
    def test_valid_path_df_chk_catalog_2(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        B = read_csv_metadata(path_b, key='ID')

        C = read_csv_metadata(path_c, ltable=A, rtable=B)

        p = os.sep.join([sndbx_path, 'C_saved.csv'])
        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(C, p)

        C1 = read_csv_metadata(p, ltable=A, rtable=B)

        self.assertEqual(cm.get_all_properties(C1), cm.get_all_properties(C), 'The properties in the '
                                                                                  'catalog are not same')
    def test_valid_path_df_chk_metadatafile_3(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)

        p = os.sep.join([sndbx_path, 'A_saved.csv'])
        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(A, p, metadata_extn='mdx')

        p_meta_1=os.sep.join([sndbx_path, 'A_saved.mdx'])
        m1 = _get_metadata_from_file(p_meta_1)

        p_meta_2=os.sep.join([io_datasets_path, 'expected_A.metadata'])
        m2 = _get_metadata_from_file(p_meta_2)

        self.assertEqual(m1, m2, 'The metadata information is not same.')
    def test_valid_path_df_chk_metadatafile_3(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)

        p = os.sep.join([sndbx_path, 'A_saved.csv'])
        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(A, p, metadata_extn='mdx')

        p_meta_1 = os.sep.join([sndbx_path, 'A_saved.mdx'])
        m1 = _get_metadata_from_file(p_meta_1)

        p_meta_2 = os.sep.join([io_datasets_path, 'expected_A.metadata'])
        m2 = _get_metadata_from_file(p_meta_2)

        self.assertEqual(m1, m2, 'The metadata information is not same.')
    def test_valid_object_2(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        B = read_csv_metadata(path_b, key='ID')
        feature_table = get_features_for_blocking(A, B, validate_inferred_attr_types=False)
        rb = RuleBasedBlocker()
        rb.add_rule('zipcode_zipcode_exm(ltuple, rtuple) != 1', feature_table)
        C = rb.block_tables(A, B, show_progress=False)
        self.assertEqual(len(C), 15)
        p = os.sep.join([sndbx_path, 'C.pkl'])
        creat_dir_ifnot_exists(sndbx_path)
        save_object(rb, p)

        rb1 = load_object(p)
        C1 = rb1.block_tables(A, B, show_progress=False)
        self.assertEqual(C.equals(C1), True)
    def test_valid_object_2(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        B = read_csv_metadata(path_b, key='ID')
        feature_table = get_features_for_blocking(A, B)
        rb = RuleBasedBlocker()
        rb.add_rule('zipcode_zipcode_exm(ltuple, rtuple) != 1', feature_table)
        C = rb.block_tables(A, B, show_progress=False)
        self.assertEqual(len(C), 15)
        p = os.sep.join([sndbx_path, 'C.pkl'])
        creat_dir_ifnot_exists(sndbx_path)
        save_object(rb, p)

        rb1 = load_object(p)
        C1 = rb1.block_tables(A, B, show_progress=False)
        self.assertEqual(C.equals(C1), True)
    def test_valid_path_df_chk_catalog_2(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        B = read_csv_metadata(path_b, key='ID')

        C = read_csv_metadata(path_c, ltable=A, rtable=B)

        p = os.sep.join([sndbx_path, 'C_saved.csv'])
        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(C, p)

        C1 = read_csv_metadata(p, ltable=A, rtable=B)

        self.assertEqual(cm.get_all_properties(C1), cm.get_all_properties(C),
                         'The properties in the '
                         'catalog are not same')
    def test_valid_path_df_chk_metadatafile_2(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        B = read_csv_metadata(path_b, key='ID')
        C = read_csv_metadata(path_c, ltable=A, rtable=B)

        p = os.sep.join([sndbx_path, 'C_saved.csv'])
        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(C, p)

        p_meta_1=os.sep.join([sndbx_path, 'C_saved.metadata'])
        m1 = _get_metadata_from_file(p_meta_1)

        p_meta_2=os.sep.join([io_datasets_path, 'expected_C.metadata'])
        m2 = _get_metadata_from_file(p_meta_2)

        self.assertEqual(m1, m2, 'The metadata information is not same.')
    def test_valid_path_df_chk_metadatafile_2(self):
        cm.del_catalog()
        del_files_in_dir(sndbx_path)
        A = read_csv_metadata(path_a)
        B = read_csv_metadata(path_b, key='ID')
        C = read_csv_metadata(path_c, ltable=A, rtable=B)

        p = os.sep.join([sndbx_path, 'C_saved.csv'])
        creat_dir_ifnot_exists(sndbx_path)
        to_csv_metadata(C, p)

        p_meta_1 = os.sep.join([sndbx_path, 'C_saved.metadata'])
        m1 = _get_metadata_from_file(p_meta_1)

        p_meta_2 = os.sep.join([io_datasets_path, 'expected_C.metadata'])
        m2 = _get_metadata_from_file(p_meta_2)

        self.assertEqual(m1, m2, 'The metadata information is not same.')
 def test_invalid_path_df(self):
     cm.del_catalog()
     del_files_in_dir(sndbx_path)
     creat_dir_ifnot_exists(sndbx_path)
     to_csv_metadata(None, None)
 def test_invalid_df_2(self):
     cm.del_catalog()
     del_files_in_dir(sndbx_path)
     p = os.sep.join([sndbx_path, 'A_saved.csv'])
     creat_dir_ifnot_exists(sndbx_path)
     to_csv_metadata(None, p)
 def test_invalid_path_df(self):
     cm.del_catalog()
     del_files_in_dir(sndbx_path)
     creat_dir_ifnot_exists(sndbx_path)
     to_csv_metadata(None, None)
 def test_invalid_df_2(self):
     cm.del_catalog()
     del_files_in_dir(sndbx_path)
     p = os.sep.join([sndbx_path, 'A_saved.csv'])
     creat_dir_ifnot_exists(sndbx_path)
     to_csv_metadata(None, p)