def test_valid_mapping_array_of_scalar(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY', 'fk': 'id_testcol', 'dest': 'testcol_a' } }, 'testcol_a': { 'pk': '_id', '_id': { 'type': 'INT' }, 'id_testcol': { 'type': 'INT' }, 'scalar': { 'type': 'INT' } } } } mappings.validate_mapping(mapping)
def __init__(self, url, unique_key='_id', auto_commit_interval=None, chunk_size=100, **kwargs): if 'mongoUrl' not in kwargs: raise InvalidConfiguration("The MongoUrl parameter is mandatory.") self.url = url self.unique_key = unique_key self.auto_commit_interval = auto_commit_interval self.chunk_size = chunk_size self._formatter = DocumentFlattener() self.pgsql = psycopg2.connect(url) self.insert_accumulator = {} self.client = MongoClient(kwargs['mongoUrl']) self.quiet = kwargs.get('quiet', False) mappings_json_file_name = kwargs.get('mappingFile', DEFAULT_MAPPINGS_JSON_FILE_NAME) register_adapter(ObjectId, object_id_adapter) if not os.path.isfile(mappings_json_file_name): raise InvalidConfiguration("no mapping file found at " + mappings_json_file_name) with open(mappings_json_file_name) as mappings_file: self.mappings = json.load(mappings_file) validate_mapping(self.mappings) self.pgsql.set_session(deferrable=True) self._init_schema()
def test_valid_mapping_array_nested(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY', 'fk': 'id_testcol', 'dest': 'testcol_a' } }, 'testcol_a': { 'pk': '_id', 'id_testcol': { 'type': 'INT' }, 'b': { 'type': '_ARRAY', 'fk': 'id_testcol_a', 'dest': 'testcol_b' } }, 'testcol_b': { 'pk': '_id', 'id_testcol_a': { 'type': 'SERIAL' }, } } } mappings.validate_mapping(mapping)
def test_invalid_mapping_array_of_scalar_value_field_not_mapped(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY_OF_SCALARS', 'fk': 'id_testcol', 'dest': 'testcol_a', 'valueField': 'scalar' } }, 'testcol_a': { 'pk': '_id', '_id': { 'type': 'INT' }, 'id_testcol': { 'type': 'INT' } } } } with self.assertRaises(mappings.InvalidConfiguration): mappings.validate_mapping(mapping)
def test_invalid_mapping_array_foreign_key_type_mismatch(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY', 'fk': 'id_testcol', 'dest': 'testcol_a' } }, 'testcol_a': { 'pk': '_id', '_id': { 'type': 'INT' }, 'id_testcol': { 'type': 'TEXT' } } } } with self.assertRaises(mappings.InvalidConfiguration): mappings.validate_mapping(mapping)
def __init__(self, url, unique_key='_id', auto_commit_interval=None, chunk_size=100, **kwargs): print kwargs # if 'mongoUrl' not in kwargs: # raise InvalidConfiguration("The MongoUrl parameter is mandatory.") self.url = url self.unique_key = unique_key self.auto_commit_interval = auto_commit_interval self.chunk_size = chunk_size self._formatter = DocumentFlattener() self.pgsql = psycopg2.connect(url) self.insert_accumulator = {} # self.client = MongoClient(kwargs['mongoUrl']) register_adapter(ObjectId, object_id_adapter) # TODO - remove mapping checks as we are just going to # translate to jsonb with an id column if not os.path.isfile(MAPPINGS_JSON_FILE_NAME): raise InvalidConfiguration("no mapping file found") with open(MAPPINGS_JSON_FILE_NAME) as mappings_file: self.mappings = json.load(mappings_file) validate_mapping(self.mappings) # TODO - this should create a table with an id pk column and # also a jsonb 'jdoc' column only self._init_schema()
def test_valid_mapping_pk(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' } } } } mappings.validate_mapping(mapping)
def test_invalid_mapping_array_missing_dest(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY', 'fk': 'id_testcol' } } } } with self.assertRaises(mappings.InvalidConfiguration): mappings.validate_mapping(mapping)
def test_invalid_mapping_array_nested(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY', 'fk': 'id_testcol', 'dest': 'testcol_a' } }, 'testcol_a': { 'pk': '_id', 'id_testcol': { 'type': 'INT' }, 'b': { 'type': '_ARRAY', 'fk': 'id_testcol_a', 'dest': 'testcol_b' } }, 'testcol_b': { 'pk': '_id', 'id_testcol_a': { 'type': 'INT' }, } } } with self.assertRaises(mappings.InvalidConfiguration): mappings.validate_mapping(mapping)
def test_valid_mapping_pk_auto_generated(self): mapping = { 'testdb': { 'testcol': { 'pk': '_id', '_id': { 'type': 'INT' }, 'a': { 'type': '_ARRAY', 'dest': 'col_array', 'fk': 'id_testcol' } }, 'col_array': { 'pk': 'id', 'id_testcol': { 'type': 'INT' } } } } mappings.validate_mapping(mapping)
def test_invalid_mapping_missing_pk_field(self): mapping = {'testdb': {'testcol': {'pk': '_id'}}} with self.assertRaises(mappings.InvalidConfiguration): mappings.validate_mapping(mapping)