def test_get_metadata(self): """ META (CLIENTS) : Gets all metadata for the given did """ tmp_name = 'name_%s' % generate_uuid() self.did_client.add_did(scope=self.tmp_scope, name=tmp_name, type="DATASET") # Test JSON case if json_implemented(session=self.session): value1 = "value_" + str(generate_uuid()) value2 = "value_" + str(generate_uuid()) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key1", value=value1) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key2", value=value2) metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="JSON") assert len(metadata) == 2 assert metadata['key1'] == value1 assert metadata['key2'] == value2 # Test DID_COLUMNS case self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key='project', value='data12_14TeV') assert self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name)['project'] == 'data12_14TeV' # Test Mixed case if json_implemented(session=self.session): all_metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="ALL") assert all_metadata['key1'] == value1 assert all_metadata['key2'] == value2 assert all_metadata['project'] == "data12_14TeV"
def test_OperatorsEqualNotEqual(self, session=None): # Plugin: DID # did_name1 = self._create_tmp_DID() did_name2 = self._create_tmp_DID() did_name3 = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name1, key='run_number', value=1) set_metadata(scope=self.tmp_scope, name=did_name2, key='run_number', value=2) dids = [] q = FilterEngine('run_number=1', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 1) dids = [] q = FilterEngine('run_number!=1', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 2) # 1, 3 (NULL counted in not equals) # Plugin: JSON # if json_implemented(session=session): did_name1 = self._create_tmp_DID() did_name2 = self._create_tmp_DID() did_name3 = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name1, key='testkeyint1', value=1) set_metadata(scope=self.tmp_scope, name=did_name2, key='testkeyint2', value=2) set_metadata(scope=self.tmp_scope, name=did_name3, key='testkeyint3', value=2) dids = [] q = FilterEngine('testkeyint1=1', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 1) if session.bind.dialect.name != 'oracle' and json_implemented(session=session): dids = [] q = FilterEngine('testkeyint1!=1', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 2)
def test_set_metadata(self): """ META (CLIENTS) : Adds a fully set json column to a did, updates if some keys present """ tmp_name = 'name_%s' % generate_uuid() self.did_client.add_did(scope=self.tmp_scope, name=tmp_name, type="DATASET") # Test JSON case if json_implemented(session=self.session): # data1 = ["key1": "value_" + str(generate_uuid()), "key2": "value_" + str(generate_uuid()), "key3": "value_" + str(generate_uuid())] value1 = "value_" + str(generate_uuid()) value2 = "value_" + str(generate_uuid()) value3 = "value_" + str(generate_uuid()) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key1", value=value1) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key2", value=value2) self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key="key3", value=value3) metadata = self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name, plugin="JSON") assert len(metadata) == 3 assert metadata['key1'] == value1 assert metadata['key2'] == value2 assert metadata['key3'] == value3 # Test DID_COLUMNS case self.did_client.set_metadata(scope=self.tmp_scope, name=tmp_name, key='project', value='data12_12TeV') assert self.did_client.get_metadata(scope=self.tmp_scope, name=tmp_name)['project'] == 'data12_12TeV'
def test_AndGroups(self, session=None): # Plugin: DID # did_name1 = self._create_tmp_DID() did_name2 = self._create_tmp_DID() did_name3 = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name1, key='run_number', value='1') set_metadata(scope=self.tmp_scope, name=did_name2, key='project', value="test") set_metadata(scope=self.tmp_scope, name=did_name3, key='run_number', value='1') set_metadata(scope=self.tmp_scope, name=did_name3, key='project', value="test") dids = [] q = FilterEngine('run_number = 1, project = test', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 1) # 3 dids = [] q = FilterEngine('run_number = 1, project != test', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 1) # 1 # Plugin: JSON # if session.bind.dialect.name != 'oracle' and json_implemented(session=session): did_name1 = self._create_tmp_DID() did_name2 = self._create_tmp_DID() did_name3 = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name1, key='testkeyint1', value='1') set_metadata(scope=self.tmp_scope, name=did_name2, key='testkeystr1', value="test") set_metadata(scope=self.tmp_scope, name=did_name3, key='testkeyint1', value='1') set_metadata(scope=self.tmp_scope, name=did_name3, key='testkeystr1', value="test") dids = [] q = FilterEngine('testkeyint1 = 1, testkeystr1 = test', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 1) # 3 dids = [] q = FilterEngine('testkeyint1 = 1, testkeystr1 != test', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3), dids)).count(True), 1) # 1
def set_metadata_bulk(self, scope, name, meta, recursive=False, session=None): if not json_implemented(session=session): raise NotImplementedError if session.query(models.DataIdentifier).filter_by( scope=scope, name=name).one_or_none() is None: raise exception.DataIdentifierNotFound( "Data identifier '%s:%s' not found" % (scope, name)) row_did_meta = session.query(models.DidMeta).filter_by( scope=scope, name=name).scalar() if row_did_meta is None: # Add metadata column to new table (if not already present) row_did_meta = models.DidMeta(scope=scope, name=name) row_did_meta.save(session=session, flush=False) existing_meta = {} if hasattr(row_did_meta, 'meta'): if row_did_meta.meta: existing_meta = row_did_meta.meta # Oracle returns a string instead of a dict if session.bind.dialect.name in ['oracle', 'sqlite'] and existing_meta: existing_meta = json_lib.loads(existing_meta) for key, value in meta.items(): existing_meta[key] = value row_did_meta.meta = None session.flush() # Oracle insert takes a string as input if session.bind.dialect.name in ['oracle', 'sqlite']: existing_meta = json_lib.dumps(existing_meta) row_did_meta.meta = existing_meta row_did_meta.save(session=session, flush=True)
def get_metadata(self, scope, name, session=None): """ Get data identifier metadata (JSON) :param scope: The scope name. :param name: The data identifier name. :param session: The database session in use. """ if not json_implemented(session=session): raise NotImplementedError try: row = session.query(models.DidMeta).filter_by(scope=scope, name=name).one() meta = getattr(row, 'meta') return json_lib.loads(meta) if session.bind.dialect.name in [ 'oracle', 'sqlite' ] else meta except NoResultFound: return {}
def list_dids(self, scope, filters, did_type='collection', ignore_case=False, limit=None, offset=None, long=False, recursive=False, session=None): # Currently for sqlite only add, get and delete is implemented. if not json_implemented(session=session): raise NotImplementedError query = session.query(models.DidMeta) if scope is not None: query = query.filter(models.DidMeta.scope == scope) filters.pop('name', None) for k, v in iteritems(filters): if session.bind.dialect.name == 'oracle': query = query.filter( text("json_exists(meta,'$?(@.{} == \"{}\")')".format(k, v))) else: query = query.filter( cast(models.DidMeta.meta[k], String) == type_coerce( v, JSON)) if long: for row in query.yield_per(5): yield { 'scope': row.scope, 'name': row.name, 'did_type': 'Info not available in JSON Plugin', 'bytes': 'Info not available in JSON Plugin', 'length': 'Info not available in JSON Plugin' } else: for row in query.yield_per(5): yield row.name
def delete_metadata(self, scope, name, key, session=None): """ Delete a key from the metadata column :param scope: the scope of did :param name: the name of the did :param key: the key to be deleted """ if not json_implemented(session=session): raise NotImplementedError try: row = session.query(models.DidMeta).filter_by(scope=scope, name=name).one() existing_meta = getattr(row, 'meta') # Oracle returns a string instead of a dict if session.bind.dialect.name in ['oracle', 'sqlite' ] and existing_meta is not None: existing_meta = json_lib.loads(existing_meta) if key not in existing_meta: raise exception.KeyNotFound(key) existing_meta.pop(key, None) row.meta = None session.flush() # Oracle insert takes a string as input if session.bind.dialect.name in ['oracle', 'sqlite']: existing_meta = json_lib.dumps(existing_meta) row.meta = existing_meta except NoResultFound: raise exception.DataIdentifierNotFound( "Key not found for data identifier '%(scope)s:%(name)s'" % locals())
def manages_key(self, key, session=None): return json_implemented(session=session)
def test_undertaker(self): """ UNDERTAKER (CORE): Test the undertaker. """ tmp_scope = InternalScope('mock', **self.vo) jdoe = InternalAccount('jdoe', **self.vo) root = InternalAccount('root', **self.vo) nbdatasets = 5 nbfiles = 5 rse = 'MOCK' rse_id = get_rse_id('MOCK', **self.vo) set_local_account_limit(jdoe, rse_id, -1) dsns1 = [{ 'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'type': 'DATASET', 'lifetime': -1 } for _ in range(nbdatasets)] dsns2 = [{ 'name': 'dsn_%s' % generate_uuid(), 'scope': tmp_scope, 'type': 'DATASET', 'lifetime': -1, 'rules': [{ 'account': jdoe, 'copies': 1, 'rse_expression': rse, 'grouping': 'DATASET' }] } for _ in range(nbdatasets)] add_dids(dids=dsns1 + dsns2, account=root) # arbitrary keys do not work without JSON support (sqlite, Oracle < 12) if json_implemented(): # Add generic metadata on did set_metadata(tmp_scope, dsns1[0]['name'], "test_key", "test_value") replicas = list() for dsn in dsns1 + dsns2: files = [{ 'scope': tmp_scope, 'name': 'file_%s' % generate_uuid(), 'bytes': 1, 'adler32': '0cc737eb', 'tombstone': datetime.utcnow() + timedelta(weeks=2), 'meta': { 'events': 10 } } for _ in range(nbfiles)] attach_dids(scope=tmp_scope, name=dsn['name'], rse_id=rse_id, dids=files, account=root) replicas += files add_rules(dids=dsns1, rules=[{ 'account': jdoe, 'copies': 1, 'rse_expression': rse, 'grouping': 'DATASET' }]) undertaker(worker_number=1, total_workers=1, once=True) undertaker(worker_number=1, total_workers=1, once=True) for replica in replicas: assert get_replica(scope=replica['scope'], name=replica['name'], rse_id=rse_id)['tombstone'] is not None
def list_dids(self, scope, filters, did_type='collection', ignore_case=False, limit=None, offset=None, long=False, recursive=False, ignore_dids=None, session=None): if not json_implemented(session=session): raise NotImplementedError if not ignore_dids: ignore_dids = set() # backwards compatability for filters as single {}. if isinstance(filters, dict): filters = [filters] # instantiate fe and create sqla query, note that coercion to a model keyword # is not appropriate here as the filter words are stored in a single json column. fe = FilterEngine(filters, model_class=models.DidMeta, strict_coerce=False) query = fe.create_sqla_query(additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], additional_filters=[(models.DidMeta.scope, operator.eq, scope)], json_column=models.DidMeta.meta) if limit: query = query.limit(limit) if recursive: from rucio.core.did import list_content # Get attached DIDs and save in list because query has to be finished before starting a new one in the recursion collections_content = [] for did in query.yield_per(100): if (did.did_type == DIDType.CONTAINER or did.did_type == DIDType.DATASET): collections_content += [ d for d in list_content(scope=did.scope, name=did.name) ] # Replace any name filtering with recursed DID names. for did in collections_content: for or_group in filters: or_group['name'] = did['name'] for result in self.list_dids(scope=did['scope'], filters=filters, recursive=True, did_type=did_type, limit=limit, offset=offset, long=long, ignore_dids=ignore_dids, session=session): yield result try: for did in query.yield_per( 5 ): # don't unpack this as it makes it dependent on query return order! if long: did_full = "{}:{}".format(did.scope, did.name) if did_full not in ignore_dids: # concatenating results of OR clauses may contain duplicate DIDs if query result sets not mutually exclusive. ignore_dids.add(did_full) yield { 'scope': did.scope, 'name': did.name, 'did_type': None, # not available with JSON plugin 'bytes': None, # not available with JSON plugin 'length': None # not available with JSON plugin } else: did_full = "{}:{}".format(did.scope, did.name) if did_full not in ignore_dids: # concatenating results of OR clauses may contain duplicate DIDs if query result sets not mutually exclusive. ignore_dids.add(did_full) yield did.name except DataError as e: raise exception.InvalidMetadata( "Database query failed: {}. This can be raised when the datatype of a key is inconsistent between dids." .format(e))
def skip_without_json(): if not json_implemented(): pytest.skip("JSON support is not implemented in this database")
def test_list_dids_extended(self): """ META (CLIENTS) : Get all dids matching the values of the provided metadata keys """ # Test did Columns use case dsns = [] tmp_scope = 'mock' tmp_dsn1 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn1) dataset_meta = {'project': 'data12_8TeV', 'run_number': 400000, 'stream_name': 'physics_CosmicCalo', 'prod_step': 'merge', 'datatype': 'NTUP_TRIG', 'version': 'f392_m920', } self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn1, meta=dataset_meta) tmp_dsn2 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn2) dataset_meta['run_number'] = 400001 self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn2, meta=dataset_meta) tmp_dsn3 = 'dsn_%s' % generate_uuid() dsns.append(tmp_dsn3) dataset_meta['stream_name'] = 'physics_Egamma' dataset_meta['datatype'] = 'NTUP_SMWZ' self.did_client.add_dataset(scope=tmp_scope, name=tmp_dsn3, meta=dataset_meta) dids = self.did_client.list_dids_extended(tmp_scope, {'project': 'data12_8TeV', 'version': 'f392_m920'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert dsn in results dsns.remove(tmp_dsn1) dids = self.did_client.list_dids_extended(tmp_scope, {'project': 'data12_8TeV', 'run_number': 400001}) results = [] for d in dids: results.append(d) for dsn in dsns: assert dsn in results dsns.remove(tmp_dsn2) dids = self.did_client.list_dids_extended(tmp_scope, {'project': 'data12_8TeV', 'stream_name': 'physics_Egamma', 'datatype': 'NTUP_SMWZ'}) results = [] for d in dids: results.append(d) for dsn in dsns: assert dsn in results # Test JSON use case if json_implemented(session=self.session): did1 = 'name_%s' % generate_uuid() did2 = 'name_%s' % generate_uuid() did3 = 'name_%s' % generate_uuid() did4 = 'name_%s' % generate_uuid() key1 = 'key_1_%s' % generate_uuid() key2 = 'key_2_%s' % generate_uuid() key3 = 'key_3_%s' % generate_uuid() value1 = 'value_1_%s' % generate_uuid() value2 = 'value_2_%s' % generate_uuid() value3 = 'value_3_%s' % generate_uuid() value_not_1 = 'value_not_1_%s' % generate_uuid() value_not_2 = 'value_not_1_%s' % generate_uuid() value_unique = 'value_unique_%s' % generate_uuid() self.did_client.add_did(scope=tmp_scope, name=did1, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did2, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did3, type="DATASET") self.did_client.add_did(scope=tmp_scope, name=did4, type="DATASET") self.did_client.set_metadata(scope=tmp_scope, name=did1, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did1, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key2, value=value_not_2) self.did_client.set_metadata(scope=tmp_scope, name=did2, key=key3, value=value3) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key1, value=value_not_1) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did3, key=key3, value=value3) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key1, value=value1) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key2, value=value2) self.did_client.set_metadata(scope=tmp_scope, name=did4, key=key3, value=value_unique) # Key not there dids = self.did_client.list_dids_extended(tmp_scope, {'key45': 'value'}) results = [] for d in dids: results.append(d) assert len(results) == 0 # Value not there dids = self.did_client.list_dids_extended(tmp_scope, {key1: 'value_not_there'}) results = [] for d in dids: results.append(d) assert len(results) == 0 # key1 = value1 dids = self.did_client.list_dids_extended(tmp_scope, {key1: value1}) results = [] for d in dids: results.append(d) assert len(results) == 3 assert did1 in results assert did2 in results assert did4 in results # key1, key2 dids = self.did_client.list_dids_extended(tmp_scope, {key1: value1, key2: value2}) results = [] for d in dids: results.append(d) assert len(results) == 2 assert did1 in results assert did4 in results # key1, key2, key 3 dids = self.did_client.list_dids_extended(tmp_scope, {key1: value1, key2: value2, key3: value3}) results = [] for d in dids: results.append(d) assert len(results) == 0 # key3 = unique value dids = self.did_client.list_dids_extended(tmp_scope, {key3: value_unique}) results = [] for d in dids: results.append(d) assert len(results) == 1 assert did4 in results
def test_Wildcards(self, session=None): # Plugin: DID # did_name1 = self._create_tmp_DID() did_name2 = self._create_tmp_DID() did_name3 = self._create_tmp_DID() did_name4 = self._create_tmp_DID() did_name5 = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name1, key='project', value="test1") set_metadata(scope=self.tmp_scope, name=did_name2, key='project', value="test2") set_metadata(scope=self.tmp_scope, name=did_name3, key='project', value="anothertest1") set_metadata(scope=self.tmp_scope, name=did_name4, key='project', value="anothertest2") dids = [] q = FilterEngine('project = test*', model_class=models.DataIdentifier).create_sqla_query(additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 2) # 1, 2 dids = [] q = FilterEngine('project = *test*', model_class=models.DataIdentifier).create_sqla_query(additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 4) # 1, 2, 3, 4 dids = [] q = FilterEngine('project != *anothertest*', model_class=models.DataIdentifier).create_sqla_query(additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 3) # 3, 4, 5 (NULL counted in not equals) dids = [] q = FilterEngine('project != *test*', model_class=models.DataIdentifier).create_sqla_query(additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 1) # 5 (NULL counted in not equals) # Plugin: JSON # if session.bind.dialect.name != 'oracle' and json_implemented(session=session): did_name1 = self._create_tmp_DID() did_name2 = self._create_tmp_DID() did_name3 = self._create_tmp_DID() did_name4 = self._create_tmp_DID() did_name5 = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name1, key='testkeystr1', value="test1") set_metadata(scope=self.tmp_scope, name=did_name2, key='testkeystr1', value="test2") set_metadata(scope=self.tmp_scope, name=did_name3, key='testkeystr1', value="anothertest1") set_metadata(scope=self.tmp_scope, name=did_name4, key='testkeystr1', value="anothertest2") dids = [] q = FilterEngine('testkeystr1 = test*', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 2) # 1, 2 dids = [] q = FilterEngine('testkeystr1 = *test*', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 4) # 1, 2, 3, 4 dids = [] q = FilterEngine('testkeystr1 != *anothertest*', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 2) # 3, 4 dids = [] q = FilterEngine('testkeystr1 != *test*', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name in (did_name1, did_name2, did_name3, did_name4, did_name5), dids)).count(True), 0)
def test_CompoundInequality(self, session=None): # Plugin: DID # did_name = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name, key='run_number', value=1) dids = [] q = FilterEngine('0 < run_number < 2', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name == did_name, dids)).count(True), 1) dids = [] q = FilterEngine('0 < run_number <= 1', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name == did_name, dids)).count(True), 1) dids = [] q = FilterEngine('0 <= run_number < 1', model_class=models.DataIdentifier).create_sqla_query( additional_model_attributes=[models.DataIdentifier.name]) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertNotEqual(list(map(lambda did: did.name == did_name, dids)).count(True), 1) # Plugin: JSON # if session.bind.dialect.name != 'oracle' and json_implemented(session=session): did_name = self._create_tmp_DID() set_metadata(scope=self.tmp_scope, name=did_name, key='testkeyint1', value=1) dids = [] q = FilterEngine('0 < testkeyint1 < 2', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name == did_name, dids)).count(True), 1) dids = [] q = FilterEngine('0 < testkeyint1 <= 1', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertEqual(list(map(lambda did: did.name == did_name, dids)).count(True), 1) dids = [] q = FilterEngine('0 <= testkeyint1 < 1', model_class=models.DidMeta, strict_coerce=False).create_sqla_query( additional_model_attributes=[ models.DidMeta.scope, models.DidMeta.name ], json_column=models.DidMeta.meta) dids += [did for did in q.yield_per(5)] dids = set(dids) self.assertNotEqual(list(map(lambda did: did.name == did_name, dids)).count(True), 1)