def create_datafile(client, data, df): query = Query(client, "Dataset", conditions={ "name": "= '%s'" % df['dataset'], "investigation.name": "= '%s'" % df['investigation'] }) dataset = client.assertedSearch(query)[0] dff = data['datafile_formats'][df['format']] query = Query(client, "DatafileFormat", conditions={ "name": "= '%s'" % dff['name'], "version": "= '%s'" % dff['version'], }) datafile_format = client.assertedSearch(query)[0] datafile = client.new("datafile") initobj(datafile, df) datafile.dataset = dataset datafile.datafileFormat = datafile_format if 'parameters' in df: for p in df['parameters']: param = client.new('datafileParameter') initobj(param, p) ptdata = data['parameter_types'][p['type']] query = ("ParameterType [name='%s' AND units='%s']" % (ptdata['name'], ptdata['units'])) param.type = client.assertedSearch(query)[0] datafile.parameters.append(param) datafile.create() return datafile
def test_query_datafile(client): """Query a datafile by its name, dataset name, and investigation name. """ dfdata = { 'name': "e208945.nxs", 'dataset': "e208945", 'investigation': "12100409-ST" } conditions = { "name": "= '%s'" % dfdata['name'], "dataset.name": "= '%s'" % dfdata['dataset'], "dataset.investigation.name": "= '%s'" % dfdata['investigation'], } query = Query(client, "Datafile", conditions=conditions) print(str(query)) qstr = str(query) res = client.search(query) assert len(res) == 1 df = res[0] assert df.BeanName == "Datafile" assert df.name == dfdata['name'] # Same example, but use placeholders in the query string now. conditions = { "name": "= '%(name)s'", "dataset.name": "= '%(dataset)s'", "dataset.investigation.name": "= '%(investigation)s'", } query = Query(client, "Datafile", conditions=conditions) print(str(query)) print(str(query) % dfdata) assert str(query) % dfdata == qstr res = client.search(str(query) % dfdata) assert len(res) == 1 assert res[0] == df
def test_putData_datafileCreateTime(tmpdirsec, client): """Call client.putData() with a datafile having datafileCreateTime set. Issue #10. """ case = testdatafiles[0] query = Query(client, "Dataset", conditions={ "name": "= '%s'" % case['dsname'], "investigation.name": "= '%s'" % case['invname'], }) dataset = client.assertedSearch(query)[0] datafileformat = client.assertedSearch("DatafileFormat [name='raw']")[0] tzinfo = UtcTimezone() if UtcTimezone else None createTime = datetime.datetime(2008, 6, 18, 9, 31, 11, tzinfo=tzinfo) dfname = "test_datafileCreateTime_dt.dat" f = DummyDatafile(tmpdirsec.dir, dfname, case['size']) datafile = client.new("datafile", name=f.name, dataset=dataset, datafileFormat=datafileformat) datafile.datafileCreateTime = createTime client.putData(f.fname, datafile) query = Query(client, "Datafile", conditions={ "name": "= '%s'" % dfname, "dataset.name": "= '%s'" % case['dsname'], "dataset.investigation.name": "= '%s'" % case['invname'], }) df = client.assertedSearch(query)[0] assert df.datafileCreateTime is not None # The handling of date value in original Suds is buggy, so we # cannot expect to be able to reliably compare date values. If # UtcTimezone is set, we have the jurko fork and then this bug in # Suds is fixed. if tzinfo is not None: assert df.datafileCreateTime == createTime # Now try the same again with datafileCreateTime set to a string. dfname = "test_datafileCreateTime_str.dat" f = DummyDatafile(tmpdirsec.dir, dfname, case['size']) datafile = client.new("datafile", name=f.name, dataset=dataset, datafileFormat=datafileformat) datafile.datafileCreateTime = createTime.isoformat() client.putData(f.fname, datafile) query = Query(client, "Datafile", conditions={ "name": "= '%s'" % dfname, "dataset.name": "= '%s'" % case['dsname'], "dataset.investigation.name": "= '%s'" % case['invname'], }) df = client.assertedSearch(query)[0] assert df.datafileCreateTime is not None if tzinfo is not None: assert df.datafileCreateTime == createTime
def test_query_condition_greaterthen(client): """Other relations then equal may be used in the conditions too. """ condition = {"datafileCreateTime": ">= '2012-01-01'"} query = Query(client, "Datafile", conditions=condition) print(str(query)) res = client.search(query) assert len(res) == 4 condition = {"datafileCreateTime": "< '2012-01-01'"} query = Query(client, "Datafile", conditions=condition) print(str(query)) res = client.search(query) assert len(res) == 6
def wipe_all(client): """Delete all content from ICAT. """ require_icat_version("4.4.0", "Need extended root permission") wipe_data(client, Query(client, "Dataset")) tables = ["Investigation", "Facility"] + client.getEntityNames() for t in tables: query = Query(client, t, limit=(0, 200)) while True: objs = client.search(query) if not objs: break client.deleteMany(objs)
def __init__(self, invid): self.investigation = self._get_investigation(invid) self.inv_name = "Investigation(%s)" % invid self.inv_size = ((have_size_attrs and self.investigation.investigationSize) or 0) query = Query(client, "DatasetType", conditions={"name": "= 'other'"}) self.ds_type = client.assertedSearch(query)[0]
def cleanup(): query = Query(client, "Dataset", conditions={ "name": "LIKE '%s-%%'" % testDatasetName }) wipe_data(client, query) client.deleteMany(client.search(query)) client.logout()
def getInvestigation(): query = Query(client, "Investigation", conditions={ "name": "= '%s'" % conf.investigation, }) return client.assertedSearch(query)[0]
def test_assertedSearch_range_exact_query(client): """Check that Query objects also work with assertedSearch(). """ query = Query(client, "User", limit=(0,3)) objs = client.assertedSearch(query, assertmin=3, assertmax=3) assert len(objs) == 3 assert objs[0].BeanName == "User"
def test_ingest_datafiles_upload(tmpdirsec, client, dataset, cmdargs): """Upload datafiles to IDS from icatingest. Same as last test, but set the --upload-datafiles flag so that icatingest will not create the datafiles as objects in the ICAT, but upload the files to IDS instead. """ dummyfiles = [ DummyDatafile(tmpdirsec.dir, f['dfname'], f['size'], f['mtime']) for f in testdatafiles ] args = cmdargs + ["-i", datafiles, "--upload-datafiles", "--datafile-dir", tmpdirsec.dir] callscript("icatingest.py", args) # Verify that the datafiles have been uploaded. dataset = client.searchMatching(dataset) for f in dummyfiles: query = Query(client, "Datafile", conditions={ "name": "= '%s'" % f.name, "dataset.id": "= %d" % dataset.id, }) df = client.assertedSearch(query)[0] assert df.location is not None assert df.fileSize == f.size assert df.checksum == f.crc32 if f.mtime: assert df.datafileModTime == f.mtime
def test_query_investigation_includes(client): """Query lots of information about one single investigation. """ includes = { "facility", "type.facility", "investigationInstruments", "investigationInstruments.instrument.facility", "shifts", "keywords", "publications", "investigationUsers", "investigationUsers.user", "investigationGroups", "investigationGroups.grouping", "parameters", "parameters.type.facility" } query = Query(client, "Investigation", conditions={"id": "= %d" % investigation.id}, includes=includes) print(str(query)) res = client.search(query) assert len(res) == 1 inv = res[0] assert inv.BeanName == "Investigation" assert inv.id == investigation.id assert inv.name == investigation.name assert inv.facility.BeanName == "Facility" assert inv.type.facility.BeanName == "Facility" assert len(inv.investigationInstruments) > 0 assert len(inv.investigationUsers) > 0 assert len(inv.investigationGroups) > 0
def getinvestigation(invid): proposal = ProposalNo.parse(invid) query = Query(client, "Investigation", conditions=proposal.as_conditions(), includes={"facility"}) return (client.assertedSearch(query)[0])
def test_query_metaattr(client): """Test adding a condition on a meta attribute. Issue #6 """ query = Query(client, "Datafile", conditions={"modId": "= 'jdoe'"}) print(str(query)) res = client.search(query) assert len(res) == 0
def verify_dataset_params(client, dataset, params): query = Query(client, "DatasetParameter", conditions={"dataset.id": "= %d" % dataset.id}, includes={"type"}) ps = client.search(query) assert len(ps) == len(params) values = { (p.type.name, p.numericValue, p.type.units) for p in ps } assert values == params
def test_query_relateddatafile(client): """RelatedDatafile is the entity type with the most complicated natural order. """ query = Query(client, "RelatedDatafile", order=True) print(str(query)) res = client.search(query) assert len(res) == 1
def test_query_rule_order(client): """Rule does not have a constraint, id is included in the natural order. """ query = Query(client, "Rule", order=True) print(str(query)) assert "id" in query.order res = client.search(query) assert len(res) == 104
def test_query_datacollection(client): """There is no sensible order for DataCollection, fall back to id. """ query = Query(client, "DataCollection", order=True) print(str(query)) assert "id" in query.order res = client.search(query) assert len(res) == 2
def test_query_condition_list(client): """We may also add a list of conditions on a single attribute. """ condition = {"datafileCreateTime": [">= '2012-01-01'", "< '2013-01-01'"]} query = Query(client, "Datafile", conditions=condition) print(str(query)) qstr = str(query) res = client.search(query) assert len(res) == 3 # The last example also works by adding the conditions separately. query = Query(client, "Datafile") query.addConditions({"datafileCreateTime": ">= '2012-01-01'"}) query.addConditions({"datafileCreateTime": "< '2013-01-01'"}) print(str(query)) assert str(query) == qstr res = client.search(query) assert len(res) == 3
def get_datafile(client, df): query = Query(client, "Datafile", conditions={ "name": "= '%s'" % df['name'], "dataset.name": "= '%s'" % df['dataset'], "dataset.investigation.name": "= '%s'" % df['investigation'] }) return client.assertedSearch(query)[0]
def searchUniqueKey(self, key, objindex=None): """Search the object that belongs to a unique key. This is in a sense the inverse method to :meth:`icat.entity.Entity.getUniqueKey`, the key must previously have been generated by it. This method searches the Entity object that the key has been generated for from the server. if objindex is not :const:`None`, it is used as a cache of previously retrieved objects. It must be a dict that maps keys to Entity objects. The object retrieved by this method call will be added to this index. This method uses the JPQL inspired query syntax introduced with ICAT 4.3.0. It won't work with older ICAT servers. :param key: the unique key of the object to search for. :type key: :class:`str` :param objindex: cache of Entity objects. :type objindex: :class:`dict` :return: the object corresponding to the key. :rtype: :class:`icat.entity.Entity` :raise SearchResultError: if the object has not been found. :raise ValueError: if the key is not well formed. :raise VersionMethodError: if connected to an ICAT server older then 4.3.0. """ if self.apiversion < '4.3': raise VersionMethodError("searchUniqueKey", self.apiversion) if objindex is not None and key in objindex: return objindex[key] us = key.index('_') beanname = key[:us] av = parse_attr_val(key[us+1:]) info = self.getEntityInfo(beanname) query = Query(self, beanname) for f in info.fields: if f.name in av.keys(): attr = f.name if f.relType == "ATTRIBUTE": cond = "= '%s'" % simpleqp_unquote(av[attr]) query.addConditions({attr:cond}) elif f.relType == "ONE": rk = str("%s_%s" % (f.type, av[attr])) ro = self.searchUniqueKey(rk, objindex) query.addConditions({"%s.id" % attr:"= %d" % ro.id}) else: raise ValueError("malformed '%s': invalid attribute '%s'" % (key, attr)) obj = self.assertedSearch(query)[0] if objindex is not None: objindex[key] = obj return obj
def test_query_include_1(client): """Test adding an "INCLUDE 1" clause. """ query = Query(client, "Investigation", includes="1") print(str(query)) res = client.search(query) assert len(res) > 0 inv = res[0] assert inv.BeanName == "Investigation" assert inv.facility.BeanName == "Facility" assert inv.type.BeanName == "InvestigationType"
def test_query_limit(client): """Add a LIMIT clause to the last example. """ query = Query(client, "Rule", order=['grouping', 'what', 'id'], conditions={"grouping": "IS NOT NULL"}) query.setLimit((0, 10)) print(str(query)) res = client.search(query) assert len(res) == 10
def test_query_nullable_warning(client, recwarn): """Ordering on nullable relations emits a warning. """ recwarn.clear() query = Query(client, "Rule", order=['grouping', 'what', 'id']) w = recwarn.pop(icat.QueryNullableOrderWarning) assert issubclass(w.category, icat.QueryNullableOrderWarning) assert "grouping" in str(w.message) print(str(query)) res = client.search(query) assert len(res) == 44
def run_dataset(self, ds_name): query = Query(client, "Dataset", conditions={ "investigation.id": "= %d" % self.investigation.id, "name": "= '%s'" % ds_name }) dataset = client.assertedSearch(query)[0] if have_size_attrs: self.inv_size -= dataset.datasetSize query = Query(client, "Datafile", conditions={ "dataset.id": "= %d" % dataset.id }, includes="1") datafiles = client.search(query) assert len(datafiles) == self.Num_Datafiles start_time = timer() for datafile in datafiles: datafile.fileSize = self.FileSize datafile.update() end_time = timer() elapsed = end_time - start_time return dataset, elapsed
def createDatasets(client, testConfig): query = Query(client, "Investigation", conditions={ "name": "= '%s'" % testInvestigation, }) inv = client.assertedSearch(query)[0] testFCount = 10 testFSize = MemorySpace(testConfig.baseSize // (10*testFCount)) for data in ['random', 'zero', 'urandom']: name = "%s-%s" % (testDatasetName, data) testDatasets.append(DatasetBase(client, inv, name, testFCount, testFSize, data))
def cleanup(): query = Query(client, "Dataset", conditions={"name": "LIKE '%s-%%'" % testDatasetName}) wipe_data(client, query) query.setLimit((0, 500)) while True: objs = client.search(query) if not objs: break client.deleteMany(objs) client.logout()
def test_query_nullable_warning_suppressed(client, recwarn): """The warning can be suppressed by making the condition explicit. """ recwarn.clear() query = Query(client, "Rule", order=['grouping', 'what', 'id'], conditions={"grouping": "IS NOT NULL"}) assert len(recwarn.list) == 0 print(str(query)) res = client.search(query) assert len(res) == 44
def _searchByReference(client, element, objtype, objindex): """Search for a referenced object. """ ref = element.get('ref') if ref: # object is referenced by key. return client.searchUniqueKey(ref, objindex) else: # object is referenced by attributes. attrs = set(element.keys()) - {'id'} conditions = { a: "= '%s'" % element.get(a) for a in attrs } query = Query(client, objtype, conditions=conditions) return client.assertedSearch(query)[0]
def test_query_datafiles_datafileformat(client, recwarn): """Datafiles ordered by format. Note: this raises a QueryNullableOrderWarning, see below. """ recwarn.clear() query = Query(client, "Datafile", order=['datafileFormat', 'dataset', 'name']) w = recwarn.pop(icat.QueryNullableOrderWarning) assert issubclass(w.category, icat.QueryNullableOrderWarning) assert "datafileFormat" in str(w.message) print(str(query)) res = client.search(query) assert len(res) == 10
def test_query_in_operator(client): """Using "id in (i)" rather then "id = i" also works. (This may be needed to work around ICAT Issue 128.) """ query = Query(client, "Investigation", conditions={"id": "in (%d)" % investigation.id}) print(str(query)) res = client.search(query) assert len(res) == 1 inv = res[0] assert inv.BeanName == "Investigation" assert inv.id == investigation.id assert inv.name == investigation.name