def run(self, service, inputTable, queryMeta): with base.getTableConn() as conn: ssaTable = rsc.TableForDef(self.queriedTable, connection=conn) try: # XXX TODO: Figure out why the unquote here is required. accref = urllib.unquote(inputTable.getParam("accref")) res = list( ssaTable.iterQuery(ssaTable.tableDef, "accref=%(accref)s", {"accref": accref})) if not res: raise svcs.UnknownURI( "No spectrum with accref %s known here" % inputTable.getParam("accref")) ssaRow = res[0] finally: ssaTable.close() resData = makeSDMDataForSSARow(ssaRow, self.sdmDD) votContextArgs = {} if queryMeta["tdEnc"]: votContextArgs["tablecoding"] = "td" # This is for VOSpec, in particular the tablecoding; I guess once # we actually support the sed DM, this should go, and the # specview links should use sed dcc sourcePaths. if inputTable.getParam("dm") == "sed": hackSDMToSED(resData) votContextArgs["tablecoding"] = "td" return (base.votableType, votable.asString(makeSDMVOT(resData, **votContextArgs)))
def __getFromDB(self, metaKey): try: # try to used cached data if self.__dbRecord is None: raise base.NoMetaKey(metaKey, carrier=self) return self.__dbRecord[metaKey] except AttributeError: # fetch data from DB pass res = None if self.rd: # We're not going through servicelist since we don't want to depend # on the registry subpackage. with base.getTableConn() as conn: res = list( conn.query( "SELECT dateUpdated, recTimestamp, setName" " FROM dc.resources_join WHERE sourceRD=%(rdId)s AND resId=%(id)s", { "rdId": self.rd.sourceId, "id": self.id })) if res: self.__dbRecord = { "sets": list(set(row[2] for row in res)), "recTimestamp": res[0][1].strftime(utils.isoTimestampFmt) } else: self.__dbRecord = { 'sets': ['unpublished'], 'recTimestamp': datetime.datetime.utcnow().strftime(utils.isoTimestampFmt) } return self.__getFromDB(metaKey)
def main(): args = parseCommandLine() jobId = args.jobId with base.getTableConn() as conn: svcId = list( conn.query("SELECT jobclass FROM uws.userjobs WHERE jobId=%(jobId)s", {"jobId": jobId}))[0][0] service = base.resolveCrossId(svcId) try: job = service.getUWS().getJob(jobId) with job.getWritable() as wjob: wjob.change(phase=uws.EXECUTING, startTime=datetime.datetime.utcnow()) service = base.resolveCrossId(job.jobClass) inputTable = rsc.TableForDef(service.core.inputTable) inputTable.job = job for parName, value in job.parameters.iteritems(): inputTable.setParam(parName, value) data = service._runWithInputTable( service.core, inputTable, None).original # Our cores either return a table, a pair of mime and data, # or None (in which case they added the results themselves) if isinstance(data, tuple): mime, payload = data with job.openResult(mime, "result") as destF: destF.write(payload) elif isinstance(data, rsc.Data): destFmt = inputTable.getParam("responseformat", "application/x-votable+xml") with job.openResult(destFmt, "result") as destF: formats.formatData(destFmt, data, destF, False) elif data is None: pass else: raise NotImplementedError("Cannot handle a service %s result yet."% repr(data)) with job.getWritable() as wjob: wjob.change(phase=uws.COMPLETED) except SystemExit: pass except uws.JobNotFound: base.ui.notifyInfo("Giving up non-existing UWS job %s."%jobId) except Exception, ex: base.ui.notifyError("UWS runner %s major failure"%jobId) # try to push job into the error state -- this may well fail given # that we're quite hosed, but it's worth the try service.getUWS().changeToPhase(jobId, uws.ERROR, ex) raise
def renderHTTP(self, ctx): queryArgs = dict( (key, value[0]) for key, value in inevow.IRequest(ctx).args.iteritems()) with base.getTableConn() as conn: res = list(conn.queryToDicts(self.query, queryArgs)) request = inevow.IRequest(ctx) request.setHeader("content-type", "text/json") return json.dumps(res)
def getMatchingRows(pars, rscTableDef, getSetFilter): """returns rows in rscTableDef matching the OAI parameters pars. The last element of the list could be an OAI.resumptionToken element. pars is a dictionary mapping any of the following keys to values: - from - until -- these give a range for which changed records are being returned - set -- maps to a sequence of set names to be matched. - resumptionToken -- some magic value (see OAI.resumptionToken) - maxRecords -- an integer literal that specifies the maximum number of records returned, defaulting to [ivoa]oaipmhPageSize maxRecords is not part of OAI-PMH; it is used internally to turn paging on when we think it's a good idea, and for testing. rscTableDef has to be a table with a column recTimestamp giving the resource record's updated time. getSetFilter(pars, fillers) is a function receiving the PMH parameters dictionary and a dictionary of query fillers and returning, as appropriate, a condition that implements any conditions on sets within pars """ maxRecords = int( pars.get("maxRecords", base.getConfig("ivoa", "oaipmhPagesize"))) offset = pars.get("resumptionToken", 0) frag, fillers = _parseOAIPars(pars) frag = " AND ".join(f for f in [getSetFilter(pars, fillers), frag] if f) try: with base.getTableConn() as conn: srvTable = rsc.TableForDef(rscTableDef, connection=conn) res = list( srvTable.iterQuery( rscTableDef, frag, fillers, limits=("LIMIT %(maxRecords)s OFFSET %(offset)s", locals()))) if len(res) == maxRecords: # there's probably more data, request a resumption token res.append(OAI.resumptionToken[makeResumptionToken( pars, offset + len(res))]) res[-1].addChild = lambda: 0 except base.DBError: raise base.ui.logOldExc( common.BadArgument("Bad syntax in some parameter value")) except KeyError, msg: raise base.ui.logOldExc( base.Error("Internal error, missing key: %s" % msg))
def _evaluateFromDB(self, ctx): if not getattr(ctx, "doQueries", True): return try: with base.getTableConn() as conn: for row in conn.query( self.parent.parent.expand("SELECT DISTINCT %s" % (self.fromdb))): self._options.feedObject( self, base.makeStruct(Option, content_=row[0])) except base.DBError: # Table probably doesn't exist yet, ignore. base.ui.notifyWarning("Values fromdb '%s' failed, ignoring" % self.fromdb)
def quote(self): """returns an estimation of the job completion. This currently is very naive: we give each job that's going to run before this one 10 minutes. This method needs to be changed when the dequeueing algorithm is changed. """ with base.getTableConn() as conn: nBefore = self.uws.runCanned('countQueuedBefore', {'dt': self.destructionTime}, conn)[0]["count"] return datetime.datetime.utcnow() + nBefore * EST_TIME_PER_JOB
def data_publishedRDs(self, ctx, data): td = base.caches.getRD("//services").getById("resources") with base.getTableConn() as conn: table = rsc.TableForDef(td, connection=conn) try: return [ row["sourceRD"] for row in table.iterQuery( [td.getColumnByName("sourceRD")], "", distinct=True, limits=("ORDER BY sourceRD", {})) ] finally: table.close()
def _run_getTargetNames(self, service, inputTable, queryMeta): with base.getTableConn() as conn: table = rsc.TableForDef(self.queriedTable, create=False, connection=conn) destTD = base.makeStruct( outputdef.OutputTableDef, parent_=self.queriedTable.parent, id="result", onDisk=False, columns=[self.queriedTable.getColumnByName("ssa_targname")]) res = rsc.TableForDef(destTD, rows=table.iterQuery(destTD, "", distinct=True)) res.noPostprocess = True return res
def validateTables(rd, args): """does some sanity checks on the (top-level) tables within rd. """ valid = True identifierSymbol = adql.getSymbols()["identifier"] for td in rd.tables: for col in td: try: if col.unit: parsedUnit = api.parseUnit(col.unit) if parsedUnit.isUnknown and not args.acceptFreeUnits: outputWarning( rd.sourceId, "Column %s.%s: Unit %s is not interoperable" % (td.getQName(), col.name, col.unit)) except api.BadUnit: valid = False outputError( rd.sourceId, "Bad unit in table %s, column %s: %s" % (td.getQName(), col.name, repr(col.unit))) try: identifierSymbol.parseString(str(col.name), parseAll=True) except base.ParseException, msg: outputWarning( rd.sourceId, "Column %s.%s: Name is not a regular" " ADQL identifier." % (td.id, col.name)) if td.onDisk and args.compareDB: with base.getTableConn() as conn: q = base.UnmanagedQuerier(conn) if q.tableExists(td.getQName()): t = api.TableForDef(td, connection=conn) try: t.ensureOnDiskMatches() except api.DataError, msg: outputError( rd.sourceId, utils.makeEllipsis(utils.safe_str(msg), 160))
def querySubjectsList(setName=None): """returns a list of local services chunked by subjects. This is mainly for the root page (see web.root). Query the cache using the __system__/services key to clear the cache on services """ setName = setName or 'local' svcsForSubjs = {} td = common.getServicesRD().getById("subjects_join") otd = svcs.OutputTableDef.fromTableDef(td, None) with base.getTableConn() as conn: for row in rsc.TableForDef(td, connection=conn).iterQuery(otd, "setName=%(setName)s AND subject IS NOT NULL", {"setName": setName}): svcsForSubjs.setdefault(row["subject"], []).append(row) for s in svcsForSubjs.values(): s.sort(key=lambda a: a["title"]) res = [{"subject": subject, "chunk": s} for subject, s in svcsForSubjs.iteritems()] res.sort(lambda a,b: cmp(a["subject"], b["subject"])) return res
def makeSDMDataForSSARow(ssaRow, spectrumData, sdmVersion=base.getConfig("ivoa", "sdmVersion")): """returns a rsc.Data instance containing an SDM compliant spectrum for the spectrum described by ssaRow. spectrumData is a data element making a primary table containing the spectrum data from an SSA row (typically, this is going to be the tablesource property of an SSA service). You'll usually use this via //datalink#sdm_genData """ with base.getTableConn() as conn: resData = rsc.makeData(spectrumData, forceSource=ssaRow, connection=conn) resTable = resData.getPrimaryTable() resTable.setMeta( "description", "Spectrum from %s" % products.makeProductLink(ssaRow["accref"])) # fudge accref into a full URL resTable.setParam("accref", products.makeProductLink(resTable.getParam("accref"))) resData.DACHS_SDM_VERSION = sdmVersion # fudge spoint params into 2-arrays for param in resTable.iterParams(): # Bad, bad: In-place changes; we should think how such things # can be done better in a rewrite if param.type == "spoint": val = param.value param.type = "double precision(2)" param.xtype = None param.unit = "deg" if val: param.set([val.x / utils.DEG, val.y / utils.DEG]) if sdmVersion == "2": hackSDM1ToSDM2(resData) return resData
def makeSDMDataForPUBDID(pubDID, ssaTD, spectrumData, sdmVersion=base.getConfig("ivoa", "sdmVersion")): """returns a rsc.Data instance containing an SDM compliant spectrum for pubDID from ssaTable. ssaTD is the definition of a table containg the SSA metadata, spectrumData is a data element making a primary table containing the spectrum data from an SSA row (typically, this is going to be the tablesource property of an SSA service). """ with base.getTableConn() as conn: ssaTable = rsc.TableForDef(ssaTD, connection=conn) matchingRows = list( ssaTable.iterQuery(ssaTable.tableDef, "ssa_pubdid=%(pubdid)s", {"pubdid": pubDID})) if not matchingRows: raise svcs.UnknownURI("No spectrum with pubdid %s known here" % pubDID) return makeSDMDataForSSARow(matchingRows[0], spectrumData, sdmVersion=sdmVersion)
def _runQuery(self, resultTableDef, fragment, pars, queryMeta, **kwargs): with base.getTableConn() as conn: queriedTable = rsc.TableForDef(self.queriedTable, nometa=True, create=False, connection=conn) queriedTable.setTimeout(queryMeta["timeout"]) if fragment and pars: resultTableDef.addMeta("info", repr(pars), infoName="queryPars", infoValue=fragment) iqArgs = {"limits": queryMeta.asSQL(), "distinct": self.distinct, "groupBy": self.groupBy} iqArgs.update(kwargs) try: try: return self._makeTable( queriedTable.iterQuery(resultTableDef, fragment, pars, **iqArgs), resultTableDef, queryMeta) except: mapDBErrors(*sys.exc_info()) finally: queriedTable.close()
def _makeCapability(self, publication): res = CapabilityMaker._makeCapability(self, publication) with base.getTableConn() as conn: from gavo.protocols import tap from gavo.adql import ufunctions res[[ TR.dataModel(ivoId=dmivoid)[dmname] for dmname, dmivoid in conn.query( "select dmname, dmivorn from tap_schema.supportedmodels") ]] res[ # Once we support more than one language, we'll have to # revisit this -- the optional features must then become # a property of the language. [ TR.language[ TR.name[langName], TR.version(ivoId=ivoId)[version], TR.description[description], TR.languageFeatures( type="ivo://ivoa.net/std/TAPRegExt#features-udf")[[ TR.feature[TR.form[udf.adqlUDF_signature], TR.description[udf.adqlUDF_doc]] for udf in ufunctions.UFUNC_REGISTRY.values() ]], TR.languageFeatures( type="ivo://ivoa.net/std/TAPRegExt#features-adqlgeo" )[[ TR.feature[TR.form[funcName]] # take this from adql.grammar somehow? for funcName in ("BOX", "POINT", "CIRCLE", "POLYGON", "REGION", "CENTROID", "COORD1", "COORD2", "DISTANCE", "CONTAINS", "INTERSECTS", "AREA") ]]] for langName, version, description, ivoId in tap. getSupportedLanguages() ], [ TR.outputFormat( ivoId=ivoId)[TR.mime[mime], [TR.alias[alias] for alias in aliases]] for mime, aliases, description, ivoId in tap. getSupportedOutputFormats() ], [ TR.uploadMethod(ivoId="ivo://ivoa.net/std/TAPRegExt#%s" % proto) for proto in tap.UPLOAD_METHODS ], TR.retentionPeriod[TR.default[str( base.getConfig("async", "defaultLifetime"))]], TR.executionDuration[TR.default[str( base.getConfig("async", "defaultExecTime"))]], TR.outputLimit[TR.default( unit="row")[str(base.getConfig("async", "defaultMAXREC"))], TR.hard( unit="row" )[str(base.getConfig("async", "hardMAXREC"))]], TR.uploadLimit[TR.hard( unit="byte")[str(base.getConfig("web", "maxUploadSize"))]]] return res
def findPublishedRDs(): """returns the ids of all RDs which have been published before. """ with base.getTableConn() as conn: return [r['sourcerd'] for r in conn.queryToDicts( "select distinct sourcerd from dc.resources where not deleted")]