Beispiel #1
0
 def u_000_updateObscore(cls, connection):
     """update obscore to work even when the table is empty"""
     rsc.TableForDef(base.caches.getRD("//obscore").getById("emptyobscore"),
                     connection=connection,
                     create=True)
     rsc.makeData(base.caches.getRD("//obscore").getById("create"),
                  connection=connection,
                  runCommit=False)
	def u_000_siapDateObsToMJD(cls, connection):
		"""change SIAP and SSAP dateObs columns to MJD"""
		mth = base.caches.getMTH(None)
		connection.execute("DROP VIEW IF EXISTS ivoa.obscore")

		for tableName, fieldName in connection.query(
				"SELECT tableName, fieldName FROM dc.columnmeta"
				" WHERE type='timestamp' AND"
				" fieldName LIKE '%%dateObs'"):
			cls._upgradeTable(mth.getTableDefForTable(tableName), fieldName,
				connection)

		from gavo import rsc
		rsc.makeData(base.caches.getRD("//obscore").getById("create"),
			connection=connection, runCommit=False)
Beispiel #3
0
    def _getMetadataData(self, queryMeta):
        """returns a SIAP-style metadata data item.
		"""
        # XXX TODO: build VOTable directly (rather than from data)
        inputFields = []
        for param in self.service.getInputKeysFor(self):
            # Caution: UPLOAD mangling is a *renderer* thing -- the core
            # doesn't know anything about it.  Hence, parameter adaption
            # is *not* done by adapting the core.  Instead:
            if param.type == "file":
                inputFields.append(dali.getUploadKeyFor(param))
            else:
                inputFields.append(param.change(name="INPUT:" + param.name))
        outputTable = MS(rscdef.TableDef,
                         columns=self.service.getCurOutputFields(queryMeta),
                         id="result")

        nullRowmaker = MS(rscdef.RowmakerDef)
        dataDesc = MS(
            svcs.InputDescriptor,
            makes=[MS(rscdef.Make, table=outputTable, rowmaker=nullRowmaker)],
            params=inputFields,
            parent_=self.service.rd)

        data = rsc.makeData(dataDesc)
        data.tables["result"].votCasts = self._outputTableCasts
        data.setMeta("_type", "results")
        data.addMeta("info", "OK", infoName="QUERY_STATUS", infoValue="OK")
        data.addMeta("info",
                     base.getMetaText(self.service, "title") or "Unnamed",
                     infoName="serviceInfo",
                     infoValue=str(self.service.getURL(self.name)))

        return data
Beispiel #4
0
	def getPreviewData(self, accref):
		table = rsc.makeData(self.sdmDD, forceSource={
			"accref": accref}).getPrimaryTable()
		data = [(r["spectral"], r["flux"]) for r in table.rows]
		data.sort()

		fig = pyplot.figure(figsize=(4,2))
		ax = fig.add_axes([0,0,1,1], frameon=False)

		if self.linearFluxes:
			plotter = ax.plot
		else:
			plotter = ax.semilogy
		
		plotter(
			[r[0] for r in data], 
			[r[1] for r in data],
			color="black")
		ax.xaxis.set_major_locator(matplotlib.ticker.NullLocator())
		ax.yaxis.set_major_locator(matplotlib.ticker.NullLocator())
		ax.yaxis.set_minor_locator(matplotlib.ticker.NullLocator())

		rendered = StringIO()
		pyplot.savefig(rendered, format="png", dpi=50)
		pyplot.close()

		rendered = StringIO(rendered.getvalue())
		im = Image.open(rendered)
		im = im.convert("L")
		im = im.convert("P", palette=Image.ADAPTIVE, colors=8)
		compressed = StringIO()
		im.save(compressed, format="png", bits=3)
		return compressed.getvalue()
	def u_020_updatePublished(cls, connection):
		"""ingesting column_index for TAP-published tables."""
		toDo = [r[0] for r in 
			connection.query(
				"SELECT DISTINCT sourceRD FROM TAP_SCHEMA.columns")]
		dd = base.resolveCrossId("//tap#importColumnsFromRD")

		for rdId in toDo:
			try:
				rd = base.caches.getRD(rdId)
			except Exception, msg:
				base.ui.notifyWarning("RD %s couldn't be loaded (%s)."
					"  Fix and run gavo imp -m on it to get"
					" column indices in TAP_SCHEMA"%(rdId, msg))
				continue
			rsc.makeData(dd, forceSource=rd, connection=connection)
Beispiel #6
0
    def _makeMetadata(self, service):
        metaTD = self.outputTable.change(id="results")
        for param in metaTD.params:
            param.name = "OUTPUT:" + param.name
        dd = base.makeStruct(rscdef.DataDescriptor,
                             parent_=self.rd,
                             makes=[
                                 base.makeStruct(rscdef.Make,
                                                 table=metaTD,
                                                 rowmaker=base.makeStruct(
                                                     rscdef.RowmakerDef))
                             ])
        dd.setMetaParent(service)

        for inP in self.inputTable.params:
            dd.feedObject("param", inP.change(name="INPUT:" + inP.name))

        dd.setMeta("_type", "meta")
        dd.addMeta("info", "", infoName="QUERY_STATUS", infoValue="OK")
        dd.addMeta("info",
                   "SSAP",
                   infoName="SERVICE_PROTOCOL",
                   infoValue="1.04")

        data = rsc.makeData(dd)

        return base.votableType, votablewrite.getAsVOTable(data)
def updateServiceList(rds, metaToo=False, connection=None, onlyWarn=True,
		keepTimestamp=False):
	"""updates the services defined in rds in the services table in the database.
	"""
	recordsWritten = 0
	parseOptions = rsc.getParseOptions(validateRows=True, batchSize=20)
	if connection is None:
		connection = base.getDBConnection("admin")
	dd = common.getServicesRD().getById("tables")
	dd.grammar = _rdRscRecGrammar
	dd.grammar.keepTimestamp = keepTimestamp
	depDD = common.getServicesRD().getById("deptable")
	msg = None
	for rd in rds:
		if rd.sourceId.startswith("/"):
			raise base.Error("Resource descriptor ID must not be absolute, but"
				" '%s' seems to be."%rd.sourceId)

		deletedUpdater = getDeletedIdentifiersUpdater(connection, rd)

		try:
			data = rsc.makeData(dd, forceSource=rd, parseOptions=parseOptions,
				connection=connection)
			recordsWritten += data.nAffected
			rsc.makeData(depDD, forceSource=rd, connection=connection)

			if metaToo:
				from gavo.protocols import tap
				tap.unpublishFromTAP(rd, connection)
				for dependentDD in rd:
					rsc.Data.create(dependentDD, connection=connection).updateMeta()
				tap.publishToTAP(rd, connection)

			deletedUpdater()

		except base.MetaValidationError, ex:
			msg = ("Aborting publication of rd '%s' since meta structure of"
				" %s (id='%s') is invalid:\n * %s")%(
				rd.sourceId, repr(ex.carrier), ex.carrier.id, "\n * ".join(ex.failures))
		except base.NoMetaKey, ex:
			msg = ("Aborting publication of '%s' at service '%s': Resource"
				" record generation failed: %s"%(
				rd.sourceId, ex.carrier.id, str(ex)))
    def run(self, service, inputTable, queryMeta):
        """returns a data set containing product sources for the keys mentioned in
		inputTable.
		"""
        authGroups = self._getGroups(queryMeta["user"], queryMeta["password"])

        dd = MS(rscdef.DataDescriptor,
                grammar=MS(ProductsGrammar, groups=authGroups),
                make=[MS(rscdef.Make, table=self.outputTable)])

        return rsc.makeData(dd, forceSource=self._getRAccrefs(inputTable))
    def _makeInputTableFor(self, renderer, args, core=None):
        """returns an input table for this service  through renderer, filled 
		from contextData.
		"""
        if isinstance(args, PreparsedInput) and not self.inputDD:
            return self._hackInputTableFromPreparsed(renderer, args, core=core)
        else:
            return rsc.makeData(
                self.getInputDDFor(renderer, core=core),
                parseOptions=rsc.parseValidating,
                forceSource=args,
                connection=base.NullConnection()).getPrimaryTable()
def publishToTAP(rd, connection):
    """publishes info for all ADQL-enabled tables of rd to the TAP_SCHEMA.
	"""
    # first check if we have any adql tables at all, and don't attempt
    # anything if we don't (this is cheap optimizing and keeps TAP_SCHEMA
    # from being created on systems that don't do ADQL.
    for table in rd.tables:
        if table.adql:
            break
    else:
        return
    tapRD = base.caches.getRD(RD_ID)
    for ddId in [
            "importTablesFromRD", "importDMsFromRD", "importColumnsFromRD",
            "importFkeysFromRD", "importGroupsFromRD"
    ]:
        dd = tapRD.getById(ddId)
        rsc.makeData(dd,
                     forceSource=rd,
                     parseOptions=rsc.parseValidating,
                     connection=connection)
    def _importData(self, sourcePath, mode):
        """parses the input file at sourcePath and writes the result to the DB.
		"""
        base.ui.notifyInfo("Web upload ingesting %s in %s mode" %
                           (sourcePath, mode))
        try:
            parseOptions = rsc.getParseOptions(validateRows=True,
                                               updateMode=True,
                                               doTableUpdates=mode == "u")
            with base.getWritableAdminConn() as conn:
                res = rsc.makeData(self.destDD,
                                   parseOptions=parseOptions,
                                   forceSource=sourcePath,
                                   connection=conn)
        except Exception, msg:
            raise base.ui.logOldExc(
                base.ValidationError(
                    "Cannot enter %s in"
                    " database: %s" % (os.path.basename(sourcePath), str(msg)),
                    "File"))
def makeSDMDataForSSARow(ssaRow,
                         spectrumData,
                         sdmVersion=base.getConfig("ivoa", "sdmVersion")):
    """returns a rsc.Data instance containing an SDM compliant spectrum
	for the spectrum described by ssaRow.

	spectrumData is a data element making a primary table containing
	the spectrum data from an SSA row (typically, this is going to be
	the tablesource property of an SSA service).

	You'll usually use this via //datalink#sdm_genData
	"""
    with base.getTableConn() as conn:
        resData = rsc.makeData(spectrumData,
                               forceSource=ssaRow,
                               connection=conn)
    resTable = resData.getPrimaryTable()
    resTable.setMeta(
        "description",
        "Spectrum from %s" % products.makeProductLink(ssaRow["accref"]))

    # fudge accref  into a full URL
    resTable.setParam("accref",
                      products.makeProductLink(resTable.getParam("accref")))
    resData.DACHS_SDM_VERSION = sdmVersion

    # fudge spoint params into 2-arrays
    for param in resTable.iterParams():
        # Bad, bad: In-place changes; we should think how such things
        # can be done better in a rewrite
        if param.type == "spoint":
            val = param.value
            param.type = "double precision(2)"
            param.xtype = None
            param.unit = "deg"
            if val:
                param.set([val.x / utils.DEG, val.y / utils.DEG])

    if sdmVersion == "2":
        hackSDM1ToSDM2(resData)
    return resData
    def getDatalinksResource(self, ctx, service):
        """returns a VOTable RESOURCE element with the data links.

		This does not contain the actual service definition elements, but it
		does contain references to them.

		You must pass in a VOTable context object ctx (for the management
		of ids).  If this is the entire content of the VOTable, use
		votablewrite.VOTableContext() there.
		"""
        internalLinks = []

        internalLinks.extend(
            LinkDef(s.pubDID,
                    service.getURL(s.rendName),
                    serviceType=ctx.getOrMakeIdFor(s),
                    semantics="#proc") for s in self.datalinkEndpoints)

        for d in self.descriptors:
            # for all descriptors that are products, make a full dataset
            # available through the data access, possibly also adding a preview.
            if not isinstance(d, ProductDescriptor):
                continue
            if hasattr(d, "suppressAutoLinks"):
                continue

            # if the accref is a datalink document, go through dlget itself.
            if d.mime == "application/x-votable+xml;content=datalink":
                internalLinks.append(
                    LinkDef(d.pubDID,
                            service.getURL("dlget") +
                            "?ID=%s" % urllib.quote(d.pubDID),
                            description="The full dataset.",
                            contentType=products.guessMediaType(d.accref),
                            contentLength=d.estimateSize(),
                            semantics="#this"))

            else:
                internalLinks.append(
                    LinkDef(d.pubDID,
                            products.makeProductLink(d.accref),
                            description="The full dataset.",
                            contentType=d.mime,
                            contentLength=d.estimateSize(),
                            semantics="#this"))

            if getattr(d, "preview", None):
                if d.preview.startswith("http"):
                    previewLink = d.preview
                else:
                    previewLink = products.makeProductLink(
                        products.RAccref(d.accref, inputDict={"preview":
                                                              True}))
                # TODO: preview mime is None for AUTO previews, and there's
                # not much we can do about it.  Or is there?
                internalLinks.append(
                    LinkDef(d.pubDID,
                            previewLink,
                            description="A preview for the dataset.",
                            contentType=d.previewMime,
                            semantics="#preview"))

        data = rsc.makeData(
            base.caches.getRD("//datalink").getById("make_response"),
            forceSource=self.datalinkLinks + internalLinks + self.errors)
        data.setMeta("_type", "results")

        return votablewrite.makeResource(
            votablewrite.VOTableContext(tablecoding="td"), data)
	def run(self, service, inputTable, queryMeta):
		"""starts the computing process if this is a computed data set.
		"""
		res = rsc.makeData(self.resultParse,
			forceSource=StringIO(self._runAndCapture(inputTable)))
		return res.getPrimaryTable()
Beispiel #15
0
 def u_010_createDLAsyncTable(cls, connection):
     """import job table for async datalink"""
     from gavo import rsc
     rsc.makeData(base.caches.getRD("//datalink").getById("import"),
                  connection=connection,
                  runCommit=False)
Beispiel #16
0
    def u_010_updateObscore(cls, connection):
        """upgrade ivoa.obscore to obscore 1.1.
		"""
        dd = base.caches.getRD(
            "//obscore", doQueries=False).getById("refreshAfterSchemaUpdate")
        rsc.makeData(dd, connection=connection)
Beispiel #17
0
 def u_000_remetaObscore(cls, connection):
     """update obscore metadata to fix the erroneous id"""
     rsc.makeData(base.caches.getRD("//obscore").getById("create"),
                  connection=connection,
                  runCommit=False,
                  parseOptions=rsc.getParseOptions(metaOnly=True))
			os.symlink(os.path.join(TEST_BASE, "test_data"),
				os.path.join(base.getConfig("inputsDir"), "data"))
			os.rmdir(os.path.join(base.getConfig("inputsDir"), "__system"))
			os.symlink(os.path.join(TEST_BASE, "test_data", "__system"),
				os.path.join(base.getConfig("inputsDir"), "__system"))
			os.mkdir(os.path.join(base.getConfig("inputsDir"), "test"))
			initdachs.initDB(dsn)

			from gavo.registry import publication
			from gavo import rsc
			from gavo import rscdesc #noflake: caches registration
			from gavo import base
			publication.updateServiceList([base.caches.getRD("//services")])

			# Import some resources necessary in trial tests
			rsc.makeData(
				base.caches.getRD("data/ssatest").getById("test_import"))
			rsc.makeData(
				base.caches.getRD("//obscore").getById("create"))
			rsc.makeData(
				base.resolveCrossId("//uws#enable_useruws"))
		except:
			traceback.print_exc()
			sys.stderr.write("Creation of test environment failed.  Remove %s\n"
				" before trying again.\n"%(base.getConfig("rootDir")))
			sys.exit(1)

	else:
		# run any pending upgrades (that's a test for them, too... of sorts)
		from gavo.user import upgrade
		upgrade.upgrade()
Beispiel #19
0
 def u_000_adqlfunctions(cls, connection):
     """update ADQL GAVO-defined functions for the postgres planner's benefit"""
     rsc.makeData(base.caches.getRD("//adql").getById("make_udfs"),
                  connection=connection,
                  runCommit=False)
Beispiel #20
0
 def u_000_tapSchema(cls, connection):
     """add supportedmodels table to tap_schema"""
     rsc.makeData(base.caches.getRD("//tap").getById("createSchema"),
                  connection=connection,
                  runCommit=False)
Beispiel #21
0
 def u_000_update_funcs(cls, connection):
     """update GAVO server-side functions"""
     rsc.makeData(base.caches.getRD("//adql").getById("make_udfs"),
                  connection=connection,
                  runCommit=False)