示例#1
0
文件: upgrade.py 项目: GMBarra/Docker
    def u_000_findMixedinTables(cls, connection):
        """inform about tables with non-trivial mixins."""
        # in reality, the mixins that really give us a headache here
        # are the ones mixin in products.  Hence, we simply look
        # for tables that have both accref and embargo; that's
        # probably a certain indication.

        print(
            "\n!! Important: column sequences"
            " of tables with some mixins have changed.")
        print "!! If this affects you, below commands are shown that will re-import"
        print "!! the affected tables.  Some services on top of these tables may"
        print "!! be *broken* until these commands have run."
        print "!! Sorry for this inconvenience; we hope it won't happen again.\n"

        from gavo import registry
        for rdId in registry.findAllRDs():
            if rdId.startswith("__system"):
                continue

            try:
                rd = base.caches.getRD(rdId)
            except:
                # ignore broken RDs -- services there are broken anyway
                continue

            ids = set()

            for td in rd.tables:
                try:
                    td.getColumnByName("accref") and td.getColumnByName(
                        "embargo")
                except base.NotFoundError:
                    continue  # table not affected
                else:

                    if not rsc.TableForDef(
                            td, connection=connection, create=False).exists():
                        continue

                    # table needs re-importing, see if you can find a correponsing
                    # data element
                    for dd in rd.dds:
                        for make in dd.makes:
                            if make.table == td:
                                ids.add(dd.id)
            if ids:
                print "gavo imp '%s' %s" % (rd.sourceId, " ".join(
                    "'%s'" % id for id in ids))

        sys.stderr.write("\nEnd of scan of mixin-affected tables...")
def getAccessibleTables():
    """returns a list of qualified table names for the TAP-published tables.
	"""
    tapRD = base.caches.getRD(RD_ID)
    td = tapRD.getById("tables")
    table = rsc.TableForDef(td)
    res = [
        r["table_name"]
        for r in table.iterQuery([td.getColumnByName("table_name")],
                                 "",
                                 limits=("order by table_name", {}))
    ]
    table.close()
    return res
示例#3
0
 def data_publishedRDs(self, ctx, data):
     td = base.caches.getRD("//services").getById("resources")
     with base.getTableConn() as conn:
         table = rsc.TableForDef(td, connection=conn)
         try:
             return [
                 row["sourceRD"] for row in table.iterQuery(
                     [td.getColumnByName("sourceRD")],
                     "",
                     distinct=True,
                     limits=("ORDER BY sourceRD", {}))
             ]
         finally:
             table.close()
    def _hackInputTableFromPreparsed(self, renderer, args, core=None):
        """returns an input table from dictionaries as produced by nevow formal.

		This is a shortcut to bypass the relatively expensive makeData.
		And is probably a bad idea.
		"""
        args = utils.CaseSemisensitiveDict(args)
        inputDD = self.getInputDDFor(renderer, core=core)
        inputTable = rsc.TableForDef(inputDD.makes[0].table)

        for ik in inputDD.grammar.iterInputKeys():
            if ik.name in args:
                if args[ik.name] is not None:
                    inputTable.setParam(ik.name, args[ik.name])
            else:
                inputTable.setParam(ik.name, ik.value)

        inputTable.validateParams()
        return inputTable
示例#5
0
	def deliverProductTar(self, coreResult, request, queryMeta):
		"""causes a tar containing all accrefs mentioned in coreResult
		to be streamed out via request.
		"""
		table = coreResult.original.getPrimaryTable()
		productColumns = table.tableDef.getProductColumns()
		if not productColumns:
			raise base.ValidationError("This query does not select any"
				" columns with access references", "_OUTPUT")
		
		inputTableRows = []
		for row in table:
			for colName in productColumns:
				inputTableRows.append({"accref": row[colName]})
		inputTable = rsc.TableForDef(self.rd.getById("forTar").inputTable, 
			rows=inputTableRows)

		prods = self.core.run(coreResult.service, inputTable, queryMeta)
		return self._streamOutTar(prods, request, queryMeta)
示例#6
0
	def _makeTable(self, rowIter, resultTableDef, queryMeta):
		"""returns a table from the row iterator rowIter, updating queryMeta
		as necessary.
		"""
		rows = list(rowIter)
		isOverflowed =  len(rows)>queryMeta.get("dbLimit", 1e10)
		if isOverflowed:
			del rows[-1]
		queryMeta["Matched"] = len(rows)
		res = rsc.TableForDef(resultTableDef, rows=rows)
		if isOverflowed:
			queryMeta["Overflow"] = True
			res.addMeta("_warning", "The query limit was reached.  Increase it"
				" to retrieve more matches.  Note that unsorted truncated queries"
				" are not reproducible (i.e., might return a different result set"
				" at a later time).")
			res.addMeta("_queryStatus", "Overflowed")
		else:
			res.addMeta("_queryStatus", "Ok")
		return res
def querySubjectsList(setName=None):
	"""returns a list of local services chunked by subjects.

	This is mainly for the root page (see web.root).  Query the
	cache using the __system__/services key to clear the cache on services
	"""
	setName = setName or 'local'
	svcsForSubjs = {}
	td = common.getServicesRD().getById("subjects_join")
	otd = svcs.OutputTableDef.fromTableDef(td, None)
	with base.getTableConn() as conn:
		for row in rsc.TableForDef(td, connection=conn).iterQuery(otd, 
				"setName=%(setName)s AND subject IS NOT NULL", {"setName": setName}):
			svcsForSubjs.setdefault(row["subject"], []).append(row)
	for s in svcsForSubjs.values():
		s.sort(key=lambda a: a["title"])
	res = [{"subject": subject, "chunk": s}
		for subject, s in svcsForSubjs.iteritems()]
	res.sort(lambda a,b: cmp(a["subject"], b["subject"]))
	return res
def makeSDMDataForPUBDID(pubDID,
                         ssaTD,
                         spectrumData,
                         sdmVersion=base.getConfig("ivoa", "sdmVersion")):
    """returns a rsc.Data instance containing an SDM compliant spectrum
	for pubDID from ssaTable.

	ssaTD is the definition of a table containg the SSA metadata, 
	spectrumData is a data element making a primary table containing
	the spectrum data from an SSA row (typically, this is going to be
	the tablesource property of an SSA service).
	"""
    with base.getTableConn() as conn:
        ssaTable = rsc.TableForDef(ssaTD, connection=conn)
        matchingRows = list(
            ssaTable.iterQuery(ssaTable.tableDef, "ssa_pubdid=%(pubdid)s",
                               {"pubdid": pubDID}))
        if not matchingRows:
            raise svcs.UnknownURI("No spectrum with pubdid %s known here" %
                                  pubDID)
    return makeSDMDataForSSARow(matchingRows[0],
                                spectrumData,
                                sdmVersion=sdmVersion)
示例#9
0
	def _runQuery(self, resultTableDef, fragment, pars, queryMeta,
			**kwargs):
		with base.getTableConn()  as conn:
			queriedTable = rsc.TableForDef(self.queriedTable, nometa=True,
				create=False, connection=conn)
			queriedTable.setTimeout(queryMeta["timeout"])

			if fragment and pars:
				resultTableDef.addMeta("info", repr(pars),
					infoName="queryPars", infoValue=fragment)

			iqArgs = {"limits": queryMeta.asSQL(), "distinct": self.distinct,
				"groupBy": self.groupBy}
			iqArgs.update(kwargs)

			try:
				try:
					return self._makeTable(
						queriedTable.iterQuery(resultTableDef, fragment, pars,
							**iqArgs), resultTableDef, queryMeta)
				except:
					mapDBErrors(*sys.exc_info())
			finally:
				queriedTable.close()
示例#10
0
文件: upgrade.py 项目: GMBarra/Docker
 def u_010_makeMetastore(cls, connection):
     """create the meta store"""
     td = base.caches.getRD("//dc_tables").getById("metastore")
     rsc.TableForDef(td, create=True, connection=connection)
	def u_000_updateObscore(cls, connection):
		"""update obscore to work even when the table is empty"""
		rsc.TableForDef(base.caches.getRD("//obscore").getById("emptyobscore"),
			connection=connection, create=True)
		rsc.makeData(base.caches.getRD("//obscore").getById("create"),
			connection=connection, runCommit=False)