def __iter__(self):
        """actually runs the query and returns rows (dictionaries).

		You can only iterate once.  At exhaustion, the connection will
		be closed.
		"""
        if self.connection is None:
            raise base.ReportableError("QueryTable already exhausted.")

        nRows = 0
        cursor = self.connection.cursor("cursor" + hex(id(self)))
        cursor.execute(self.query)
        while True:
            nextRows = cursor.fetchmany(1000)
            if not nextRows:
                break
            for row in nextRows:
                nRows += 1
                yield self.tableDef.makeRowFromTuple(row)
        cursor.close()

        if self.matchLimit and self.matchLimit == nRows:
            self.setMeta("_queryStatus", "OVERFLOW")
        else:
            self.setMeta("_queryStatus", "OVERFLOW")
        self.cleanup()
    def _getColDescs(self):
        """returns a C initializer for an array of FITSColDescs.
		"""
        res = []
        for index, fcd in enumerate(self.fitsTable.columns):
            col = self.colForFITSIndex[index]
            if col is None:
                # table column not part of FITS table, suppress reading
                # my having .cSize=0
                res.append("{.cSize = 0, .fitsType = 0, .index=0}")
                continue

            length, typecode = self._parseFITSFormat(fcd.format, fcd.name)

            if typecode == "A":
                # special handling for strings, as we need their size
                # var length strings have been rejected above
                res.append("{.cSize = %d, .fitsType = TSTRING, .index=%d}" %
                           (length, index + 1))

            else:
                if length != 1:
                    raise base.ReportableError(
                        "Column %s: Arrays not supported"
                        " by gavo mkboost." % fcd.name)
                res.append("{.cSize = sizeof(%s), .fitsType = %s, .index=%d}" %
                           (self.fitsTypes[typecode][1],
                            self.fitsTypes[typecode][0], index + 1))

        return res
Esempio n. 3
0
def processSource(data, source, feeder, opts, connection=None):
    """ingests source into the Data instance data.

	If you pass in a connection, you can set opts.keepGoing to true
	and make the system continue importing even if a particular source 
	has caused an error.  In that case, everything contributed by
	the bad source is rolled back.
	"""
    if not opts.keepGoing:
        # simple shortcut if we don't want to recover from bad sources
        _processSourceReal(data, source, feeder, opts)

    else:  # recover from bad sources, be more careful
        if connection is None:
            raise base.ReportableError(
                "Can only ignore source errors"
                " with an explicit connection",
                hint="This is a programming error.")
        try:
            with connection.savepoint():
                _processSourceReal(data, source, feeder, opts)
                feeder.flush()
        except Exception, ex:
            feeder.reset()
            if not isinstance(ex, base.ExecutiveAction):
                base.ui.notifyError(
                    "Error while importing source; changes from"
                    " this source will be rolled back, processing will continue."
                    " (%s)" % utils.safe_str(ex))
    def __init__(self, tableDef, **kwargs):
        self.suppressIndex = kwargs.pop("suppressIndex", False)
        self.tableUpdates = kwargs.pop("tableUpdates", False)
        self.exclusive = kwargs.pop("exclusive", False)
        self.commitAfterMeta = kwargs.pop("commitAfterMeta", False)
        table.BaseTable.__init__(self, tableDef, **kwargs)

        self._makeConnection(kwargs)

        if self.tableDef.rd is None and not self.tableDef.temporary:
            raise base.ReportableError(
                "TableDefs without resource descriptor"
                " cannot be used to access database tables")
        self.tableName = self.tableDef.getQName()
        self.nometa = (kwargs.get("nometa", False) or self.tableDef.temporary
                       or tableDef.rd.schema == "dc")

        self.newlyCreated = False
        if kwargs.get("create", False):
            self.createIfNecessary()
        if not self.tableUpdates:
            self.addCommand = (
                "INSERT INTO %s (%s) VALUES (%s)" % (self.tableName, ", ".join(
                    [str(c.name) for c in self.tableDef.columns]), ", ".join(
                        ["%%(%s)s" % c.key for c in self.tableDef.columns])))
        else:
            self.addCommand = "UPDATE %s SET %s WHERE %s" % (
                self.tableName, ", ".join("%s=%%(%s)s" % (f.name, f.key)
                                          for f in self.tableDef),
                " AND ".join("%s=%%(%s)s" % (n, n)
                             for n in self.tableDef.primary))
        if "rows" in kwargs:
            self.feedRows(kwargs["rows"])
def getRDInputStream(srcId):
    """returns a read-open stream for the XML source of the resource
	descriptor with srcId.

	srcId is already normalized; that means that absolute paths must
	point to a file (sans possibly .rd), relative paths are relative
	to inputsDir or pkg_resources(/resources/inputs).

	This function prefers files with .rd to those without, and
	inputsDir to pkg_resources (the latter allowing the user to
	override built-in system RDs).
	"""
    for fName in _getFilenamesForId(srcId):
        if os.path.isfile(fName):
            # We don't want RDs from outside of inputs and config, as
            # these make referencing really messy.
            filePath = os.path.abspath(fName)
            if not (filePath.startswith(base.getConfig("inputsDir"))
                    or filePath.startswith(base.getConfig("configDir"))):
                raise base.ReportableError(
                    "%s: Only RDs below inputsDir (%s) are"
                    " allowed." % (fName, base.getConfig("inputsDir")))

            return fName, open(fName)

        if (pkg_resources.resource_exists('gavo', fName)
                and not pkg_resources.resource_isdir('gavo', fName)):
            return (PkgResourcePath(fName),
                    pkg_resources.resource_stream('gavo', fName))
    raise base.RDNotFound(srcId)
def declaredel(querier, args):
    import datetime

    from gavo import registry
    from gavo import rsc

    authority, path = registry.parseIdentifier(args.svcId)
    if authority != base.getConfig("ivoa", "authority"):
        raise base.ReportableError("You can only declare ivo ids from your"
                                   " own authority as deleted.")
    idParts = path.split("/")
    svcsRD = base.caches.getRD("//services")

    # mark in resources table
    resTable = rsc.TableForDef(svcsRD.getById("resources"),
                               connection=querier.connection)
    newRow = resTable.tableDef.getDefaults()
    newRow["sourceRD"] = "/".join(idParts[:-1])
    newRow["resId"] = idParts[-1]
    newRow["deleted"] = True
    newRow["title"] = "Ex " + args.svcId
    newRow["dateUpdated"] = newRow["recTimestamp"] = datetime.datetime.utcnow()
    resTable.addRow(newRow)

    # mark in sets table
    resTable = rsc.TableForDef(svcsRD.getById("sets"),
                               connection=querier.connection)
    newRow = resTable.tableDef.getDefaults()
    newRow["sourceRD"] = "/".join(idParts[:-1])
    newRow["renderer"] = "null"
    newRow["resId"] = idParts[-1]
    newRow["setName"] = "ivo_managed"
    newRow["deleted"] = True
    resTable.addRow(newRow)
Esempio n. 7
0
    def gatherUploadFiles(self, request):
        """creates a files attribute on request, containing all uploaded
		files.

		The upload files are removed from args, which is good since we
		don't want to serialize those in the parameters dictionary.

		This method inspects all upload parameters and converts the
		referenced arguments to cgi-like files as necessary.  Missing
		uploads will be noticed here, and the request will be rejected.

		Of course, all that hurts if someone manages to upload from REQUEST --
		but that's their fault then.
		"""
        request.files = {}
        for uploadSpec in request.args.get("upload", []):
            if uploadSpec:
                for tableName, upload in tap.parseUploadString(uploadSpec):
                    if upload.startswith("param:"):
                        paramName = upload[6:]
                        if paramName not in request.args or not request.args[
                                paramName]:
                            raise base.ReportableError(
                                "No parameter for upload"
                                " table %s" % tableName)

                        item = request.args.pop(paramName)[0]
                        # fix if it doesn't already look like a file
                        if getattr(item, "file", None) is None:
                            item = _FakeUploadedFile(
                                "unnamed_inline_upload_%s" % paramName, item)
                        request.files[paramName] = item
Esempio n. 8
0
def getPertainingDDs(rd, selectedIds):
    """returns a list of dds on which imp or drop should operate.

	By default, that's the "auto" dds of rd.  If ddIds is not empty,
	it is validated that all ids mentioned actually exist.

	Finally, if no DDs are selected but DDs are available, an error is raised.
	"""
    if selectedIds:
        dds = _getSelectedDDIds(rd, selectedIds)
    else:
        dds = _getAutoDDIds(rd)
    if not dds:
        if not rd.dds:
            base.ui.notifyWarning("There is no data element"
                                  " in the RD %s; is that all right?" %
                                  rd.sourceId)
        else:
            raise base.ReportableError(
                "Neither automatic not manual data selected from RD %s " %
                rd.sourceId,
                hint="All data elements have auto=False.  You have to"
                " explicitely name one or more data to import (names"
                " available: %s)" % (", ".join(dd.id or "(anon)"
                                               for dd in rd.dds)))
    return dds
Esempio n. 9
0
def _combineRowIntoOne(ssaRows):
    """makes a "total row" from ssaRows.

	In the resulting row, minima and maxima are representative of the
	whole result set, and enumerated columsn are set-valued.

	This is useful when generating parameter metadata.
	"""
    if not ssaRows:
        raise base.ReportableError(
            "Datalink meta needs at least one result row")

    totalRow = ssaRows[0].copy()
    totalRow["mime"] = set([totalRow["mime"]])
    calibs = set()

    for row in ssaRows[1:]:
        if row["ssa_specstart"] < totalRow["ssa_specstart"]:
            totalRow["ssa_specstart"] = row["ssa_specstart"]
        if row["ssa_specend"] > totalRow["ssa_specend"]:
            totalRow["ssa_specend"] = row["ssa_specend"]
        totalRow["mime"].add(row["mime"])
        calibs.add(row.get("ssa_fluxcalib", None))

    totalRow["collect_calibs"] = set(c for c in calibs if c is not None)
    return totalRow
def _startServer():
    """runs a detached server, dropping privileges and all.
	"""
    try:
        reactor.listenTCP(int(base.getConfig("web", "serverPort")),
                          root.site,
                          interface=base.getConfig("web", "bindAddress"))
    except CannotListenError:
        raise base.ReportableError(
            "Someone already listens on the"
            " configured port %s." % base.getConfig("web", "serverPort"),
            hint="This could mean that a DaCHS server is already running."
            " You would have to manually kill it then since its PID file"
            " got lost somehow.  It's more likely that some"
            " other server is already taking up this port; you may want to change"
            " the [web] serverPort setting in that case.")
    _dropPrivileges()
    root.site.webLog = _configureTwistedLog()

    PIDManager.setPID()
    try:
        setupServer(root)
        signal.signal(signal.SIGHUP,
                      lambda sig, stack: reactor.callLater(0, _reloadConfig))
        _preloadRDs()
        reactor.run()
    finally:
        PIDManager.clearPID()
    def runForData(self, service, inputTable, queryMeta):
        """returns a data set processed according to inputTable's parameters.
		"""
        try:
            args = inputTable.getParamDict()
            if not self.dataFunctions:
                raise base.DataError(
                    "This datalink service cannot process data")

            descriptor = self.descriptors[-1]
            self.dataFunctions[0].compile(self)(descriptor, args)

            if descriptor.data is None:
                raise base.ReportableError(
                    "Internal Error: a first data function did"
                    " not create data.")

            for func in self.dataFunctions[1:]:
                try:
                    func.compile(self)(descriptor, args)
                except FormatNow:
                    break
                except DeliverNow:
                    return descriptor.data

            res = self.dataFormatter.compile(self)(descriptor, args)
            return res
        finally:
            self.finalize()
 def _build(self):
     callArgs = {}
     if self.silence_for_test:
         # test instrumentation -- don't worry if the file remains open
         callArgs["stdout"] = open("/dev/null", "w")
     if subprocess.call("make", **callArgs):
         raise base.ReportableError("Booster build failed, messages above.")
Esempio n. 13
0
 def add(roleRow):
     role, row = roleRow
     if role not in addersDict:
         raise base.ReportableError(
             "Grammar tries to feed to role '%s',"
             " but there is no corresponding make" % role)
     for adder in addersDict[role]:
         adder(row)
    def _parseFITSFormat(self, format, colName):
        """returns length and typecode for the supported FITS table types.

		All others raise errors.
		"""
        mat = re.match("(\d*)(.)$", format)
        if not mat:
            raise base.ReportableError(
                "FITS type code '%s' of %s not handled"
                " by gavo mkboost; add handling if you can." %
                (format, colName))
        if not mat.group(2) in self.fitsTypes:
            raise base.ReportableError(
                "FITS type '%s' of %s not handled"
                " by gavo mkboost; add handling if you can." %
                (mat.group(2), colName))
        return int(mat.group(1) or "1"), mat.group(2)
Esempio n. 15
0
 def __getitem__(self, key):
     matches = [s for s in self.keys() if s.startswith(key)]
     if len(matches) == 0:
         raise KeyError(key)
     elif len(matches) == 1:
         return dict.__getitem__(self, matches[0])
     else:
         raise base.ReportableError("Ambiguous subcommand specification;"
                                    " choose between %s." % repr(matches))
Esempio n. 16
0
def main():

	# we want to preserve group-writability in all our operations;  hence
	# this prominent place for overriding a user decision...
	os.umask(002)

	if len(sys.argv)>1 and sys.argv[1]=="init":  
		# Special case: initial setup, no api working yet
		del sys.argv[1]
		from gavo.user import initdachs
		sys.exit(initdachs.main())

	opts, module, funcName = _parseCLArgs()
	from gavo import base
	from gavo import utils
	from gavo.user import errhandle
	from gavo.user import plainui
	from gavo.user import useless

	interfaces = {
		"deluge": useless.DelugeUI,
		"null": useless.NullUI,
		"stingy": plainui.StingyPlainUI,
		"semistingy": plainui.SemiStingyPlainUI,
		"plain": plainui.PlainUI,
	}

	if not (opts.suppressLog or os.environ.get("GAVO_LOG")=="no"):
		from gavo.user import logui
		logui.LoggingUI(base.ui)

	if opts.uiName=="module-dependent":
		opts.uiName = {"registry.publication": "semistingy",
			"user.dropping": "stingy",
			"user.serve": "null",
			}.get(module, "plain")
	if opts.uiName not in interfaces:
		raise base.ReportableError("UI %s does not exist.  Choose one of"
			" %s"%(opts.uiName, ", ".join(interfaces)))
	interfaces[opts.uiName](base.ui)

	if opts.enablePDB:
		_enablePDB()
	funcToRun = utils.loadInternalObject(module, funcName)

	if opts.profilePath:
		import cProfile
		cProfile.runctx("funcToRun()", globals(), locals(), opts.profilePath)
		return

	try:
		funcToRun()
	except Exception:
		if opts.alwaysTracebacks:
			traceback.print_exc()
		sys.exit(errhandle.raiseAndCatch(opts))
Esempio n. 17
0
 def getAnnotation(self, roleName, container):
     if self.parent == container:
         return dm.ColumnAnnotation(roleName, self)
     else:
         raise base.ReportableError(
             "You cannot use columns from"
             " other tables in your DM annotations directly.",
             hint="If you really need something like this, you need to"
             " define a datatype corresponding to what's in  the other table"
             " and reference a corresponding dm declaration.")
Esempio n. 18
0
def getGrammar(grammarName):
    if grammarName not in GRAMMAR_REGISTRY:
        raise base.NotFoundError(grammarName, "grammar", "defined grammars")
    grammarClass = utils.loadInternalObject(*GRAMMAR_REGISTRY[grammarName])
    if grammarClass.name_ != grammarName:
        raise base.ReportableError(
            "Internal Error: Grammar %s is registred"
            " under the wrong name." % grammarName,
            hint="This is probably a typo in grammars.__init__; it needs"
            " to be fixed there")
    return grammarClass
Esempio n. 19
0
def _makeHeaderSequence(keyTpl, commentTpl):
	try:
		return [
			(keyTpl%ind, commentTpl%numeral) 
			for ind, numeral in [
				(1, "1st"),
				(2, "2nd"),
				(3, "3rd"),]]
	except TypeError:
		raise base.ReportableError("Invalid header sequence templates: %r %r"%(
			keyTpl, commentTpl))
Esempio n. 20
0
def getCore(name):
    if name not in CORE_REGISTRY:
        raise base.NotFoundError(name, "core", "registred cores")
    cls = utils.loadInternalObject(*CORE_REGISTRY[name])
    if cls.name_ != name:
        raise base.ReportableError(
            "Internal Error: Core %s is registred"
            " under the wrong name." % name,
            hint="This is probably a typo in svcs.core; it needs"
            " to be fixed there")
    return cls
Esempio n. 21
0
 def _iterRows(self):
     try:
         from pds.core.parser import Parser
         from pds.core.common import open_pds
     except ImportError:
         raise base.ReportableError(
             "PDSGrammar needs the external PyPDS python"
             " package.  You can obtain it from"
             " git://github.com/RyanBalfanz/PyPDS.git or from the python"
             " package index.")
     yield Parser().parse(open_pds(self.sourceToken))
def getRenderer(name):
    if name not in RENDERER_REGISTRY:
        raise base.NotFoundError(name, "renderer", "registred renderers")
    cls = utils.loadInternalObject(*RENDERER_REGISTRY[name])
    if cls.name != name:
        raise base.ReportableError(
            "Internal Error: Renderer %s is registred"
            " under the wrong name." % name,
            hint="This is probably a typo in svcs.renderers; it needs"
            " to be fixed there")
    return cls
Esempio n. 23
0
def getTypeForFTColumn(fitsCol):
    """returns a DaCHS type for FITS table column.

	This currently ignores array sizes and such.  Well, people can always
	fix things manually.
	"""
    mat = re.match("(\d*)(.)$", fitsCol.format)
    if not mat or not mat.group(2) in FT_TYPE_MAP:
        raise base.ReportableError("FITS type code '%s' of %s not handled"
                                   " by gavo mkrd; add handling if you can." %
                                   (fitsCol.format, fitsCol.name))
    return FT_TYPE_MAP[mat.group(2)]
 def __init__(self, tableDef, query, connection, **kwargs):
     self.connection = connection
     self.autoClose = kwargs.pop("autoClose", False)
     if "rows" in kwargs:
         raise base.ReportableError("QueryTables cannot be constructed"
                                    " with rows.")
     self.matchLimit = kwargs.pop("matchLimit", None)
     self.query = query
     table.BaseTable.__init__(self,
                              tableDef,
                              connection=connection,
                              **kwargs)
def expireRDs(args):
    pw = base.getConfig("web", "adminpasswd")
    if pw == '':
        raise base.ReportableError(
            "expireRDs needs [web]adminpasswd config item.")

    for rdId in args.rdIds:
        if rdId.startswith("//"):
            rdId = "__system__" + rdId[1:]

        try:
            f = utils.urlopenRemote(base.makeAbsoluteURL("/seffe/" +
                                                         urllib.quote(rdId)),
                                    urllib.urlencode({
                                        "__nevow_form__": "adminOps",
                                        "submit": "Reload RD"
                                    }),
                                    creds=("gavoadmin", pw))
            ignored = f.read(
            )  #noflake: don't care enough to check at this point.
        except IOError, msg:
            raise base.ReportableError("Failed to reload %s: %s" % (rdId, msg))
Esempio n. 26
0
def _getSelectedDDIds(rd, selectedIds):
    """helps getPertainingDDs
	"""
    res = []
    ddDict = dict((dd.id, dd) for dd in rd.dds)
    for ddId in selectedIds:
        if ddId not in ddDict:
            raise base.ReportableError(
                "The DD '%s' you are trying to import is not defined within"
                " the RD '%s'." % (ddId, rd.sourceId),
                hint="Data elements available in %s include %s" %
                (rd.sourceId, ", ".join(ddDict) or '(None)'))
        res.append(ddDict[ddId])
    return res
Esempio n. 27
0
def getTemplateNameFromHistory(hdr):
	"""returns the template name used for generating hdr.

	A ReportableError is raised if the info signature is missing.
	"""
	for card in hdr.get_history():
		mat = re.search("GAVO DaCHS template used: (\w+)", card)
		if mat:
			return mat.group(1)
	raise base.ReportableError("DaCHS template signature not found.",
		hint="This means that a function needed to figure out which"
		" FITS template DaCHS used to generate that header, and no"
		" such information was found in the Header's HISTORY cards."
		"  Either this file hasn't been written by DaCHS FITS templating"
		" engine, or some intervening thing hosed the history.")
def _getRunnerForSingle(testId, runnerArgs):
	from gavo import api

	testElement = common.getReferencedElement(testId, doQueries=False)
	
	if isinstance(testElement, api.RD):
		runner = TestRunner.fromRD(testElement, **runnerArgs)
	elif isinstance(testElement, RegTestSuite):
		runner = TestRunner.fromSuite(testElement, **runnerArgs)
	elif isinstance(testElement, RegTest):
		runner = TestRunner.fromTest(testElement, **runnerArgs)
	else:
		raise base.ReportableError("%s is not a testable element."%testId,
			hint="Only RDs, regSuites, or regTests are eligible for testing.")
	return runner
def getCapabilityElement(publication):
    """returns the appropriate capability definition for a publication object.
	"""
    if publication.auxiliary:
        return getAuxiliaryCapability(publication)
    else:
        try:
            maker = _getCapabilityMaker(publication.render)
        except KeyError:
            raise base.ui.logOldExc(
                base.ReportableError(
                    "Do not know how to"
                    " produce a capability for the '%s' renderer" %
                    publication.render))
        return maker(publication)
Esempio n. 30
0
	def _iterRows(self):
		try:
			from spacepy import pycdf
		except ImportError:
			raise base.ReportableError("cdfHeaderGrammar needs the external"
				" spacepy package.  You can obtain it from"
				" http://spacepy.lanl.gov.")
		
		cdfStruct = pycdf.CDF(self.sourceToken)

		res = {}
		for key, value in cdfStruct.attrs.iteritems():
			if self.grammar.autoAtomize and value.max_idx()==0:
				res[key] = value[0]
			else:
				res[key] = value[:]
		yield res