Exemple #1
0
class Script(base.Structure, base.RestrictionMixin):
    """A script, i.e., some executable item within a resource descriptor.

	The content of scripts is given by their type -- usually, they are
	either python scripts or SQL with special rules for breaking the
	script into individual statements (which are basically like python's).

	The special language AC_SQL is like SQL, but execution errors are
	ignored.  This is not what you want for most data RDs (it's intended
	for housekeeping scripts).

	See `Scripting`_.
	"""
    name_ = "script"
    typeDesc_ = "Embedded executable code with a type definition"

    _lang = base.EnumeratedUnicodeAttribute(
        "lang",
        default=base.Undefined,
        description="Language of the script.",
        validValues=["SQL", "python", "AC_SQL"],
        copyable=True)
    _type = base.EnumeratedUnicodeAttribute(
        "type",
        default=base.Undefined,
        description="Point of time at which script is to run.",
        validValues=[
            "preImport", "newSource", "preIndex", "postCreation", "beforeDrop",
            "sourceDone"
        ],
        copyable=True)
    _name = base.UnicodeAttribute(
        "name",
        default="anonymous",
        description="A human-consumable designation of the script.",
        copyable=True)
    _notify = base.BooleanAttribute(
        "notify",
        default=True,
        description="Send out a notification when running this"
        " script.",
        copyable=True)
    _content = base.DataContent(copyable=True, description="The script body.")
    _original = base.OriginalAttribute()

    def getSource(self):
        """returns the content with all macros expanded.
		"""
        return self.parent.getExpander().expand(self.content_)
class ContextGrammar(grammars.Grammar):
    """A grammar for web inputs.

	These are almost exclusively in InputDDs.  They hold InputKeys
	defining what they take from the context.

	For DBCores, the InputDDs are generally defined implicitely
	via CondDescs.	Thus, only for other cores will you ever need
	to bother with ContextGrammars (unless you're going for special
	effects).

	The source tokens for context grammars are dictionaries; these
	are either typed dictionaries from nevow, where the values
	usually are atomic, or, preferably, the dictionaries of lists
	from request.args.

	ContextGrammars only yield rows if there's a rowKey defined.
	In that case, an outer join of all other parameters is returned;
	with rowKey defined, the input keys are obtained from the table's
	columns.

	In normal usage, they just yield a single parameter row,
	corresponding to the source dictionary possibly completed with
	defaults, where non-requried input keys get None defaults where not
	given.  Missing required parameters yield errors.

	Since most VO protocols require case-insensitive matching of parameter
	names, matching of input key names and the keys of the input dictionary
	is attempted first literally, then disregarding case.
	"""
    name_ = "contextGrammar"

    _inputTable = base.ReferenceAttribute(
        "inputTable",
        default=base.NotGiven,
        description="The table that is to be built using this grammar",
        copyable=True)

    _inputKeys = rscdef.ColumnListAttribute(
        "inputKeys",
        childFactory=InputKey,
        description="Input keys this context grammar should parse."
        "  These must not be given if there is an input table defined.")

    _rowKey = base.UnicodeAttribute(
        "rowKey",
        default=base.NotGiven,
        description="The name of a key that is used to generate"
        " rows from the input",
        copyable=True)

    _rejectExtras = base.BooleanAttribute(
        "rejectExtras",
        default=False,
        description="If true, the grammar will reject extra input parameters."
        "  Note that for form-based services, there *are* extra parameters"
        " not declared in the services' input tables.  Right now,"
        " contextGrammar does not ignore those.",
        copyable=True)

    _original = base.OriginalAttribute("original")

    rowIterator = ContextRowIterator

    def onElementComplete(self):
        if self.inputTable is not base.NotGiven:
            if self.inputKeys != []:
                raise base.StructureError(
                    "InputKeys and inputTable must not"
                    " both be given in a context grammar")
            else:
                if self.rowKey:
                    self.inputKeys = [
                        InputKey.fromColumn(c) for c in self.inputTable.columns
                    ]
                else:
                    self.inputKeys = self.inputTable.params

        else:
            columns = []
            if self.rowKey:
                columns = self.inputKeys
            self.inputTable = MS(InputTable,
                                 params=self.inputKeys,
                                 columns=columns)

        self.defaults = {}
        for ik in self.iterInputKeys():
            if not ik.required:
                self.defaults[ik.name] = None
            if ik.value is not None:
                self.defaults[ik.name] = ik.value
        self._onElementCompleteNext(ContextGrammar)

    def iterInputKeys(self):
        for ik in self.inputKeys:
            yield ik
class RowmakerDef(base.Structure, RowmakerMacroMixin):
	"""A definition of the mapping between grammar input and finished rows
	ready for shipout.

	Rowmakers consist of variables, procedures and mappings.  They
	result in a python callable doing the mapping.

	RowmakerDefs double as macro packages for the expansion of various
	macros.  The standard macros will need to be quoted, the rowmaker macros
	above yield python expressions.

	Within map and var bodies as well as late apply pars and apply bodies, 
	you can refer to the grammar input as vars["name"] or, shorter @name.

	To add output keys, use map or, in apply bodies, add keys to the
	result dictionary.
	"""
	name_ = "rowmaker"

	_maps = base.StructListAttribute("maps", childFactory=MapRule,
		description="Mapping rules.", copyable=True)
	_vars = base.StructListAttribute("vars", childFactory=VarDef,
		description="Definitions of intermediate variables.",
		copyable=True)
	_apps = base.StructListAttribute("apps",
		childFactory=ApplyDef, description="Procedure applications.",
		copyable=True)
	_rd = common.RDAttribute()
	_idmaps = base.StringListAttribute("idmaps", description="List of"
		' column names that are just "mapped through" (like map with key'
		" only); you can use shell patterns to select multiple colums at once.",
		copyable=True)
	_simplemaps = base.IdMapAttribute("simplemaps", description=
		"Abbreviated notation for <map source>; each pair is destination:source", 
		copyable=True)
	_ignoreOn = base.StructAttribute("ignoreOn", default=None,
		childFactory=rowtriggers.IgnoreOn, description="Conditions on the"
		" input record coming from the grammar to cause the input"
		" record to be dropped by the rowmaker, i.e., for this specific"
		" table.  If you need to drop a row for all tables being fed,"
		" use a trigger on the grammar.", copyable=True)
	_original = base.OriginalAttribute()

	@classmethod
	def makeIdentityFromTable(cls, table, **kwargs):
		"""returns a rowmaker that just maps input names to column names.
		"""
		if "id" not in kwargs:
			kwargs["id"] = "autogenerated rowmaker for table %s"%table.id
		return base.makeStruct(cls, idmaps=[c.key for c in table], **kwargs)

	@classmethod
	def makeTransparentFromTable(cls, table, **kwargs):
		"""returns a rowmaker that maps input names to column names without
		touching them.

		This is for crazy cases in which the source actually provides 
		pre-parsed data that any treatment would actually ruin.
		"""
		if "id" not in kwargs:
			kwargs["id"] = "autogenerated rowmaker for table %s"%table.id
		return base.makeStruct(cls, maps=[
				base.makeStruct(MapRule, key=c.name, content_="vars[%s]"%repr(c.name))
					for c in table],
			**kwargs)

	def completeElement(self, ctx):
		if self.simplemaps:
			for k,v in self.simplemaps.iteritems():
				nullExcs = base.NotGiven
				if v.startswith("@"):
					v = v[1:]
					nullExcs = "KeyError,"
				self.feedObject("maps", base.makeStruct(MapRule, 
					key=k, source=v, nullExcs=nullExcs))
		self._completeElementNext(RowmakerDef, ctx)

	def _getSourceFromColset(self, columns):
		"""returns the source code for a mapper to a column set.
		"""
		lineMap, line = {}, 0
		source = []

		def appendToSource(srcLine, line, lineMarker):
			source.append(srcLine)
			line += 1
			lineMap[line] = lineMarker
			line += source[-1].count("\n")
			return line

		if self.ignoreOn:
			line = appendToSource("if checkTrigger(vars):\n"
				"  raise IgnoreThisRow(vars)",
				line, "Checking ignore")
		for v in self.vars:
			line = appendToSource(v.getCode(columns), line, "assigning "+v.key)
		for a in self.apps:
			line = appendToSource(
				"%s(vars, result, targetTable, _self)"%a.name, 
				line, "executing "+a.name)
		for m in self.maps:
			line = appendToSource(m.getCode(columns), line, "building "+m.key)
		return "\n".join(source), lineMap

	def _getSource(self, tableDef):
		"""returns the source code for a mapper to tableDef's columns.
		"""
		return self._getSourceFromColset(tableDef.columns)

	def _getGlobals(self, tableDef):
		globals = {}
		for a in self.apps:
			globals[a.name] = a.compile()
		if self.ignoreOn:
			globals["checkTrigger"] = self.ignoreOn
		globals["tableDef_"] = tableDef
		globals["rd_"] = tableDef.rd
		globals["curDD_"] = tableDef.parent
		return globals

	def _resolveIdmaps(self, columns):
		"""adds mappings for self's idmap within column set.
		"""
		existingMaps = set(m.key for m in self.maps)
		baseNames = [c.key for c in columns]
		for colName in self.idmaps:
			matching = fnmatch.filter(baseNames, colName)
			if not matching:
				raise base.NotFoundError(colName, "columns matching", "unknown")
			for dest in matching:
				if dest not in existingMaps:
					self.maps.append(MapRule(self, key=dest).finishElement(None))
		self.idmaps = []

	def _checkTable(self, columns, id):
		"""raises a LiteralParseError if we try to map to non-existing
		columns.
		"""
		for map in self.maps:
			try:
				columns.getColumnByName(map.key)
			except KeyError:
				raise base.ui.logOldExc(base.LiteralParseError(self.name_, map.key, 
					"Cannot map to '%s' since it does not exist in %s"%(
						map.key, id)))

	def _buildForTable(self, tableDef):
		"""returns a RowmakerDef with everything expanded and checked for
		tableDef.

		This may raise LiteralParseErrors if self's output is incompatible
		with tableDef.
		"""
		res = self.copyShallowly()
		try:
			res._resolveIdmaps(tableDef.columns)
			res._checkTable(tableDef.columns, tableDef.id)
		except base.NotFoundError, ex:
			ex.within = "table %s's columns"%tableDef.id
			raise
		return res
Exemple #4
0
class ColumnBase(base.Structure, base.MetaMixin):
    """A base class for columns, parameters, output fields, etc.

	Actually, right now there's far too much cruft in here that 
	should go into Column proper or still somewhere else.  Hence:
	XXX TODO: Refactor.

	See also Column for a docstring that still applies to all we've in
	here.
	"""
    _name = ParamNameAttribute("name",
                               default=base.Undefined,
                               description="Name of the param",
                               copyable=True,
                               before="type")
    _type = TypeNameAttribute(
        "type",
        default="real",
        description="datatype for the column (SQL-like type system)",
        copyable=True,
        before="unit")
    _unit = base.UnicodeAttribute("unit",
                                  default="",
                                  description="Unit of the values",
                                  copyable=True,
                                  before="ucd",
                                  strip=True)
    _ucd = base.UnicodeAttribute("ucd",
                                 default="",
                                 description="UCD of the column",
                                 copyable=True,
                                 before="description")
    _description = base.NWUnicodeAttribute(
        "description",
        default="",
        copyable=True,
        description=
        "A short (one-line) description of the values in this column.")
    _tablehead = base.UnicodeAttribute(
        "tablehead",
        default=None,
        description="Terse phrase to put into table headers for this"
        " column",
        copyable=True)
    _utype = base.UnicodeAttribute("utype",
                                   default=None,
                                   description="utype for this column",
                                   copyable=True)
    _required = base.BooleanAttribute(
        "required",
        default=False,
        description="Record becomes invalid when this column is NULL",
        copyable=True)
    _displayHint = DisplayHintAttribute(
        "displayHint",
        description="Suggested presentation; the format is "
        " <kw>=<value>{,<kw>=<value>}, where what is interpreted depends"
        " on the output format.  See, e.g., documentation on HTML renderers"
        " and the formatter child of outputFields.",
        copyable=True)
    _verbLevel = base.IntAttribute(
        "verbLevel",
        default=20,
        description="Minimal verbosity level at which to include this column",
        copyable=True)
    _values = base.StructAttribute("values",
                                   default=None,
                                   childFactory=Values,
                                   description="Specification of legal values",
                                   copyable=True)
    _fixup = base.UnicodeAttribute(
        "fixup",
        description=
        "A python expression the value of which will replace this column's"
        " value on database reads.  Write a ___ to access the original"
        ' value.  You can use macros for the embedding table.'
        ' This is for, e.g., simple URL generation'
        ' (fixup="\'\\internallink{/this/svc}\'+___").'
        ' It will *only* kick in when tuples are deserialized from the'
        " database, i.e., *not* for values taken from tables in memory.",
        default=None,
        copyable=True)
    _note = base.UnicodeAttribute(
        "note",
        description="Reference to a note meta"
        " on this table explaining more about this column",
        default=None,
        copyable=True)
    _xtype = base.UnicodeAttribute("xtype",
                                   description="VOTable xtype giving"
                                   " the serialization form",
                                   default=None,
                                   copyable=True)
    _stc = TableManagedAttribute(
        "stc",
        description="Internally used"
        " STC information for this column (do not assign to unless instructed"
        " to do so)",
        default=None,
        copyable=True)
    _stcUtype = TableManagedAttribute(
        "stcUtype",
        description="Internally used"
        " STC information for this column (do not assign to)",
        default=None,
        copyable=True)
    _properties = base.PropertyAttribute(copyable=True)
    _original = base.OriginalAttribute()

    restrictedMode = False

    def __repr__(self):
        return "<Column %s>" % repr(self.name)

    def setMetaParent(self, parent):
        # columns should *not* take part in meta inheritance.  The reason is
        # that there are usually many columns to a table, and there's no
        # way I can see that any piece of metadata should be repeated in
        # all of them.  On the other hand, for votlinks (no name an example),
        # meta inheritance would have disastrous consequences.
        # So, we bend the rules a bit.
        raise base.StructureError(
            "Columns may not have meta parents.",
            hint="The rationale for this is explained in rscdef/column.py,"
            " look for setMetaParent.")

    def onParentComplete(self):
        # we need to resolve note on construction since columns are routinely
        # copied to other tables and  meta info does not necessarily follow.
        if isinstance(self.note, basestring):
            try:
                self.note = self.parent.getNote(self.note)
            except base.NotFoundError:  # non-existing notes silently ignored
                self.note = None

    def completeElement(self, ctx):
        self.restrictedMode = getattr(ctx, "restricted", False)
        if isinstance(self.name, utils.QuotedName):
            self.key = self.name.name
            if ')' in self.key:
                # No '()' allowed in key for that breaks the %()s syntax (sigh!).
                # Work around with the following quick hack that would break
                # if people carefully chose proper names.  Anyone using delim.
                # ids in SQL deserves a good spanking anyway.
                self.key = self.key.replace(')', "__").replace('(', "__")
        else:
            self.key = self.name
        self._completeElementNext(ColumnBase, ctx)

    def isEnumerated(self):
        return self.values and self.values.options

    def validate(self):
        self._validateNext(ColumnBase)
        if self.restrictedMode and self.fixup:
            raise base.RestrictedElement("fixup")

    def validateValue(self, value):
        """raises a ValidationError if value does not match the constraints
		given here.
		"""
        if value is None:
            if self.required:
                raise base.ValidationError(
                    "Field %s is empty but non-optional" % self.name,
                    self.name)
            return

        # Only validate these if we're not a database column
        if not isinstance(self, Column):
            vals = self.values
            if vals:
                if vals.options:
                    if value and not vals.validateOptions(value):
                        raise base.ValidationError(
                            "Value %s not consistent with"
                            " legal values %s" % (value, vals.options),
                            self.name)
                else:
                    if vals.min and value < vals.min:
                        raise base.ValidationError(
                            "%s too small (must be at least %s)" %
                            (value, vals.min), self.name)
                    if vals.max and value > vals.max:
                        raise base.ValidationError(
                            "%s too large (must be less than %s)" %
                            (value, vals.max), self.name)

    def isIndexed(self):
        """returns a guess as to whether this column is part of an index.

		This may return True, False, or None (unknown).
		"""
        if self.parent and hasattr(self.parent, "indexedColumns"):
            # parent is something like a TableDef
            if self.name in self.parent.indexedColumns:
                return True
            else:
                return False

    def isPrimary(self):
        """returns a guess as to whether this column is a primary key of the
		embedding table.

		This may return True, False, or None (unknown).
		"""
        if self.parent and hasattr(self.parent, "primary"):
            # parent is something like a TableDef
            if self.name in self.parent.primary:
                return True
            else:
                return False

    _indexedCleartext = {
        True: "indexed",
        False: "notIndexed",
        None: "unknown",
    }

    def asInfoDict(self):
        """returns a dictionary of certain, "user-interesting" properties
		of the data field, in a dict of strings.
		"""
        return {
            "name": unicode(self.name),
            "description": self.description or "N/A",
            "tablehead": self.getLabel(),
            "unit": self.unit or "N/A",
            "ucd": self.ucd or "N/A",
            "verbLevel": self.verbLevel,
            "indexState": self._indexedCleartext[self.isIndexed()],
            "note": self.note,
        }

    def getDDL(self):
        """returns an SQL fragment describing this column ready for 
		inclusion in a DDL statement.
		"""
        type = self.type
        # we have one "artificial" type, and it shouldn't become more than
        # one; so, a simple hack should do it.
        if type.upper() == "UNICODE":
            type = "TEXT"

        # The "str" does magic for delimited identifiers, so it's important.
        items = [str(self.name), type]
        if self.required:
            items.append("NOT NULL")
        return " ".join(items)

    def getDisplayHintAsString(self):
        return self._displayHint.unparse(self.displayHint)

    def getLabel(self):
        """returns a short label for this column.

		The label is either the tablehead or, missing it, the capitalized
		column name.
		"""
        if self.tablehead is not None:
            return self.tablehead
        return str(self.name).capitalize()

    def _getVOTableType(self):
        """returns the VOTable type, arraysize and xtype for this
		column-like thing.
		"""
        type, arraysize, xtype = base.sqltypeToVOTable(self.type)

        if self.type == "date":
            xtype = "dachs:DATE"

        return type, arraysize, xtype
Exemple #5
0
class Values(base.Structure):
    """Information on a column's values, in particular its domain.

	This is quite like the values element in a VOTable.  In particular,
	to accomodate VOTable usage, we require nullLiteral to be a valid literal
	for the parent's type.

	Note that DaCHS does not validate for contraints from values on
	table import.  This is mainly because before ``gavo values`` has run,
	values may not represent the new dataset in semiautomatic values.

	With HTTP parameters, values validation does take place (but again,
	that's mostly not too helpful because there are query languages
	sitting in between most of the time).

	Hence, the main utility of values is metadata declaration, both
	in the form render (where they become placeholders) and in datalink
	(where they are communicated as VOTable values).
	"""
    name_ = "values"

    _min = base.UnicodeAttribute("min",
                                 default=None,
                                 description="Minimum acceptable"
                                 " value as a datatype literal",
                                 copyable=True)
    _max = base.UnicodeAttribute("max",
                                 default=None,
                                 description="Maximum acceptable"
                                 " value as a datatype literal",
                                 copyable=True)
    _options = base.StructListAttribute(
        "options",
        childFactory=Option,
        description="List of acceptable values (if set)",
        copyable=True)
    _default = base.UnicodeAttribute(
        "default",
        default=None,
        description="A default"
        " value (currently only used for options).",
        copyable=True)
    _nullLiteral = base.UnicodeAttribute(
        "nullLiteral",
        default=None,
        description=
        "An appropriate value representing a NULL for this column in VOTables"
        " and similar places.  You usually should only set it for integer"
        " types and chars.  Note that rowmakers make no use of this nullLiteral,"
        " i.e., you can and should choose null values independently of"
        " your source.  Again, for reals, floats and (mostly) text you probably"
        " do not want to do this.",
        copyable=True)
    _multiOk = base.BooleanAttribute("multiOk",
                                     False, "Deprecated, use"
                                     " multiplicity=multiple instead.",
                                     copyable=True)
    _fromDB = base.ActionAttribute(
        "fromdb",
        "_evaluateFromDB",
        description=
        "A query fragment returning just one column to fill options from (will"
        " add to options if some are given).  Do not write SELECT or anything,"
        " just the column name and the where clause.")
    _original = base.OriginalAttribute()

    validValues = None

    @classmethod
    def fromOptions(cls, labels):
        """returns Values with the elements of labels as valid options.
		"""
        return base.makeStruct(
            cls, options=[base.makeStruct(Option, content_=l) for l in labels])

    def makePythonVal(self, literal, sqltype):
        return typesystems.sqltypeToPython(sqltype)(literal)

    def _evaluateFromDB(self, ctx):
        if not getattr(ctx, "doQueries", True):
            return
        try:
            with base.getTableConn() as conn:
                for row in conn.query(
                        self.parent.parent.expand("SELECT DISTINCT %s" %
                                                  (self.fromdb))):
                    self._options.feedObject(
                        self, base.makeStruct(Option, content_=row[0]))
        except base.DBError:  # Table probably doesn't exist yet, ignore.
            base.ui.notifyWarning("Values fromdb '%s' failed, ignoring" %
                                  self.fromdb)

    def onParentComplete(self):
        """converts min, max, and options from string literals to python
		objects.
		"""
        dataField = self.parent
        # It would be nicer if we could handle this in properties for min etc, but
        # unfortunately parent might not be complete then.  The only
        # way around that would have been delegations from Column, and that's
        # not very attractive either.
        if self.min:
            self.min = self.makePythonVal(self.min, dataField.type)
        if self.max:
            self.max = self.makePythonVal(self.max, dataField.type)

        if self.options:
            dbt = dataField.type
            for opt in self.options:
                opt.content_ = self.makePythonVal(opt.content_, dbt)
            self.validValues = set([o.content_ for o in self.options])

        if self.nullLiteral:
            try:
                self.makePythonVal(self.nullLiteral, dataField.type)
            except ValueError:
                raise base.LiteralParseError(
                    "nullLiteral",
                    self.nullLiteral,
                    hint="If you want to *parse* whatever you gave into a NULL,"
                    " use the parseWithNull function in a rowmaker.  The null"
                    " literal gives what value will be used for null values"
                    " when serializing to VOTables and the like.")

        if self.default and isinstance(self.default, basestring):
            type, arraysize, xtype = dataField._getVOTableType()
            self.default = paramval.getVOTParser(type, arraysize,
                                                 xtype)(self.default)

    def validateOptions(self, value):
        """returns false if value isn't either in options or doesn't consist of
		items in options.

		Various null values always validate here; non-null checking is done
		by the column on its required attribute.
		"""
        if value == "None":
            assert False, "Literal 'None' passed as a value to validateOptions"

        if self.validValues is None:
            return True
        if isinstance(value, (list, tuple, set)):
            for val in value:
                if val and not val in self.validValues:
                    return False
        else:
            return value in self.validValues or value is None
        return True
Exemple #6
0
class SourceSpec(base.Structure):
	"""A Specification of a data descriptor's inputs.

	This will typcially be files taken from a file system.  If so, DaCHS will,
	in each directory, process the files in alphabetical order.  No guarantees
	are made as to the sequence directories are processed in.

	Multiple patterns are processed in the order given in the RD.
	"""
	name_ = "sources"

	_patterns = base.ListOfAtomsAttribute("patterns", description=
		"Paths to the source files.  You can use shell patterns here.",
		itemAttD=base.UnicodeAttribute("pattern", description="Shell pattern"
			" for source file(s), relative to resource directory."),
		copyable=True)
	_items = base.ListOfAtomsAttribute("items", description=
		"String literals to pass to grammars.  In contrast to patterns,"
		" they are not interpreted as file names but passed to the"
		" grammar verbatim.  Normal grammars do not like this. It is"
		" mainly intended for use with custom or null grammars.",
		itemAttD=base.UnicodeAttribute("item", 
			description="Grammar-specific string"), copyable=True)
	_recurse = base.BooleanAttribute("recurse", default=False,
		description="Search for pattern(s) recursively in their directory"
			" part(s)?", copyable=True)
	_ignore = base.StructAttribute("ignoredSources", childFactory=
		IgnoreSpec, description="Specification of sources that should not"
			" be processed although they match patterns.  Typically used"
			" in update-type data descriptors.", copyable=True)
	_file = base.DataContent(description="A single"
		" file name (this is for convenience)", copyable="True")
	_original = base.OriginalAttribute()

	def __iter__(self):
		return self.iterSources()

	def completeElement(self, ctx):
		if self.ignoredSources is base.Undefined:
			self.ignoredSources = base.makeStruct(IgnoreSpec)
		self._completeElementNext(SourceSpec, ctx)

	def _expandDirParts(self, dirParts, ignoreDotDirs=True):
		"""expands a list of directories into a list of them and all their
		descendants.

		It follows symbolic links but doesn't do any bookkeeping, so bad
		things will happen if the directory graph contains cycles.
		"""
		res = []
		for root in dirParts:
			for root, dirs, files in os.walk(root):
				if ignoreDotDirs:
					if os.path.basename(root).startswith("."):
						continue
					dirs = [dir for dir in dirs if not dir.startswith(".")]
				dirs = (os.path.join(root, dir) for dir in dirs)
				res.extend(dir for dir in dirs if os.path.isdir(dir))
				for child in files:
					destName = os.path.join(root, child)
					if os.path.islink(destName) and not os.path.isfile(destName):
						res.extend(self._expandDirParts(destName))
		return res

	def iterSources(self, connection=None):
		self.ignoredSources.prepare(connection)
		for item in self.items:
			if not self.ignoredSources.isIgnored(item):
				yield item

		baseDir = ""
		if self.parent.rd:
			baseDir = self.parent.rd.resdir

		for pattern in self.patterns:
			dirPart, baseName = os.path.split(pattern)
			if self.parent.rd:
				dirParts = [os.path.join(baseDir, dirPart)]
			else:
				dirParts = [dirPart]
			if self.recurse:
				dirParts = dirParts+self._expandDirParts(dirParts)
			for dir in sorted(dirParts):
				for name in sorted(glob.glob(os.path.join(dir, baseName))):
					fullName = os.path.abspath(name)
					if not self.ignoredSources.isIgnored(fullName):
						yield fullName
		if self.content_:
			yield os.path.abspath(os.path.join(baseDir, self.content_))
	
	def __nonzero__(self):
		return (not not self.patterns) or (not not self.items
			) or (not not self.content_)
Exemple #7
0
class DataDescriptor(base.Structure, base.ComputedMetaMixin, 
		common.IVOMetaMixin, tabledef.PublishableDataMixin):
	"""A description of how to process data from a given set of sources.

	Data descriptors bring together a grammar, a source specification and
	"makes", each giving a table and a rowmaker to feed the table from the
	grammar output.

	They are the "executable" parts of a resource descriptor.  Their ids
	are used as arguments to gavoimp for partial imports.
	"""
	name_ = "data"
	resType = "data"

	_rowmakers = base.StructListAttribute("rowmakers",
		childFactory=rmkdef.RowmakerDef, 
		description="Embedded build rules (usually rowmakers are defined toplevel)",
		copyable=True,
		before="makes")

	_tables = base.StructListAttribute("tables",
		childFactory=tabledef.TableDef, 
		description="Embedded table definitions (usually, tables are defined"
			" toplevel)", 
		copyable=True,
		before="makes")

	_grammar = base.MultiStructAttribute("grammar", 
		default=None,
		childFactory=builtingrammars.getGrammar,
		childNames=builtingrammars.GRAMMAR_REGISTRY.keys(),
		description="Grammar used to parse this data set.", 
		copyable=True,
		before="makes")
	
	_sources = base.StructAttribute("sources", 
		default=None, 
		childFactory=SourceSpec,
		description="Specification of sources that should be fed to the grammar.",
		copyable=True,
		before="grammar")

	_dependents = base.ListOfAtomsAttribute("dependents",
		itemAttD=base.UnicodeAttribute("recreateAfter"),
		description="A data ID to recreate when this resource is"
			" remade; use # syntax to reference in other RDs.")

	_auto = base.BooleanAttribute("auto", 
		default=True, 
		description="Import this data set if not explicitly"
			" mentioned on the command line?")

	_updating = base.BooleanAttribute("updating", 
		default=False,
		description="Keep existing tables on import?  You usually want this"
			" False unless you have some kind of sources management,"
			" e.g., via a sources ignore specification.", 
		copyable=True)

	_makes = base.StructListAttribute("makes", 
		childFactory=Make,
		copyable=True, 
		description="Specification of a target table and the rowmaker"
			" to feed them.")
	
	_params = common.ColumnListAttribute("params",
		childFactory=column.Param, 
		description='Param ("global columns") for this data (mostly for'
		 ' VOTable serialization).', 
		copyable=True)

	_properties = base.PropertyAttribute()

	_rd = common.RDAttribute()

	_original = base.OriginalAttribute()

	metaModel = ("title(1), creationDate(1), description(1),"
		"subject, referenceURL(1)")

	def __repr__(self):
		return "<data descriptor with id %s>"%self.id

	def validate(self):
		self._validateNext(DataDescriptor)
		if self.registration and self.id is None:
			raise base.StructureError("Published data needs an assigned id.")

	def onElementComplete(self):
		self._onElementCompleteNext(DataDescriptor)
		for t in self.tables:
			t.setMetaParent(self)
		if self.registration:
			self.registration.register()

	# since we want to be able to create DDs dynamically , they must find their
	# meta parent themselves.  We do this while the DD is being adopted;
	# the rules here are: if the parent is a meta mixin itself, it's the
	# meta parent, if it has an rd attribute, use that, else give up.
	# TODO: For DDs on cores, it would be *desirable* to come up
	# with some magic that makes the current service their meta parent.

	def _getParent(self):
		return self.__parent
	
	def _setParent(self, value):
		self.__parent = value
		if isinstance(value, base.MetaMixin):
			self.setMetaParent(value)
		elif hasattr(value, "rd"):
			self.setMetaParent(value.rd)
	
	parent = property(_getParent, _setParent)

	def iterSources(self, connection=None):
		if self.sources:
			return self.sources.iterSources(connection)
		else:
			return iter([])

	def __iter__(self):
		for m in self.makes:
			yield m.table

	def iterTableDefs(self):
		"""iterates over the definitions of all the tables built by this DD.
		"""
		for m in self.makes:
			yield m.table

	def getTableDefById(self, id):
		for td in self.iterTableDefs():
			if td.id==id:
				return td
		raise base.StructureError("No table name %s will be built"%id)

	def getTableDefWithRole(self, role):
		for m in self.makes:
			if m.role==role:
				return m.table
		raise base.StructureError("No table def with role '%s'"%role)

	def getPrimary(self):
		"""returns the "primary" table definition in the data descriptor.

		"primary" means the only table in a one-table dd, the table with the
		role "primary" if there are more.  If no matching table is found, a
		StructureError is raised.
		"""
		if len(self.makes)==1:
			return self.makes[0].table
		else:
			try:
				return self.getTableDefWithRole("primary")
			except base.StructureError: # raise more telling message
				pass
		raise base.StructureError("Ambiguous request for primary table")

	def copyShallowly(self):
		"""returns a shallow copy of self.

		Sources are not copied.
		"""
		return DataDescriptor(self.parent, rowmakers=self.rowmakers[:],
			tables=self.tables[:], grammar=self.grammar, makes=self.makes[:])
	
	def getURL(self, rendName, absolute=True):
		# there's no sensible URL for DDs; thus, let people browse
		# the RD info.  At least they should find links to any tables
		# included here there.
		basePath = "%sbrowse/%s"%(
			base.getConfig("web", "nevowRoot"),
			self.rd.sourceId)
		if absolute:
			return base.getConfig("web", "serverURL")+basePath
		return basePath
Exemple #8
0
class MixinDef(activetags.ReplayBase):
	"""A definition for a resource mixin.

	Resource mixins are resource descriptor fragments typically rooted
	in tables (though it's conceivable that other structures could
	grow mixin attributes as well).

	They are used to define and implement certain behaviours components of
	the DC software want to see:

	- products want to be added into their table, and certain fields are required
		within tables describing products
	- tables containing positions need some basic machinery to support scs.
	- siap needs quite a bunch of fields

	Mixins consist of events that are played back on the structure
	mixing in before anything else happens (much like original) and
	two procedure definitions, viz, processEarly and processLate.
	These can access the structure that has the mixin as substrate.

	processEarly is called as part of the substrate's completeElement
	method.  processLate is executed just before the parser exits.  This
	is the place to fix up anything that uses the table mixed in.  Note,
	however, that you should be as conservative as possible here -- you
	should think of DC structures as immutable as long as possible.

	Programmatically, you can check if a certain table mixes in 
	something by calling its mixesIn method.

	Recursive application of mixins, even to seperate objects, will deadlock.
	"""
	name_ = "mixinDef"

	_doc = base.UnicodeAttribute("doc", description="Documentation for"
		" this mixin", strip=False)
	_events = base.StructAttribute("events", 
		childFactory=activetags.EmbeddedStream,
		description="Events to be played back into the structure mixing"
		" this in at mixin time.", copyable=True,
		default=base.NotGiven)
	_lateEvents = base.StructAttribute("lateEvents", 
		childFactory=LateEvents,
		description="Events to be played back into the structure mixing"
		" this in at completion time.", copyable=True,
		default=base.NotGiven)
	_processEarly = base.StructAttribute("processEarly", 
		default=None, 
		childFactory=ProcessEarly,
		description="Code executed at element fixup.",
		copyable=True)
	_processLate = base.StructAttribute("processLate", 
		default=None, 
		childFactory=ProcessLate,
		description="Code executed resource fixup.",
		copyable=True)
	_pars = base.UniquedStructListAttribute("pars",
		childFactory=MixinPar,
		uniqueAttribute="key",
		description="Parameters available for this mixin.",
		copyable=True)
	_original = base.OriginalAttribute()

	def completeElement(self, ctx):
		# we want to double-expand macros in mixins.  Thus, reset all
		# value/expanded events to plain values
		if self.events:
			self.events.unexpandMacros()
		if self.lateEvents:
			self.lateEvents.unexpandMacros()

		# This lock protects against multiple uses of applyTo.  This is
		# necessary because during replay, we have macroExpansions and
		# macroParent reflect a concrete application's context.
		self.applicationLock = threading.Lock()
		self._completeElementNext(MixinDef, ctx)

	def _defineMacros(self, fillers, destination):
		"""creates attributes macroExpansions and parentMacroPackage used by
		execMacros.

		Within mixins, you can use macros filled by mixin parameters or
		expanded by the substrate.  This information is local to a concrete
		mixin application.  Hence, applyTo calls this method, and the
		attributes created are invalid for any subsequent or parallel applyTo
		calls.  Therefore, applyTo acquires the applicationLock before
		calling this.
		"""
		self.parentMacroPackage = None
		if hasattr(destination, "execMacro"):
			self.parentMacroPackage = destination

		self.macroExpansions = {}
		for p in self.pars:
			if p.key in fillers:
				self.macroExpansions[p.key] = fillers.pop(p.key)
			elif p.isDefaulted():
				self.macroExpansions[p.key] = p.content_
			else:
				raise base.StructureError("Mixin parameter %s mandatory"%p.key)
		if fillers:
			raise base.StructureError("The attribute(s) %s is/are not allowed"
				" on this mixin"%(",".join(fillers)))

	def execMacro(self, macName, args):
		if macName in self.macroExpansions:
			return self.macroExpansions[macName]
		try:
			if self.parentMacroPackage:
				return self.parentMacroPackage.execMacro(macName, args)
		except base.MacroError:
			raise base.MacroError(
				"No macro \\%s available in this mixin or substrate."%(macName), 
				macName)

	def applyTo(self, destination, ctx, fillers={}):
		"""replays the stored events on destination and arranges for processEarly
		and processLate to be run.
		"""
		with self.applicationLock:
			self._defineMacros(fillers.copy(), destination)
			if self.events:
				self.replay(self.events.events_, destination, ctx)

			if self.processEarly is not None:
				self.processEarly.compile(destination)(ctx, destination, 
					self.macroExpansions)

			if self.processLate is not None:
				def procLate(rootStruct, parseContext):
					self.processLate.compile(destination)(
						destination, rootStruct, parseContext)
				ctx.addExitFunc(procLate)

		if self.lateEvents:
			origComplete = destination.completeElement
			def newComplete(ctx):
				with self.applicationLock:
					self._defineMacros(fillers.copy(), destination)
					self.replay(self.lateEvents.events_, destination, ctx)
				origComplete(ctx)
			destination.completeElement = newComplete

	def applyToFinished(self, destination):
		"""applies the mixin to an object already parsed.

		Late callbacks will only be executed if destination has an rd
		attribute; if that is the case, this rd's idmap will be amended
		with anything the mixin comes up with.
		"""
		rd = None
		if hasattr(destination, "rd"):
			rd = destination.rd

		ctx = base.ParseContext()
		if rd is not None:
			ctx.idmap = destination.rd.idmap
		self.applyTo(destination, ctx)

		# we don't keep the application lock for this; applyToFinished
		# is more of a debugging thing, so we don't worry too much.
		if self.lateEvents:
			self.replay(self.lateEvents.events_, destination, ctx)

		if rd is not None:
			ctx.runExitFuncs(rd)
class Service(base.Structure, base.ComputedMetaMixin, base.StandardMacroMixin,
              rscdef.IVOMetaMixin):
    """A service definition.

	A service is a combination of a core and one or more renderers.  They
	can be published, and they carry the metadata published into the VO.

	You can set the defaultSort property on the service to a name of an
	output column to preselect a sort order.  Note again that this will
	slow down responses for all but the smallest tables unless there is
	an index on the corresponding column.

	Properties evaluated:

	* defaultSort -- a key to sort on by default with the form renderer.  
	  This differs from the dbCore's sortKey in that this does not suppress the
	  widget itself, it just sets a default for its value.  Don't use this unless
	  you have to; the combination of sort and limit can have disastrous effects
	  on the run time of queries.
	* votableRespectsOutputTable -- usually, VOTable output puts in
	  all columns from the underlying database table with low enough
	  verbLevel (essentially).  When this property is "True" (case-sensitive),
		that's not done and only the service's output table is evaluated.
		[Note that column selection is such a mess it needs to be fixed
		before version 1.0 anyway]
	"""
    name_ = "service"

    _core = CoreAttribute()
    _templates = base.DictAttribute(
        "templates",
        description="Custom"
        ' nevow templates for this service; use key "form" to replace the Form'
        " renderer's standard template.  Start the path with two slashes to"
        " access system templates.",
        itemAttD=rscdef.ResdirRelativeAttribute(
            "template",
            description="resdir-relative path to a nevow template"
            " used for the function given in key."),
        copyable=True)
    _publications = base.StructListAttribute(
        "publications",
        childFactory=Publication,
        description="Sets and renderers this service"
        " is published with.")
    _limitTo = base.UnicodeAttribute(
        "limitTo",
        default=None,
        description="Limit access to the group given; the empty default disables"
        " access control.",
        copyable="True")
    _customPage = rscdef.ResdirRelativeAttribute(
        "customPage",
        default=None,
        description="resdir-relative path to custom page code.  It is used"
        " by the 'custom' renderer",
        copyable="True")
    _allowedRenderers = base.StringSetAttribute(
        "allowed",
        description="Names of renderers allowed on this service; leave emtpy"
        " to allow the form renderer only.",
        copyable=True)
    _customRF = base.StructListAttribute(
        "customRFs",
        description="Custom render functions for use in custom templates.",
        childFactory=CustomRF,
        copyable=True)
    _customDF = base.StructListAttribute(
        "customDFs",
        description="Custom data functions for use in custom templates.",
        childFactory=CustomDF,
        copyable=True)
    _inputData = base.StructAttribute(
        "inputDD",
        default=base.NotGiven,
        childFactory=inputdef.InputDescriptor,
        description="A data descriptor"
        " for obtaining the core's input, usually based on a contextGrammar."
        "  For many cores (e.g., DBCores), you do not want to give this"
        " but rather want to let service figure this out from the core.",
        copyable=True)
    _outputTable = base.StructAttribute(
        "outputTable",
        default=base.NotGiven,
        childFactory=outputdef.OutputTableDef,
        copyable=True,
        description="The output fields of this service.")
    _serviceKeys = base.StructListAttribute(
        "serviceKeys",
        childFactory=inputdef.InputKey,
        description="Input widgets for"
        " processing by the service, e.g. output sets.",
        copyable=True)
    _defaultRenderer = base.UnicodeAttribute(
        "defaultRenderer",
        default=None,
        description="A name of a renderer used when"
        " none is provided in the URL (lets you have shorter URLs).")
    _rd = rscdef.RDAttribute()
    _props = base.PropertyAttribute()
    _original = base.OriginalAttribute()

    metaModel = ("title(1), creationDate(1), description(1),"
                 "subject, referenceURL(1), shortName(!)")

    # formats that should query the same fields as HTML (the others behave
    # like VOTables and offer a "verbosity" widget in forms).
    htmlLikeFormats = ["HTML", "tar"]

    ####################### Housekeeping methods

    def __repr__(self):
        return "<Service at %x>" % id(self)

    def completeElement(self, ctx):
        self._completeElementNext(Service, ctx)
        if not self.allowed:
            self.allowed.add("form")

        if self.core is base.Undefined:
            # undefined cores are only allowed with custom pages
            # (Deprecated)
            if self.customPage:
                self.core = core.getCore("nullCore")(
                    self.rd).finishElement(None)
                base.ui.notifyWarning(
                    "Custom page service %s without nullCore."
                    "  This is deprecated, please fix" % self.id)
            else:
                raise base.StructureError(
                    "Services must have cores (add <nullCore/>"
                    " if you really do not want a core, e.g., with fixed renderers)."
                )

        # if there's only one renderer on this service, make it the default
        if self.defaultRenderer is None and len(self.allowed) == 1:
            self.defaultRenderer = list(self.allowed)[0]
        # empty output tables are filled from the core
        if self.outputTable is base.NotGiven:
            self.outputTable = self.core.outputTable

        # cache all kinds of things expensive to create and parse
        self._coresCache = {}
        self._inputDDCache = {}
        self._loadedTemplates = {}

        # Schedule the capabilities to be added when the parse is
        # done (i.e., the RD is complete)
        ctx.addExitFunc(lambda rd, ctx: self._addAutomaticCapabilities())

    def onElementComplete(self):
        self._onElementCompleteNext(Service)

        # Index custom render/data functions
        self.nevowRenderers = {}
        for customRF in self.customRFs:
            self.nevowRenderers[customRF.name] = customRF.func
        self.nevowDataFunctions = {}
        for customDF in self.customDFs:
            self.nevowDataFunctions[customDF.name] = customDF.func

        self._compileCustomPage()

        self._computeResourceType()

    def _compileCustomPage(self):
        if self.customPage:
            try:
                modNs, moddesc = utils.loadPythonModule(self.customPage)
                modNs.RD = self.rd
                getattr(modNs, "initModule", lambda: None)()
                page = modNs.MainPage
            except ImportError:
                raise base.ui.logOldExc(
                    base.LiteralParseError(
                        "customPage",
                        self.customPage,
                        hint=
                        "This means that an exception was raised while DaCHS"
                        " tried to import the renderer module.  If DaCHS ran"
                        " with --debug, the original traceback is available"
                        " in the logs."))
            self.customPageCode = page, (os.path.basename(
                self.customPage), ) + moddesc

    def getTemplate(self, key):
        """returns the nevow template for the function key on this service.
		"""
        if key not in self._loadedTemplates:
            from nevow import loaders
            tp = self.templates[key]
            if tp.startswith("//"):
                self._loadedTemplates[key] = common.loadSystemTemplate(tp[2:])
            else:
                self._loadedTemplates[key] = loaders.xmlfile(
                    os.path.join(self.rd.resdir, tp))
        return self._loadedTemplates[key]

    def getUWS(self):
        """returns a user UWS instance for this service.

		This is a service for the UWSAsyncRenderer.
		"""
        if not hasattr(self, "uws"):
            from gavo.protocols import useruws
            self.uws = useruws.makeUWSForService(self)
        return self.uws

    ################### Registry and related methods.

    @property
    def isVOPublished(self, renderer=None):
        """is true if there is any ivo_managed publication on this
		service.

		If renderer is non-None, only publications with this renderer name
		count.
		"""
        for pub in self.publications:
            if "ivo_managed" in pub.sets:
                if renderer:
                    if pub.render == renderer:
                        return True
                else:
                    return True
        return False

    def _computeResourceType(self):
        """sets the resType attribute.

		Services are resources, and the registry code wants to know what kind.
		This method ventures a guess.  You can override this decision by setting
		the resType meta item.
		"""
        if (self.outputTable.columns or self.outputTable.verbLevel
                or "tap" in self.allowed):
            self.resType = "catalogService"
        else:  # no output table defined, we're a plain service
            self.resType = "nonTabularService"

    def _addAutomaticCapabilities(self):
        """adds some publications that are automatic for certain types
		of services.

		For services with ivo_managed publications and with useful cores
		(this keeps out doc-like publications, which shouldn't have VOSI
		resources), artificial VOSI publications are added.

		If there is _example meta, an examples publication is added.

		If this service exposes a table (i.e., a DbCore with a queriedTable)
		and that table is adql-readable, also add an auxiliary TAP publication
		if going to the VO.

		This is being run as an exit function from the parse context as
		we want the RD to be complete at this point (e.g., _examples
		meta might come from it).  This also lets us liberally resolve
		references anywhere.
		"""
        if not self.isVOPublished:
            return
        vosiSet = set(["ivo_managed"])

        # All actual services get VOSI caps
        if not isinstance(self.core, core.getCore("nullCore")):
            self._publications.feedObject(
                self,
                base.makeStruct(Publication,
                                render="availability",
                                sets=vosiSet,
                                parent_=self))
            self._publications.feedObject(
                self,
                base.makeStruct(Publication,
                                render="capabilities",
                                sets=vosiSet,
                                parent_=self))
            self._publications.feedObject(
                self,
                base.makeStruct(Publication,
                                render="tableMetadata",
                                sets=vosiSet,
                                parent_=self))

        # things querying tables get a TAP relationship if
        # their table is adql-queriable
        if isinstance(self.core, core.getCore("dbCore")):
            if self.core.queriedTable.adql:
                tapService = base.resolveCrossId("//tap#run")
                self._publications.feedObject(
                    self,
                    base.makeStruct(Publication,
                                    render="tap",
                                    sets=vosiSet,
                                    auxiliary=True,
                                    service=tapService,
                                    parent_=self))
                # and they need a servedBy, too.
                # According to the "discovering dependent" note, we don't
                # do the reverse relationship lest the TAP service
                # gets too related...
                self.addMeta("servedBy",
                             base.getMetaText(tapService, "title"),
                             ivoId=base.getMetaText(tapService, "identifier"))

        # things with examples meta get an examples capability
        try:
            self.getMeta("_example", raiseOnFail=True)
            self._publications.feedObject(
                self,
                base.makeStruct(Publication,
                                render="examples",
                                sets=utils.AllEncompassingSet(),
                                parent_=self))
        except base.NoMetaKey:
            pass

    def getPublicationsForSet(self, names):
        """returns publications for set names in names.

		names must be a set.  
		"""
        additionals = []
        # for ivo_managed, also return a datalink endpoints if they're
        # there; the specs imply that might be useful some day.
        if self.getProperty("datalink", None):
            dlSvc = self.rd.getById(self.getProperty("datalink"))
            if "dlget" in dlSvc.allowed:
                additionals.append(
                    base.makeStruct(Publication,
                                    render="dlget",
                                    sets="ivo_managed",
                                    service=dlSvc))

            if "dlasync" in dlSvc.allowed:
                additionals.append(
                    base.makeStruct(Publication,
                                    render="dlasync",
                                    sets="ivo_managed",
                                    service=dlSvc))

            if "dlmeta" in dlSvc.allowed:
                additionals.append(
                    base.makeStruct(Publication,
                                    render="dlmeta",
                                    sets="ivo_managed",
                                    service=dlSvc))

        return [pub
                for pub in self.publications if pub.sets & names] + additionals

    def getURL(self, rendName, absolute=True, **kwargs):
        """returns the full canonical access URL of this service together 
		with renderer.

		rendName is the name of the intended renderer in the registry
		of renderers.

		With absolute, a fully qualified URL is being returned.

		Further keyword arguments are translated into URL parameters in the
		query part.
		"""
        basePath = "%s%s/%s" % (base.getConfig(
            "web", "nevowRoot"), self.rd.sourceId, self.id)
        if absolute:
            basePath = base.getConfig("web", "serverURL") + basePath
        res = renderers.getRenderer(rendName).makeAccessURL(basePath)

        if kwargs:
            res = res + "?" + urllib.urlencode(kwargs)
        return res

    # used by getBrowserURL; keep external higher than form as long as
    # we have mess like Potsdam CdC.
    _browserScores = {
        "form": 10,
        "external": 12,
        "fixed": 15,
        "custom": 3,
        "img.jpeg": 2,
        "static": 1
    }

    def getBrowserURL(self, fq=True):
        """returns a published URL that's suitable for a web browser or None if
		no such URL can be guessed.

		If you pass fq=False, you will get a path rather than a URL.
		"""
        # There can be multiple candidates for browser URLs (like when a service
        # has both form, static, and external renderers).  If so, we select
        # by plain scores.
        browseables = []
        for rendName in self.allowed:
            if self.isBrowseableWith(rendName):
                browseables.append((self._browserScores.get(rendName,
                                                            -1), rendName))
        if browseables:
            return self.getURL(max(browseables)[1], absolute=fq)
        else:
            return None

    def isBrowseableWith(self, rendName):
        """returns true if rendering this service through rendName results 
		in something pretty in a web browser.
		"""
        try:
            return bool(renderers.getRenderer(rendName).isBrowseable(self))
        except base.NotFoundError:  # renderer name not known
            return False

    def getTableSet(self):
        """returns a list of table definitions that have something to do with
		this service.

		This is for VOSI-type queries.  Usually, that's just the core's
		queried table or an output table, except when there is a TAP renderer on
		the service.

		All this is a bit heuristic; but then again, there's no rigorous 
		definition for what's to be in a tables endpoint either.
		"""
        tables = []

        # output our own outputTable if it sounds reasonable; if so,
        # add the core's queried table, too, if it has one.
        if self.outputTable and self.outputTable.columns:
            tables.append(self.outputTable)
            tables.append(getattr(self.core, "queriedTable", None))

        else:
            # if our outputTable is no good, just use the one of the core
            qt = getattr(self.core, "queriedTable", None)
            if qt is None:
                qt = getattr(self.core, "outputTable", None)
            if qt is not None:
                tables.append(qt)

        # XXX TODO: This stinks big time.  It's because we got TAP factorization
        # wrong.  Sync and async should be renderers, and there should
        # be a core that then could say this kind of thing.  That's not
        # yet the case, so:
        if "tap" in self.allowed:
            # tap never has "native" tables, so start afresh
            tables = []

            mth = base.caches.getMTH(None)
            for tableName in mth.getTAPTables():
                try:
                    tables.append(mth.getTableDefForTable(tableName))
                except:
                    base.ui.notifyError(
                        "Failure trying to retrieve table definition"
                        " for table %s.  Please fix the corresponding RD." %
                        tableName)

        return [t for t in tables if t is not None and t.rd is not None]

    def declareServes(self, data):
        """adds meta to self and data indicating that data is served by
		service.

		This is used by table/@adql and the publish element on data.
		"""
        if data.registration:
            self.addMeta("serviceFor",
                         base.getMetaText(data, "title", default="Anonymous"),
                         ivoId=base.getMetaText(data, "identifier"))
            data.addMeta("servedBy",
                         base.getMetaText(self, "title"),
                         ivoId=base.getMetaText(self, "identifier"))

            # Since this is always initiated by the data, the dependency
            # must show up in its RD to be properly added on publication
            # and to be removed when the data is removed.
            data.rd.addDependency(self.rd, data.rd)

    ########################## Output field selection (ouch!)

    def _getVOTableOutputFields(self, queryMeta):
        """returns a list of OutputFields suitable for a VOTable response 
		described by queryMeta.

		This is what's given for HTML when the columns verbLevel is low
		enough and there's no displayHint of noxml present. 
		
		In addition, more columns are added from outputTable's parent (which 
		usually will be the database table itself) if their verbLevel is low
		enough.  this may be suppressed by setting the
		votableRespectsOutputTable property to "True".
		"""
        verbLevel = queryMeta.get("verbosity", 20)
        fields = [
            f for f in self.getHTMLOutputFields(queryMeta) if
            f.verbLevel <= verbLevel and f.displayHint.get("noxml") != "true"
        ]

        if (verbLevel != "HTML" and self.getProperty(
                "votableRespectsOutputTable", None) != "True"):
            htmlNames = set(f.name for f in fields)

            for field in self.outputTable.parentTable:
                if field.name in htmlNames:
                    continue
                if (field.displayHint.get("type") == "suppress"
                        or field.displayHint.get("noxml") == "true"):
                    continue
                if field.verbLevel <= verbLevel:
                    fields.append(field)

        return rscdef.ColumnList(fields)

    _allSet = set(["ALL"])

    def getHTMLOutputFields(self,
                            queryMeta,
                            ignoreAdditionals=False,
                            raiseOnUnknown=True):
        """returns a list of OutputFields suitable for an HTML response described
		by queryMeta.

		raiseOnUnknown is used by customwidgets to avoid exceptions because of
		bad additional fields during form construction (when they aren't
		properly caught).
		"""
        requireSet = queryMeta["columnSet"]
        res = rscdef.ColumnList()

        # add "normal" output fields
        if requireSet:
            res.extend([
                f for f in self.outputTable
                if f.sets == self._allSet or requireSet & f.sets
            ])
        else:
            res.extend([
                f for f in self.outputTable
                if f.displayHint.get("type") != "suppress"
            ])

        # add user-selected fields
        if not ignoreAdditionals and queryMeta["additionalFields"]:
            cofs = self.core.outputTable.columns
            try:
                for fieldName in queryMeta["additionalFields"]:
                    col = cofs.getColumnByName(fieldName)
                    if isinstance(col, outputdef.OutputField):
                        res.append(col)
                    else:
                        res.append(outputdef.OutputField.fromColumn(col))
            except base.NotFoundError, msg:
                if raiseOnUnknown:
                    raise base.ValidationError(
                        "The additional field %s you requested"
                        " does not exist" % repr(msg.lookedFor),
                        colName="_OUTPUT")
        return res
Exemple #10
0
class Core(base.Structure):
    """A definition of the "active" part of a service.

	Cores receive their input in tables the structure of which is
	defined by their inputTable attribute.

	The abstract core element will never occur in resource descriptors.  See 
	`Cores Available`_ for concrete cores.  Use the names of the concrete
	cores in RDs.

	You can specify an input table in an inputTableXML and an output table
	in an outputTableXML class attribute.
	"""
    name_ = "core"

    inputTableXML = None
    outputTableXML = None

    # the cached prototype of the output table, filled in by
    # _OutputTableFactory
    _ot_prototype = None

    _rd = rscdef.RDAttribute()
    _inputTable = base.StructAttribute(
        "inputTable",
        default=base.NotGiven,
        childFactory=inputdef.InputTable,
        description="Description of the input data",
        copyable=True)

    _outputTable = base.StructAttribute(
        "outputTable",
        default=base.NotGiven,
        childFactory=_OutputTableFactory(),
        description="Table describing what fields are available from this core",
        copyable=True)

    _original = base.OriginalAttribute()

    _properties = base.PropertyAttribute()

    def __init__(self, parent, **kwargs):
        if self.inputTableXML is not None:
            if "inputTable" not in kwargs:
                kwargs["inputTable"] = base.parseFromString(
                    inputdef.InputTable, self.inputTableXML)

        base.Structure.__init__(self, parent, **kwargs)

    def __repr__(self):
        return "<%s at %s>" % (self.__class__.__name__, id(self))

    def __str__(self):
        return repr(self)

    def completeElement(self, ctx):
        self._completeElementNext(Core, ctx)
        if self.inputTable is base.NotGiven:
            self.inputTable = base.makeStruct(inputdef.InputTable)
        if self.outputTable is base.NotGiven:
            self.outputTable = self._outputTable.childFactory(self)

    def adaptForRenderer(self, renderer):
        """returns a core object tailored for renderer.
		"""
        newIT = self.inputTable.adaptForRenderer(renderer)
        if newIT is self.inputTable:
            return self
        else:
            return self.change(inputTable=newIT)

    def run(self, service, inputData, queryMeta):
        raise NotImplementedError("%s cores are missing the run method" %
                                  self.__class__.__name__)

    def makeUserDoc(self):
        return ("Polymorphous core element.  May contain any of the cores"
                " mentioned in `Cores Available`_ .")
class ProcSetup(base.Structure):
	"""Prescriptions for setting up a namespace for a procedure application.

	You can add names to this namespace you using par(ameter)s.
	If a parameter has no default and an procedure application does
	not provide them, an error is raised.

	You can also add names by providing a code attribute containing
	a python function body in code.  Within, the parameters are
	available.  The procedure application's parent can be accessed
	as parent.  All names you define in the code are available as
	globals to the procedure body.

	Caution: Macros are expanded within the code; this means you
	need double backslashes if you want a single backslash in python
	code.
	"""
	name_ = "setup"

	_code = base.ListOfAtomsAttribute("codeFrags",
		description="Python function bodies setting globals for the function"
		" application.  Macros are expanded in the context"
		" of the procedure's parent.", 
		itemAttD=base.UnicodeAttribute("code", description="Python function"
			" bodies setting globals for the function application.  Macros"
			" are expanded in the context of the procedure's parent.",
			copyable=True),
		copyable=True)
	_pars = base.StructListAttribute("pars", ProcPar,
		description="Names to add to the procedure's global namespace.", 
		copyable=True)
	_original = base.OriginalAttribute()

	def _getParSettingCode(self, useLate, indent, bindings):
		"""returns code that sets our parameters.

		If useLate is true, generate for late bindings.  Indent the
		code by indent.  Bindings is is a dictionary overriding
		the defaults or setting parameter values.
		"""
		parCode = []
		for p in self.pars:
			if p.late==useLate:
				val = bindings.get(p.key, base.NotGiven)
				if val is base.NotGiven:
					val = p.content_
				parCode.append("%s%s = %s"%(indent, p.key, val))
		return "\n".join(parCode)

	def getParCode(self, bindings):
		"""returns code doing setup bindings un-indented.
		"""
		return self._getParSettingCode(False, "", bindings)

	def getLateCode(self, bindings):
		"""returns code doing late (in-function) bindings indented with two
		spaces.
		"""
		return self._getParSettingCode(True, "  ", bindings)

	def getBodyCode(self):
		"""returns the body code un-indented.
		"""
		collectedCode = []
		for frag in self.codeFrags:
			collectedCode.append(
				utils.fixIndentation(frag, "", governingLine=1))
		return "\n".join(collectedCode)
class ProcDef(base.Structure, base.RestrictionMixin):
	"""An embedded procedure.

	Embedded procedures are python code fragments with some interface defined
	by their type.  They can occur at various places (which is called procedure
	application generically), e.g., as row generators in grammars, as applys in
	rowmakers, or as SQL phrase makers in condDescs.

	They consist of the actual actual code and, optionally, definitions like
	the namespace setup, configuration parameters, or a documentation.

	The procedure applications compile into python functions with special
	global namespaces.  The signatures of the functions are determined by
	the type attribute.

	ProcDefs are referred to by procedure applications using their id.
	"""
	name_ = "procDef"

	_code = base.UnicodeAttribute("code", default=base.NotGiven,
		copyable=True, description="A python function body.")
	_setup = base.StructListAttribute("setups", ProcSetup,
		description="Setup of the namespace the function will run in", 
		copyable=True)
	_doc = base.UnicodeAttribute("doc", default="", description=
		"Human-readable docs for this proc (may be interpreted as restructured"
		" text).", copyable=True)
	_type = base.EnumeratedUnicodeAttribute("type", default=None, description=
		"The type of the procedure definition.  The procedure applications"
		" will in general require certain types of definitions.",
		validValues=["t_t", "apply", "rowfilter", "sourceFields", "mixinProc",
			"phraseMaker", "descriptorGenerator", "dataFunction", "dataFormatter",
			"metaMaker", "regTest", "iterator", "pargetter"], 
			copyable=True,
		strip=True)
	_deprecated = base.UnicodeAttribute("deprecated", default=None,
		copyable=True, description="A deprecation message.  This will"
			" be shown if this procDef is being compiled.")
	_original = base.OriginalAttribute()


	def getCode(self):
		"""returns the body code indented with two spaces.
		"""
		if self.code is base.NotGiven:
			return ""
		else:
			return utils.fixIndentation(self.code, "  ", governingLine=1)

	@utils.memoized
	def getSetupPars(self):
		"""returns all parameters used by setup items, where lexically
		later items override earlier items of the same name.
		"""
		return unionByKey(*[s.pars for s in self.setups])

	def getLateSetupCode(self, boundNames):
		return "\n".join(s.getLateCode(boundNames) for s in self.setups)

	def getParSetupCode(self, boundNames):
		return "\n".join(s.getParCode(boundNames) for s in self.setups)

	def getBodySetupCode(self, boundNames):
		return "\n".join(s.getBodyCode() for s in self.setups)
class TableDef(base.Structure, base.ComputedMetaMixin, common.PrivilegesMixin,
               common.IVOMetaMixin, base.StandardMacroMixin,
               PublishableDataMixin):
    """A definition of a table, both on-disk and internal.

	Some attributes are ignored for in-memory tables, e.g., roles or adql.

	Properties for tables:

	* supportsModel -- a short name of a data model supported through this 
	  table (for TAPRegExt dataModel); you can give multiple names separated
	  by commas.
	* supportsModelURI -- a URI of a data model supported through this table.
	  You can give multiple URIs separated by blanks.
	
	If you give multiple data model names or URIs, the sequences of names and 
	URIs must be identical (in particular, each name needs a URI).
	"""
    name_ = "table"

    resType = "table"

    # We don't want to force people to come up with an id for all their
    # internal tables but want to avoid writing default-named tables to
    # the db.  Thus, the default is not a valid sql identifier.
    _id = base.IdAttribute(
        "id",
        default=base.NotGiven,
        description="Name of the table (must be SQL-legal for onDisk tables)")

    _cols = common.ColumnListAttribute(
        "columns",
        childFactory=column.Column,
        description="Columns making up this table.",
        copyable=True)

    _params = common.ColumnListAttribute(
        "params",
        childFactory=column.Param,
        description='Param ("global columns") for this table.',
        copyable=True)

    _viewStatement = base.UnicodeAttribute(
        "viewStatement",
        default=None,
        description="A single SQL statement to create a view.  Setting this"
        " makes this table a view.  The statement will typically be something"
        " like CREATE VIEW \\\\curtable AS (SELECT \\\\colNames FROM...).",
        copyable=True)

    # onDisk must not be copyable since queries might copy the tds and havoc
    # would result if the queries were to end up on disk.
    _onDisk = base.BooleanAttribute(
        "onDisk",
        default=False,
        description="Table in the database rather than in memory?")

    _temporary = base.BooleanAttribute(
        "temporary",
        default=False,
        description="If this is an onDisk table, make it temporary?"
        "  This is mostly useful for custom cores and such.",
        copyable=True)

    _adql = ADQLVisibilityAttribute(
        "adql",
        default=False,
        description="Should this table be available for ADQL queries?  In"
        " addition to True/False, this can also be 'hidden' for tables"
        " readable from the TAP machinery but not published in the"
        " metadata; this is useful for, e.g., tables contributing to a"
        " published view.  Warning: adql=hidden is incompatible with setting"
        " readProfiles manually.")

    _system = base.BooleanAttribute(
        "system",
        default=False,
        description="Is this a system table?  If it is, it will not be"
        " dropped on normal imports, and accesses to it will not be logged.")

    _forceUnique = base.BooleanAttribute(
        "forceUnique",
        default=False,
        description="Enforce dupe policy for primary key (see dupePolicy)?")

    _dupePolicy = base.EnumeratedUnicodeAttribute(
        "dupePolicy",
        default="check",
        validValues=["check", "drop", "overwrite", "dropOld"],
        description="Handle duplicate rows with identical primary keys manually"
        " by raising an error if existing and new rows are not identical (check),"
        " dropping the new one (drop), updating the old one (overwrite), or"
        " dropping the old one and inserting the new one (dropOld)?")

    _primary = ColumnTupleAttribute(
        "primary",
        default=(),
        description=
        "Comma separated names of columns making up the primary key.",
        copyable=True)

    _indices = base.StructListAttribute(
        "indices",
        childFactory=DBIndex,
        description="Indices defined on this table",
        copyable=True)

    _foreignKeys = base.StructListAttribute(
        "foreignKeys",
        childFactory=ForeignKey,
        description="Foreign keys used in this table",
        copyable=False)

    _groups = base.StructListAttribute(
        "groups",
        childFactory=group.Group,
        description="Groups for columns and params of this table",
        copyable=True)

    # this actually induces an attribute annotations with the DM
    # annotation instances
    _annotations = base.StructListAttribute(
        "dm",
        childFactory=dm.DataModelRoles,
        description="Annotations for data models.",
        copyable=True)

    _properties = base.PropertyAttribute()

    # don't copy stc -- columns just keep the reference to the original
    # stc on copy, and nothing should rely on column stc actually being
    # defined in the parent tableDefs.
    _stcs = base.StructListAttribute("stc",
                                     description="STC-S definitions"
                                     " of coordinate systems.",
                                     childFactory=STCDef)

    _rd = common.RDAttribute()
    _mixins = mixins.MixinAttribute()
    _original = base.OriginalAttribute()
    _namePath = common.NamePathAttribute()

    fixupFunction = None

    metaModel = ("title(1), creationDate(1), description(1),"
                 "subject, referenceURL(1)")

    @classmethod
    def fromColumns(cls, columns, **kwargs):
        """returns a TableDef from a sequence of columns.

		You can give additional constructor arguments.  makeStruct is used
		to build the instance, the mixin hack is applied.

		Columns with identical names will be disambiguated.
		"""
        res = MS(cls,
                 columns=common.ColumnList(cls.disambiguateColumns(columns)),
                 **kwargs)
        return res

    def __iter__(self):
        return iter(self.columns)

    def __contains__(self, name):
        try:
            self.columns.getColumnByName(name)
        except base.NotFoundError:
            return False
        return True

    def __repr__(self):
        try:
            return "<Table definition of %s>" % self.getQName()
        except base.Error:
            return "<Non-RD table %s>" % self.id

    def completeElement(self, ctx):
        # we want a meta parent as soon as possible, and we always let it
        # be our struct parent
        if (not self.getMetaParent() and self.parent
                and hasattr(self.parent, "_getMeta")):
            self.setMetaParent(self.parent)

        # Make room for DM annotations (these are currently filled by
        # gavo.dm.dmrd.DataModelRoles, but we might reconsider this)
        self.annotations = []

        if self.viewStatement and getattr(ctx, "restricted", False):
            raise base.RestrictedElement(
                "table",
                hint="tables with"
                " view creation statements are not allowed in restricted mode")

        if self.registration and self.id is base.NotGiven:
            raise base.StructureError("Published tables need an assigned id.")
        if not self.id:
            self._id.feed(ctx, self, utils.intToFunnyWord(id(self)))

        # allow iterables to be passed in for columns and convert them
        # to a ColumnList here
        if not isinstance(self.columns, common.ColumnList):
            self.columns = common.ColumnList(self.columns)
        self._resolveSTC()
        self._completeElementNext(TableDef, ctx)
        self.columns.withinId = self.params.tableName = "table " + self.id

    def validate(self):
        if self.id.upper() in adql.allReservedWords:
            raise base.StructureError(
                "Reserved word %s is not allowed as a table"
                " name" % self.id)
        self._validateNext(TableDef)

    def onElementComplete(self):
        if self.adql:
            self.readProfiles = (self.readProfiles
                                 | base.getConfig("db", "adqlProfiles"))
        self.dictKeys = [c.key for c in self]

        self.indexedColumns = set()
        for index in self.indices:
            for col in index.columns:
                if "\\" in col:
                    try:
                        self.indexedColumns.add(self.expand(col))
                    except (base.Error,
                            ValueError):  # cannot expand yet, ignore
                        pass
                else:
                    self.indexedColumns.add(col)
        if self.primary:
            self.indexedColumns |= set(self.primary)

        self._defineFixupFunction()

        self._onElementCompleteNext(TableDef)

        if self.registration:
            self.registration.register()

    def getElementForName(self, name):
        """returns the first of column and param having name name.

		The function raises a NotFoundError if neiter column nor param with
		name exists.
		"""
        try:
            try:
                return self.columns.getColumnByName(name)
            except base.NotFoundError:
                return self.params.getColumnByName(name)
        except base.NotFoundError, ex:
            ex.within = "table %s" % self.id
            raise
Exemple #14
0
class CondDesc(base.Structure):
	"""A query specification for cores talking to the database.
	
	CondDescs define inputs as a sequence of InputKeys (see `Element InputKey`_).
	Internally, the values in the InputKeys can be translated to SQL.
	"""
	name_ = "condDesc"

	_inputKeys = rscdef.ColumnListAttribute("inputKeys", 
		childFactory=inputdef.InputKey, 
		description="One or more InputKeys defining the condition's input.",
		copyable=True)

	_silent = base.BooleanAttribute("silent", 
		default=False,
		description="Do not produce SQL from this CondDesc.  This"
			" can be used to convey meta information to the core.  However,"
			" in general, a service is a more appropriate place to deal with"
			" such information, and thus you should prefer service InputKeys"
			" to silent CondDescs.",
		copyable=True)

	_required = base.BooleanAttribute("required", 
		default=False,
		description="Reject queries not filling the InputKeys of this CondDesc",
		copyable=True)

	_fixedSQL = base.UnicodeAttribute("fixedSQL", 
		default=None,
		description="Always insert this SQL statement into the query.  Deprecated.",
		copyable=True)

	_buildFrom = base.ReferenceAttribute("buildFrom", 
		description="A reference to a column or an InputKey to define"
			" this CondDesc",
		default=None)

	_phraseMaker = base.StructAttribute("phraseMaker", 
		default=None,
		description="Code to generate custom SQL from the input keys", 
		childFactory=PhraseMaker, 
		copyable=True)

	_combining = base.BooleanAttribute("combining", 
		default=False,
		description="Allow some input keys to be missing when others are given?"
			" (you want this for pseudo-condDescs just collecting random input"
			" keys)",   # (and I wish I had a better idea)
		copyable="True")

	_group = base.StructAttribute("group",
		default=None,
		childFactory=rscdef.Group,
		description="Group child input keys in the input table (primarily"
			" interesting for web forms, where this grouping is shown graphically;"
			" Set the style property to compact to have a one-line group there)")

	_joiner = base.UnicodeAttribute("joiner",
		default="OR",
		description="When yielding multiple fragments, join them"
			" using this operator (probably the only thing besides OR is"
			" AND).",
		copyable=True)

	_original = base.OriginalAttribute()
	
	def __init__(self, parent, **kwargs):
		base.Structure.__init__(self, parent, **kwargs)
		# copy parent's resolveName if present for buildFrom resolution
		if hasattr(self.parent, "resolveName"):
			self.resolveName = self.parent.resolveName

	def __repr__(self):
		return "<CondDesc %s>"%",".join(ik.name for ik in self.inputKeys)

	@classmethod
	def fromInputKey(cls, ik, **kwargs):
		return base.makeStruct(CondDesc, inputKeys=[ik], **kwargs)

	@classmethod
	def fromColumn(cls, col, **kwargs):
		return base.makeStruct(cls, buildFrom=col, **kwargs)

	@property
	def name(self):
		"""returns some key for uniqueness of condDescs.
		"""
		# This is necessary for ColumnLists that are used
		# for CondDescs as well.  Ideally, we'd do this on an
		# InputKeys basis and yield their names (because that's what
		# formal counts on), but it's probably not worth the effort.
		return "+".join([f.name for f in self.inputKeys])

	def completeElement(self, ctx):
		if self.buildFrom and not self.inputKeys:
			# use the column as input key; special renderers may want
			# to do type mapping, but the default is to have plain input
			self.inputKeys = [inputdef.InputKey.fromColumn(self.buildFrom)]
		self._completeElementNext(CondDesc, ctx)

	def expand(self, *args, **kwargs):
		"""hands macro expansion requests (from phraseMakers) upwards.

		This is to the queried table if the parent has one (i.e., we're
		part of a core), or to the RD if not (i.e., we're defined within
		an rd).
		"""
		if hasattr(self.parent, "queriedTable"):
			return self.parent.queriedTable.expand(*args, **kwargs)
		else:
			return self.parent.rd.expand(*args, **kwargs)

	def _makePhraseDefault(self, ignored, inputKeys, inPars, outPars, core):
		# the default phrase maker uses whatever the individual input keys
		# come up with.
		for ik in self.inputKeys:
			yield base.getSQLForField(ik, inPars, outPars)

	# We only want to compile the phraseMaker if actually necessary.
	# condDescs may be defined within resource descriptors (e.g., in
	# scs.rd), and they can't be compiled there (since macros may
	# be missing); thus, we dispatch on the first call.
	def _getPhraseMaker(self):
		try:
			return self.__compiledPhraseMaker
		except AttributeError:
			if self.phraseMaker is not None:
				val = self.phraseMaker.compile()
			else:
				val = self._makePhraseDefault
			self.__compiledPhraseMaker = val
		return self.__compiledPhraseMaker
	makePhrase = property(_getPhraseMaker)

	def _isActive(self, inPars):
		"""returns True if the dict inPars contains input to all our input keys.
		"""
		for f in self.inputKeys:
			if f.name not in inPars:
				return False
		return True

	def inputReceived(self, inPars, queryMeta):
		"""returns True if all inputKeys can be filled from inPars.

		As a side effect, inPars will receive defaults form the input keys
		if there are any.
		"""
		if not self._isActive(inPars):
			return False
		keysFound, keysMissing = [], []
		for f in self.inputKeys:
			if inPars.get(f.name) is None:
				keysMissing.append(f)
			else:
				if f.value!=inPars.get(f.name): # non-defaulted
					keysFound.append(f)
		if not keysMissing:
			return True

		# keys are missing.  That's ok if none were found and we're not required
		if not self.required and not keysFound:
			return False
		if self.required:
			raise base.ValidationError("is mandatory but was not provided.", 
				colName=keysMissing[0].name)

		# we're optional, but a value was given and others are missing
		if not self.combining:
			raise base.ValidationError("When you give a value for %s,"
				" you must give value(s) for %s, too"%(keysFound[0].getLabel(), 
						", ".join(k.name for k in keysMissing)),
					colName=keysMissing[0].name)
		return True

	def asSQL(self, inPars, sqlPars, queryMeta):
		if self.silent or not self.inputReceived(inPars, queryMeta):
			return ""
		res = list(self.makePhrase(
			self, self.inputKeys, inPars, sqlPars, self.parent))
		sql = base.joinOperatorExpr(self.joiner, res)
		if self.fixedSQL:
			sql = base.joinOperatorExpr(self.joiner, [sql, self.fixedSQL])
		return sql

	def adaptForRenderer(self, renderer):
		"""returns a changed version of self if renderer suggests such a
		change.

		This only happens if buildFrom is non-None.  The method must
		return a "defused" version that has buildFrom None (or self,
		which will do because core.adaptForRenderer stops adapting if
		the condDescs are stable).

		The adaptors may also raise a Replace exception and return a 
		full CondDesc; this is done, e.g., for spoints for the form
		renderer, since they need two input keys and a completely modified
		phrase.
		"""
		if not self.buildFrom:
			return self
		adaptor = inputdef.getRendererAdaptor(renderer)
		if adaptor is None:
			return self

		try:
			newInputKeys = []
			for ik in self.inputKeys:
				newInputKeys.append(adaptor(ik))
			if self.inputKeys==newInputKeys:
				return self
			else:
				return self.change(inputKeys=newInputKeys, buildFrom=None)
		except base.Replace, ex:
			return ex.newOb